summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--scripts/context/lua/mtx-context.lua9
-rw-r--r--scripts/context/lua/mtx-update.lua11
-rw-r--r--scripts/context/lua/mtxrun.lua473
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua473
-rwxr-xr-xscripts/context/stubs/unix/mtxrun473
-rw-r--r--tex/context/base/back-swf.mkiv61
-rw-r--r--tex/context/base/buff-ini.lua45
-rw-r--r--tex/context/base/buff-ini.mkiv4
-rw-r--r--tex/context/base/buff-ver.lua19
-rw-r--r--tex/context/base/buff-ver.mkiv15
-rw-r--r--tex/context/base/catc-ctx.mkiv12
-rw-r--r--tex/context/base/catc-def.mkiv10
-rw-r--r--tex/context/base/catc-ini.lua7
-rw-r--r--tex/context/base/catc-ini.mkiv2
-rw-r--r--tex/context/base/char-act.mkiv20
-rw-r--r--tex/context/base/char-def.lua243
-rw-r--r--tex/context/base/char-ini.lua32
-rw-r--r--tex/context/base/char-ini.mkiv8
-rw-r--r--tex/context/base/chem-str.lua17
-rw-r--r--tex/context/base/cldf-com.lua34
-rw-r--r--tex/context/base/cldf-ini.lua451
-rw-r--r--tex/context/base/cldf-int.lua67
-rw-r--r--tex/context/base/cldf-int.mkiv8
-rw-r--r--tex/context/base/cldf-ver.lua2
-rw-r--r--tex/context/base/cldf-ver.mkiv2
-rw-r--r--tex/context/base/colo-ini.lua26
-rw-r--r--tex/context/base/colo-ini.mkiv2
-rw-r--r--tex/context/base/colo-run.mkiv4
-rw-r--r--tex/context/base/cont-log.mkiv26
-rw-r--r--tex/context/base/cont-new.mkii2
-rw-r--r--tex/context/base/cont-new.mkiv6
-rw-r--r--tex/context/base/context.mkii2
-rw-r--r--tex/context/base/context.mkiv14
-rw-r--r--tex/context/base/core-con.lua2
-rw-r--r--tex/context/base/core-env.mkiv6
-rw-r--r--tex/context/base/core-job.lua212
-rw-r--r--tex/context/base/core-job.mkiv313
-rw-r--r--tex/context/base/core-sys.mkiv50
-rw-r--r--tex/context/base/core-two.lua17
-rw-r--r--tex/context/base/data-exp.lua117
-rw-r--r--tex/context/base/data-res.lua13
-rw-r--r--tex/context/base/file-ini.lua38
-rw-r--r--tex/context/base/file-ini.mkvi229
-rw-r--r--tex/context/base/file-job.lua622
-rw-r--r--tex/context/base/file-job.mkvi195
-rw-r--r--tex/context/base/file-lib.lua58
-rw-r--r--tex/context/base/file-lib.mkvi20
-rw-r--r--tex/context/base/file-mod.lua171
-rw-r--r--tex/context/base/file-mod.mkvi (renamed from tex/context/base/core-fil.mkiv)149
-rw-r--r--tex/context/base/file-res.lua107
-rw-r--r--tex/context/base/file-res.mkvi147
-rw-r--r--tex/context/base/file-syn.lua46
-rw-r--r--tex/context/base/file-syn.mkvi66
-rw-r--r--tex/context/base/font-afm.lua2
-rw-r--r--tex/context/base/font-chk.lua92
-rw-r--r--tex/context/base/font-col.lua15
-rw-r--r--tex/context/base/font-col.mkiv15
-rw-r--r--tex/context/base/font-ctx.lua47
-rw-r--r--tex/context/base/font-gds.lua11
-rw-r--r--tex/context/base/font-gds.mkiv7
-rw-r--r--tex/context/base/font-ini.mkiv84
-rw-r--r--tex/context/base/font-mis.lua2
-rw-r--r--tex/context/base/font-otc.lua166
-rw-r--r--tex/context/base/font-otf.lua60
-rw-r--r--tex/context/base/font-syn.lua32
-rw-r--r--tex/context/base/font-tfm.lua2
-rw-r--r--tex/context/base/grph-inc.lua16
-rw-r--r--tex/context/base/grph-inc.mkiv6
-rw-r--r--tex/context/base/java-ini.lua31
-rw-r--r--tex/context/base/l-file.lua38
-rw-r--r--tex/context/base/l-lpeg.lua10
-rw-r--r--tex/context/base/l-unicode.lua17
-rw-r--r--tex/context/base/lang-ini.lua5
-rw-r--r--tex/context/base/lang-ini.mkiv4
-rw-r--r--tex/context/base/lang-lab.lua17
-rw-r--r--tex/context/base/lang-lab.mkiv4
-rw-r--r--tex/context/base/lang-url.lua2
-rw-r--r--tex/context/base/lpdf-ini.lua4
-rw-r--r--tex/context/base/lpdf-mov.lua4
-rw-r--r--tex/context/base/lpdf-swf.lua11
-rw-r--r--tex/context/base/lpdf-wid.lua2
-rw-r--r--tex/context/base/luat-cod.mkiv4
-rw-r--r--tex/context/base/luat-fio.lua37
-rw-r--r--tex/context/base/luat-ini.lua9
-rw-r--r--tex/context/base/luat-ini.mkiv12
-rw-r--r--tex/context/base/luat-lua.lua14
-rw-r--r--tex/context/base/luat-mac.lua40
-rw-r--r--tex/context/base/luat-run.lua7
-rw-r--r--tex/context/base/luat-sto.lua18
-rw-r--r--tex/context/base/lxml-ctx.mkiv60
-rw-r--r--tex/context/base/lxml-dir.lua11
-rw-r--r--tex/context/base/lxml-ent.lua5
-rw-r--r--tex/context/base/lxml-tab.lua208
-rw-r--r--tex/context/base/lxml-tex.lua269
-rw-r--r--tex/context/base/lxml-xml.lua50
-rw-r--r--tex/context/base/m-barcodes.mkiv4
-rw-r--r--tex/context/base/m-database.lua68
-rw-r--r--tex/context/base/m-graph.mkiv2
-rw-r--r--tex/context/base/m-timing.mkiv14
-rw-r--r--tex/context/base/math-ini.lua51
-rw-r--r--tex/context/base/math-ini.mkiv157
-rw-r--r--tex/context/base/math-noa.lua62
-rw-r--r--tex/context/base/math-vfu.lua72
-rw-r--r--tex/context/base/meta-fun.lua23
-rw-r--r--tex/context/base/meta-ini.lua30
-rw-r--r--tex/context/base/meta-ini.mkiv46
-rw-r--r--tex/context/base/meta-pdh.lua7
-rw-r--r--tex/context/base/mlib-ctx.lua3
-rw-r--r--tex/context/base/mult-chk.lua1
-rw-r--r--tex/context/base/mult-de.mkii1
-rw-r--r--tex/context/base/mult-def.lua4
-rw-r--r--tex/context/base/mult-en.mkii1
-rw-r--r--tex/context/base/mult-fr.mkii1
-rw-r--r--tex/context/base/mult-ini.lua14
-rw-r--r--tex/context/base/mult-ini.mkiv4
-rw-r--r--tex/context/base/mult-it.mkii1
-rw-r--r--tex/context/base/mult-nl.mkii1
-rw-r--r--tex/context/base/mult-pe.mkii1
-rw-r--r--tex/context/base/mult-ro.mkii1
-rw-r--r--tex/context/base/node-rul.lua6
-rw-r--r--tex/context/base/node-ser.lua10
-rw-r--r--tex/context/base/node-spl.lua4
-rw-r--r--tex/context/base/node-tra.lua2
-rw-r--r--tex/context/base/pack-obj.lua6
-rw-r--r--tex/context/base/pack-obj.mkiv10
-rw-r--r--tex/context/base/page-flt.lua78
-rw-r--r--tex/context/base/page-flt.mkiv47
-rw-r--r--tex/context/base/page-ini.mkiv25
-rw-r--r--tex/context/base/page-lay.mkiv12
-rw-r--r--tex/context/base/page-lin.mkiv2
-rw-r--r--tex/context/base/page-mrk.mkiv6
-rw-r--r--tex/context/base/page-run.mkiv111
-rw-r--r--tex/context/base/phys-dim.lua188
-rw-r--r--tex/context/base/phys-dim.mkiv89
-rw-r--r--tex/context/base/regi-ini.lua2
-rw-r--r--tex/context/base/regi-ini.mkiv2
-rw-r--r--tex/context/base/s-fnt-23.mkiv63
-rw-r--r--tex/context/base/s-fnt-26.mkiv6
-rw-r--r--tex/context/base/s-fnt-28.mkiv2
-rw-r--r--tex/context/base/s-mag-01.tex10
-rw-r--r--tex/context/base/s-mod-02.mkiv19
-rw-r--r--tex/context/base/s-pre-30.mkiv4
-rw-r--r--tex/context/base/s-pre-69.mkiv3
-rw-r--r--tex/context/base/scrn-fld.mkvi26
-rw-r--r--tex/context/base/scrp-ini.lua2
-rw-r--r--tex/context/base/sort-ini.lua2
-rw-r--r--tex/context/base/spac-hor.mkiv4
-rw-r--r--tex/context/base/spac-ver.lua5
-rw-r--r--tex/context/base/spac-ver.mkiv8
-rw-r--r--tex/context/base/status-files.pdfbin23625 -> 23763 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin156579 -> 161767 bytes
-rw-r--r--tex/context/base/strc-bkm.lua6
-rw-r--r--tex/context/base/strc-bkm.mkiv9
-rw-r--r--tex/context/base/strc-blk.lua9
-rw-r--r--tex/context/base/strc-blk.mkiv10
-rw-r--r--tex/context/base/strc-doc.lua102
-rw-r--r--tex/context/base/strc-doc.mkiv14
-rw-r--r--tex/context/base/strc-flt.mkiv34
-rw-r--r--tex/context/base/strc-ini.lua94
-rw-r--r--tex/context/base/strc-ini.mkiv8
-rw-r--r--tex/context/base/strc-lst.lua25
-rw-r--r--tex/context/base/strc-lst.mkiv34
-rw-r--r--tex/context/base/strc-mar.lua95
-rw-r--r--tex/context/base/strc-mar.mkiv8
-rw-r--r--tex/context/base/strc-mat.lua2
-rw-r--r--tex/context/base/strc-mat.mkiv2
-rw-r--r--tex/context/base/strc-num.lua71
-rw-r--r--tex/context/base/strc-num.mkiv121
-rw-r--r--tex/context/base/strc-pag.lua39
-rw-r--r--tex/context/base/strc-prc.mkiv4
-rw-r--r--tex/context/base/strc-ref.mkiv4
-rw-r--r--tex/context/base/strc-reg.lua12
-rw-r--r--tex/context/base/strc-ren.mkiv24
-rw-r--r--tex/context/base/strc-sec.mkiv10
-rw-r--r--tex/context/base/strc-syn.lua2
-rw-r--r--tex/context/base/strc-syn.mkiv11
-rw-r--r--tex/context/base/supp-fil.lua336
-rw-r--r--tex/context/base/supp-fil.mkiv462
-rw-r--r--tex/context/base/supp-ran.lua7
-rw-r--r--tex/context/base/symb-ini.lua30
-rw-r--r--tex/context/base/syst-aux.mkiv83
-rw-r--r--tex/context/base/syst-con.lua22
-rw-r--r--tex/context/base/syst-con.mkiv10
-rw-r--r--tex/context/base/syst-lua.lua56
-rw-r--r--tex/context/base/syst-lua.mkiv4
-rw-r--r--tex/context/base/task-ini.lua1
-rw-r--r--tex/context/base/toks-ini.lua6
-rw-r--r--tex/context/base/trac-inf.lua2
-rw-r--r--tex/context/base/trac-log.lua14
-rw-r--r--tex/context/base/type-ini.lua34
-rw-r--r--tex/context/base/type-ini.mkiv21
-rw-r--r--tex/context/base/type-one.mkii3
-rw-r--r--tex/context/base/type-otf.mkiv11
-rw-r--r--tex/context/base/typo-dir.mkiv2
-rw-r--r--tex/context/base/typo-mar.mkiv51
-rw-r--r--tex/context/base/util-sto.lua4
-rw-r--r--tex/context/base/x-asciimath.lua6
-rw-r--r--tex/context/base/x-cals.lua61
-rw-r--r--tex/context/base/x-ct.lua52
-rw-r--r--tex/context/base/x-dir-05.mkiv2
-rw-r--r--tex/context/base/x-mathml.lua83
-rw-r--r--tex/context/fonts/asana-math.lfg18
-rw-r--r--tex/context/fonts/cambria-math.lfg27
-rw-r--r--tex/context/fonts/lm-math.lfg59
-rw-r--r--tex/context/fonts/xits-math.lfg39
-rw-r--r--tex/context/interface/keys-cs.xml1
-rw-r--r--tex/context/interface/keys-de.xml1
-rw-r--r--tex/context/interface/keys-en.xml1
-rw-r--r--tex/context/interface/keys-fr.xml1
-rw-r--r--tex/context/interface/keys-it.xml1
-rw-r--r--tex/context/interface/keys-nl.xml1
-rw-r--r--tex/context/interface/keys-pe.xml1
-rw-r--r--tex/context/interface/keys-ro.xml1
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua110
214 files changed, 6605 insertions, 3700 deletions
diff --git a/scripts/context/lua/mtx-context.lua b/scripts/context/lua/mtx-context.lua
index 8f2363229..c014f9d78 100644
--- a/scripts/context/lua/mtx-context.lua
+++ b/scripts/context/lua/mtx-context.lua
@@ -643,9 +643,11 @@ local function analyze(filename) -- only files on current path
if f then
local t = { }
local line = f:read("*line") or ""
- local preamble = match(line,"[\254\255]*%%%s+(.+)$") -- there can be an utf bomb in front
+ -- there can be an utf bomb in front: \254\255 or \255\254
+ -- a template line starts with % or %% (used in asciimode) followed by one or more spaces
+ local preamble = match(line,"^[\254\255]*%%%%?%s+(.+)$")
if preamble then
- for key, value in gmatch(preamble,"(%S+)=(%S+)") do
+ for key, value in gmatch(preamble,"(%S+)%s*=%s*(%S+)") do
t[key] = value
end
t.type = "tex"
@@ -661,7 +663,6 @@ local function analyze(filename) -- only files on current path
f:close()
return t
end
- return nil
end
local function makestub(wrap,template,filename,prepname)
@@ -898,7 +899,7 @@ function scripts.context.run(ctxdata,filename)
end
flags[#flags+1] = format('--backend="%s"',backend)
--
- local command = format("luatex %s %s", concat(flags," "), quote(filename))
+ local command = format("luatex %s %s \\stoptext", concat(flags," "), quote(filename))
local oldhash, newhash = scripts.context.multipass.hashfiles(jobname), { }
local once = environment.argument("once")
local maxnofruns = (once and 1) or scripts.context.multipass.nofruns
diff --git a/scripts/context/lua/mtx-update.lua b/scripts/context/lua/mtx-update.lua
index 74baed938..099a7218f 100644
--- a/scripts/context/lua/mtx-update.lua
+++ b/scripts/context/lua/mtx-update.lua
@@ -93,7 +93,6 @@ scripts.update.base = {
{ "fonts/common/", "texmf" },
{ "fonts/other/", "texmf" }, -- not *really* needed, but helpful
{ "context/<version>/", "texmf-context" },
- { "context/img/", "texmf-context" },
{ "misc/setuptex/", "." },
{ "misc/web2c", "texmf" },
{ "bin/common/<platform>/", "texmf-<platform>" },
@@ -526,7 +525,7 @@ if scripts.savestate then
states.set("info.version",0.1) -- ok
states.set("info.count",(states.get("info.count") or 0) + 1,1,false) -- ok
- states.set("info.comment","this file contains the settings of the last 'mtxrun --script update ' run",false) -- ok
+ states.set("info.comment","this file contains the settings of the last 'mtxrun --script update' run",false) -- ok
states.set("info.date",os.date("!%Y-%m-%d %H:%M:%S")) -- ok
states.set("rsync.program", environment.argument("rsync"), "rsync", true) -- ok
@@ -574,14 +573,14 @@ if scripts.savestate then
states.set("formats.metafun", true)
for r in gmatch(environment.argument("extras") or "","([^, ]+)") do -- for old times sake
- if not find(r,"^[a-z]%-") then
- r= "t-" .. r
+ if r ~= "all" and not find(r,"^[a-z]%-") then
+ r = "t-" .. r
end
states.set("modules." .. r, true)
end
for r in gmatch(environment.argument("modules") or "","([^, ]+)") do
- if not find(r,"^[a-z]%-") then
- r= "t-" .. r
+ if r ~= "all" and not find(r,"^[a-z]%-") then
+ r = "t-" .. r
end
states.set("modules." .. r, true)
end
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 6b0e18ff0..04f8c21c4 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -1654,7 +1654,7 @@ end
local sort, fastcopy, sortedpairs = table.sort, table.fastcopy, table.sortedpairs -- dependency!
-function lpeg.append(list,pp)
+function lpeg.append(list,pp,delayed)
local p = pp
if #list > 0 then
list = fastcopy(list)
@@ -1666,6 +1666,14 @@ function lpeg.append(list,pp)
p = P(list[l])
end
end
+ elseif delayed then
+ for k, v in sortedpairs(list) do
+ if p then
+ p = P(k)/list + p
+ else
+ p = P(k)/list
+ end
+ end
else
for k, v in sortedpairs(list) do
if p then
@@ -2813,10 +2821,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -3719,14 +3759,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3741,17 +3781,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3766,7 +3806,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3861,6 +3901,7 @@ end
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -4138,8 +4179,8 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
+local function f_empty() return "" end -- t,k
+local function f_self(t,k) t[k] = k return k end
local function f_ignore() end -- t,k,v
local t_empty = { __index = empty }
@@ -5059,7 +5100,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -5464,6 +5505,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5569,6 +5614,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5628,6 +5677,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5984,6 +6034,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6513,10 +6568,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6629,9 +6685,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6797,7 +6866,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6808,6 +6877,57 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = {
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";"
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6849,7 +6969,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6877,12 +6997,12 @@ local function handle_any_entity(str)
a = entities[str]
end
if a then
-if type(a) == "function" then
- if trace_entities then
- report_xml("expanding entity &%s; (function)",str)
- end
- a = a(str) or ""
-end
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
a = lpegmatch(parsedentity,a) or a
if trace_entities then
report_xml("resolved entity &%s; -> %s (internal)",str,a)
@@ -6918,18 +7038,25 @@ end
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -7069,17 +7196,29 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -7131,7 +7270,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -7139,6 +7278,11 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+unify_predefined, cleanup, entities = nil, nil, nil
+stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+acache, hcache, dcache = nil, nil, nil
+reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
@@ -7285,7 +7429,7 @@ local function verbose_element(e,handlers)
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7301,7 +7445,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7322,11 +7466,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e))
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7361,7 +7505,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7489,20 +7633,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7511,28 +7668,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -9615,15 +9778,16 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9718,10 +9882,39 @@ local function raw(collected) -- hybrid
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
if collected then
local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ return (e and xmltotext(e.dt)) or ""
else
return ""
end
@@ -9869,10 +10062,10 @@ function xml.text(id,pattern)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return (collected and xmltotext(collected[1].dt)) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id.dt) or ""
else
return ""
end
@@ -9880,6 +10073,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -10178,7 +10373,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower, char = string.format, string.find, string.gmatch, string.lower, string.char
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -10422,13 +10617,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10466,15 +10670,17 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-local cache = { }
+local fullcache = { }
function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(cache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
if usecache then
- local files = cache[path]
+ local files = fullcache[realpath]
if files then
if trace_locating then
report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
@@ -10485,26 +10691,100 @@ function resolvers.scanfiles(path,branch,usecache)
if trace_locating then
report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
if usecache then
- cache[path] = files
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
end
- statistics.stoptiming(cache)
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
-function resolvers.scantime()
- return statistics.elapsedtime(cache)
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
@@ -12144,6 +12424,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -12403,6 +12684,7 @@ end
local function find_intree(filename,filetype,wantedfiles,allresults)
local typespec = resolvers.variableofformat(filetype)
local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
if pathlist and #pathlist > 0 then
-- list search
local filelist = collect_files(wantedfiles)
@@ -12425,7 +12707,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
local done = false
-- using file list
- if filelist then
+ if filelist then -- database
-- compare list entries with permitted pattern -- /xx /xx//
local expression = makepathexpression(pathname)
if trace_detail then
@@ -12454,7 +12736,10 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
end
end
- if not done then
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
pathname = gsub(pathname,"/+$","")
pathname = resolvers.resolve(pathname)
local scheme = url.hasscheme(pathname)
@@ -12476,7 +12761,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
if not done and doscan then
-- collect files in path (and cache the result)
- local files = resolvers.scanfiles(pname,false,true)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
local subpath = files[w]
@@ -12525,7 +12810,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
end
if #result > 0 then
- return "intree", result
+ return method, result
end
end
end
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 6b0e18ff0..04f8c21c4 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -1654,7 +1654,7 @@ end
local sort, fastcopy, sortedpairs = table.sort, table.fastcopy, table.sortedpairs -- dependency!
-function lpeg.append(list,pp)
+function lpeg.append(list,pp,delayed)
local p = pp
if #list > 0 then
list = fastcopy(list)
@@ -1666,6 +1666,14 @@ function lpeg.append(list,pp)
p = P(list[l])
end
end
+ elseif delayed then
+ for k, v in sortedpairs(list) do
+ if p then
+ p = P(k)/list + p
+ else
+ p = P(k)/list
+ end
+ end
else
for k, v in sortedpairs(list) do
if p then
@@ -2813,10 +2821,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -3719,14 +3759,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3741,17 +3781,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3766,7 +3806,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3861,6 +3901,7 @@ end
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -4138,8 +4179,8 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
+local function f_empty() return "" end -- t,k
+local function f_self(t,k) t[k] = k return k end
local function f_ignore() end -- t,k,v
local t_empty = { __index = empty }
@@ -5059,7 +5100,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -5464,6 +5505,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5569,6 +5614,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5628,6 +5677,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5984,6 +6034,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6513,10 +6568,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6629,9 +6685,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6797,7 +6866,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6808,6 +6877,57 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = {
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";"
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6849,7 +6969,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6877,12 +6997,12 @@ local function handle_any_entity(str)
a = entities[str]
end
if a then
-if type(a) == "function" then
- if trace_entities then
- report_xml("expanding entity &%s; (function)",str)
- end
- a = a(str) or ""
-end
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
a = lpegmatch(parsedentity,a) or a
if trace_entities then
report_xml("resolved entity &%s; -> %s (internal)",str,a)
@@ -6918,18 +7038,25 @@ end
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -7069,17 +7196,29 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -7131,7 +7270,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -7139,6 +7278,11 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+unify_predefined, cleanup, entities = nil, nil, nil
+stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+acache, hcache, dcache = nil, nil, nil
+reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
@@ -7285,7 +7429,7 @@ local function verbose_element(e,handlers)
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7301,7 +7445,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7322,11 +7466,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e))
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7361,7 +7505,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7489,20 +7633,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7511,28 +7668,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -9615,15 +9778,16 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9718,10 +9882,39 @@ local function raw(collected) -- hybrid
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
if collected then
local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ return (e and xmltotext(e.dt)) or ""
else
return ""
end
@@ -9869,10 +10062,10 @@ function xml.text(id,pattern)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return (collected and xmltotext(collected[1].dt)) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id.dt) or ""
else
return ""
end
@@ -9880,6 +10073,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -10178,7 +10373,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower, char = string.format, string.find, string.gmatch, string.lower, string.char
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -10422,13 +10617,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10466,15 +10670,17 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-local cache = { }
+local fullcache = { }
function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(cache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
if usecache then
- local files = cache[path]
+ local files = fullcache[realpath]
if files then
if trace_locating then
report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
@@ -10485,26 +10691,100 @@ function resolvers.scanfiles(path,branch,usecache)
if trace_locating then
report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
if usecache then
- cache[path] = files
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
end
- statistics.stoptiming(cache)
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
-function resolvers.scantime()
- return statistics.elapsedtime(cache)
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
@@ -12144,6 +12424,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -12403,6 +12684,7 @@ end
local function find_intree(filename,filetype,wantedfiles,allresults)
local typespec = resolvers.variableofformat(filetype)
local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
if pathlist and #pathlist > 0 then
-- list search
local filelist = collect_files(wantedfiles)
@@ -12425,7 +12707,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
local done = false
-- using file list
- if filelist then
+ if filelist then -- database
-- compare list entries with permitted pattern -- /xx /xx//
local expression = makepathexpression(pathname)
if trace_detail then
@@ -12454,7 +12736,10 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
end
end
- if not done then
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
pathname = gsub(pathname,"/+$","")
pathname = resolvers.resolve(pathname)
local scheme = url.hasscheme(pathname)
@@ -12476,7 +12761,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
if not done and doscan then
-- collect files in path (and cache the result)
- local files = resolvers.scanfiles(pname,false,true)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
local subpath = files[w]
@@ -12525,7 +12810,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
end
if #result > 0 then
- return "intree", result
+ return method, result
end
end
end
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 6b0e18ff0..04f8c21c4 100755
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -1654,7 +1654,7 @@ end
local sort, fastcopy, sortedpairs = table.sort, table.fastcopy, table.sortedpairs -- dependency!
-function lpeg.append(list,pp)
+function lpeg.append(list,pp,delayed)
local p = pp
if #list > 0 then
list = fastcopy(list)
@@ -1666,6 +1666,14 @@ function lpeg.append(list,pp)
p = P(list[l])
end
end
+ elseif delayed then
+ for k, v in sortedpairs(list) do
+ if p then
+ p = P(k)/list + p
+ else
+ p = P(k)/list
+ end
+ end
else
for k, v in sortedpairs(list) do
if p then
@@ -2813,10 +2821,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -3719,14 +3759,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3741,17 +3781,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3766,7 +3806,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3861,6 +3901,7 @@ end
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -4138,8 +4179,8 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
+local function f_empty() return "" end -- t,k
+local function f_self(t,k) t[k] = k return k end
local function f_ignore() end -- t,k,v
local t_empty = { __index = empty }
@@ -5059,7 +5100,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -5464,6 +5505,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5569,6 +5614,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5628,6 +5677,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5984,6 +6034,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6513,10 +6568,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6629,9 +6685,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6797,7 +6866,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6808,6 +6877,57 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = {
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";"
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6849,7 +6969,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6877,12 +6997,12 @@ local function handle_any_entity(str)
a = entities[str]
end
if a then
-if type(a) == "function" then
- if trace_entities then
- report_xml("expanding entity &%s; (function)",str)
- end
- a = a(str) or ""
-end
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
a = lpegmatch(parsedentity,a) or a
if trace_entities then
report_xml("resolved entity &%s; -> %s (internal)",str,a)
@@ -6918,18 +7038,25 @@ end
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -7069,17 +7196,29 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -7131,7 +7270,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -7139,6 +7278,11 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+unify_predefined, cleanup, entities = nil, nil, nil
+stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+acache, hcache, dcache = nil, nil, nil
+reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
@@ -7285,7 +7429,7 @@ local function verbose_element(e,handlers)
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7301,7 +7445,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7322,11 +7466,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e))
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7361,7 +7505,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7489,20 +7633,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7511,28 +7668,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -9615,15 +9778,16 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9718,10 +9882,39 @@ local function raw(collected) -- hybrid
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
if collected then
local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ return (e and xmltotext(e.dt)) or ""
else
return ""
end
@@ -9869,10 +10062,10 @@ function xml.text(id,pattern)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return (collected and xmltotext(collected[1].dt)) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id.dt) or ""
else
return ""
end
@@ -9880,6 +10073,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -10178,7 +10373,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower, char = string.format, string.find, string.gmatch, string.lower, string.char
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -10422,13 +10617,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10466,15 +10670,17 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-local cache = { }
+local fullcache = { }
function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(cache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
if usecache then
- local files = cache[path]
+ local files = fullcache[realpath]
if files then
if trace_locating then
report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
@@ -10485,26 +10691,100 @@ function resolvers.scanfiles(path,branch,usecache)
if trace_locating then
report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
if usecache then
- cache[path] = files
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
end
- statistics.stoptiming(cache)
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
-function resolvers.scantime()
- return statistics.elapsedtime(cache)
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
@@ -12144,6 +12424,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -12403,6 +12684,7 @@ end
local function find_intree(filename,filetype,wantedfiles,allresults)
local typespec = resolvers.variableofformat(filetype)
local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
if pathlist and #pathlist > 0 then
-- list search
local filelist = collect_files(wantedfiles)
@@ -12425,7 +12707,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
local done = false
-- using file list
- if filelist then
+ if filelist then -- database
-- compare list entries with permitted pattern -- /xx /xx//
local expression = makepathexpression(pathname)
if trace_detail then
@@ -12454,7 +12736,10 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
end
end
- if not done then
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
pathname = gsub(pathname,"/+$","")
pathname = resolvers.resolve(pathname)
local scheme = url.hasscheme(pathname)
@@ -12476,7 +12761,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
if not done and doscan then
-- collect files in path (and cache the result)
- local files = resolvers.scanfiles(pname,false,true)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
local subpath = files[w]
@@ -12525,7 +12810,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
end
if #result > 0 then
- return "intree", result
+ return method, result
end
end
end
diff --git a/tex/context/base/back-swf.mkiv b/tex/context/base/back-swf.mkiv
new file mode 100644
index 000000000..82c95f898
--- /dev/null
+++ b/tex/context/base/back-swf.mkiv
@@ -0,0 +1,61 @@
+%D \module
+%D [ file=back-swf,
+%D version=2009.12.31,
+%D title=\CONTEXT\ Backend Macros,
+%D subtitle=Shockwave Experiment,
+%D author=Hans Hagen \& Luigi Scarso,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% This is only a placeholder that demonstrates the usage of swf
+% resources.
+
+\endinput
+
+\starttext
+
+\startluaparameterset [swf:myset:display:1]
+ toolbar = true,
+ preview = "assets/images/posterframes/SPT_14-16_Ra_01_PN_LE01_02_DoLikeMeLater_posterframe.jpg",
+ -- preview = "t:/sources/cow.pdf",
+ -- preview = "t:/sources/hacker.jpg",
+ open = "click", -- click page focus
+ close = "focus", -- click page focus
+\stopluaparameterset
+
+\startluaparameterset [swf:myset:resources:1]
+ paths = {
+ "assets"
+ },
+ files = {
+ -- "somename_1"
+ -- "somename_1"
+ }
+\stopluaparameterset
+
+\startluaparameterset [swf:dolikemelater:resources]
+ paths = {
+ "assets"
+ },
+\stopluaparameterset
+
+\starttext
+
+% preview=swf:myset:display:1
+% controls=swf:myset:controls:1
+% resources=swf:myset:resources:1
+
+\placefigure
+ {flash demo}
+ {\startcombination[2*2]
+ {\externalfigure[trasf1.swf][width=0.45\textwidth,height=0.25\textheight]} {one}
+ {\externalfigure[trasf2.swf][width=0.45\textwidth,height=0.25\textheight]} {two}
+ {\externalfigure[trasf3.swf][width=0.45\textwidth,height=0.25\textheight]} {three}
+ {\externalfigure[trasf4.swf][width=0.45\textwidth,height=0.25\textheight]} {four}
+ \stopcombination}
+
+\stoptext
diff --git a/tex/context/base/buff-ini.lua b/tex/context/base/buff-ini.lua
index c52753cf9..4f2e27db0 100644
--- a/tex/context/base/buff-ini.lua
+++ b/tex/context/base/buff-ini.lua
@@ -13,11 +13,11 @@ local report_buffers = logs.reporter("buffers","usage")
local concat = table.concat
local type, next = type, next
-local sub, format, count, splitlines = string.sub, string.format, string.count, string.splitlines
+local sub, format, match, find = string.sub, string.format, string.match, string.find
+local count, splitlines = string.count, string.splitlines
local variables = interfaces.variables
local settings_to_array = utilities.parsers.settings_to_array
-local texprint, ctxcatcodes = tex.print, tex.ctxcatcodes
buffers = { }
@@ -103,8 +103,9 @@ local function countnesting(b,e)
return p
end
-local counters = { }
-local nesting = 0
+local counters = { }
+local nesting = 0
+local autoundent = true
function commands.grabbuffer(name,begintag,endtag,bufferdata) -- maybe move \\ to call
local dn = getcontent(name)
@@ -123,6 +124,7 @@ function commands.grabbuffer(name,begintag,endtag,bufferdata) -- maybe move \\ t
dn = dn .. bufferdata .. endtag
nesting = nesting - 1
else
+ -- bufferdata ends with a \
if dn == "" then
dn = sub(bufferdata,1,-2)
else
@@ -132,6 +134,27 @@ function commands.grabbuffer(name,begintag,endtag,bufferdata) -- maybe move \\ t
if last == "\n" or last == "\r" then -- \n is unlikely as \r is the endlinechar
dn = sub(dn,1,-2)
end
+ if autoundent then
+ local margin = match(dn,"[\n\r]( +)[\n\r]*$") or ""
+ local indent = #margin
+ if indent > 0 then
+ local lines = splitlines(dn)
+ local ok = true
+ local pattern = "^" .. margin
+ for i=1,#lines do
+ local l = lines[i]
+ if find(l,pattern) then
+ lines[i] = sub(l,indent+1)
+ else
+ ok = false
+ break
+ end
+ end
+ if ok then
+ dn = concat(lines,"\n")
+ end
+ end
+ end
end
assign(name,dn)
commands.testcase(more)
@@ -203,10 +226,14 @@ function commands.doifelsebuffer(name)
commands.testcase(exists(name))
end
--- This only used for mp buffers and is a kludge. Don't
--- change the texprint into texsprint as it fails because
--- "p<nl>enddef" becomes "penddef" then.
+-- This only used for mp buffers and is a kludge. Don't change the
+-- texprint into texsprint as it fails because "p<nl>enddef" becomes
+-- "penddef" then.
+
+-- function commands.feedback(names)
+-- texprint(ctxcatcodes,splitlines(collectcontent(names)))
+-- end
-function commands.feedback(names)
- texprint(ctxcatcodes,splitlines(collectcontent(names)))
+function commands.feedback(names) -- bad name, maybe rename to injectbuffercontent
+ context.printlines(collectcontent(names))
end
diff --git a/tex/context/base/buff-ini.mkiv b/tex/context/base/buff-ini.mkiv
index 4db373836..256d7f64a 100644
--- a/tex/context/base/buff-ini.mkiv
+++ b/tex/context/base/buff-ini.mkiv
@@ -172,10 +172,10 @@
%D Experimental: no expansion of commands in buffer!
% \startbuffer[what]
-% tex.print("WHAT")
+% context("WHAT")
% \stopbuffer
% \startbuffer
-% tex.print("JOBNAME")
+% context("JOBNAME")
% \stopbuffer
%
% \ctxluabuffer[what] \ctxluabuffer
diff --git a/tex/context/base/buff-ver.lua b/tex/context/base/buff-ver.lua
index bd274a36a..c76f17efc 100644
--- a/tex/context/base/buff-ver.lua
+++ b/tex/context/base/buff-ver.lua
@@ -250,10 +250,6 @@ function visualizers.load(name)
end
end
-function commands.doifelsevisualizer(name)
- commands.testcase(specifications[lower(name)])
-end
-
function visualizers.register(name,specification)
name = lower(name)
if trace_visualize then
@@ -611,15 +607,24 @@ local function filter(lines,settings) -- todo: inline or display in settings
return content, m
end
--- main functions
-
local getlines = buffers.getlines
+-- interface
+
+function commands.doifelsevisualizer(name)
+ commands.testcase(specifications[lower(name)])
+end
+
+commands.loadvisualizer = visualizers.load
+
+--~ local decodecomment = resolvers.macros.decodecomment -- experiment
+
function commands.typebuffer(settings)
local lines = getlines(settings.name)
if lines then
local content, m = filter(lines,settings)
if content and content ~= "" then
+--~ content = decodecomment(content)
content = dotabs(content,settings)
visualize(content,checkedsettings(settings,"display"))
end
@@ -642,6 +647,7 @@ end
function commands.typestring(settings)
local content = settings.data
if content and content ~= "" then
+--~ content = decodecomment(content)
-- content = dotabs(content,settings)
visualize(content,checkedsettings(settings,"inline"))
end
@@ -658,6 +664,7 @@ function commands.typefile(settings)
str = regimes.translate(str,regime)
end
if str and str~= "" then
+--~ content = decodecomment(content)
local lines = splitlines(str)
local content, m = filter(lines,settings)
if content and content ~= "" then
diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv
index 15a8d2887..7ee11cbab 100644
--- a/tex/context/base/buff-ver.mkiv
+++ b/tex/context/base/buff-ver.mkiv
@@ -153,7 +153,7 @@
\to \everysetuptyping
\def\doinitializevisualizer#1%
- {\ifproductionrun\ctxlua{visualizers.load("#1")}\fi}
+ {\ifproductionrun\ctxcommand{loadvisualizer("#1")}\fi}
% \definetype[typeTEX][option=tex]
%
@@ -565,15 +565,16 @@
\def\dododotypefile#1#2%
{\doifelsetypingfile{#2}
- {\dodododotypefile{#1}\readfilename}
+ {\dodododotypefile{#1}\askedtypingfile}
{\showmessage\m!verbatims1{#2}}}
\def\doifelsetypingfile#1% sets \readfilename (we will make this proper mkiv i.e. less messy)
- {\doiflocfileelse{#1}
- {\firstoftwoarguments}
- {\doifinputfileelse{#1}
- {\def\readfilename{#1}\firstoftwoarguments} % messy, looks wrong too
- {\secondoftwoarguments}}}
+ {\edef\askedtypingfile{\locfilename{#1}}%
+ \ifx\askedtypingfile\empty
+ \expandafter\secondoftwoarguments
+ \else
+ \expandafter\firstoftwoarguments
+ \fi}
\def\dodododotypefile#1#2% category name
{\edef\currenttyping{#1}%
diff --git a/tex/context/base/catc-ctx.mkiv b/tex/context/base/catc-ctx.mkiv
index 347c37da4..7eeee80ee 100644
--- a/tex/context/base/catc-ctx.mkiv
+++ b/tex/context/base/catc-ctx.mkiv
@@ -21,15 +21,20 @@
\ifdefined \tpacatcodes \else \newcatcodetable \tpacatcodes \fi % { }
\ifdefined \tpbcatcodes \else \newcatcodetable \tpbcatcodes \fi % < >
+\chardef\doublecommentsignal="10FF25 %% < 110000 (last valid range)
+
\startcatcodetable \ctxcatcodes
\catcode\tabasciicode \spacecatcode
\catcode\endoflineasciicode \endoflinecatcode
\catcode\formfeedasciicode \endoflinecatcode
\catcode\spaceasciicode \spacecatcode
\catcode\endoffileasciicode \ignorecatcode
- \catcode\circumflexasciicode\superscriptcatcode
- \catcode\underscoreasciicode\subscriptcatcode
- \catcode\ampersandasciicode \alignmentcatcode
+ % \catcode\circumflexasciicode\superscriptcatcode
+ % \catcode\underscoreasciicode\subscriptcatcode
+ % \catcode\ampersandasciicode \alignmentcatcode
+ \catcode\underscoreasciicode\othercatcode
+ \catcode\circumflexasciicode\othercatcode
+ \catcode\ampersandasciicode \othercatcode
\catcode\backslashasciicode \escapecatcode
\catcode\leftbraceasciicode \begingroupcatcode
\catcode\rightbraceasciicode\endgroupcatcode
@@ -112,6 +117,7 @@
\catcode\backslashasciicode \escapecatcode
\catcode\leftbraceasciicode \begingroupcatcode
\catcode\rightbraceasciicode\endgroupcatcode
+ \catcode\doublecommentsignal\commentcatcode
\stopcatcodetable
\letcatcodecommand \ctxcatcodes \barasciicode \relax
diff --git a/tex/context/base/catc-def.mkiv b/tex/context/base/catc-def.mkiv
index 0c6c5370d..a8c0f33c0 100644
--- a/tex/context/base/catc-def.mkiv
+++ b/tex/context/base/catc-def.mkiv
@@ -21,11 +21,11 @@
\ifdefined\prtcatcodes \else \newcatcodetable \prtcatcodes \fi
\startcatcodetable \nilcatcodes
- \catcode\tabasciicode \spacecatcode
- \catcode\endoflineasciicode\endoflinecatcode
- \catcode\formfeedasciicode \endoflinecatcode
- \catcode\spaceasciicode \spacecatcode
- \catcode\endoffileasciicode\ignorecatcode
+ \catcode\tabasciicode \spacecatcode
+ \catcode\endoflineasciicode \endoflinecatcode
+ \catcode\formfeedasciicode \endoflinecatcode
+ \catcode\spaceasciicode \spacecatcode
+ \catcode\endoffileasciicode \ignorecatcode
\stopcatcodetable
\startcatcodetable \texcatcodes
diff --git a/tex/context/base/catc-ini.lua b/tex/context/base/catc-ini.lua
index 2749f403c..b2c793a6a 100644
--- a/tex/context/base/catc-ini.lua
+++ b/tex/context/base/catc-ini.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['catc-ini'] = {
license = "see context related readme files"
}
+-- todo: everywhere replace tex.ctxcatcodes -> catcodes.numbers.ctxcatcodes
+
catcodes = catcodes or { }
catcodes.numbers = catcodes.numbers or { }
catcodes.names = catcodes.names or { }
@@ -31,3 +33,8 @@ end
for k, v in next, catcodes.numbers do
tex[k] = v
end
+
+-- nasty
+
+table.setmetatableindex(catcodes.numbers,function(t,k) if type(k) == "number" then t[k] = k return k end end)
+table.setmetatableindex(catcodes.names, function(t,k) if type(k) == "string" then t[k] = k return k end end)
diff --git a/tex/context/base/catc-ini.mkiv b/tex/context/base/catc-ini.mkiv
index 9e7693203..47562f5f2 100644
--- a/tex/context/base/catc-ini.mkiv
+++ b/tex/context/base/catc-ini.mkiv
@@ -86,7 +86,7 @@
\newcount\cctcounterb
\newcount\cctcounterc
-\def\newcatcodetable#1%
+\def\newcatcodetable#1% we could move the cctdefcounter to lua
{\global\advance\cctdefcounter\plusone
\expandafter\xdef\csname @@ccn:\number\cctdefcounter\endcsname{\string#1}% logging
\newconstant#1%
diff --git a/tex/context/base/char-act.mkiv b/tex/context/base/char-act.mkiv
index 9e219bccf..f791ba9e2 100644
--- a/tex/context/base/char-act.mkiv
+++ b/tex/context/base/char-act.mkiv
@@ -123,17 +123,15 @@
% obsolete (old hack for idris)
-%D This is a hack, and only meant for special situations. We don't
-%D support this in for instance verbatim. The active characters map
-%D onto the \CONTEXT\ names and font handling etc. is up to the user.
-
-%D This feature is obsolete.
-
-\registerctxluafile{char-act}{1.001}
-
-\def\enableactiveutf {\ctxlua{characters.active.enable()}}
-\def\disableactiveutf{\ctxlua{characters.active.disable()}}
-\def\testactiveutf #1{\ctxlua{characters.active.test("#1")}}
+% %D This is a hack, and only meant for special situations. We don't
+% %D support this in for instance verbatim. The active characters map
+% %D onto the \CONTEXT\ names and font handling etc. is up to the user.
+%
+% \registerctxluafile{char-act}{1.001}
+%
+% \def\enableactiveutf {\ctxlua{characters.active.enable()}}
+% \def\disableactiveutf{\ctxlua{characters.active.disable()}}
+% \def\testactiveutf #1{\ctxlua{characters.active.test("#1")}}
%D Usage:
%D
diff --git a/tex/context/base/char-def.lua b/tex/context/base/char-def.lua
index 319b23d0e..adf0b75ed 100644
--- a/tex/context/base/char-def.lua
+++ b/tex/context/base/char-def.lua
@@ -42180,6 +42180,10 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1820,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ },
},
[0x1821]={
category="lo",
@@ -42187,6 +42191,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1821,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1822]={
category="lo",
@@ -42194,6 +42201,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1822,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1823]={
category="lo",
@@ -42201,6 +42211,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1823,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1824]={
category="lo",
@@ -42208,6 +42221,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1824,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1825]={
category="lo",
@@ -42215,6 +42231,10 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1825,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ },
},
[0x1826]={
category="lo",
@@ -42222,6 +42242,10 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1826,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ },
},
[0x1827]={
category="lo",
@@ -42236,6 +42260,11 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1828,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ [0x180D]="separate form",
+ },
},
[0x1829]={
category="lo",
@@ -42250,6 +42279,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x182A,
+ variants={
+ [0x180B]="alternative form",
+ },
},
[0x182B]={
category="lo",
@@ -42264,6 +42296,11 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x182C,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ [0x180D]="fourth form",
+ },
},
[0x182D]={
category="lo",
@@ -42271,6 +42308,11 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x182D,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ [0x180D]="feminine form",
+ },
},
[0x182E]={
category="lo",
@@ -42292,6 +42334,10 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1830,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ },
},
[0x1831]={
category="lo",
@@ -42306,6 +42352,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1832,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1833]={
category="lo",
@@ -42313,6 +42362,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1833,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1834]={
category="lo",
@@ -42327,6 +42379,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1835,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1836]={
category="lo",
@@ -42334,6 +42389,10 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1836,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ },
},
[0x1837]={
category="lo",
@@ -42348,6 +42407,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1838,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1839]={
category="lo",
@@ -42432,6 +42494,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1844,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1845]={
category="lo",
@@ -42439,6 +42504,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1845,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1846]={
category="lo",
@@ -42446,6 +42514,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1846,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1847]={
category="lo",
@@ -42453,6 +42524,10 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1847,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ },
},
[0x1848]={
category="lo",
@@ -42460,6 +42535,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1848,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1849]={
category="lo",
@@ -42467,6 +42545,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1849,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x184A]={
category="lo",
@@ -42495,6 +42576,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x184D,
+ variants={
+ [0x180B]="feminine form",
+ },
},
[0x184E]={
category="lo",
@@ -42502,6 +42586,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x184E,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x184F]={
category="lo",
@@ -42607,6 +42694,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x185D,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x185E]={
category="lo",
@@ -42614,6 +42704,10 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x185E,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ },
},
[0x185F]={
category="lo",
@@ -42628,6 +42722,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1860,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1861]={
category="lo",
@@ -42649,6 +42746,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1863,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1864]={
category="lo",
@@ -42684,6 +42784,10 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1868,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ },
},
[0x1869]={
category="lo",
@@ -42691,6 +42795,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1869,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x186A]={
category="lo",
@@ -42733,6 +42840,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x186F,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1870]={
category="lo",
@@ -42761,6 +42871,11 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1873,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ [0x180D]="fourth form",
+ },
},
[0x1874]={
category="lo",
@@ -42768,6 +42883,11 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1874,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="feminine first medial form",
+ [0x180D]="feminine second medial form",
+ },
},
[0x1875]={
category="lo",
@@ -42782,6 +42902,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1876,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1877]={
category="lo",
@@ -42796,6 +42919,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1880,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1881]={
category="lo",
@@ -42803,6 +42929,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1881,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1882]={
category="lo",
@@ -42845,6 +42974,11 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1887,
+ variants={
+ [0x180B]="second form",
+ [0x180C]="third form",
+ [0x180D]="fourth form",
+ },
},
[0x1888]={
category="lo",
@@ -42852,6 +42986,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x1888,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x1889]={
category="lo",
@@ -42866,6 +43003,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0x188A,
+ variants={
+ [0x180B]="second form",
+ },
},
[0x188B]={
category="lo",
@@ -60728,6 +60868,9 @@ characters.data={
mathclass="binary",
mathname="cap",
unicodeslot=0x2229,
+ variants={
+ [0xFE00]="with serifs",
+ },
},
[0x222A]={
adobename="union",
@@ -60739,6 +60882,9 @@ characters.data={
mathclass="binary",
mathname="cup",
unicodeslot=0x222A,
+ variants={
+ [0xFE00]="with serifs",
+ },
},
[0x222B]={
adobename="integral",
@@ -61414,6 +61560,9 @@ characters.data={
mathname="lneqq",
mirror=0x2269,
unicodeslot=0x2268,
+ variants={
+ [0xFE00]="with vertical stroke",
+ },
},
[0x2269]={
category="sm",
@@ -61424,6 +61573,9 @@ characters.data={
mathname="gneqq",
mirror=0x2268,
unicodeslot=0x2269,
+ variants={
+ [0xFE00]="with vertical stroke",
+ },
},
[0x226A]={
adobename="muchless",
@@ -61528,6 +61680,9 @@ characters.data={
mathname="lesssim",
mirror=0x2273,
unicodeslot=0x2272,
+ variants={
+ [0xFE00]="following the slant of the lower leg",
+ },
},
[0x2273]={
adobename="greaterorequivalent",
@@ -61539,6 +61694,9 @@ characters.data={
mathname="gtrsim",
mirror=0x2272,
unicodeslot=0x2273,
+ variants={
+ [0xFE00]="following the slant of the lower leg",
+ },
},
[0x2274]={
category="sm",
@@ -61797,6 +61955,9 @@ characters.data={
mathname="subsetneq",
mirror=0x228B,
unicodeslot=0x228A,
+ variants={
+ [0xFE00]="with stroke through bottom members",
+ },
},
[0x228B]={
adobename="supersetnotequal",
@@ -61808,6 +61969,9 @@ characters.data={
mathname="supsetneq",
mirror=0x228A,
unicodeslot=0x228B,
+ variants={
+ [0xFE00]="with stroke through bottom members",
+ },
},
[0x228C]={
category="sm",
@@ -61880,6 +62044,9 @@ characters.data={
mathclass="binary",
mathname="sqcap",
unicodeslot=0x2293,
+ variants={
+ [0xFE00]="with serifs",
+ },
},
[0x2294]={
category="sm",
@@ -61889,6 +62056,9 @@ characters.data={
mathclass="binary",
mathname="sqcup",
unicodeslot=0x2294,
+ variants={
+ [0xFE00]="with serifs",
+ },
},
[0x2295]={
adobename="pluscircle",
@@ -61900,6 +62070,9 @@ characters.data={
mathclass="binary",
mathname="oplus",
unicodeslot=0x2295,
+ variants={
+ [0xFE00]="with white rim",
+ },
},
[0x2296]={
adobename="minuscircle",
@@ -61920,6 +62093,9 @@ characters.data={
mathclass="binary",
mathname="otimes",
unicodeslot=0x2297,
+ variants={
+ [0xFE00]="with white rim",
+ },
},
[0x2298]={
category="sm",
@@ -61968,6 +62144,9 @@ characters.data={
mathclass="binary",
mathname="circledequals",
unicodeslot=0x229C,
+ variants={
+ [0xFE00]="with equal sign touching the circle",
+ },
},
[0x229D]={
category="sm",
@@ -62178,21 +62357,21 @@ characters.data={
},
[0x22B2]={
category="sm",
+ comment="cramped triangleleft",
description="NORMAL SUBGROUP OF",
direction="on",
linebreak="al",
mathclass="bin",
- mathname="triangleleft",
mirror=0x22B3,
unicodeslot=0x22B2,
},
[0x22B3]={
category="sm",
+ comment="cramped triangleright",
description="CONTAINS AS NORMAL SUBGROUP",
direction="on",
linebreak="al",
mathclass="bin",
- mathname="triangleright",
mirror=0x22B2,
unicodeslot=0x22B3,
},
@@ -62596,6 +62775,9 @@ characters.data={
mathname="lesseqgtr",
mirror=0x22DB,
unicodeslot=0x22DA,
+ variants={
+ [0xFE00]="with slanted equal",
+ },
},
[0x22DB]={
adobename="greaterequalorless",
@@ -62607,6 +62789,9 @@ characters.data={
mathname="gtreqless",
mirror=0x22DA,
unicodeslot=0x22DB,
+ variants={
+ [0xFE00]="with slanted equal",
+ },
},
[0x22DC]={
category="sm",
@@ -68196,6 +68381,8 @@ characters.data={
description="WHITE RIGHT-POINTING TRIANGLE",
direction="on",
linebreak="ai",
+ mathclass="bin",
+ mathname="triangleright",
unicodeslot=0x25B7,
},
[0x25B8]={
@@ -68281,6 +68468,8 @@ characters.data={
description="WHITE LEFT-POINTING TRIANGLE",
direction="on",
linebreak="ai",
+ mathclass="bin",
+ mathname="triangleleft",
unicodeslot=0x25C1,
},
[0x25C2]={
@@ -69522,6 +69711,8 @@ characters.data={
description="BLACK DIAMOND SUIT",
direction="on",
linebreak="al",
+ mathclass="ord",
+ mathname="blacklozenge",
unicodeslot=0x2666,
},
[0x2667]={
@@ -76066,8 +76257,6 @@ characters.data={
description="BLACK LOZENGE",
direction="on",
linebreak="al",
- mathclass="ord",
- mathname="blacklozenge",
unicodeslot=0x29EB,
},
[0x29EC]={
@@ -76220,6 +76409,8 @@ characters.data={
description="N-ARY CIRCLED DOT OPERATOR",
direction="on",
linebreak="al",
+ mathclass="limop",
+ mathname="bigodot",
unicodeslot=0x2A00,
},
[0x2A01]={
@@ -76246,7 +76437,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="limop",
- mathname="bigodot",
+ mathname="bigudot",
unicodeslot=0x2A03,
},
[0x2A04]={
@@ -76663,6 +76854,9 @@ characters.data={
linebreak="al",
mirror=0x2A3D,
unicodeslot=0x2A3C,
+ variants={
+ [0xFE00]="tall variant with narrow foot",
+ },
},
[0x2A3D]={
category="sm",
@@ -76671,6 +76865,9 @@ characters.data={
linebreak="al",
mirror=0x2A3C,
unicodeslot=0x2A3D,
+ variants={
+ [0xFE00]="tall variant with narrow foot",
+ },
},
[0x2A3E]={
category="sm",
@@ -77398,6 +77595,9 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2A9D,
+ variants={
+ [0xFE00]="with similar following the slant of the upper leg",
+ },
},
[0x2A9E]={
category="sm",
@@ -77405,6 +77605,9 @@ characters.data={
direction="on",
linebreak="al",
unicodeslot=0x2A9E,
+ variants={
+ [0xFE00]="with similar following the slant of the upper leg",
+ },
},
[0x2A9F]={
category="sm",
@@ -77512,6 +77715,9 @@ characters.data={
linebreak="al",
mirror=0x2AAD,
unicodeslot=0x2AAC,
+ variants={
+ [0xFE00]="with slanted equal",
+ },
},
[0x2AAD]={
category="sm",
@@ -77520,6 +77726,9 @@ characters.data={
linebreak="al",
mirror=0x2AAC,
unicodeslot=0x2AAD,
+ variants={
+ [0xFE00]="with slanted equal",
+ },
},
[0x2AAE]={
category="sm",
@@ -77776,6 +77985,9 @@ characters.data={
mathclass="relation",
mathname="subsetneqq",
unicodeslot=0x2ACB,
+ variants={
+ [0xFE00]="with stroke through bottom members",
+ },
},
[0x2ACC]={
category="sm",
@@ -77785,6 +77997,9 @@ characters.data={
mathclass="relation",
mathname="supsetneqq",
unicodeslot=0x2ACC,
+ variants={
+ [0xFE00]="with stroke through bottom members",
+ },
},
[0x2ACD]={
category="sm",
@@ -110243,6 +110458,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0xA856,
+ variants={
+ [0xFE00]="phags-pa letter reversed shaping small a",
+ },
},
[0xA857]={
category="lo",
@@ -110285,6 +110503,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0xA85C,
+ variants={
+ [0xFE00]="phags-pa letter reversed shaping ha",
+ },
},
[0xA85D]={
category="lo",
@@ -110299,6 +110520,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0xA85E,
+ variants={
+ [0xFE00]="phags-pa letter reversed shaping i",
+ },
},
[0xA85F]={
category="lo",
@@ -110306,6 +110530,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0xA85F,
+ variants={
+ [0xFE00]="phags-pa letter reversed shaping u",
+ },
},
[0xA860]={
category="lo",
@@ -110313,6 +110540,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0xA860,
+ variants={
+ [0xFE00]="phags-pa letter reversed shaping e",
+ },
},
[0xA861]={
category="lo",
@@ -110369,6 +110599,9 @@ characters.data={
direction="l",
linebreak="al",
unicodeslot=0xA868,
+ variants={
+ [0xFE00]="phags-pa letter reversed shaping subjoined ya",
+ },
},
[0xA869]={
category="lo",
diff --git a/tex/context/base/char-ini.lua b/tex/context/base/char-ini.lua
index 3a4789821..6d17a7507 100644
--- a/tex/context/base/char-ini.lua
+++ b/tex/context/base/char-ini.lua
@@ -17,7 +17,6 @@ local utfchar, utfbyte, utfvalues = utf.char, utf.byte, string.utfvalues
local ustring = unicode.ustring
local concat, unpack, tohash = table.concat, table.unpack, table.tohash
local next, tonumber, type, rawget, rawset = next, tonumber, type, rawget, rawset
-local texsprint, texprint = tex.sprint, tex.print
local format, lower, gsub, match, gmatch = string.format, string.lower, string.gsub, string.match, string.match, string.gmatch
local P, R, lpegmatch = lpeg.P, lpeg.R, lpeg.match
@@ -27,8 +26,11 @@ local texsetlccode = tex.setlccode
local texsetuccode = tex.setuccode
local texsetsfcode = tex.setsfcode
local texsetcatcode = tex.setcatcode
+
+local contextsprint = context.sprint
local ctxcatcodes = tex.ctxcatcodes
local texcatcodes = tex.texcatcodes
+
local setmetatableindex = table.setmetatableindex
local trace_defining = false trackers.register("characters.defining", function(v) characters_defining = v end)
@@ -487,17 +489,17 @@ use the table. After all, we have this information available anyway.</p>
--ldx]]--
function characters.makeactive(n,name) -- let ?
- texsprint(ctxcatcodes,format("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name))
+ contextsprint(ctxcatcodes,format("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name))
-- context("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name)
end
function tex.uprint(c,n)
if n then
- -- texsprint(c,charfromnumber(n))
- texsprint(c,utfchar(n))
+ -- contextsprint(c,charfromnumber(n))
+ contextsprint(c,utfchar(n))
else
- -- texsprint(charfromnumber(c))
- texsprint(utfchar(c))
+ -- contextsprint(charfromnumber(c))
+ contextsprint(utfchar(c))
end
end
@@ -520,7 +522,7 @@ function characters.define(tobelettered, tobeactivated) -- catcodetables
for u, chr in next, data do -- these will be commands
local fallback = chr.fallback
if fallback then
- texsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}") -- no texprint
+ contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}")
a = a + 1
activated[a] = u
else
@@ -530,16 +532,16 @@ function characters.define(tobelettered, tobeactivated) -- catcodetables
if is_character[category] then
if chr.unicodeslot < 128 then
if is_letter[category] then
- texprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u)))
+ contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s
else
- texprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u))
+ contextsprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u)) -- has no s
end
else
- texprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u)))
+ contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s
end
elseif is_command[category] then
if not temphack[u] then
- texsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}") -- no texprint
+ contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}")
a = a + 1
activated[a] = u
end
@@ -681,7 +683,7 @@ end
-- xml support (moved)
function characters.remapentity(chr,slot)
- texsprint(format("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr))
+ contextsprint(format("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr))
end
characters.activeoffset = 0x10000 -- there will be remapped in that byte range
@@ -880,7 +882,7 @@ function characters.flush(n,direct)
if direct then
return c
else
- texsprint(c)
+ contextsprint(c)
end
end
@@ -1003,3 +1005,7 @@ if not characters.superscripts then
storage.register("characters/subscripts", subscripts, "characters.subscripts")
end
+
+-- interface
+
+commands.utfchar = tex.uprint
diff --git a/tex/context/base/char-ini.mkiv b/tex/context/base/char-ini.mkiv
index 22c233dac..d1d7574c9 100644
--- a/tex/context/base/char-ini.mkiv
+++ b/tex/context/base/char-ini.mkiv
@@ -40,7 +40,7 @@
%
% impossible in math mode so there always fallback (till we have gyre):
-\def\utfchar #1{\ctxlua{tex.uprint(\number#1)}}
+\def\utfchar #1{\ctxcommand{utfchar(\number#1)}}
\def\checkedchar {\relax\ifmmode\expandafter\checkedmathchar\else\expandafter\checkedtextchar\fi} % #1#2
\def\checkedmathchar#1#2{#2}
\def\checkedtextchar #1{\iffontchar\font#1 \expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi{\char#1}}
@@ -74,9 +74,6 @@
%number\mthcatcodes,
\number\vrbcatcodes,
\number\prtcatcodes,
-% \number\xmlcatcodesn,
-% \number\xmlcatcodese,
-% \number\xmlcatcodesr,
\number\tpacatcodes,
\number\tpbcatcodes,
\number\txtcatcodes,
@@ -85,9 +82,6 @@
\number\ctxcatcodes,
\number\notcatcodes,
\number\prtcatcodes, % new
-% \number\xmlcatcodesn,
-% \number\xmlcatcodese,
-% \number\xmlcatcodesr,
}
)
% catcodes.register("xmlcatcodes",\number\xmlcatcodes)
diff --git a/tex/context/base/chem-str.lua b/tex/context/base/chem-str.lua
index 1a6ed4dc2..043cff039 100644
--- a/tex/context/base/chem-str.lua
+++ b/tex/context/base/chem-str.lua
@@ -17,7 +17,7 @@ local report_chemistry = logs.reporter("chemistry")
local format, gmatch, match, lower, gsub = string.format, string.gmatch, string.match, string.lower, string.gsub
local concat, insert, remove = table.concat, table.insert, table.remove
-local apply = structures.processors.apply
+local processor_tostring = structures.processors.tostring
local lpegmatch = lpeg.match
local settings_to_array = utilities.parsers.settings_to_array
@@ -319,7 +319,8 @@ local function process(spec,text,n,rulethickness,rulecolor,offset)
if t then
local a = align and align[si]
if a then a = "." .. a else a = "" end
- m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\dochemicaltext{%s}");',operation,a,bonds,si,molecule(apply(t)))
+ t = molecule(processor_tostring(t))
+ m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\dochemicaltext{%s}");',operation,a,bonds,si,t)
end
end
elseif upto then
@@ -329,14 +330,16 @@ local function process(spec,text,n,rulethickness,rulecolor,offset)
if t then
local s = align and align[i]
if s then s = "." .. s else s = "" end
- m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\dochemicaltext{%s}");',operation,s,bonds,i,molecule(apply(t)))
+ t = molecule(processor_tostring(t))
+ m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\dochemicaltext{%s}");',operation,s,bonds,i,t)
end
end
elseif index == 0 then
local t = text
if not t then txt, t = fetch(txt) end
if t then
- m = m + 1 ; metacode[m] = format('chem_%s_zero("\\dochemicaltext{%s}");',operation,molecule(apply(t)))
+ t = molecule(processor_tostring(t))
+ m = m + 1 ; metacode[m] = format('chem_%s_zero("\\dochemicaltext{%s}");',operation,t)
end
elseif index then
local t = text
@@ -344,7 +347,8 @@ local function process(spec,text,n,rulethickness,rulecolor,offset)
if t then
local s = align and align[index]
if s then s = "." .. s else s = "" end
- m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\dochemicaltext{%s}");',operation,s,bonds,index,molecule(apply(t)))
+ t = molecule(processor_tostring(t))
+ m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\dochemicaltext{%s}");',operation,s,bonds,index,t)
end
else
for i=1,max do
@@ -353,7 +357,8 @@ local function process(spec,text,n,rulethickness,rulecolor,offset)
if t then
local s = align and align[i]
if s then s = "." .. s else s = "" end
- m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\dochemicaltext{%s}");',operation,s,bonds,i,molecule(apply(t)))
+ t = molecule(processor_tostring(t))
+ m = m + 1 ; metacode[m] = format('chem_%s%s(%s,%s,"\\dochemicaltext{%s}");',operation,s,bonds,i,t)
end
end
end
diff --git a/tex/context/base/cldf-com.lua b/tex/context/base/cldf-com.lua
index 9f5d2bcae..47c8080d1 100644
--- a/tex/context/base/cldf-com.lua
+++ b/tex/context/base/cldf-com.lua
@@ -75,8 +75,6 @@ end
-- -- speedtest needed:
--
--- local ctxcatcodes = tex.ctxcatcodes
---
-- local flush, writer = context.getlogger()
--
-- trackers.register("context.trace",function(v)
@@ -122,3 +120,35 @@ context.vrule = context.hrule
--~ hbox(a,...)
--~ end
--~ end
+
+-- not yet used ... but will get variant at the tex end as well
+
+function context.sethboxregister (n) context("\\setbox %s\\hbox",n) end
+function context.setvboxregister (n) context("\\setbox %s\\vbox",n) end
+
+function context.starthboxregister(n)
+ if type(n) == "number" then
+ context("\\setbox%s\\hbox\\bgroup",n)
+ else
+ context("\\setbox\\%s\\hbox\\bgroup",n)
+ end
+end
+
+function context.startvboxregister(n)
+ if type(n) == "number" then
+ context("\\setbox%s\\vbox\\bgroup",n)
+ else
+ context("\\setbox\\%s\\vbox\\bgroup",n)
+ end
+end
+
+context.stophboxregister = context.egroup
+context.stopvboxregister = context.egroup
+
+function context.flushboxregister(n)
+ if type(n) == "number" then
+ context("\\box%s ",n)
+ else
+ context("\\box\\%s",n)
+ end
+end
diff --git a/tex/context/base/cldf-ini.lua b/tex/context/base/cldf-ini.lua
index b2616b664..2544f0ded 100644
--- a/tex/context/base/cldf-ini.lua
+++ b/tex/context/base/cldf-ini.lua
@@ -16,25 +16,28 @@ if not modules then modules = { } end modules ['cldf-ini'] = {
-- Todo: optional checking against interface
-- Todo: coroutine trickery
-- Todo: maybe use txtcatcodes
+-- Todo: we could always use prtcatcodes (context.a_b_c) but then we loose protection
-- tflush needs checking ... sort of weird that it's not a table
-- __flushlines is an experiment and rather ugly so it will go away
+-- tex.print == line with endlinechar appended
+
local tex = tex
context = context or { }
local context = context
-local format, find, gmatch, splitlines = string.format, string.find, string.gmatch, string.splitlines
+local format, find, gmatch, gsub = string.format, string.find, string.gmatch, string.gsub
local next, type, tostring, setmetatable = next, type, tostring, setmetatable
local insert, remove, concat = table.insert, table.remove, table.concat
-local lpegmatch = lpeg.match
+local lpegmatch, lpegC, lpegS, lpegP, lpegCc = lpeg.match, lpeg.C, lpeg.S, lpeg.P, lpeg.Cc
local texsprint = tex.sprint
local textprint = tex.tprint
local texprint = tex.print
-local texiowrite = texio.write
+local texwrite = tex.write
local texcount = tex.count
local isnode = node.is_node -- after 0.65 just node.type
@@ -49,6 +52,8 @@ local vrbcatcodes = tex.vrbcatcodes
local xmlcatcodes = tex.xmlcatcodes
local flush = texsprint
+local flushdirect = texprint
+local flushraw = texwrite
local report_context = logs.reporter("cld","tex")
local report_cld = logs.reporter("cld","stack")
@@ -137,62 +142,155 @@ local catcodes = {
xml = xmlcatcodes, xmlcatcodes = xmlcatcodes,
}
-function context.pushcatcodes(c)
+local function pushcatcodes(c)
insert(catcodestack,currentcatcodes)
currentcatcodes = (c and catcodes[c] or tonumber(c)) or currentcatcodes
contentcatcodes = currentcatcodes
end
-function context.popcatcodes()
+local function popcatcodes()
currentcatcodes = remove(catcodestack) or currentcatcodes
contentcatcodes = currentcatcodes
end
-function tex.fprint(...) -- goodie
- texsprint(currentcatcodes,format(...))
-end
+context.pushcatcodes = pushcatcodes
+context.popcatcodes = popcatcodes
-- -- -- todo: tracing
-local newline = lpeg.patterns.newline
-local space = lpeg.patterns.spacer
-local spacing = newline * space^0
-local content = lpeg.C((1-spacing)^1)
-local emptyline = space^0 * newline^2
-local endofline = space^0 * newline * space^0
-local simpleline = endofline * lpeg.P(-1)
+local newline = lpeg.patterns.newline
+local space = lpeg.patterns.spacer
+local spacing = newline * space^0
+local content = lpegC((1-spacing)^1)
+local emptyline = space^0 * newline^2
+local endofline = space^0 * newline * space^0
+local simpleline = endofline * lpegP(-1)
+
+local verbose = lpegC((1-space-newline)^1)
+local beginstripper = (lpegS(" \t")^1 * newline^1) / ""
+local endstripper = beginstripper * lpegP(-1)
local function n_content(s)
flush(contentcatcodes,s)
end
+local function n_verbose(s)
+ flush(vrbcatcodes,s)
+end
+
local function n_endofline()
- texsprint(" \r")
+ flush(currentcatcodes," \r")
end
local function n_emptyline()
- texprint("\r")
+ flushdirect(currentcatcodes,"\r")
end
local function n_simpleline()
- texprint("\r")
+ flushdirect(currentcatcodes,"\r")
+end
+
+local n_exception = ""
+
+-- better a table specification
+
+function context.newtexthandler(specification) -- can also be used for verbose
+ specification = specification or { }
+ --
+ local s_catcodes = specification.catcodes
+ --
+ local f_before = specification.before
+ local f_after = specification.after
+ --
+ local f_endofline = specification.endofline or n_endofline
+ local f_emptyline = specification.emptyline or n_emptyline
+ local f_simpleline = specification.simpleline or n_simpleline
+ local f_content = specification.content or n_content
+ --
+ local p_exception = specification.exception
+ --
+ if s_catcodes then
+ f_content = function(s)
+ flush(s_catcodes,s)
+ end
+ end
+ --
+ local pattern
+ if p_exception then
+ local content = lpegC((1-spacing-p_exception)^1)
+ pattern =
+ simpleline / f_simpleline
+ + (
+ emptyline / f_emptyline
+ + endofline / f_endofline
+ + p_exception
+ + content / f_content
+ )^0
+ else
+ local content = lpegC((1-spacing)^1)
+ pattern =
+ simpleline / f_simpleline
+ + (
+ emptyline / f_emptyline
+ + endofline / f_endofline
+ + content / f_content
+ )^0
+ end
+ --
+ if f_before then
+ pattern = (P(true) / f_before) * pattern
+ end
+ --
+ if f_after then
+ pattern = pattern * (P(true) / f_after)
+ end
+ --
+ return function(str) return lpegmatch(pattern,str) end, pattern
+end
+
+function context.newverbosehandler(specification) -- a special variant for e.g. cdata in lxml-tex
+ specification = specification or { }
+ --
+ local f_line = specification.line or function() flushdirect("\r") end
+ local f_space = specification.space or function() flush(" ") end
+ local f_content = specification.content or n_verbose
+ local f_before = specification.before
+ local f_after = specification.after
+ --
+ local pattern =
+ newline * (lpegCc("") / f_line) -- so we get call{}
+ + verbose / f_content
+ + space * (lpegCc("") / f_space) -- so we get call{}
+ --
+ if specification.strip then
+ pattern = beginstripper^0 * (endstripper + pattern)^0
+ else
+ pattern = pattern^0
+ end
+ --
+ if f_before then
+ pattern = (lpegP(true) / f_before) * pattern
+ end
+ --
+ if f_after then
+ pattern = pattern * (lpegP(true) / f_after)
+ end
+ --
+ return function(str) return lpegmatch(pattern,str) end, pattern
end
-function lpeg.texlinesplitter(f_content,f_endofline,f_emptyline,f_simpleline)
- local splitlines =
- simpleline / (f_simpleline or n_simpleline)
- + (
- emptyline / (f_emptyline or n_emptyline)
- + endofline / (f_endofline or n_emptyline)
- + content / (f_content or n_content)
- )^0
- return function(str) return lpegmatch(splitlines,str) end
-end
+local flushlines = context.newtexthandler {
+ content = n_content,
+ endofline = n_endofline,
+ emptyline = n_emptyline,
+ simpleline = n_simpleline,
+}
-local flushlines = lpeg.texlinesplitter(n_content,n_endofline,n_emptyline,n_simpleline)
+context.__flushlines = flushlines -- maybe context.helpers.flushtexlines
+context.__flush = flush
+context.__flushdirect = flushdirect
-context.__flushlines = flushlines -- maybe context.helpers.flushtexlines
-context.__flush = flush
+-- The next variant is only used in rare cases (buffer to mp):
local printlines_ctx = (
(newline) / function() texprint("") end +
@@ -204,7 +302,7 @@ local printlines_raw = (
(1-newline)^1 / function(s) texprint(s) end * newline^-1
)^0
-function context.printlines(str,raw)
+function context.printlines(str,raw) -- todo: see if via file is useable
if raw then
lpegmatch(printlines_raw,str)
else
@@ -212,19 +310,17 @@ function context.printlines(str,raw)
end
end
--- -- --
+-- This is the most reliable way to deal with nested buffers and other
+-- catcode sensitive data.
local methodhandler = resolvers.methodhandler
function context.viafile(data)
- -- this is the only way to deal with nested buffers
- -- and other catcode sensitive data
if data and data ~= "" then
local filename = resolvers.savers.byscheme("virtual","viafile",data)
- -- somewhat slow, these regime changes (todo: wrap in one command)
---~ context.startregime { "utf" }
+ -- context.startregime { "utf" }
context.input(filename)
---~ context.stopregime()
+ -- context.stopregime()
end
end
@@ -249,6 +345,7 @@ local function writer(parent,command,first,...) -- already optimized before call
elseif ti == "" then
flush(currentcatcodes,"{}")
elseif typ == "string" then
+ -- is processelines seen ?
if processlines and find(ti,"[\n\r]") then -- we can check for ti == "\n"
flush(currentcatcodes,"{")
local flushlines = parent.__flushlines or flushlines
@@ -305,8 +402,7 @@ local function writer(parent,command,first,...) -- already optimized before call
flush(currentcatcodes,"{\\cldf{",_store_f_(ti),"}}") -- todo: ctx|prt|texcatcodes
elseif typ == "boolean" then
if ti then
- -- flush(currentcatcodes,"^^M")
- texprint("")
+ flushdirect(currentcatcodes,"\r")
else
direct = true
end
@@ -335,9 +431,60 @@ local function indexer(parent,k)
return f
end
+-- Potential optimization: after the first call we know if there will be an
+-- argument. Of course there is the side effect that for instance abuse like
+-- context.NC(str) fails as well as optional arguments. So, we don't do this
+-- in practice. We just keep the next trick commented. The gain on some
+-- 100000 calls is not that large: 0.100 => 0.95 which is neglectable.
+--
+-- local function constructor(parent,k,c,first,...)
+-- if first == nil then
+-- local f = function()
+-- flush(currentcatcodes,c)
+-- end
+-- parent[k] = f
+-- return f()
+-- else
+-- local f = function(...)
+-- return writer(parent,c,...)
+-- end
+-- parent[k] = f
+-- return f(first,...)
+-- end
+-- end
+--
+-- local function indexer(parent,k)
+-- local c = "\\" .. tostring(generics[k] or k)
+-- local f = function(...)
+-- return constructor(parent,k,c,...)
+-- end
+-- parent[k] = f
+-- return f
+-- end
+
+-- only for internal usage:
+
+function context.constructcsonly(k) -- not much faster than the next but more mem efficient
+ local c = "\\" .. tostring(generics[k] or k)
+ rawset(context, k, function()
+ flush(prtcatcodes,c)
+ end)
+end
+
+function context.constructcs(k)
+ local c = "\\" .. tostring(generics[k] or k)
+ rawset(context, k, function(first,...)
+ if first == nil then
+ flush(prtcatcodes,c)
+ else
+ return writer(context,c,first,...)
+ end
+ end)
+end
+
local function caller(parent,f,a,...)
if not parent then
- -- so we don't need to test in the calling (slower but often no issue) (will go)
+ -- so we don't need to test in the calling (slower but often no issue)
elseif f ~= nil then
local typ = type(f)
if typ == "string" then
@@ -365,8 +512,7 @@ local function caller(parent,f,a,...)
flushlines(f)
-- ignore ... maybe some day
else
- -- flush(currentcatcodes,"^^M")
- texprint("")
+ flushdirect(currentcatcodes,"\r")
end
else
if a ~= nil then
@@ -410,15 +556,45 @@ function context.protect()
contentcatcodes = currentcatcodes
end
+function context.sprint(...) -- takes catcodes as first argument
+ flush(...)
+end
+
+function context.fprint(catcodes,fmt,first,...)
+ if type(catcodes) == "number" then
+ if first then
+ flush(catcodes,format(fmt,first,...))
+ else
+ flush(catcodes,fmt)
+ end
+ else
+ if fmt then
+ flush(format(catodes,fmt,first,...))
+ else
+ flush(catcodes)
+ end
+ end
+end
+
+function tex.fprint(fmt,first,...) -- goodie
+ if first then
+ flush(currentcatcodes,format(fmt,first,...))
+ else
+ flush(currentcatcodes,fmt)
+ end
+end
+
-- logging
local trace_stack = { }
-local normalflush = flush
-local normalwriter = writer
-local currenttrace = nil
-local nofwriters = 0
-local nofflushes = 0
+local normalflush = flush
+local normalflushdirect = flushdirect
+local normalflushraw = flushraw
+local normalwriter = writer
+local currenttrace = nil
+local nofwriters = 0
+local nofflushes = 0
statistics.register("traced context", function()
if nofwriters > 0 or nofflushes > 0 then
@@ -426,54 +602,92 @@ statistics.register("traced context", function()
end
end)
-local tracedwriter = function(parent,...)
+local tracedwriter = function(parent,...) -- also catcodes ?
nofwriters = nofwriters + 1
- local t, f, n = { "w : " }, flush, 0
- flush = function(...)
+ local savedflush = flush
+ local savedflushdirect = flushdirect -- unlikely to be used here
+ local t, n = { "w : - : " }, 1
+ local traced = function(normal,catcodes,...) -- todo: check for catcodes
+ local s = concat({...})
+ s = gsub(s,"\r","<<newline>>") -- unlikely
n = n + 1
- t[n] = concat({...},"",2)
- normalflush(...)
+ t[n] = s
+ normal(catcodes,...)
end
+ flush = function(...) traced(normalflush, ...) end
+ flushdirect = function(...) traced(normalflushdirect,...) end
normalwriter(parent,...)
- flush = f
+ flush = savedflush
+ flushdirect = savedflushdirect
currenttrace(concat(t))
end
-local tracedflush = function(...)
+-- we could reuse collapsed
+
+local traced = function(normal,one,two,...)
nofflushes = nofflushes + 1
- normalflush(...)
- local t = { ... }
- t[1] = "f : " -- replaces the catcode
- for i=2,#t do
- local ti = t[i]
- local tt = type(ti)
- if tt == "string" then
- -- ok
- elseif tt == "number" then
- -- ok
+ if two then
+ -- only catcodes if 'one' is number
+ normal(one,two,...)
+ local catcodes = type(one) == "number" and one
+ local arguments = catcodes and { two, ... } or { one, two, ... }
+ local collapsed, c = { format("f : %s : ", catcodes or '-') }, 1
+ for i=1,#arguments do
+ local argument = arguments[i]
+ local argtype = type(argument)
+ c = c + 1
+ if argtype == "string" then
+ collapsed[c] = gsub(argument,"\r","<<newline>>")
+ elseif argtype == "number" then
+ collapsed[c] = argument
+ else
+ collapsed[c] = format("<<%s>>",tostring(argument))
+ end
+ end
+ currenttrace(concat(collapsed))
+ else
+ -- no catcodes
+ normal(one)
+ local argtype = type(one)
+ if argtype == "string" then
+ currenttrace(format("f : - : %s",gsub(one,"\r","<<newline>>")))
+ elseif argtype == "number" then
+ currenttrace(format("f : - : %s",one))
else
- t[i] = format("<%s>",tostring(ti))
+ currenttrace(format("f : - : <<%s>>",tostring(one)))
end
- -- currenttrace(format("%02i: %s",i-1,tostring(t[i])))
end
- currenttrace(concat(t))
end
+local tracedflush = function(...) traced(normalflush, ...) end
+local tracedflushdirect = function(...) traced(normalflushdirect,...) end
+
local function pushlogger(trace)
+ trace = trace or report_context
insert(trace_stack,currenttrace)
currenttrace = trace
- flush, writer = tracedflush, tracedwriter
- context.__flush = flush
- return flush, writer
+ --
+ flush = tracedflush
+ flushdirect = tracedflushdirect
+ writer = tracedwriter
+ --
+ context.__flush = flush
+ context.__flushdirect = flushdirect
+ --
+ return flush, writer, flushdirect
end
local function poplogger()
currenttrace = remove(trace_stack)
if not currenttrace then
- flush, writer = normalflush, normalwriter
- context.__flush = flush
+ flush = normalflush
+ flushdirect = normalflushdirect
+ writer = normalwriter
+ --
+ context.__flush = flush
+ context.__flushdirect = flushdirect
end
- return flush, writer
+ return flush, writer, flushdirect
end
local function settracing(v)
@@ -488,12 +702,34 @@ end
trackers.register("context.trace",settracing)
-context.pushlogger = pushlogger
-context.poplogger = poplogger
-context.settracing = settracing
+context.pushlogger = pushlogger
+context.poplogger = poplogger
+context.settracing = settracing
+
+-- -- untested, no time now:
+--
+-- local tracestack, tracestacktop = { }, false
+--
+-- function context.pushtracing(v)
+-- insert(tracestack,tracestacktop)
+-- if type(v) == "function" then
+-- pushlogger(v)
+-- v = true
+-- else
+-- pushlogger()
+-- end
+-- tracestacktop = v
+-- settracing(v)
+-- end
+--
+-- function context.poptracing()
+-- poplogger()
+-- tracestacktop = remove(tracestack) or false
+-- settracing(tracestacktop)
+-- end
function context.getlogger()
- return flush, writer
+ return flush, writer, flush_direct
end
local trace_cld = false trackers.register("context.files", function(v) trace_cld = v end)
@@ -688,65 +924,6 @@ end
setmetatable(delayed, { __index = indexer, __call = caller } )
---~ Not that useful yet. Maybe something like this when the main loop
---~ is a coroutine. It also does not help taking care of nested calls.
---~ Even worse, it interferes with other mechanisms using context calls.
---~
---~ local create, yield, resume = coroutine.create, coroutine.yield, coroutine.resume
---~ local getflush, setflush = context.getflush, context.setflush
---~ local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
---~
---~ function context.getflush()
---~ return flush
---~ end
---~
---~ function context.setflush(newflush)
---~ local oldflush = flush
---~ flush = newflush or flush
---~ return oldflush
---~ end
---~
---~ function context.direct(f)
---~ local routine = create(f)
---~ local oldflush = getflush()
---~ function newflush(...)
---~ oldflush(...)
---~ yield(true)
---~ end
---~ setflush(newflush)
---~
---~ -- local function resumecontext()
---~ -- local done = resume(routine)
---~ -- if not done then
---~ -- return
---~ -- end
---~ -- resumecontext() -- stack overflow ... no tail recursion
---~ -- end
---~ -- context.resume = resumecontext
---~ -- texsprint(ctxcatcodes,"\\ctxlua{context.resume()}")
---~
---~ local function resumecontext()
---~ local done = resume(routine)
---~ if not done then
---~ return
---~ end
---~ -- texsprint(ctxcatcodes,"\\exitloop")
---~ texsprint(ctxcatcodes,"\\ctxlua{context.resume()}") -- can be simple macro call
---~ end
---~ context.resume = resumecontext
---~ -- texsprint(ctxcatcodes,"\\doloop{\\ctxlua{context.resume()}}") -- can be fast loop at the tex end
---~ texsprint(ctxcatcodes,"\\ctxlua{context.resume()}")
---~
---~ end
---~
---~ function something()
---~ context("\\setbox0")
---~ context("\\hbox{hans hagen xx}")
---~ context("\\the\\wd0/\\box0")
---~ end
---~
---~ context.direct(something)
-
-- helpers:
-- we could have faster calls here
diff --git a/tex/context/base/cldf-int.lua b/tex/context/base/cldf-int.lua
index 2291fd849..55db9fa0b 100644
--- a/tex/context/base/cldf-int.lua
+++ b/tex/context/base/cldf-int.lua
@@ -11,10 +11,13 @@ if not modules then modules = { } end modules ['mult-clm'] = {
-- needs checking
-- todo: multilingual
-local texsprint, ctxcatcodes, vrbcatcodes = tex.sprint, tex.ctxcatcodes, tex.vrbcatcodes
local format, insert, remove, concat = string.format, table.insert, table.remove, table.concat
local unpack = unpack or table.unpack
+local contextsprint = context.sprint
+local ctxcatcodes = tex.ctxcatcodes
+local vrbcatcodes = tex.vrbcatcodes
+
local trace_define = false trackers.register("context.define", function(v) trace_define = v end)
interfaces = interfaces or { }
@@ -25,6 +28,7 @@ _clma_ = utilities.parsers.settings_to_array
local starters, stoppers, macros, stack = { }, { }, { }, { }
local checkers = {
+ [0] = "",
"\\dosingleempty",
"\\dodoubleempty",
"\\dotripleempty",
@@ -55,6 +59,9 @@ end
_clmn_ = tonumber
+local estart = interfaces.elements.start
+local estop = interfaces.elements.stop
+
function interfaces.definecommand(name,specification) -- name is optional
if type(name) == "table" then
specification = name
@@ -66,53 +73,75 @@ function interfaces.definecommand(name,specification) -- name is optional
local environment = specification.environment
if na == 0 then
if environment then
- texsprint(ctxcatcodes,"\\clmb{",name,"}{\\ctxlua{_clmb_('",name,"')}}")
- texsprint(ctxcatcodes,"\\clme{",name,"}{\\ctxlua{_clme_('",name,"')}}")
- else
- texsprint(ctxcatcodes,"\\clmm{",name,"}{\\ctxlua{_clmm_('",name,"')}}")
+ contextsprint(ctxcatcodes,"\\setuvalue{",estart,name,"}{\\ctxlua{_clmb_('",name,"')}}")
+ contextsprint(ctxcatcodes,"\\setuvalue{",estop, name,"}{\\ctxlua{_clme_('",name,"')}}")
+ end
+ if not environment or environment == "both" then
+ contextsprint(ctxcatcodes,"\\setuvalue{", name,"}{\\ctxlua{_clmm_('",name,"')}}")
end
else
+ -- we could flush immediate but tracing is bad then
stack[name] = { }
local opt, done = 0, false
+ local snippets = { } -- we can reuse it
local mkivdo = "\\mkivdo" .. name -- maybe clddo
- texsprint(ctxcatcodes,"\\def",mkivdo)
+ snippets[#snippets+1] = "\\def"
+ snippets[#snippets+1] = mkivdo
for i=1,na do
local a = arguments[i]
local variant = a[1]
if variant == "option" then
- texsprint(ctxcatcodes,"[#",i,"]")
+ snippets[#snippets+1] = "[#"
+ snippets[#snippets+1] = i
+ snippets[#snippets+1] = "]"
if not done then
opt = opt + 1
end
else
done = true -- no more optional checking after this
- texsprint(ctxcatcodes,"#",i)
+ snippets[#snippets+1] = "#"
+ snippets[#snippets+1] = i
end
end
if environment then
- texsprint(ctxcatcodes,"{\\ctxlua{_clmb_('",name,"'")
+ snippets[#snippets+1] = "{\\ctxlua{_clmb_('"
+ snippets[#snippets+1] = name
+ snippets[#snippets+1] = "'"
else
- texsprint(ctxcatcodes,"{\\ctxlua{_clmm_('",name,"'")
+ snippets[#snippets+1] = "{\\ctxlua{_clmm_('"
+ snippets[#snippets+1] = name
+ snippets[#snippets+1] = "'"
end
for i=1,na do
local a = arguments[i]
local variant = a[2]
if variant == "list" then
- texsprint(ctxcatcodes,",_clma_([[#",i,"]])")
+ snippets[#snippets+1] = ",_clma_([[#"
+ snippets[#snippets+1] = i
+ snippets[#snippets+1] = "]])"
elseif variant == "hash" then
- texsprint(ctxcatcodes,",_clmh_([[#",i,"]])")
+ snippets[#snippets+1] = ",_clmh_([[#"
+ snippets[#snippets+1] = i
+ snippets[#snippets+1] = "]])"
elseif variant == "number" then
- texsprint(ctxcatcodes,",_clmn_([[#",i,"]])")
+ snippets[#snippets+1] = ",_clmn_([[#"
+ snippets[#snippets+1] = i
+ snippets[#snippets+1] = "]])"
else
- texsprint(ctxcatcodes,",[[#",i,"]]")
+ snippets[#snippets+1] = ",[[#"
+ snippets[#snippets+1] = i
+ snippets[#snippets+1] = "]]"
end
end
- texsprint(ctxcatcodes,")}}")
+ snippets[#snippets+1] = ")}}"
+ contextsprint(ctxcatcodes,unpack(snippets))
if environment then
- texsprint(ctxcatcodes,"\\clme{",name,"}{\\ctxlua{_clme_('",name,"')}}")
- texsprint(ctxcatcodes,"\\clmb{",name,"}{",checkers[opt],mkivdo,"}")
- else
- texsprint(ctxcatcodes,"\\clmm{",name,"}{",checkers[opt],mkivdo,"}")
+ -- needs checking
+ contextsprint(ctxcatcodes,"\\setuvalue{",estart,name,"}{",checkers[opt],mkivdo,"}")
+ contextsprint(ctxcatcodes,"\\setuvalue{",estop, name,"}{\\ctxlua{_clme_('",name,"')}}")
+ end
+ if not environment or environment == "both" then
+ contextsprint(ctxcatcodes,"\\setuvalue{", name,"}{",checkers[opt],mkivdo,"}")
end
end
if environment then
diff --git a/tex/context/base/cldf-int.mkiv b/tex/context/base/cldf-int.mkiv
index e20ca54de..da684694a 100644
--- a/tex/context/base/cldf-int.mkiv
+++ b/tex/context/base/cldf-int.mkiv
@@ -17,8 +17,10 @@
\unprotect
-\unexpanded\def\clmb#1{\unexpanded\expandafter\def\csname\e!start#1\endcsname}
-\unexpanded\def\clme#1{\unexpanded\expandafter\def\csname\e!stop #1\endcsname}
-\unexpanded\def\clmm#1{\unexpanded\expandafter\def\csname #1\endcsname}
+% now done directly
+%
+% \unexpanded\def\clmb#1{\unexpanded\expandafter\def\csname\e!start#1\endcsname}
+% \unexpanded\def\clme#1{\unexpanded\expandafter\def\csname\e!stop #1\endcsname}
+% \unexpanded\def\clmm#1{\unexpanded\expandafter\def\csname #1\endcsname}
\protect \endinput
diff --git a/tex/context/base/cldf-ver.lua b/tex/context/base/cldf-ver.lua
index fc681e830..237078157 100644
--- a/tex/context/base/cldf-ver.lua
+++ b/tex/context/base/cldf-ver.lua
@@ -6,8 +6,6 @@ if not modules then modules = { } end modules ['cldf-ver'] = {
license = "see context related readme files"
}
--- tex.print == line with endlinechar appended
-
local concat, tohandle = table.concat, table.tohandle
local find, splitlines = string.find, string.splitlines
local tostring, type = tostring, type
diff --git a/tex/context/base/cldf-ver.mkiv b/tex/context/base/cldf-ver.mkiv
index 4dcefc437..8e009035d 100644
--- a/tex/context/base/cldf-ver.mkiv
+++ b/tex/context/base/cldf-ver.mkiv
@@ -1,6 +1,6 @@
%D \module
%D [ file=cldf-com,
-%D version=2010.10.19,,
+%D version=2010.10.19,
%D title=\CONTEXT\ \LUA\ Document Functions,
%D subtitle=Verbatim,
%D author=Hans Hagen,
diff --git a/tex/context/base/colo-ini.lua b/tex/context/base/colo-ini.lua
index 7e0a4ac76..5c9ad61b5 100644
--- a/tex/context/base/colo-ini.lua
+++ b/tex/context/base/colo-ini.lua
@@ -727,15 +727,25 @@ end
local patterns = { "colo-imp-%s.mkiv", "colo-imp-%s.tex", "colo-%s.mkiv", "colo-%s.tex" }
+local function action(name,foundname)
+ context.startreadingfile()
+ context.input(foundname)
+ context.showcolormessage("colors",4,name)
+ context.stopreadingfile()
+end
+
+local function failure(name)
+ context.showcolormessage("colors",5,name)
+end
+
function colors.usecolors(name)
- commands.uselibrary(name,patterns,function(name,foundname)
- context.startreadingfile()
- context.input(foundname)
- context.showcolormessage("colors",4,name)
- context.stopreadingfile()
- end, function(name)
- context.showcolormessage("colors",5,name)
- end)
+ commands.uselibrary {
+ name = name,
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = true,
+ }
end
-- interface
diff --git a/tex/context/base/colo-ini.mkiv b/tex/context/base/colo-ini.mkiv
index 1ba6026ce..0efe5df4e 100644
--- a/tex/context/base/colo-ini.mkiv
+++ b/tex/context/base/colo-ini.mkiv
@@ -148,7 +148,7 @@
\unexpanded\def\color [#1]{\groupedcommand{\doactivatecolor{#1}}{}}
\unexpanded\def\startcolor [#1]{\begingroup\doactivatecolor{#1}}
\unexpanded\def\stopcolor {\endgroup}
-\unexpanded\def\graycolor [#1]{\groupedcommand{\dosetcolormodel{gray}\getvalue{#1}}{}}
+\unexpanded\def\graycolor [#1]{\groupedcommand{\dosetcolormodel{gray}\doactivatecolor{#1}}{}}
\unexpanded\def\colored [#1]{\groupedcommand{\definecolor[@colored@][#1]\doactivatecolor{@colored@}}{}}
\unexpanded\def\fastcolored [#1]#2{\begingroup\dodefinefastcolor[@colored@][#1]\doactivatecolor{@colored@}#2\endgroup}
\unexpanded\def\predefinecolor [#1]{\flushatshipout{\hbox{\color[#1]{}}}}
diff --git a/tex/context/base/colo-run.mkiv b/tex/context/base/colo-run.mkiv
index 02da462d3..363b39c47 100644
--- a/tex/context/base/colo-run.mkiv
+++ b/tex/context/base/colo-run.mkiv
@@ -51,7 +51,7 @@
&\doifinset{\v!name}{#2}{\strut#1}&\cr%
\processpalet[#1]\doshowpalet\crcr}}}
-\gdef\showhorizontalpalet[#1][#2]%
+\gdef\showhorizontalpalet[#1][#2]% todo: bTABLE etc
{\localvbox
{\offinterlineskip
\setuppalet[#1]
@@ -86,7 +86,7 @@
\vskip.25ex
\everypar{\strut}
\veryraggedcenter
- \let\colorformatseparator=\endgraf
+ \let\colorformatseparator\endgraf
\colorvalue{##1}}}%
\processpalet[#1]\doshowpalet}%
\crcr}}}
diff --git a/tex/context/base/cont-log.mkiv b/tex/context/base/cont-log.mkiv
index 3708d9b07..c0ed606e0 100644
--- a/tex/context/base/cont-log.mkiv
+++ b/tex/context/base/cont-log.mkiv
@@ -185,19 +185,19 @@
\let\LuaTeX \luaTeX
\let\XETEX \XeTeX
-\unexpanded\def\MkApproved % joke, not used so it might move
- {\dontleavehmode\rotate
- [\c!rotation={\ifnum\texengine=\luatexengine\ctxlua{tex.write(45-45*\the\luatexversion/100)}\else0\fi},
- \c!align=\v!middle,
- \c!foregroundstyle=\v!type,
- \c!foregroundcolor=darkred,
- \c!frame=\v!on,
- \c!offset=1ex,
- \c!background=\v!color,
- \c!backgroundcolor=lightgray,
- \c!framecolor=darkred,
- \c!rulethickness=2pt]
- {Mk\ifnum\texengine=\luatexengine IV\else II\fi\\approved}}
+% \unexpanded\def\MkApproved % joke, not used so it might move
+% {\dontleavehmode\rotate
+% [\c!rotation={\ifnum\texengine=\luatexengine\cldcontext{45-45*\the\luatexversion/100}\else0\fi},
+% \c!align=\v!middle,
+% \c!foregroundstyle=\v!type,
+% \c!foregroundcolor=darkred,
+% \c!frame=\v!on,
+% \c!offset=1ex,
+% \c!background=\v!color,
+% \c!backgroundcolor=lightgray,
+% \c!framecolor=darkred,
+% \c!rulethickness=2pt]
+% {Mk\ifnum\texengine=\luatexengine IV\else II\fi\\approved}}
% \unexpanded\def\luaTeX
% {\dontleavehmode\begingroup
diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii
index bcdc5211d..b26aabfdb 100644
--- a/tex/context/base/cont-new.mkii
+++ b/tex/context/base/cont-new.mkii
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2011.06.29 09:57}
+\newcontextversion{2011.07.13 20:14}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 664170616..748e9a13b 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2011.06.29 09:57}
+\newcontextversion{2011.07.13 20:14}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
@@ -230,10 +230,12 @@
\setuplabeltext
[placeholder={, moved}]
+% move to support module, and then use context(...)
+
\startluacode
function commands.percentageof(str,dim)
local n = str:match("^(.*)%%$")
- tex.sprint(tex.ctxcatcodes,(n and (tonumber(n)/100)*dim .. "sp") or str)
+ context.sprint(tex.ctxcatcodes,(n and (tonumber(n)/100)*dim .. "sp") or str)
end
\stopluacode
diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii
index fd7be141b..d9b8ec945 100644
--- a/tex/context/base/context.mkii
+++ b/tex/context/base/context.mkii
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2011.06.29 09:57}
+\edef\contextversion{2011.07.13 20:14}
%D For those who want to use this:
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 49675b67c..129660060 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2011.06.29 09:57}
+\edef\contextversion{2011.07.13 20:14}
%D For those who want to use this:
@@ -92,7 +92,10 @@
\loadmarkfile{syst-fnt}
\loadmarkfile{syst-rtp}
-\loadmarkfile{supp-fil}
+\loadmkvifile{file-ini}
+\loadmkvifile{file-res}
+\loadmkvifile{file-lib}
+
\loadmarkfile{supp-dir}
\loadmarkfile{char-ini}
@@ -150,7 +153,10 @@
\loadmarkfile{typo-ini}
\loadmarkfile{page-ins}
-\loadmarkfile{core-fil}
+
+\loadmkvifile{file-syn}
+\loadmkvifile{file-mod}
+
\loadmarkfile{core-con}
\loadmarkfile{cont-fil}
@@ -252,7 +258,7 @@
%loadmarkfile{page-mar} % also commented code in anch-pgr.mkiv
\loadmarkfile{typo-mar}
-\loadmarkfile{core-job} % why so late?
+\loadmkvifile{file-job} % why so late?
\loadmarkfile{buff-ini}
\loadmarkfile{buff-ver}
diff --git a/tex/context/base/core-con.lua b/tex/context/base/core-con.lua
index df91a9c17..8679b6b8e 100644
--- a/tex/context/base/core-con.lua
+++ b/tex/context/base/core-con.lua
@@ -864,7 +864,7 @@ function commands.currentdate(str,currentlanguage) -- j and jj obsolete
auto = true
end
if ordinal and whatordinal then
- commands.ordinal(whatordinal,currentlanguage)
+ context("%s",converters.ordinal(whatordinal,currentlanguage))
end
end
end
diff --git a/tex/context/base/core-env.mkiv b/tex/context/base/core-env.mkiv
index ce2155153..dbf2b5944 100644
--- a/tex/context/base/core-env.mkiv
+++ b/tex/context/base/core-env.mkiv
@@ -282,7 +282,7 @@
{\doifdefinedelse{\??su:#1}} % doto: ifcsname
% \startluasetups oeps
-% tex.print("DONE")
+% context("DONE")
% a = 1
% b = 1
% \stopluasetups
@@ -302,11 +302,11 @@
% \xmlsetup{123}{zzz}
%
% \startbuffer[what]
-% tex.print("DONE")
+% context("DONE")
% \stopbuffer
%
% \startbuffer
-% tex.print("MORE")
+% context("MORE")
% \stopbuffer
%
% \ctxluabuffer[what]
diff --git a/tex/context/base/core-job.lua b/tex/context/base/core-job.lua
deleted file mode 100644
index f56d2ca1f..000000000
--- a/tex/context/base/core-job.lua
+++ /dev/null
@@ -1,212 +0,0 @@
-if not modules then modules = { } end modules ['core-job'] = {
- version = 1.001,
- comment = "companion to core-job.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local texsprint, texprint, texwrite = tex.sprint, tex.print, tex.write
-local ctxcatcodes, texcatcodes = tex.ctxcatcodes, tex.texcatcodes
-local lower, format, find, gmatch, gsub, match = string.lower, string.format, string.find, string.gmatch, string.gsub, string.match
-local concat = table.concat
-
-local commands, resolvers, context = commands, resolvers, context
-
--- main code
-
-resolvers.maxreadlevel = 3
-
-directives.register("resolvers.maxreadlevel", function(v) resolvers.maxreadlevel = tonumber(v) or resolvers.maxreadlevel end)
-
-local report_examodes = logs.reporter("system","examodes") -- maybe another category
-
-local function exists(n)
- if io.exists(n) then
- return n
- else
- n = file.addsuffix(n,'tex')
- if io.exists(n) then
- return n
- end
- end
- return nil
-end
-
-function resolvers.findctxfile(name,maxreadlevel)
- if file.is_qualified_path(name) then
- return name
- else
- -- not that efficient, too many ./ lookups
- local n = "./" .. name
- local found = exists(n)
- if found then
- return found
- else
- for i=1,maxreadlevel or resolvers.maxreadlevel or 0 do
- n = "../" .. n
- found = exists(n)
- if found then
- return found
- end
- end
- end
- return resolvers.findfile(name) or ""
- end
-end
-
-function commands.maxreadlevel()
- texwrite(resolvers.maxreadlevel)
-end
-
-function commands.processfile(name,maxreadlevel)
- name = resolvers.findctxfile(name,maxreadlevel)
- if name ~= "" then
- texsprint(ctxcatcodes,format("\\input %s\\relax",name)) -- we need \input {name}
- end
-end
-
-function commands.doifinputfileelse(name,maxreadlevel)
- commands.doifelse(resolvers.findctxfile(name,maxreadlevel) ~= "")
-end
-
-function commands.locatefilepath(name,maxreadlevel)
- texsprint(texcatcodes,file.dirname(resolvers.findctxfile(name,maxreadlevel)))
-end
-
-function commands.usepath(paths,maxreadlevel)
- resolvers.registerextrapath(paths)
- texsprint(texcatcodes,concat(resolvers.instance.extra_paths or {}, ""))
-end
-
-function commands.usesubpath(subpaths,maxreadlevel)
- resolvers.registerextrapath(nil,subpaths)
- texsprint(texcatcodes,concat(resolvers.instance.extra_paths or {}, ""))
-end
-
-function commands.usezipfile(name,tree)
- if tree and tree ~= "" then
- resolvers.usezipfile(format("zip:///%s?tree=%s",name,tree))
- else
- resolvers.usezipfile(format("zip:///%s",name))
- end
-end
-
--- for the moment here, maybe a module
-
---~ <?xml version='1.0' standalone='yes'?>
---~ <exa:variables xmlns:exa='htpp://www.pragma-ade.com/schemas/exa-variables.rng'>
---~ <exa:variable label='mode:pragma'>nee</exa:variable>
---~ <exa:variable label='mode:variant'>standaard</exa:variable>
---~ </exa:variables>
-
-local function convertexamodes(str)
- local x = xml.convert(str)
- for e in xml.collected(x,"exa:variable") do
- local label = e.at and e.at.label
- if label and label ~= "" then
- local data = xml.text(e)
- local mode = match(label,"^mode:(.+)$")
- if mode then
- context.enablemode { format("%s:%s",mode,data) }
- end
- context.setvariable("exa:variables",label,(gsub(data,"([{}])","\\%1")))
- end
- end
-end
-
--- we need a system file option: ,. .. etc + paths but no tex lookup so resolvers.findfile is wrong here
-
-function commands.loadexamodes(filename)
- if not filename or filename == "" then
- filename = file.removesuffix(tex.jobname)
- end
- filename = resolvers.findfile(file.addsuffix(filename,'ctm')) or ""
- if filename ~= "" then
- report_examodes("loading %s",filename) -- todo: message system
- convertexamodes(io.loaddata(filename))
- else
- report_examodes("no mode file %s",filename) -- todo: message system
- end
-end
-
-local report_options = logs.reporter("system","options")
-
-function commands.logoptionfile(name)
- -- todo: xml if xml logmode
- local f = io.open(name)
- if f then
- logs.pushtarget("logfile")
- report_options("begin of optionfile")
- report_options()
- for line in f:lines() do
- report_options(line)
- end
- report_options()
- report_options("end of optionfile")
- f:close()
- logs.poptarget()
- end
-end
-
---~ set functions not ok and not faster on mk runs either
---~
---~ local function doifcommonelse(a,b)
---~ local ba = find(a,",")
---~ local bb = find(b,",")
---~ if ba and bb then
---~ for sa in gmatch(a,"[^ ,]+") do
---~ for sb in gmatch(b,"[^ ,]+") do
---~ if sa == sb then
---~ context.setvalue("commalistelement",sa)
---~ return true
---~ end
---~ end
---~ end
---~ elseif ba then
---~ for sa in gmatch(a,"[^ ,]+") do
---~ if sa == b then
---~ context.setvalue("commalistelement",b)
---~ return true
---~ end
---~ end
---~ elseif bb then
---~ for sb in gmatch(b,"[^ ,]+") do
---~ if a == sb then
---~ context.setvalue("commalistelement",sb)
---~ return true
---~ end
---~ end
---~ else
---~ if a == b then
---~ context.setvalue("commalistelement",a)
---~ return true
---~ end
---~ end
---~ context.letvalueempty("commalistelement")
---~ return false
---~ end
---~ local function doifinsetelse(a,b)
---~ local bb = find(b,",")
---~ if bb then
---~ for sb in gmatch(b,"[^ ,]+") do
---~ if a == sb then
---~ texsprint(ctxcatcodes,"\\def\\commalistelement{",a,"}")
---~ return true
---~ end
---~ end
---~ else
---~ if a == b then
---~ texsprint(ctxcatcodes,"\\def\\commalistelement{",a,"}")
---~ return true
---~ end
---~ end
---~ texsprint(ctxcatcodes,"\\let\\commalistelement\\empty")
---~ return false
---~ end
---~ function commands.doifcommon (a,b) commands.doif (doifcommonelse(a,b)) end
---~ function commands.doifnotcommon (a,b) commands.doifnot (doifcommonelse(a,b)) end
---~ function commands.doifcommonelse(a,b) commands.doifelse(doifcommonelse(a,b)) end
---~ function commands.doifinset (a,b) commands.doif (doifinsetelse(a,b)) end
---~ function commands.doifnotinset (a,b) commands.doifnot (doifinsetelse(a,b)) end
---~ function commands.doifinsetelse (a,b) commands.doifelse(doifinsetelse(a,b)) end
diff --git a/tex/context/base/core-job.mkiv b/tex/context/base/core-job.mkiv
deleted file mode 100644
index a413d1d6a..000000000
--- a/tex/context/base/core-job.mkiv
+++ /dev/null
@@ -1,313 +0,0 @@
-%D \module
-%D [ file=core-job, % copied from main-001,
-%D version=1997.03.31,
-%D title=\CONTEXT\ Core Macros,
-%D subtitle=Job Handling,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-%D This module is still to be split and documented.
-
-\writestatus{loading}{ConTeXt Core Macros / Job Handling}
-
-\unprotect
-
-\registerctxluafile{core-job}{1.001}
-
-\let \currentproject \empty
-\let \currentproduct \empty
-\let \currentenvironment \empty
-\let \currentcomponent \empty
-
-\let \loadedfiles \empty
-\let \processedfiles \empty
-
-\let \nomorefiles \relax
-
-\let \allinputpaths \empty
-\let \locatedfilepath \empty
-
-\newcount\textlevel
-\newcount\fileprocesslevel
-
-\setvalue{\c!file::0}{\jobname}
-
-\def\processedfile % is used in styles, don't change !
- {\getvalue{\c!file::\number\fileprocesslevel}}
-
-\def\dostarttextfile#1%
- {\global\advance\fileprocesslevel\plusone
- \setxvalue{\c!file::\number\fileprocesslevel}{#1}%
- \doglobal\addtocommalist{#1}\processedfiles}
-
-\def\dostoptextfile
- {\global\advance\fileprocesslevel\minusone}
-
-\def\processlocalfile#1#2%
- {#1{#2}\donothing{\readfile{#2}\donothing\donothing}}
-
-\def\processfile #1{\ctxcommand{processfile("#1")}}
-\def\doifinputfileelse #1{\ctxcommand{doifinputfileelse("#1")}}
-\def\locatefilepath #1{\edef\locatedfilepath{\ctxcommand{locatefilepath("#1")}}}
-\def\usepath [#1]{\edef\allinputpaths{\ctxcommand{usepath("#1")}}}
-\def\usesubpath [#1]{\edef\allinputpaths{\ctxcommand{usesubpath("#1")}}}
-\def\usezipfile {\dodoubleempty\dousezipfile}
-\def\dousezipfile[#1][#2]{\ctxcommand{usezipfile("#1","#2")}} % [filename] [optional subtree]
-\def\loadexamodes {\dosingleempty\doloadexamodes}
-\def\doloadexamodes [#1]{\ctxcommand{loadexamodes("#1")}}
-
-\def\registerfileinfo[#1#2]#3% geen \showmessage ?
- {\writestatus\m!system{#1#2 file #3 at line \the\inputlineno}}
-
-\def\doloadsystemfile#1% only mkiv files
- {\readsysfile{#1.\mksuffix}{\showmessage\m!system2{#1.\mksuffix}}\donothing}
-
-\def\loadsystemfiles
- {\doloadsystemfile\f!newfilename % new code, to be integrated at some point, plus fixes posted on the list
- \doloadsystemfile\f!locfilename % new code, somewhat experimental, not distributed (outside the dev group)
- \doloadsystemfile\f!expfilename % new code, very experimental, can be engine specific, mostly for me only
- \doloadsystemfile\f!sysfilename} % local settings, but probably not that good an idea to use
-
-%D We don't want multiple jobfiles to interfere.
-
-\def\loadoptionfile % todo : mark document.* tables as storage
- {\readjobfile{\jobname.\f!optionextension}
- {\writestatus\m!system{\jobname.\f!optionextension\space loaded}%
- \ctxcommand{logoptionfile("\jobname.\f!optionextension")}}%
- {\writestatus\m!system{no \jobname.\f!optionextension}}}
-
-% Most natural ...
-%
-% \def\doateverystarttext
-% {\the\everystarttext
-% \global\let\doateverystarttext\relax}
-%
-% ... most practical, since we can load env's in a
-% something.run file (nested \starttext's; see for
-% instance x-res-08, where we definitely want to
-% open the file!).
-
-\def\doateverystarttext
- {\the\everystarttext
- \global\everystarttext\emptytoks}
-
-\unexpanded\def\starttext
- {\doateverystarttext
- \ifcase\textlevel
- \registerfileinfo[begin]\jobfilename
- \fi
- \global\advance\textlevel\plusone}
-
-\unexpanded\def\stoptext
- {\global\advance\textlevel\minusone
- \ifnum\textlevel>\zerocount \else
- \flushfinallayoutpage % optional
- \page % anyway
- \the\everystoptext
- %\the\everybye %
- %\the\everygoodbye % == \end (new)
- %\expandafter\normalend %
-\ifcase\textlevel
- \registerfileinfo[end]\jobfilename
-\fi
- \expandafter\finalend
- \fi}
-
-\def\forcequitjob#1%
- {\writestatus\m!system{forcing quit: #1}%
- \batchmode
- \dorecurse\textlevel{\stoptext}
- \normalend}
-
-\let\autostoptext\relax
-
-\def\autostarttext
- {\ifcase\textlevel
- \starttext
- \writestatus\m!system{auto \string\starttext..\string\stoptext}%
- \let\autostoptext\stoptext
- \fi}
-
-\def\finalend
- {\ifnum\textlevel>\zerocount \else
- \the\everybye
- \the\everygoodbye
- \doifsometokselse\everynotabene{\writeline\the\everynotabene\writeline}\donothing
- \global\everybye \emptytoks % rather unneeded
- \global\everygoodbye\emptytoks % but for sure
- \expandafter\normalend
- \fi}
-
-\let\end\finalend
-
-\def\emergencyend
- {\writestatus\m!system{invalid \@EA\string\csname\e!start\v!text\endcsname...\@EA\string\csname\e!stop\v!text\endcsname\space structure}%
- \stoptext}
-
-\def\currentfile{\inputfilename}
-
-\def\doexecutefileonce#1%
- {\beforesplitstring#1\at.\to\currentfile
- \doifnotinset\currentfile\loadedfiles
- {\addtocommalist\currentfile\loadedfiles
- \doexecutefile{#1}}}
-
-\def\doexecutefile#1%
- {\registerfileinfo[begin]{#1}%
- \dostarttextfile{#1}%
- \processfile{#1}%
- \dostoptextfile
- \registerfileinfo[end]{#1}}
-
-\def\donotexecutefile#1%
- {}
-
-\unexpanded\def\project {\doifnextoptionalelse\useproject \redoproject}
-\unexpanded\def\environment{\doifnextoptionalelse\useenvironment\redoenvironment}
-\unexpanded\def\product {\doifnextoptionalelse\useproduct \redoproduct}
-\unexpanded\def\component {\doifnextoptionalelse\usecomponent \redocomponent}
-
-\def\redoproject #1 {\useproject [#1]}
-\def\redoenvironment #1 {\useenvironment[#1]}
-\def\redoproduct #1 {\useproduct [#1]}
-\def\redocomponent #1 {\usecomponent [#1]}
-
-\def\useproject[#1]%
- {}
-
-\def\useenvironment[#1]% maybe commalist
- {\pushmacro\dostartenvironment % we could use a depth counter
- \pushmacro\stopenvironment
- \unexpanded\def\dostartenvironment[##1]{}%
- \let\stopenvironment\relax
- \startreadingfile
- \doexecutefileonce{#1}
- \stopreadingfile
- \popmacro\stopenvironment
- \popmacro\dostartenvironment}
-
-\def\useproduct[#1]%
- {}
-
-\def\usecomponent[#1]%
- {\dostarttextfile{#1}%
- \processfile{#1}%
- \dostoptextfile}
-
-\newcount\filelevel
-
-\let\currentcomponent \v!text
-\let\currentcomponentpath\f!currentpath
-
-\def\donextlevel#1#2#3#4#5#6#7\\% we will move this to lua
- {\pushmacro\currentcomponent
- \pushmacro\currentcomponentpath
- \let\currentcomponent#1%
- \setsystemmode\currentcomponent
- \splitfilename{#1}%
- \ifx\splitoffpath\empty
- \let\currentcomponentpath\f!currentpath
- \else
- \let\currentcomponentpath\splitoffpath
- \fi
- \beforesplitstring#7\at.\to#2\relax % can become path + base
- \ifcase\filelevel\relax
- \starttext
- \unexpanded\def\useproject [##1]{#3{##1}}%
- \unexpanded\def\useenvironment[##1]{#4{##1}}%
- \unexpanded\def\useproduct [##1]{#5{##1}}%
- \unexpanded\def\usecomponent [##1]{#6{##1}}%
- \fi
- \advance\filelevel\plusone
- \addtocommalist{#1}\loadedfiles}
-
-\def\doprevlevel
- {\popmacro\currentcomponentpath
- \popmacro\currentcomponent
- \setsystemmode\currentcomponent
- \ifnum\filelevel=\plusone
- \expandafter\stoptext
- \else
- \advance\filelevel\minusone
- \expandafter\endinput
- \fi}
-
-\unexpanded\def\startproject {\doifnextoptionalelse\dostartproject \redostartproject}
-\unexpanded\def\startenvironment{\doifnextoptionalelse\dostartenvironment\redostartenvironment}
-\unexpanded\def\startproduct {\doifnextoptionalelse\dostartproduct \redostartproduct}
-\unexpanded\def\startcomponent {\doifnextoptionalelse\dostartcomponent \redostartcomponent}
-
-\def\redostartproject #1 {\dostartproject [#1]}
-\def\redostartenvironment #1 {\dostartenvironment[#1]}
-\def\redostartproduct #1 {\dostartproduct [#1]}
-\def\redostartcomponent #1 {\dostartcomponent [#1]}
-
-\def\dostartproject[#1]%
- {\donextlevel\v!project\currentproject
- \donotexecutefile\doexecutefileonce
- \doexecutefileonce\doexecutefile#1\\}
-
-\def\dostartproduct[#1]%
- {\doateverystarttext
- \donextlevel\v!product\currentproduct
- \doexecutefileonce\doexecutefileonce
- \donotexecutefile\doexecutefile#1\\}
-
-\def\dostartcomponent[#1]%
- {\doateverystarttext
- \donextlevel\v!component\currentcomponent
- \doexecutefileonce\doexecutefileonce
- \donotexecutefile\doexecutefile#1\\}
-
-\def\dostartenvironment[#1]%
- {\donextlevel\v!environment\currentenvironment
- \donotexecutefile\doexecutefileonce
- \donotexecutefile\donotexecutefile#1\\}
-
-% \startproject test
-% 1: \startmode[*project] project \stopmode \endgraf
-% 2: \startmode[*product] product \stopmode \endgraf
-% \stopproject
-
-\unexpanded\def\stopproject {\doprevlevel}
-\unexpanded\def\stopproduct {\doprevlevel}
-\unexpanded\def\stopcomponent {\doprevlevel}
-\unexpanded\def\stopenvironment{\doprevlevel}
-
-% more or less replaced by modes
-
-\setvalue{\e!start\v!localenvironment}[#1]%
- {\let\loadedlocalenvironments\empty
- \def\docommand##1%
- {\beforesplitstring##1\at.\to\someevironment
- \addtocommalist\someevironment\loadedlocalenvironments}%
- \processcommalist[#1]\docommand
- \doifcommonelse{\currentproject,\currentproduct,\currentcomponent,\currentenvironment}\loadedlocalenvironments
- {\letvalue{\e!stop\v!localenvironment}\relax}
- {\grabuntil{\e!stop\v!localenvironment}\gobbleoneargument}}
-
-\setvalue{\v!localenvironment}{\doifnextoptionalelse\uselocalenvironment\redolocalenvironment}
-
-\def\redolocalenvironment#1 {\uselocalenvironment[#1]}
-
-\def\uselocalenvironment[#1]{\doexecutefileonce{#1}}
-
-\neverypar\emptytoks % here ?
-
-% \appendtoks \flushnotes \to \everypar
-% \appendtoks \synchronizesidefloats \to \everypar
-% \appendtoks \checkindentation \to \everypar
-% \appendtoks \showparagraphnumber \to \everypar
-% %appendtoks \flushmargincontents \to \everypar
-% \appendtoks \flushcommentanchors \to \everypar
-% \appendtoks \synchronizenotes \to \everypar
-
-% \appendtoks \flushnotes \to \everydisplay
-% \appendtoks \adjustsidefloatdisplaylines \to \everydisplay
-
-\protect \endinput
diff --git a/tex/context/base/core-sys.mkiv b/tex/context/base/core-sys.mkiv
index cf3c7f7db..e6d27e42b 100644
--- a/tex/context/base/core-sys.mkiv
+++ b/tex/context/base/core-sys.mkiv
@@ -35,14 +35,14 @@
%D End of lines to the output. \TEX\ will map this onto the platform specific
%D line ending. I hate this mess.
-\edef\operatingsystem {\ctxwrite {os.platform}}
+\edef\operatingsystem {\cldcontext{os.platform}}
-\def \jobfilename {\ctxsprint{environment.jobfilename or ""}}
-\def \jobfilesuffix {\ctxsprint{environment.jobfilesuffix or ""}}
-\def \inputfilebarename{\ctxsprint{environment.inputfilebarename or ""}}
-\def \inputfilesuffix {\ctxsprint{environment.inputfilesuffix or ""}}
-\def \inputfilename {\ctxsprint{environment.inputfilename or ""}}
-\def \outputfilename {\ctxsprint{environment.outputfilename or ""}}
+\def \jobfilename {\cldcontext{environment.jobfilename or ""}}
+\def \jobfilesuffix {\cldcontext{environment.jobfilesuffix or ""}}
+\def \inputfilebarename{\cldcontext{environment.inputfilebarename or ""}}
+\def \inputfilesuffix {\cldcontext{environment.inputfilesuffix or ""}}
+\def \inputfilename {\cldcontext{environment.inputfilename or ""}}
+\def \outputfilename {\cldcontext{environment.outputfilename or ""}}
\newtoks \everysetupsystem
@@ -85,7 +85,7 @@
\to \everysetupsystem
\appendtoks
- \ctxlua {commands.updatefilenames("\inputfilename","\outputfilename")}%
+ \ctxcommand{updatefilenames("\inputfilename","\outputfilename")}%
\to \everysetupsystem
% Some mechanisms (see x-res-01) use either \jobfilename or
@@ -287,7 +287,7 @@
\definecomplexorsimpleempty\define
% \startluacode
-% local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
+% local contextsprint, ctxcatcodes = context.sprint, tex.ctxcatcodes
% local format, match, gmatch, rep = string.format, string.match, string.gmatch, string.rep
% local empty = {
% "single",
@@ -316,38 +316,38 @@
% else
% n = 0
% end
-% texsprint(ctxcatcodes,format("\\unexpanded\\def\\%s",cmd))
+% contextsprint(ctxcatcodes,format("\\unexpanded\\def\\%s",cmd))
% if #a > 0 then
-% texsprint(ctxcatcodes,format("{\\do%sempty\\do%s}",empty[#a],cmd))
-% texsprint(ctxcatcodes,format("\\def\\do%s",cmd))
+% contextsprint(ctxcatcodes,format("{\\do%sempty\\do%s}",empty[#a],cmd))
+% contextsprint(ctxcatcodes,format("\\def\\do%s",cmd))
% for i=1,#a do
-% texsprint(ctxcatcodes,"[#",i,"]")
+% contextsprint(ctxcatcodes,"[#",i,"]")
% end
-% texsprint(ctxcatcodes,"{")
+% contextsprint(ctxcatcodes,"{")
% for i=#a,1,-1 do
-% texsprint(ctxcatcodes,format("\\if%sargument",check[i]))
-% texsprint(ctxcatcodes,format("\\def\\next{\\dodo%s",cmd))
+% contextsprint(ctxcatcodes,format("\\if%sargument",check[i]))
+% contextsprint(ctxcatcodes,format("\\def\\next{\\dodo%s",cmd))
% for j=1,#a-i do
-% texsprint(ctxcatcodes,format("[%s]",a[j]))
+% contextsprint(ctxcatcodes,format("[%s]",a[j]))
% end
% for j=1,i do
-% texsprint(ctxcatcodes,format("[#%s]",j))
+% contextsprint(ctxcatcodes,format("[#%s]",j))
% end
-% texsprint(ctxcatcodes,"}")
+% contextsprint(ctxcatcodes,"}")
% if i == 1 then
-% texsprint(ctxcatcodes,rep("\\fi",#a))
+% contextsprint(ctxcatcodes,rep("\\fi",#a))
% else
-% texsprint(ctxcatcodes,"\\else")
+% contextsprint(ctxcatcodes,"\\else")
% end
% end
-% texsprint(ctxcatcodes,"\\next}")
-% texsprint(ctxcatcodes,format("\\def\\dodo%s",cmd))
+% contextsprint(ctxcatcodes,"\\next}")
+% contextsprint(ctxcatcodes,format("\\def\\dodo%s",cmd))
% for i=1,#a do
-% texsprint(ctxcatcodes,"[#",i,"]")
+% contextsprint(ctxcatcodes,"[#",i,"]")
% end
% end
% for i=1,n do
-% texsprint(ctxcatcodes,"#",#a+i)
+% contextsprint(ctxcatcodes,"#",#a+i)
% end
% end
% \stopluacode
diff --git a/tex/context/base/core-two.lua b/tex/context/base/core-two.lua
index 51f6dd890..606030329 100644
--- a/tex/context/base/core-two.lua
+++ b/tex/context/base/core-two.lua
@@ -7,7 +7,6 @@ if not modules then modules = { } end modules ['core-two'] = {
}
local remove, concat = table.remove, table.concat
-local texprint = tex.print
local allocate = utilities.storage.allocate
--[[ldx--
@@ -64,21 +63,21 @@ end
function jobpasses.get(id)
local jti = collected[id]
if jti and #jti > 0 then
- texprint(remove(jti,1))
+ context(remove(jti,1))
end
end
function jobpasses.first(id)
local jti = collected[id]
if jti and #jti > 0 then
- texprint(jti[1])
+ context(jti[1])
end
end
function jobpasses.last(id)
local jti = collected[id]
if jti and #jti > 0 then
- texprint(jti[#jti])
+ context(jti[#jti])
end
end
@@ -87,19 +86,19 @@ jobpasses.check = jobpasses.first
function jobpasses.find(id,n)
local jti = collected[id]
if jti and jti[n] then
- texprint(jti[n])
+ context(jti[n])
end
end
function jobpasses.count(id)
local jti = collected[id]
- texprint((jti and #jti) or 0)
+ context((jti and #jti) or 0)
end
function jobpasses.list(id)
local jti = collected[id]
if jti then
- texprint(concat(jti,','))
+ context(concat(jti,','))
end
end
@@ -130,13 +129,13 @@ end
function jobpasses.getdata(id,index,default)
local jti = collected[id]
local value = jit and jti[index]
- texprint((value ~= "" and value) or default or "")
+ context((value ~= "" and value) or default or "")
end
function jobpasses.getfield(id,index,tag,default)
local jti = collected[id]
jti = jti and jti[index]
local value = jti and jti[tag]
- texprint((value ~= "" and value) or default or "")
+ context((value ~= "" and value) or default or "")
end
diff --git a/tex/context/base/data-exp.lua b/tex/context/base/data-exp.lua
index 6083ffc2d..7f0805c6e 100644
--- a/tex/context/base/data-exp.lua
+++ b/tex/context/base/data-exp.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower, char = string.format, string.find, string.gmatch, string.lower, string.char
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -264,13 +264,22 @@ end
--~ test("ヒラギノ明朝 /Pro W3;")
--~ test("ヒラギノ明朝 Pro W3")
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -308,15 +317,17 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-local cache = { }
+local fullcache = { }
function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(cache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
if usecache then
- local files = cache[path]
+ local files = fullcache[realpath]
if files then
if trace_locating then
report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
@@ -327,25 +338,99 @@ function resolvers.scanfiles(path,branch,usecache)
if trace_locating then
report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
if usecache then
- cache[path] = files
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
end
- statistics.stoptiming(cache)
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
-function resolvers.scantime()
- return statistics.elapsedtime(cache)
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
--~ print(table.serialize(resolvers.scanfiles("t:/sources")))
diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua
index 4d48c3027..38611830d 100644
--- a/tex/context/base/data-res.lua
+++ b/tex/context/base/data-res.lua
@@ -813,6 +813,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -1072,6 +1073,7 @@ end
local function find_intree(filename,filetype,wantedfiles,allresults)
local typespec = resolvers.variableofformat(filetype)
local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
if pathlist and #pathlist > 0 then
-- list search
local filelist = collect_files(wantedfiles)
@@ -1094,7 +1096,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
local done = false
-- using file list
- if filelist then
+ if filelist then -- database
-- compare list entries with permitted pattern -- /xx /xx//
local expression = makepathexpression(pathname)
if trace_detail then
@@ -1123,7 +1125,10 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
end
end
- if not done then
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
pathname = gsub(pathname,"/+$","")
pathname = resolvers.resolve(pathname)
local scheme = url.hasscheme(pathname)
@@ -1145,7 +1150,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
if not done and doscan then
-- collect files in path (and cache the result)
- local files = resolvers.scanfiles(pname,false,true)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
local subpath = files[w]
@@ -1194,7 +1199,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
end
if #result > 0 then
- return "intree", result
+ return method, result
end
end
end
diff --git a/tex/context/base/file-ini.lua b/tex/context/base/file-ini.lua
new file mode 100644
index 000000000..4f8b5b6e3
--- /dev/null
+++ b/tex/context/base/file-ini.lua
@@ -0,0 +1,38 @@
+if not modules then modules = { } end modules ['file-ini'] = {
+ version = 1.001,
+ comment = "companion to file-ini.mkvi",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+<p>It's more convenient to manipulate filenames (paths) in
+<l n='lua'/> than in <l n='tex'/>. These methods have counterparts
+at the <l n='tex'/> side.</p>
+--ldx]]--
+
+resolvers.jobs = resolvers.jobs or { }
+
+local texcount = tex.count
+local setvalue = context.setvalue
+
+function commands.splitfilename(fullname)
+ local t = file.nametotable(fullname)
+ local path = t.path
+ texcount.splitoffkind = (path == "" and 0) or (path == '.' and 1) or 2
+ setvalue("splitofffull",fullname)
+ setvalue("splitoffpath",path)
+ setvalue("splitoffname",t.name)
+ setvalue("splitoffbase",t.base)
+ setvalue("splitofftype",t.suffix)
+end
+
+function commands.doifparentfileelse(n)
+ commands.doifelse(n == environment.jobname or n == environment.jobname .. '.tex' or n == environment.outputfilename)
+end
+
+function commands.doiffileexistelse(name)
+ local foundname = resolvers.findtexfile(name)
+ commands.doifelse(foundname and foundname ~= "")
+end
diff --git a/tex/context/base/file-ini.mkvi b/tex/context/base/file-ini.mkvi
new file mode 100644
index 000000000..b8ee9e2d4
--- /dev/null
+++ b/tex/context/base/file-ini.mkvi
@@ -0,0 +1,229 @@
+%D \module
+%D [ file=file-ini, % was supp-fil,
+%D version=20110701, % 1995.10.10,
+%D title=\CONTEXT\ File Macros,
+%D subtitle=Helpers,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D \TEX\ operates on files, so one wouldn't wonder that there
+%D is a separate module for file commands. In \CONTEXT\ files
+%D are used for several purposes:
+%D
+%D \startitemize[packed]
+%D \item general textual input
+%D \item logging status information
+%D \item saving registers, lists and references
+%D \item buffering defered textual input
+%D \stopitemize
+%D
+%D When dealing with files we can load them as a whole, using
+%D the \type{\input} primitive or load them on a line||by||line
+%D basis, using \type{\read}. Writing is always done line by
+%D line, using \type{\write}.
+
+\writestatus{loading}{ConTeXt File Macros / Helpers}
+
+\registerctxluafile{file-ini}{1.001}
+
+\unprotect
+
+%D \macros
+%D {scratchread, scratchwrite}
+%D
+%D We define a scratch file for reading. Keep in mind that
+%D the number of files is limited to~16, so use this one when
+%D possible. We also define a scratch output file.
+
+\ifx\undefined\scratchread \newread \scratchread \fi
+\ifx\undefined\scratchwrite \newwrite\scratchwrite \fi
+
+%D Seldom needed:
+
+\def\openinputfile #handle#name{\immediate\openin #handle={#name}\relax}
+\def\openoutputfile #handle#name{\immediate\openout#handle={#name}\relax}
+
+\def\closeinputfile #handle{\immediate\closein #handle\relax}
+\def\closeoutputfile#handle{\immediate\closeout#handle\relax}
+
+%D \macros
+%D {writeln}
+%D
+%D This saves a few tokens:
+
+\def\writeln#handle{\write#handle{}}
+
+%D \macros
+%D {pushendofline,popendofline}
+%D
+%D When we are loading files in the middle of the typesetting
+%D process, for instance when we load references, we have to be
+%D sure that the reading process does not generate so called
+%D 'spurious spaces'. This can be prevented by assigning the
+%D line ending character the \CATCODE\ comment. This is
+%D accomplished by
+%D
+%D \starttyping
+%D \pushendofline
+%D ... reading ...
+%D \popendofline
+%D \stoptyping
+
+\newcount\endoflinelevel
+
+\def\pushendofline
+ {\advance\endoflinelevel\plusone
+ \expandafter\chardef\csname :eol:\number\endoflinelevel\endcsname\catcode\endoflineasciicode
+ \catcode\endoflineasciicode\commentcatcode\relax}
+
+\def\popendofline
+ {\catcode\endoflineasciicode\csname :eol:\number\endoflinelevel\endcsname
+ \advance\endoflinelevel\minusone}
+
+\def\restoreendofline
+ {\catcode\endoflineasciicode\endoflinecatcode}
+
+%D \macros
+%D {startreadingfile,stopreadingfile}
+%D
+%D A low level capsule:
+
+\newcount\readingfilelevel
+\newtoks \everystartreadingfile
+\newtoks \everystopreadingfile
+
+\unexpanded\def\startreadingfile% beter een every en \setnormalcatcodes
+ {\global\advance\readingfilelevel\plusone
+ \the\everystartreadingfile
+ \pushcatcodetable % saveguard
+ \setcatcodetable\ctxcatcodes
+ \ctxlua{regimes.push()}}% temporarily this way
+
+\unexpanded\def\stopreadingfile
+ {\popcatcodetable % saveguard
+ \ctxlua{regimes.pop()}% temporarily this way
+ \the\everystopreadingfile
+ \global\advance\readingfilelevel\minusone}
+
+% %D \macros
+% %D {unlinkfile}
+% %D
+% %D Sometimes we want to make sure a file is deleted, so here
+% %D is a macro that does the job. It's named after the \PERL\
+% %D one.
+%
+% \def\unlinkfile#name{\ctxlua{os.remove([[#name]])}} % obsolete
+
+%D \macros
+%D {input, normalinput}
+%D
+%D Sometimes we run into troubles when \type {\input} wants to get
+%D expanded, e.g. in a \type {\write} (which happens in the metafun
+%D manual when we permit long MP lines). So, instead of fixing that,
+%D we go for a redefinition of \type {\input}. Of course it's better
+%D to use \type {\readfile} or \type {\processfile}.
+
+\unexpanded\def\input{\normalinput}
+
+\def\inputgivenfile#name{\normalinput{#name}}
+
+%D \macros
+%D {doiffileelse}
+%D
+%D The next alternative only looks if a file is present. No
+%D loading is done. This one obeys the standard \TEX\ lookup.
+%D
+%D \starttyping
+%D \doiffileelse {filename} {found} {not found}
+%D \stoptyping
+
+\def\doiffileexistselse#name{\ctxcommand{doiffileexistelse([[#name]])}}
+
+\def\doiffileelse {\doiffileexistselse}
+\def\doiffile #name{\doiffileexistselse{#name}\firstofoneargument\gobbleoneargument}
+\def\doifnotfile #name{\doiffileexistselse{#name}\gobbleoneargument\firstofoneargument}
+
+%D \macros
+%D {doifparentfileelse}
+%D
+%D \starttyping
+%D \doifparentfileelse{filename}{yes}{no}
+%D \stoptyping
+
+\ifx\outputfilename\undefined \def\outputfilename{\jobname} \fi
+
+\def\doifparentfileelse#name{\ctxcommand{doifparentfileelse([[#name]])}}
+
+%D \macros
+%D {splitfilename}
+%D
+%D \startbuffer
+%D \def\showfilesplit
+%D {\bgroup \tttf
+%D \hbox{(full: \splitofffull)}\space
+%D \hbox{(path: \splitoffpath)}\space
+%D \hbox{(base: \splitoffbase)}\space
+%D \hbox{(name: \splitoffname)}\space
+%D \hbox{(type: \splitofftype)}\space
+%D \egroup}
+%D
+%D \splitfilename{c:/aa/bb/cc/dd.ee.ff} \showfilesplit \endgraf
+%D \splitfilename{c:/aa/bb/cc/dd.ee} \showfilesplit \endgraf
+%D \splitfilename{c:/aa/bb/cc/dd} \showfilesplit \endgraf
+%D
+%D \splitfilename{dd.ee.ff} \showfilesplit \endgraf
+%D \splitfilename{dd.ee} \showfilesplit \endgraf
+%D \splitfilename{dd} \showfilesplit \endgraf
+%D \stopbuffer
+%D
+%D \start \typebuffer \getbuffer \stop
+
+\newconstant\kindoffile % 0=normal 1=full path spec (or http) / set at the lua end
+
+\def\splitoffroot{.} \newconstant\splitoffkind
+
+\let\splitofffull\empty
+\let\splitoffpath\empty
+\let\splitoffbase\empty
+\let\splitoffname\empty
+\let\splitofftype\empty
+
+\def\splitfilename#name{\ctxcommand{splitfilename([[#name]])}}
+
+%D \macros
+%D {doonlyonce, doinputonce, doendinputonce}
+%D
+%D Especially macropackages need only be loaded once.
+%D Repetitive loading not only costs time, relocating registers
+%D often leads to abortion of the processing because \TEX's
+%D capacity is limited. One can prevent multiple execution and
+%D loading by using one of both:
+%D
+%D \starttyping
+%D \doonlyonce{actions}
+%D \doinputonce{filename}
+%D \doendinputonce{filename}
+%D \stoptyping
+%D
+%D This command obeys the standard method for locating files.
+
+\long\def\doonlyonce#whatever%
+ {\doifundefinedelse{@@@#whatever@@@}
+ {\letgvalue{@@@#whatever@@@}\empty
+ \firstofoneargument}
+ {\gobbleoneargument}}
+
+\def\doinputonce#name%
+ {\doonlyonce{#name}{\doiffileelse{#name}{\inputgivenfile{#name}}\donothing}}
+
+\def\doendinputonce#name%
+ {\doifdefined{@@@#name@@@}\endinput}
+
+\def\forgetdoingonce#whatever%
+ {\global\letbeundefined{@@@#whatever@@@}}
+
+\protect \endinput
diff --git a/tex/context/base/file-job.lua b/tex/context/base/file-job.lua
new file mode 100644
index 000000000..21b3f069e
--- /dev/null
+++ b/tex/context/base/file-job.lua
@@ -0,0 +1,622 @@
+if not modules then modules = { } end modules ['file-job'] = {
+ version = 1.001,
+ comment = "companion to file-job.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- in retrospect dealing it's not that bad to deal with the nesting
+-- and push/poppign at the tex end
+
+local format, gsub, match = string.format, string.gsub, string.match
+local insert, remove, concat = table.insert, table.remove, table.concat
+
+local commands, resolvers, context = commands, resolvers, context
+
+local trace_jobfiles = false trackers.register("system.jobfiles", function(v) trace_jobfiles = v end)
+
+local report_jobfiles = logs.reporter("system","jobfiles")
+
+local texsetcount = tex.setcount
+local elements = interfaces.elements
+local variables = interfaces.variables
+local logsnewline = logs.newline
+local logspushtarget = logs.pushtarget
+local logspoptarget = logs.poptarget
+
+local v_outer = variables.outer
+local v_text = variables.text
+local v_project = variables.project
+local v_environment = variables.environment
+local v_product = variables.product
+local v_component = variables.component
+
+-- main code .. there is some overlap .. here we have loc://
+
+local function findctxfile(name) -- loc ? any ?
+ if file.is_qualified_path(name) then
+ return name
+ elseif not url.hasscheme(filename) then
+ return findbyscheme("loc",filename) or ""
+ else
+ return resolvers.findtexfile(filename) or ""
+ end
+end
+
+resolvers.findctxfile = findctxfile
+
+function commands.processfile(name)
+ name = findctxfile(name)
+ if name ~= "" then
+ context.input(name)
+ end
+end
+
+function commands.doifinputfileelse(name)
+ commands.doifelse(findctxfile(name) ~= "")
+end
+
+function commands.locatefilepath(name)
+ context(file.dirname(findctxfile(name)))
+end
+
+function commands.usepath(paths)
+ resolvers.registerextrapath(paths)
+end
+
+function commands.usesubpath(subpaths)
+ resolvers.registerextrapath(nil,subpaths)
+end
+
+function commands.allinputpaths()
+ context(concat(resolvers.instance.extra_paths or { },","))
+end
+
+function commands.usezipfile(name,tree)
+ if tree and tree ~= "" then
+ resolvers.usezipfile(format("zip:///%s?tree=%s",name,tree))
+ else
+ resolvers.usezipfile(format("zip:///%s",name))
+ end
+end
+
+local report_system = logs.reporter("system","options")
+local report_options = logs.reporter("used options")
+
+function commands.copyfiletolog(name)
+ local f = io.open(name)
+ if f then
+ logspushtarget("logfile")
+ logsnewline()
+ report_system("start used options")
+ logsnewline()
+ for line in f:lines() do
+ report_options(line)
+ end
+ logsnewline()
+ report_system("stop used options")
+ logsnewline()
+ logspoptarget()
+ f:close()
+ end
+end
+
+-- moved from tex to lua:
+
+local texpatterns = { "%s.mkvi", "%s.mkiv", "%s.tex" }
+local luapatterns = { "%s.luc", "%s.lua" }
+local cldpatterns = { "%s.cld" }
+local xmlpatterns = { "%s.xml" }
+
+local uselibrary = commands.uselibrary
+local input = context.input
+
+-- status
+--
+-- these need to be synced with input stream:
+
+local processstack = { }
+local processedfile = ""
+local processedfiles = { }
+
+function commands.processedfile()
+ context(processedfile)
+end
+
+function commands.processedfiles()
+ context(concat(processedfiles,","))
+end
+
+function commands.dostarttextfile(name)
+ insert(processstack,name)
+ processedfile = name
+ insert(processedfiles,name)
+end
+
+function commands.dostoptextfile()
+ processedfile = remove(processstack) or ""
+end
+
+local function startprocessing(name,notext)
+ if not notext then
+ -- report_system("begin file %s at line %s",name,status.linenumber or 0)
+ context.dostarttextfile(name)
+ end
+end
+
+local function stopprocessing(notext)
+ if not notext then
+ context.dostoptextfile()
+ -- report_system("end file %s at line %s",name,status.linenumber or 0)
+ end
+end
+
+--
+
+local action = function(name,foundname) input(foundname) end
+local failure = function(name,foundname) end
+
+local function usetexfile(name,onlyonce,notext)
+ startprocessing(name,notext)
+ uselibrary {
+ name = name,
+ patterns = texpatterns,
+ action = action,
+ failure = failure,
+ onlyonce = onlyonce,
+ }
+ stopprocessing(notext)
+end
+
+local action = function(name,foundname) dofile(foundname) end
+local failure = function(name,foundname) end
+
+local function useluafile(name,onlyonce,notext)
+ uselibrary {
+ name = name,
+ patterns = luapatterns,
+ action = action,
+ failure = failure,
+ onlyonce = onlyonce,
+ }
+end
+
+local action = function(name,foundname) dofile(foundname) end
+local failure = function(name,foundname) end
+
+local function usecldfile(name,onlyonce,notext)
+ startprocessing(name,notext)
+ uselibrary {
+ name = name,
+ patterns = cldpatterns,
+ action = action,
+ failure = failure,
+ onlyonce = onlyonce,
+ }
+ stopprocessing(notext)
+end
+
+local action = function(name,foundname) context.xmlprocess(foundname,"main","") end
+local failure = function(name,foundname) end
+
+local function usexmlfile(name,onlyonce,notext)
+ startprocessing(name,notext)
+ uselibrary {
+ name = name,
+ patterns = xmlpatterns,
+ action = action,
+ failure = failure,
+ onlyonce = onlyonce,
+ }
+ stopprocessing(notext)
+end
+
+commands.usetexfile = usetexfile
+commands.useluafile = useluafile
+commands.usecldfile = usecldfile
+commands.usexmlfile = usexmlfile
+
+local suffixes = {
+ mkvi = usetexfile,
+ mkiv = usetexfile,
+ tex = usetexfile,
+ luc = useluafile,
+ lua = useluafile,
+ cld = usecldfile,
+ xml = usexmlfile,
+ [""] = usetexfile,
+}
+
+local function useanyfile(name,onlyonce)
+ local s = suffixes[file.suffix(name)]
+ if s then
+ s(file.removesuffix(name),onlyonce)
+ else
+ resolvers.readfilename(name) -- might change
+ end
+end
+
+commands.useanyfile = useanyfile
+
+function resolvers.jobs.usefile(name,onlyonce,notext)
+ local s = suffixes[file.suffix(name)]
+ if s then
+ s(file.removesuffix(name),onlyonce,notext)
+ end
+end
+
+-- document structure
+
+local report_system = logs.reporter("system")
+
+local textlevel = 0 -- inaccessible for user, we need to define counter textlevel at the tex end
+
+local function dummyfunction() end
+
+local function startstoperror()
+ report_system("invalid \\%s%s ... \\%s%s structure",elements.start,v_text,elements.stop,v_text)
+ startstoperror = dummyfunction
+end
+
+local function starttext()
+ if textlevel == 0 then
+ if trace_jobfiles then
+ report_jobfiles("starting text")
+ end
+ -- registerfileinfo[begin]jobfilename
+ context.dostarttext()
+ end
+ textlevel = textlevel + 1
+ texsetcount("global","textlevel",textlevel)
+end
+
+local function stoptext()
+ if textlevel == 0 then
+ startstoperror()
+ elseif textlevel > 0 then
+ textlevel = textlevel - 1
+ end
+ texsetcount("global","textlevel",textlevel)
+ if textlevel <= 0 then
+ if trace_jobfiles then
+ report_jobfiles("stopping text")
+ end
+ context.dostoptext()
+ -- registerfileinfo[end]jobfilename
+ context.finalend()
+ commands.stoptext = dummyfunction
+ end
+end
+
+commands.starttext = starttext
+commands.stoptext = stoptext
+
+function commands.forcequitjob(reason)
+ if reason then
+ report_system("forcing quit: %s",reason)
+ else
+ report_system("forcing quit")
+ end
+ context.batchmode()
+ while textlevel >= 0 do
+ context.stoptext()
+ end
+end
+
+function commands.forceendjob()
+ report_system([[don't use \end to finish a document]])
+ context.stoptext()
+end
+
+function commands.autostarttext()
+ if textlevel == 0 then
+ report_system([[auto \starttext ... \stoptext]])
+ end
+ context.starttext()
+end
+
+commands.autostoptext = stoptext
+
+-- project structure
+
+function commands.processfilemany(name)
+ useanyfile(name,false)
+end
+
+function commands.processfileonce(name)
+ useanyfile(name,true)
+end
+
+function commands.processfilenone(name)
+ -- skip file
+end
+
+--
+
+local typestack = { }
+local pathstack = { }
+
+local currenttype = v_text
+local currentpath = "."
+
+local tree = { type = "text", name = "", branches = { } }
+local treestack = { }
+local top = tree.branches
+local root = tree
+
+local stacks = {
+ [v_project ] = { },
+ [v_product ] = { },
+ [v_component ] = { },
+ [v_environment] = { },
+}
+
+--
+
+local report_system = logs.reporter("system","structure")
+local report_structure = logs.reporter("used structure")
+
+local function pushtree(what,name)
+ local t = { }
+ top[#top+1] = { type = what, name = name, branches = t }
+ insert(treestack,top)
+ top = t
+end
+
+local function poptree()
+ top = remove(treestack)
+ -- inspect(top)
+end
+
+local function log_tree(top,depth)
+ report_structure("%s%s: %s",depth,top.type,top.name)
+ local branches = top.branches
+ if #branches > 0 then
+ depth = depth .. " "
+ for i=1,#branches do
+ log_tree(branches[i],depth)
+ end
+ end
+end
+
+local function logtree()
+ logspushtarget("logfile")
+ logsnewline()
+ report_system("start used stucture")
+ logsnewline()
+ root.name = environment.jobname
+ log_tree(root,"")
+ logsnewline()
+ report_system("stop used structure")
+ logsnewline()
+ logspoptarget()
+end
+
+luatex.registerstopactions(logtree)
+
+-- component: small unit, either or not components itself
+-- product : combination of components
+
+local processors = utilities.storage.allocate {
+ [v_outer] = {
+ [v_text] = { "many", context.processfilemany },
+ [v_project] = { "once", context.processfileonce },
+ [v_environment] = { "once", context.processfileonce },
+ [v_product] = { "many", context.processfileonce },
+ [v_component] = { "many", context.processfilemany },
+ },
+ [v_text] = {
+ [v_text] = { "many", context.processfilemany },
+ [v_project] = { "once", context.processfileonce }, -- none *
+ [v_environment] = { "once", context.processfileonce }, -- once
+ [v_product] = { "once", context.processfileonce }, -- none *
+ [v_component] = { "many", context.processfilemany }, -- many
+ },
+ [v_project] = {
+ [v_text] = { "many", context.processfilemany },
+ [v_project] = { "none", context.processfilenone }, -- none
+ [v_environment] = { "once", context.processfileonce }, -- once
+ [v_product] = { "none", context.processfilenone }, -- once *
+ [v_component] = { "none", context.processfilenone }, -- many *
+ },
+ [v_environment] = {
+ [v_text] = { "many", context.processfilemany },
+ [v_project] = { "none", context.processfilenone }, -- none
+ [v_environment] = { "once", context.processfileonce }, -- once
+ [v_product] = { "none", context.processfilenone }, -- none
+ [v_component] = { "none", context.processfilenone }, -- none
+ },
+ [v_product] = {
+ [v_text] = { "many", context.processfilemany },
+ [v_project] = { "once", context.processfileonce }, -- once
+ [v_environment] = { "once", context.processfileonce }, -- once
+ [v_product] = { "many", context.processfilemany }, -- none *
+ [v_component] = { "many", context.processfilemany }, -- many
+ },
+ [v_component] = {
+ [v_text] = { "many", context.processfilemany },
+ [v_project] = { "once", context.processfileonce }, -- once
+ [v_environment] = { "once", context.processfileonce }, -- once
+ [v_product] = { "none", context.processfilenone }, -- none
+ [v_component] = { "many", context.processfilemany }, -- many
+ }
+}
+
+local start = {
+ [v_text] = nil,
+ [v_project] = nil,
+ [v_environment] = context.startreadingfile,
+ [v_product] = context.starttext,
+ [v_component] = context.starttext,
+}
+
+local stop = {
+ [v_text] = nil,
+ [v_project] = nil,
+ [v_environment] = context.stopreadingfile,
+ [v_product] = context.stoptext,
+ [v_component] = context.stoptext,
+}
+
+resolvers.jobs.processors = processors
+
+local function topofstack(what)
+ local stack = stacks[what]
+ return stack and stack[#stack] or environment.jobname
+end
+
+local done = { }
+local tolerant = true
+
+local function process(what,name)
+ local depth = #typestack
+ local process
+ if not tolerant then
+ -- okay, would be best but not compatible with mkii
+ process = processors[currenttype][what]
+ elseif depth == 0 then
+ -- could be a component, product or (brr) project
+ if trace_jobfiles then
+ report_jobfiles("%s : %s > %s (case 1)",depth,currenttype,v_outer)
+ end
+ process = processors[v_outer][what]
+ elseif depth == 1 and typestack[1] == v_text then
+ -- we're still not doing a component or product
+ if trace_jobfiles then
+ report_jobfiles("%s : %s > %s (case 2)",depth,currenttype,v_outer)
+ end
+ process = processors[v_outer][what]
+ else
+ process = processors[currenttype][what]
+ end
+ if process then
+ local method = process[1]
+ if method == "none" then
+ if trace_jobfiles then
+ report_jobfiles("%s : %s : ignoring %s '%s' in %s '%s'",depth,method,what,name,currenttype,topofstack(currenttype))
+ end
+ elseif method == "once" and done[name] then
+ if trace_jobfiles then
+ report_jobfiles("%s : %s : skipping %s '%s' in %s '%s'",depth,method,what,name,currenttype,topofstack(currenttype))
+ end
+ else
+ -- keep in mind that we also handle "once" at the file level
+ -- so there is a double catch
+ done[name] = true
+ local before = start[what]
+ local after = stop [what]
+ if trace_jobfiles then
+ report_jobfiles("%s : %s : processing %s '%s' in %s '%s'",depth,method,what,name,currenttype,topofstack(currenttype))
+ end
+ if before then
+ before()
+ end
+ process[2](name)
+ if after then
+ after()
+ end
+ end
+ else
+ if trace_jobfiles then
+ report_jobfiles("%s : ? : ignoring %s '%s' in %s '%s'",depth,what,name,currenttype,topofstack(currenttype))
+ end
+ end
+end
+
+function commands.useproject (name) process(v_project, name) end
+function commands.useenvironment(name) process(v_environment,name) end
+function commands.useproduct (name) process(v_product, name) end
+function commands.usecomponent (name) process(v_component, name) end
+
+-- -- todo: setsystemmode to currenttype
+
+local start = {
+ [v_project] = context.starttext,
+ [v_product] = context.starttext,
+ [v_component] = context.starttext,
+}
+
+local stop = {
+ [v_project] = context.stoptext,
+ [v_product] = context.stoptext,
+ [v_component] = context.stoptext,
+}
+
+local function gotonextlevel(what,name) -- todo: something with suffix name
+ insert(stacks[what],name)
+ insert(typestack,currenttype)
+ insert(pathstack,currentpath)
+ currenttype = what
+ currentpath = file.dirname(name)
+ pushtree(what,name)
+ if start[what] then
+ start[what]()
+ end
+end
+
+local function gotopreviouslevel(what)
+ if stop[what] then
+ stop[what]()
+ end
+ poptree()
+ currentpath = remove(pathstack) or "."
+ currenttype = remove(typestack) or v_text
+ remove(stacks[what]) -- not currenttype ... weak recovery
+ context.endinput()
+end
+
+function commands.startproject (name) gotonextlevel(v_project, name) end
+function commands.startproduct (name) gotonextlevel(v_product, name) end
+function commands.startcomponent (name) gotonextlevel(v_component, name) end
+function commands.startenvironment(name) gotonextlevel(v_environment,name) end
+
+function commands.stopproject () gotopreviouslevel(v_project ) end
+function commands.stopproduct () gotopreviouslevel(v_product ) end
+function commands.stopcomponent () gotopreviouslevel(v_component ) end
+function commands.stopenvironment() gotopreviouslevel(v_environment) end
+
+function commands.currentproject () context(topofstack(v_project )) end
+function commands.currentproduct () context(topofstack(v_product )) end
+function commands.currentcomponent () context(topofstack(v_component )) end
+function commands.currentenvironment() context(topofstack(v_environment)) end
+
+-- -- -- this will move -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
+--
+-- <?xml version='1.0' standalone='yes'?>
+-- <exa:variables xmlns:exa='htpp://www.pragma-ade.com/schemas/exa-variables.rng'>
+-- <exa:variable label='mode:pragma'>nee</exa:variable>
+-- <exa:variable label='mode:variant'>standaard</exa:variable>
+-- </exa:variables>
+--
+-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
+
+local report_examodes = logs.reporter("system","examodes")
+
+local function convertexamodes(str)
+ local x = xml.convert(str)
+ for e in xml.collected(x,"exa:variable") do
+ local label = e.at and e.at.label
+ if label and label ~= "" then
+ local data = xml.text(e)
+ local mode = match(label,"^mode:(.+)$")
+ if mode then
+ context.enablemode { format("%s:%s",mode,data) }
+ end
+ context.setvariable("exa:variables",label,(gsub(data,"([{}])","\\%1")))
+ end
+ end
+end
+
+function commands.loadexamodes(filename)
+ if not filename or filename == "" then
+ filename = file.removesuffix(tex.jobname)
+ end
+ filename = resolvers.findfile(file.addsuffix(filename,'ctm')) or ""
+ if filename ~= "" then
+ report_examodes("loading %s",filename) -- todo: message system
+ convertexamodes(io.loaddata(filename))
+ else
+ report_examodes("no mode file %s",filename) -- todo: message system
+ end
+end
diff --git a/tex/context/base/file-job.mkvi b/tex/context/base/file-job.mkvi
new file mode 100644
index 000000000..b33bb9e19
--- /dev/null
+++ b/tex/context/base/file-job.mkvi
@@ -0,0 +1,195 @@
+%D \module
+%D [ file=file-job, % copied from main-001, later core-job
+%D version=1997.03.31,
+%D title=\CONTEXT\ Core Macros,
+%D subtitle=Job Handling,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This module is still to be split and documented.
+
+\writestatus{loading}{ConTeXt File Macros / Job Handling}
+
+\unprotect
+
+\registerctxluafile{file-job}{1.001}
+
+% processing
+
+\def\doifinputfileelse#name{\ctxcommand{doifinputfileelse("#name")}}
+\def\processfile #name{\ctxcommand{processfile("#name")}}
+
+% path control
+
+\def\usepath [#path]{\ctxcommand{usepath("#path")}}
+\def\usesubpath [#path]{\ctxcommand{usesubpath("#path")}}
+\def\allinputpaths {\ctxcommand{allinputpaths()}}
+
+% helper (not really needed nowadays)
+
+\let\locatedfilepath\empty
+
+\def\locatefilepath#name{\edef\locatedfilepath{\ctxcommand{locatefilepath("#name")}}}
+
+% zip files are tree'd
+
+\def\usezipfile {\dodoubleempty\dousezipfile}
+\def\dousezipfile[#zipname][#subtree]{\ctxcommand{usezipfile("#zipname","#subtree")}}
+
+% exa stuff might become obsolete:
+
+\def\loadexamodes {\dosingleempty\doloadexamodes}
+\def\doloadexamodes[#name]{\ctxcommand{loadexamodes("#name")}}
+
+% runtime files (maybe also do this in lua?)
+
+% \def\doloadsystemfile#1% only mkiv files
+% {\readfile{sys:///#1.\mksuffix}{\showmessage\m!system2{#1.\mksuffix}}\donothing}
+
+\def\doloadsystemfile#name% only mkiv files
+ {\readsysfile{#name.\mksuffix}{\showmessage\m!system2{#name.\mksuffix}}\donothing}
+
+\def\loadsystemfiles
+ {\doloadsystemfile\f!newfilename % new code, to be integrated at some point, plus fixes posted on the list
+ \doloadsystemfile\f!locfilename % new code, somewhat experimental, not distributed (outside the dev group)
+ \doloadsystemfile\f!expfilename % new code, very experimental, can be engine specific, mostly for me only
+ \doloadsystemfile\f!sysfilename} % local settings, but probably not that good an idea to use
+
+% \def\loadoptionfile
+% {\readfile{job:///\jobname.\f!optionextension}
+% {\writestatus\m!system{\jobname.\f!optionextension\space loaded}%
+% \ctxcommand{logoptionfile("\jobname.\f!optionextension")}}%
+% {\writestatus\m!system{no \jobname.\f!optionextension}}}
+
+\def\loadoptionfile
+ {\readjobfile{\jobname.\f!optionextension}
+ {\writestatus\m!system{\jobname.\f!optionextension\space loaded}%
+ \ctxcommand{copyfiletolog("\jobname.\f!optionextension")}}%
+ {\writestatus\m!system{no \jobname.\f!optionextension}}}
+
+% document structure
+
+\ifdefined\else \newcount\textlevel \fi % might go away
+
+\def\dostarttext
+ {\glet\dostarttext\relax
+ \the\everystarttext
+ \global\everystarttext\emptytoks}
+
+\def\dostoptext
+ {\glet\dostoptext\relax
+ \flushfinallayoutpage % optional
+ \page % anyway
+ \the\everystoptext
+ \global\everystoptext\emptytoks
+ \the\everybye
+ \global\everybye\emptytoks
+ \the\everygoodbye
+ \global\everygoodbye\emptytoks
+ \doifsometokselse\everynotabene{\writeline\the\everynotabene\writeline}\donothing
+ \normalend} % tex's \end
+
+\unexpanded\def\starttext {\ctxcommand{starttext()}}
+\unexpanded\def\stoptext {\ctxcommand{stoptext()}}
+\unexpanded\def\forcequitjob {\ctxcommand{forcequitjob()}}
+\unexpanded\def\end {\ctxcommand{forceendjob()}}
+\unexpanded\def\autostarttext{\ctxcommand{autostarttext()}}
+\unexpanded\def\autostoptext {\ctxcommand{autostoptext()}}
+
+% protect structure
+
+\unexpanded\def\processfilemany #name{\ctxcommand{processfilemany("#name")}}
+\unexpanded\def\processfileonce #name{\ctxcommand{processfileonce("#name")}}
+\unexpanded\def\processfilenone #name{\ctxcommand{processfilenone("#name")}}
+
+\unexpanded\def\project {\doifnextoptionalelse\useproject \redoproject}
+\unexpanded\def\product {\doifnextoptionalelse\useproduct \redoproduct}
+\unexpanded\def\component {\doifnextoptionalelse\usecomponent \redocomponent}
+\unexpanded\def\environment {\doifnextoptionalelse\useenvironment\redoenvironment}
+
+\def\redoproject #name {\ctxcommand{useproject ("#name")}}
+\def\redoproduct #name {\ctxcommand{useproduct ("#name")}}
+\def\redocomponent #name {\ctxcommand{usecomponent ("#name")}}
+\def\redoenvironment #name {\ctxcommand{useenvironment("#name")}}
+
+\unexpanded\def\startproject {\doifnextoptionalelse\dostartproject \redostartproject}
+\unexpanded\def\startproduct {\doifnextoptionalelse\dostartproduct \redostartproduct}
+\unexpanded\def\startcomponent {\doifnextoptionalelse\dostartcomponent \redostartcomponent}
+\unexpanded\def\startenvironment {\doifnextoptionalelse\dostartenvironment\redostartenvironment}
+
+\def\redostartproject #name {\ctxcommand{startproject ("#name")}}
+\def\redostartproduct #name {\ctxcommand{startproduct ("#name")}}
+\def\redostartcomponent #name {\ctxcommand{startcomponent ("#name")}}
+\def\redostartenvironment #name {\ctxcommand{startenvironment("#name")}}
+
+\unexpanded\def\useproject [#name]{\ctxcommand{useproject ("#name")}}
+\unexpanded\def\useproduct [#name]{\ctxcommand{useproduct ("#name")}}
+\unexpanded\def\usecomponent [#name]{\ctxcommand{usecomponent ("#name")}}
+\unexpanded\def\useenvironment [#name]{\ctxcommand{useenvironment("#name")}}
+
+\unexpanded\def\dostartproject [#name]{\ctxcommand{startproject ("#name")}}
+\unexpanded\def\dostartproduct [#name]{\ctxcommand{startproduct ("#name")}}
+\unexpanded\def\dostartcomponent [#name]{\ctxcommand{startcomponent ("#name")}}
+\unexpanded\def\dostartenvironment[#name]{\ctxcommand{startenvironment("#name")}}
+
+\unexpanded\def\stopproject {\ctxcommand{stopproject ()}}
+\unexpanded\def\stopproduct {\ctxcommand{stopproduct ()}}
+\unexpanded\def\stopcomponent {\ctxcommand{stopcomponent ()}}
+\unexpanded\def\stopenvironment {\ctxcommand{stopenvironment()}}
+
+\def\currentproject {\ctxcommand{currentproject ()}}
+\def\currentproduct {\ctxcommand{currentproduct ()}}
+\def\currentcomponent {\ctxcommand{currentcomponent ()}}
+\def\currentenvironment {\ctxcommand{currentenvironment()}}
+
+\unexpanded\def\dostarttextfile #name{\ctxcommand{dostarttextfile(name)}}
+\unexpanded\def\dostoptextfile {\ctxcommand{dostoptextfile()}}
+\def\processedfile {\ctxcommand{processedfile()}}
+\def\processedfiles {\ctxcommand{processedfiles()}}
+
+\unexpanded\def\loadtexfile [#name]{\cxtcommand{usetexfile("#name")}}
+\unexpanded\def\loadluafile [#name]{\ctxcommand{useluafile("#name")}}
+\unexpanded\def\loadcldfile [#name]{\ctxcommand{usecldfile("#name")}}
+\unexpanded\def\loadanyfile [#name]{\ctxcommand{useanyfile("#name")}}
+
+\unexpanded\def\loadtexfileonce [#name]{\cxtcommand{usetexfile("#name",true)}}
+\unexpanded\def\loadluafileonce [#name]{\ctxcommand{useluafile("#name",true)}}
+\unexpanded\def\loadcldfileonce [#name]{\ctxcommand{usecldfile("#name",true)}}
+\unexpanded\def\loadanyfileonce [#name]{\ctxcommand{useanyfile("#name",true)}}
+
+%D Handy for modules that have a test/demo appended.
+
+\def\continueifinputfile#1{\doifnot\inputfilename{#1}{\endinput}}
+
+% \startproject test
+% 1: \startmode[*project] project \stopmode \endgraf
+% 2: \startmode[*product] product \stopmode \endgraf
+% \stopproject
+
+% replaced by modes:
+%
+% \setvalue{\e!start\v!localenvironment}[#1]%
+% {\let\loadedlocalenvironments\empty
+% \def\docommand##1%
+% {\beforesplitstring##1\at.\to\someevironment
+% \addtocommalist\someevironment\loadedlocalenvironments}%
+% \processcommalist[#1]\docommand
+% \doifcommonelse{\currentproject,\currentproduct,\currentcomponent,\currentenvironment}\loadedlocalenvironments
+% {\letvalue{\e!stop\v!localenvironment}\relax}
+% {\grabuntil{\e!stop\v!localenvironment}\gobbleoneargument}}
+%
+% \setvalue{\v!localenvironment}{\doifnextoptionalelse\uselocalenvironment\redolocalenvironment}
+%
+% \def\redolocalenvironment#1 {\uselocalenvironment[#1]}
+% \def\uselocalenvironment[#1]{\doexecutefileonce{#1}}
+
+% weird place:
+
+\neverypar\emptytoks
+
+\protect \endinput
diff --git a/tex/context/base/file-lib.lua b/tex/context/base/file-lib.lua
new file mode 100644
index 000000000..03ae0dfb7
--- /dev/null
+++ b/tex/context/base/file-lib.lua
@@ -0,0 +1,58 @@
+if not modules then modules = { } end modules ['file-lib'] = {
+ version = 1.001,
+ comment = "companion to file-lib.mkvi",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+
+local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end)
+local report_files = logs.reporter("files","readfile")
+
+local loaded = { }
+local defaultpatterns = { "%s" }
+
+local function defaultaction(name,foundname)
+ report_files("asked name: '%s', found name: '%s'",name,foundname)
+end
+
+local function defaultfailure(name)
+ report_files("asked name: '%s', not found",name)
+end
+
+function commands.uselibrary(specification) -- todo; reporter
+ local name = specification.name
+ if name and name ~= "" then
+ local patterns = specification.patterns or defaultpatterns
+ local action = specification.action or defaultaction
+ local failure = specification.failure or defaultfailure
+ local onlyonce = specification.onlyonce
+ local files = utilities.parsers.settings_to_array(name)
+ local done = false
+ for i=1,#files do
+ local filename = files[i]
+ if not loaded[filename] then
+ if onlyonce then
+ loaded[filename] = true -- todo: base this on return value
+ end
+ for i=1,#patterns do
+ local somename = format(patterns[i],filename)
+ local foundname = resolvers.getreadfilename("any",".",somename) or ""
+ if foundname ~= "" then
+ action(name,foundname)
+ done = true
+ break
+ end
+ end
+ if done then
+ break
+ end
+ end
+ end
+ if failure and not done then
+ failure(name)
+ end
+ end
+end
diff --git a/tex/context/base/file-lib.mkvi b/tex/context/base/file-lib.mkvi
new file mode 100644
index 000000000..76137ce15
--- /dev/null
+++ b/tex/context/base/file-lib.mkvi
@@ -0,0 +1,20 @@
+%D \module
+%D [ file=file-lib, % was core-fil,
+%D version=20110701, % 1997.11.15,
+%D title=\CONTEXT\ File Macros,
+%D subtitle=Module Support,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt File Macros / Libraries}
+
+\unprotect
+
+\registerctxluafile{file-lib}{1.001}
+
+\protect \endinput
diff --git a/tex/context/base/file-mod.lua b/tex/context/base/file-mod.lua
new file mode 100644
index 000000000..4d1c9aadf
--- /dev/null
+++ b/tex/context/base/file-mod.lua
@@ -0,0 +1,171 @@
+if not modules then modules = { } end modules ['file-mod'] = {
+ version = 1.001,
+ comment = "companion to file-mod.mkvi",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This module will be redone! For instance, the prefixes will move to data-*
+-- as they arr sort of generic along with home:// etc/.
+
+-- context is not defined yet! todo! (we need to load tupp-fil after cld)
+-- todo: move startreadingfile to lua and push regime there
+
+--[[ldx--
+<p>It's more convenient to manipulate filenames (paths) in
+<l n='lua'/> than in <l n='tex'/>. These methods have counterparts
+at the <l n='tex'/> side.</p>
+--ldx]]--
+
+local format, concat, tonumber = string.format, table.concat, tonumber
+
+local trace_modules = false trackers.register("modules.loading", function(v) trace_modules = v end)
+
+local report_modules = logs.reporter("resolvers","modules")
+
+commands = commands or { }
+local commands = commands
+
+local findbyscheme = resolvers.finders.byscheme -- use different one
+
+-- modules can have a specific suffix or can specify one
+
+local prefixes = { "m", "p", "s", "x", "v", "t" }
+local suffixes = { "mkvi", "mkiv", "tex", "cld", "lua" } -- order might change and how about cld
+local modstatus = { }
+
+local function usemodule(name,hasscheme)
+ local foundname
+ if hasscheme then
+ -- no auto suffix as http will return a home page or error page
+ -- so we only add one if missing
+ local fullname = file.addsuffix(name,"tex")
+ if trace_modules then
+ report_modules("checking url: '%s'",fullname)
+ end
+ foundname = resolvers.findtexfile(fullname) or ""
+ elseif file.extname(name) ~= "" then
+ if trace_modules then
+ report_modules("checking file: '%s'",name)
+ end
+ foundname = findbyscheme("any",name) or ""
+ else
+ for i=1,#suffixes do
+ local fullname = file.addsuffix(name,suffixes[i])
+ if trace_modules then
+ report_modules("checking file: '%s'",fullname)
+ end
+ foundname = findbyscheme("any",fullname) or ""
+ if foundname ~= "" then
+ break
+ end
+ end
+ end
+ if foundname ~= "" then
+ if trace_modules then
+ report_modules("loading: '%s'",foundname)
+ end
+ context.startreadingfile()
+ resolvers.jobs.usefile(foundname,true) -- once, notext
+--~ context.input(foundname)
+ context.stopreadingfile()
+ return true
+ else
+ return false
+ end
+end
+
+function commands.usemodules(prefix,askedname,truename)
+ local hasprefix = prefix and prefix ~= ""
+ local hashname = ((hasprefix and prefix) or "*") .. "-" .. truename
+ local status = modstatus[hashname]
+ if status == 0 then
+ -- not found
+ elseif status == 1 then
+ status = status + 1
+ else
+ if trace_modules then
+ report_modules("locating: prefix: '%s', askedname: '%s', truename: '%s'",prefix or "", askedname or "", truename or "")
+ end
+ local hasscheme = url.hasscheme(truename)
+ if hasscheme then
+ -- no prefix and suffix done
+ if usemodule(truename,true) then
+ status = 1
+ else
+ status = 0
+ end
+ elseif hasprefix then
+ if usemodule(prefix .. "-" .. truename) then
+ status = 1
+ else
+ status = 0
+ end
+ else
+ for i=1,#prefixes do
+ -- todo: reconstruct name i.e. basename
+ local thename = prefixes[i] .. "-" .. truename
+ if usemodule(thename) then
+ status = 1
+ break
+ end
+ end
+ if status then
+ -- ok, don't change
+ elseif usemodule(truename) then
+ status = 1
+ else
+ status = 0
+ end
+ end
+ end
+ if status == 0 then
+ report_modules("not found: '%s'",askedname)
+ elseif status == 1 then
+ report_modules("loaded: '%s'",trace_modules and truename or askedname)
+ else
+ report_modules("already loaded: '%s'",trace_modules and truename or askedname)
+ end
+ modstatus[hashname] = status
+end
+
+statistics.register("loaded tex modules", function()
+ if next(modstatus) then
+ local t, f, nt, nf = { }, { }, 0, 0
+ for k, v in table.sortedhash(modstatus) do
+ k = file.basename(k)
+ if v == 0 then
+ nf = nf + 1
+ f[nf] = k
+ else
+ nt = nt + 1
+ t[nt] = k
+ end
+ end
+ local ts = (nt>0 and format(" (%s)",concat(t," "))) or ""
+ local fs = (nf>0 and format(" (%s)",concat(f," "))) or ""
+ return format("%s requested, %s found%s, %s missing%s",nt+nf,nt,ts,nf,fs)
+ else
+ return nil
+ end
+end)
+
+-- moved from syst-lua.lua:
+
+local splitter = lpeg.tsplitat(lpeg.S(". "))
+
+function commands.doifolderversionelse(one,two) -- one >= two
+ if not two then
+ one, two = environment.version, one
+ elseif one == "" then
+ one = environment.version
+ end
+ local y_1, m_1, d_1 = lpeg.match(splitter,one)
+ local y_2, m_2, d_2 = lpeg.match(splitter,two)
+ commands.testcase (
+ (tonumber(y_1) or 0) >= (tonumber(y_2) or 0) and
+ (tonumber(m_1) or 0) >= (tonumber(m_2) or 0) and
+ (tonumber(d_1) or 0) >= (tonumber(d_1) or 0)
+ )
+end
diff --git a/tex/context/base/core-fil.mkiv b/tex/context/base/file-mod.mkvi
index 78900215f..6259a04d8 100644
--- a/tex/context/base/core-fil.mkiv
+++ b/tex/context/base/file-mod.mkvi
@@ -1,8 +1,8 @@
%D \module
-%D [ file=core-fil,
-%D version=1997.11.15,
-%D title=\CONTEXT\ Core Macros,
-%D subtitle=File Support,
+%D [ file=file-mod, % was core-fil,
+%D version=20110701, % 1997.11.15,
+%D title=\CONTEXT\ File Macros,
+%D subtitle=Module Support,
%D author=Hans Hagen,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
@@ -11,72 +11,11 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\writestatus{loading}{ConTeXt Core Macros / File Support}
+\writestatus{loading}{ConTeXt File Macros / Modules}
\unprotect
-%D \macros
-%D {definefilesynonym}
-%D
-%D One of the problems with loading files is that their names
-%D can depend on the interface language. We therefore need a
-%D method to define filesynonyms. The actual synonyms are
-%D defined elsewhere, but look like:
-%D
-%D \starttyping
-%D \definefilesynonym [chemic] [chemie]
-%D \definefilesynonym [einheit] [unit]
-%D \definefilesynonym [unit] [unit]
-%D \stoptyping
-%D
-%D So we can say in english:
-%D
-%D \starttyping
-%D \usemodules[pictex,chemic,unit]
-%D \stoptyping
-%D
-%D and in dutch:
-%D
-%D \starttyping
-%D \usemodules[pictex,chemie,unit]
-%D \stoptyping
-
-% will be redone in mkiv
-
-\unexpanded\def\definefilesynonym
- {\dodoubleempty\dodefinefilesynonym}
-
-\def\dodefinefilesynonym[#1][#2]%
- {\ifcsname\??fs#1\endcsname
- \doifnotvalue{\??fs#1}{#2}{\showmessage\m!files1{#1 (#2),\getvalue{\??fs#1}}}%
- \fi
- \doifelse{#1}{#2}{\letbeundefined{\??fs#1}{#2}}{\setevalue{\??fs#1}{#2}}}
-
-%D \macros
-%D {definefilefallback}
-
-\unexpanded\def\definefilefallback
- {\dodoubleargument\dodefinefilefallback}
-
-\def\dodefinefilefallback[#1][#2]%
- {\doifnotfile{#1}
- {\def\docommand##1{\doiffile{##1}{\definefilesynonym[#1][##1]\quitcommalist}}%
- \processcommalist[#2]\docommand}}
-
-%D \macros
-%D {truefilename}
-%D
-%D At the system level such a filename can be called upon by
-%D saying:
-%D
-%D \starttyping
-%D \truefilename{filename/filesynonym}
-%D \stoptyping
-%D
-%D The implementation shows that nesting is supported.
-
-\def\truefilename#1% this will move to lua
- {\ifcsname\??fs#1\endcsname\expandafter\truefilename\csname\??fs#1\endcsname\else#1\fi}
+\registerctxluafile{file-mod}{1.001}
%D \macros
%D {usemodule}
@@ -92,30 +31,32 @@
%D their dedicated manuals. We use \type {\next} so that we
%D can \type {\end} in modules.
-\def\dodousemodules#1#2%
- {\ctxcommand{usemodules("#1","#2","\truefilename{#2}")}}
+\def\dodousemodules#category#name%
+ {\ctxcommand{usemodules("#category","#name","\truefilename{#name}")}}
\def\usemodules
{\dotripleempty\dousemodules}
-\def\dousemodules[#1][#2][#3]%
+\let\usemodule\usemodules
+
+\def\dousemodules[#category][#name][#parameters]%
{\pushmacro\currentmodule
\pushmacro\currentmoduleparameters
\let\currentmoduleparameters\empty
\ifthirdargument
- \doifelsenothing{#2}
+ \doifelsenothing{#name}
{\let\next\relax}
- {\def\currentmoduleparameters{#3}%
- \def\next{\processcommalist[#2]{\dodousemodules{#1}}}}%
+ {\def\currentmoduleparameters{#parameters}%
+ \def\next{\processcommalist[#name]{\dodousemodules{#category}}}}%
\else\ifsecondargument
- \doifelsenothing{#2}
+ \doifelsenothing{#name}
{\let\next\relax}
- {\doifassignmentelse{#2}
- {\def\currentmoduleparameters{#2}%
- \def\next{\processcommalist[#1]{\dodousemodules{}}}}
- {\def\next{\processcommalist[#2]{\dodousemodules{#1}}}}}%
+ {\doifassignmentelse{#name}
+ {\def\currentmoduleparameters{#name}%
+ \def\next{\processcommalist[#category]{\dodousemodules{}}}}
+ {\def\next{\processcommalist[#name]{\dodousemodules{#category}}}}}%
\else
- \def\next{\processcommalist[#1]{\dodousemodules{}}}%
+ \def\next{\processcommalist[#category]{\dodousemodules{}}}%
\fi\fi
\next
\popmacro\currentmoduleparameters
@@ -127,13 +68,13 @@
\unexpanded\def\startmodule
{\doifnextoptionalelse\dostartmodule\nostartmodule}
-\def\nostartmodule #1 %
- {\dostartmodule[#1]}
+\def\nostartmodule #name %
+ {\dostartmodule[#name]}
-\def\dostartmodule[#1]%
+\def\dostartmodule[#name]%
{\pushmacro\currentmodule
\pushmacro\currentmoduleparameters
- \def\currentmodule{#1}}
+ \def\currentmodule{#name}}
\unexpanded\def\stopmodule
{\popmacro\currentmoduleparameters
@@ -142,22 +83,22 @@
\unexpanded\def\setupmodule
{\dodoubleempty\dosetupmodule}
-\def\dosetupmodule[#1][#2]%
+\def\dosetupmodule[#name][#parameters]%
{\scratchtoks\expandafter{\currentmoduleparameters}%
\ifsecondargument
- \getparameters[\??md:#1:][#2]%
- \expanded{\getparameters[\??md:#1:][\the\scratchtoks]}%
+ \getparameters[\??md:#name:][#parameters]%
+ \expanded{\getparameters[\??md:#name:][\the\scratchtoks]}%
\else
- \getparameters[\??md:\currentmodule:][#1]%
+ \getparameters[\??md:\currentmodule:][#name]%
\expanded{\getparameters[\??md:\currentmodule:][\the\scratchtoks]}%
\fi
\let\currentmoduleparameters\empty}
-\def\moduleparameter #1#2{\executeifdefined{\??md:#1:#2}{}}
-\def\currentmoduleparameter#1{\executeifdefined{\??md:\currentmodule:#1}{}}
+\def\moduleparameter #name#parameter{\executeifdefined{\??md:#name:#parameter}{}}
+\def\currentmoduleparameter #parameter{\executeifdefined{\??md:\currentmodule:#parameter}{}}
-\def\useluamodule [#1]{\ctxlua{dofile(resolvers.findctxfile("#1"))}}
-\def\luaenvironment #1 {\ctxlua{dofile(resolvers.findctxfile("#1"))}}
+\def\useluamodule [#name]{\ctxlua{dofile(resolvers.findctxfile("#name"))}}
+\def\luaenvironment #name {\ctxlua{dofile(resolvers.findctxfile("#name"))}}
% \usemodule[newmml]
% \usemodule[newmml][a=b]
@@ -190,10 +131,7 @@
\writestatus{\currentmodule}{skipping experimental code}%
\gobbleuntil\stopmoduletestsection}}
-%D We also support a singular call, which saves us for
-%D frustrations when we do a typo.
-
-\let\usemodule\usemodules
+% will become file-run
%D To save memory, we implement some seldomly used commands
%D in a lazy way. Nota bene: such runtime definitions are
@@ -222,6 +160,23 @@
\fi
#1}
+%D \macros
+%D {doifolderversionelse}
+%D
+%D We start with a macro specially for Aditya who wants to be able
+%D to use development versions of \MKIV\ for real documents.
+%D
+%D \starttyping
+%D \doifolderversionelse\contextversion{1010.10.10} {OLDER} {OKAY} => OLDER
+%D \doifolderversionelse\contextversion{2020.20.20} {OLDER} {OKAY} => OKAY
+%D \doifolderversionelse\contextversion{2020} {OLDER} {OKAY} => OKAY
+%D \stoptyping
+%D
+%D The version pattern is \type {yyyy.mm.dd} (with mm and dd being optional).
+
+\def\doifolderversionelse#parent#child{\ctxcommand{doifolderversionelse("#parent","#child")}}
+\def\doifoldercontextelse #child{\ctxcommand{doifolderversionelse("#child")}}
+
%D Experimental:
\let\checkpreprocessor\relax
@@ -236,8 +191,4 @@
\setupexternalresources
[url=]
-%D This module will be perfected / changed / weeded.
-
-\let\protectbufferstrue\relax % will go away ... now in cont-sys.tex
-
\protect \endinput
diff --git a/tex/context/base/file-res.lua b/tex/context/base/file-res.lua
new file mode 100644
index 000000000..cd301d656
--- /dev/null
+++ b/tex/context/base/file-res.lua
@@ -0,0 +1,107 @@
+if not modules then modules = { } end modules ['supp-fil'] = {
+ version = 1.001,
+ comment = "companion to supp-fil.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+local isfile = lfs.isfile
+
+local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end)
+local report_files = logs.reporter("files","readfile")
+
+resolvers.maxreadlevel = 2
+
+directives.register("resolvers.maxreadlevel", function(v) resolvers.maxreadlevel = tonumber(v) or resolvers.maxreadlevel end)
+
+local finders, loaders, openers = resolvers.finders, resolvers.loaders, resolvers.openers
+
+local found = { } -- can best be done in the resolver itself
+
+local function readfilename(specification,backtrack,treetoo)
+ local name = specification.filename
+ local fnd = found[name]
+ if not fnd then
+ if isfile(name) then
+ if trace_files then
+ report_files("found local: %s",name)
+ end
+ fnd = name
+ end
+ if not fnd and backtrack then
+ local fname = name
+ for i=1,backtrack,1 do
+ fname = "../" .. fname
+ if isfile(fname) then
+ if trace_files then
+ report_files("found by backtracking: %s",fname)
+ end
+ fnd = fname
+ break
+ elseif trace_files then
+ report_files("not found by backtracking: %s",fname)
+ end
+ end
+ end
+ if not fnd and treetoo then
+ fnd = resolvers.findtexfile(name) or ""
+ if trace_files then
+ if fnd ~= "" then
+ report_files("found by tree lookup: %s",fnd)
+ else
+ report_files("not found by tree lookup: %s",name)
+ end
+ end
+ end
+ found[name] = fnd
+ elseif trace_files then
+ if fnd ~= "" then
+ report_files("already found: %s",fnd)
+ else
+ report_files("already not found: %s",name)
+ end
+ end
+ return fnd or ""
+end
+
+function finders.job(specification) return readfilename(specification,false, false) end -- current path, no backtracking
+function finders.loc(specification) return readfilename(specification,resolvers.maxreadlevel,false) end -- current path, backtracking
+function finders.sys(specification) return readfilename(specification,false, true ) end -- current path, obeys tex search
+function finders.fix(specification) return readfilename(specification,resolvers.maxreadlevel,false) end -- specified path, backtracking
+function finders.set(specification) return readfilename(specification,false, false) end -- specified path, no backtracking
+function finders.any(specification) return readfilename(specification,resolvers.maxreadlevel,true ) end -- loc job sys
+
+openers.job = openers.file loaders.job = loaders.file -- default anyway
+openers.loc = openers.file loaders.loc = loaders.file
+openers.sys = openers.file loaders.sys = loaders.file
+openers.fix = openers.file loaders.fix = loaders.file
+openers.set = openers.file loaders.set = loaders.file
+openers.any = openers.file loaders.any = loaders.file
+
+function getreadfilename(scheme,path,name) -- better do a split and then pass table
+ local fullname
+ if url.hasscheme(name) then
+ fullname = name
+ else
+ fullname = ((path == "") and format("%s:///%s",scheme,name)) or format("%s:///%s/%s",scheme,path,name)
+ end
+ return resolvers.findtexfile(fullname) or "" -- can be more direct
+end
+
+resolvers.getreadfilename = getreadfilename
+
+function commands.getreadfilename(scheme,path,name)
+ context(getreadfilename(scheme,path,name))
+end
+
+-- a name belonging to the run but also honoring qualified
+
+function commands.locfilename(name)
+ context(getreadfilename("loc",".",name))
+end
+
+function commands.doiflocfileelse(name)
+ commands.doifelse(isfile(getreadfilename("loc",".",name)))
+end
diff --git a/tex/context/base/file-res.mkvi b/tex/context/base/file-res.mkvi
new file mode 100644
index 000000000..c2d2cdec3
--- /dev/null
+++ b/tex/context/base/file-res.mkvi
@@ -0,0 +1,147 @@
+%D \module
+%D [ file=file-mod, % was supp-fil,
+%D version=20110701, % 1995.10.10,
+%D title=\CONTEXT\ File Macros,
+%D subtitle=Resolvers,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt File Macros / Resolvers}
+
+\unprotect
+
+\registerctxluafile{file-res}{1.001}
+
+%D \macros
+%D {readfile,ReadFile}
+%D
+%D One cannot be sure if a file exists. When no file can be
+%D found, the \type{\input} primitive gives an error message
+%D and switches to interactive mode. The macro \type{\readfile}
+%D takes care of non||existing files. This macro has two faces.
+%D
+%D \starttyping
+%D \ReadFile {filename}
+%D \readfile {filename} {before loading} {not found}
+%D \stoptyping
+%D
+%D Many \TEX\ implementations have laid out some strategy for
+%D locating files. This can lead to unexpected results,
+%D especially when one loads files that are not found in the
+%D current directory. Let's give an example of this. In
+%D \CONTEXT\ illustrations can be defined in an external file.
+%D The resizing macro first looks if an illustration is defined
+%D in the local definitions file. When no such file is found,
+%D it searches for a global file and when this file is not
+%D found either, the illustration itself is scanned for
+%D dimensions. One can imagine what happens if an adapted,
+%D localy stored illustration, is scaled according to
+%D dimensions stored somewhere else.
+%D
+%D When some \TEX\ implementation starts looking for a file, it
+%D normally first looks in the current directory. When no file
+%D is found, \TEX\ starts searching on the path where format
+%D and|/|or style files are stored. Depending on the implementation
+%D this can considerably slow down processing speed.
+%D
+%D In \CONTEXT, we support a project||wise ordening of files.
+%D In such an approach it seems feasible to store common files
+%D in a lower directory. When for instance searching for a
+%D general layout file, we therefore have to backtrack.
+%D
+%D These three considerations have lead to a more advanced
+%D approach for loading files.
+%D
+%D We first present an earlier implementation of
+%D \type{\readfile}. This command backtracks parent
+%D directories, upto a predefined level. Users can change this
+%D level (on the commandline using a directive); we default to~3.
+%D
+%D We use \type{\normalinput} instead of \type{\input}
+%D because we want to be able to redefine the original
+%D \type{\input} when needed, for instance when loading third
+%D party libraries.
+
+\def\doreadfile#protocol#path#name% #true #false
+ {\edef\readfilename{\ctxcommand{getreadfilename("#protocol","#path","#name")}}%
+ \ifx\readfilename\empty
+ \expandafter\secondoftwoarguments
+ \else
+ \expandafter\dodoreadfile
+ \fi}
+
+\long\def\dodoreadfile#true#false%
+ {#true
+ \relax
+ \normalinput{\readfilename}%
+ \relax}
+
+%D \macros
+%D {readjobfile,readlocfile,readsysfile,
+%D readfixfile,readsetfile}
+%D
+%D This implementation honnors the third situation, but we
+%D still can get unwanted files loaded and/or can get involved
+%D in extensive searching.
+%D
+%D Due to different needs, we decided to offer four alternative
+%D loading commands. With \type{\readjobfile} we load a local
+%D file and do no backtracking, while \type{\readlocfile}
+%D backtracks~\number\maxreadlevel\ directories, including the current
+%D one.
+%D
+%D System files can be anywhere and therefore
+%D \type{\readsysfile} is not bound to the current directory
+%D and obeys the \TEX\ implementation.
+%D
+%D Of the last two, \type{\readfixfile} searches on the
+%D directory specified and backtracks too, while
+%D \type{\readsetfile} does only search on the specified path.
+%D
+%D The most liberal is \type {\readfile}.
+
+\unexpanded\def\readjobfile #name{\doreadfile{job} {.}{#name}} % current path, no backtracking
+\unexpanded\def\readlocfile #name{\doreadfile{loc} {.}{#name}} % current path, backtracking
+\unexpanded\def\readsysfile #name{\doreadfile{sys} {.}{#name}} % current path, obeys tex search
+\unexpanded\def\readfixfile#path#name{\doreadfile{fix}{#path}{#name}} % specified path, backtracking
+\unexpanded\def\readsetfile#path#name{\doreadfile{set}{#path}{#name}} % specified path, no backtracking
+\unexpanded\def\readfile #name{\doreadfile{any} {.}{#name}}
+\unexpanded\def\ReadFile #name{\doreadfile{any} {.}{#name}\donothing\donothing}
+
+%D So now we've got ourselves five file loading commands:
+%D
+%D \starttyping
+%D \readfile {filename} {before loading} {not found}
+%D
+%D \readjobfile {filename} {before loading} {not found}
+%D \readlocfile {filename} {before loading} {not found}
+%D \readfixfile {filename} {before loading} {not found}
+%D \readsysfile {directory} {filename} {before loading} {not found}
+%D \stoptyping
+
+\def\readtexfile#name#true#false%
+ {\pushcatcodetable \catcodetable \ctxcatcodes
+ \readfile{#name}{#true}{#false}%
+ \popcatcodetable}
+
+\def\readxmlfile#name#true#false%
+ {\pushcatcodetable \catcodetable \xmlcatcodes
+ \readfile{#name}{#true}{#false}%
+ \popcatcodetable}
+
+%D \macros
+%D {doiflocfileelse,locfilename}
+%D
+%D \starttyping
+%D \doiflocfileelse {filename} {before loading} {not found}
+%D \stoptyping
+
+\unexpanded\def\doiflocfileelse#name{\ctxcommand{doiflocfileelse([[#name]])}}
+ \def\locfilename #name{\ctxcommand{locfilename([[#name]])}}
+
+\protect \endinput
diff --git a/tex/context/base/file-syn.lua b/tex/context/base/file-syn.lua
new file mode 100644
index 000000000..df431f49b
--- /dev/null
+++ b/tex/context/base/file-syn.lua
@@ -0,0 +1,46 @@
+if not modules then modules = { } end modules ['file-syn'] = {
+ version = 1.001,
+ comment = "companion to file-syn.mkvi",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local report_files = logs.reporter("files")
+
+environment.filesynonyms = environment.filesynonyms or { }
+local filesynonyms = environment.filesynonyms
+
+storage.register("environment/filesynonyms", filesynonyms, "environment.filesynonyms")
+
+local function truefilename(name)
+ local realname = filesynonyms[name] or name
+ if realname ~= name then
+ return truefilename(realname)
+ else
+ return realname
+ end
+end
+
+function commands.truefilename(name)
+ context(truefilename(name))
+end
+
+function commands.definefilesynonym(name,realname)
+ local synonym = filesynonyms[name]
+ if synonym then
+ interfaces.showmessage("files",1,name,realname,synonym)
+ end
+ filesynonyms[name] = realname
+end
+
+function commands.definefilefallback(name,alternatives)
+ local names = utilities.parser.settings_to_array(alternatives)
+ for i=1,#names do
+ local realname = resolvers.findfile(names[i])
+ if realname ~= "" then
+ filesynonyms[name] = realname
+ break
+ end
+ end
+end
diff --git a/tex/context/base/file-syn.mkvi b/tex/context/base/file-syn.mkvi
new file mode 100644
index 000000000..573c582d7
--- /dev/null
+++ b/tex/context/base/file-syn.mkvi
@@ -0,0 +1,66 @@
+%D \module
+%D [ file=file-syn, % was core-fil,
+%D version=20110701, % 1997.11.15,
+%D title=\CONTEXT\ File Macros,
+%D subtitle=Module Support,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt File Macros / Synonyms}
+
+\unprotect
+
+\registerctxluafile{file-syn}{1.001}
+
+%D \macros
+%D {definefilesynonym,definefilefallback}
+%D
+%D One of the problems with loading files is that their names
+%D can depend on the interface language. We therefore need a
+%D method to define filesynonyms. The actual synonyms are
+%D defined elsewhere, but look like:
+%D
+%D \starttyping
+%D \definefilesynonym [chemic] [chemie]
+%D \definefilesynonym [einheit] [unit]
+%D \definefilesynonym [unit] [unit]
+%D \stoptyping
+%D
+%D So we can say in english:
+%D
+%D \starttyping
+%D \usemodules[pictex,chemic,unit]
+%D \stoptyping
+%D
+%D and in dutch:
+%D
+%D \starttyping
+%D \usemodules[pictex,chemie,unit]
+%D \stoptyping
+
+\unexpanded\def\definefilesynonym {\dodoubleempty \dodefinefilesynonym}
+\unexpanded\def\definefilefallback{\dodoubleargument\dodefinefilefallback} % still used?
+
+\def\dodefinefilesynonym [#name][#realname]{\ctxcommand{definefilesynonym ("#name","#realname")}}
+\def\dodefinefilefallback[#name][#alternatives]{\ctxcommand{definefilefallback("#name","#alternatives")}}
+
+%D \macros
+%D {truefilename}
+%D
+%D At the system level such a filename can be called upon by
+%D saying:
+%D
+%D \starttyping
+%D \truefilename{filename/filesynonym}
+%D \stoptyping
+%D
+%D The implementation shows that nesting is supported.
+
+\def\truefilename#1{\ctxcommand{truefilename("#1")}}
+
+\protect \endinput
diff --git a/tex/context/base/font-afm.lua b/tex/context/base/font-afm.lua
index 0aca634bb..400136d70 100644
--- a/tex/context/base/font-afm.lua
+++ b/tex/context/base/font-afm.lua
@@ -1103,7 +1103,7 @@ fonts.formats.pfb = "type1"
local function check_afm(specification,fullname)
local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure
if foundname == "" then
- foundname = fonts.names.getfilename(fullname,"afm")
+ foundname = fonts.names.getfilename(fullname,"afm") or ""
end
if foundname == "" and afm.autoprefixed then
local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.*
diff --git a/tex/context/base/font-chk.lua b/tex/context/base/font-chk.lua
index e3679f06d..fd4e2e249 100644
--- a/tex/context/base/font-chk.lua
+++ b/tex/context/base/font-chk.lua
@@ -9,27 +9,32 @@ if not modules then modules = { } end modules ['font-chk'] = {
-- possible optimization: delayed initialization of vectors
-- move to the nodes namespace
-local report_fonts = logs.reporter("fonts","checking")
+local report_fonts = logs.reporter("fonts","checking")
-local fonts = fonts
+local fonts = fonts
-fonts.checkers = fonts.checkers or { }
-local checkers = fonts.checkers
+fonts.checkers = fonts.checkers or { }
+local checkers = fonts.checkers
-local fontdata = fonts.hashes.identifiers
-local is_character = characters.is_character
-local chardata = characters.data
-local tasks = nodes.tasks
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local fontcharacters = fonthashes.characters
-local glyph = node.id('glyph')
-local traverse_id = node.traverse_id
-local remove_node = nodes.remove
+local is_character = characters.is_character
+local chardata = characters.data
+
+local tasks = nodes.tasks
+local enableaction = tasks.enableaction
+local disableaction = tasks.disableaction
+
+local glyph = node.id('glyph')
+local traverse_id = node.traverse_id
+local remove_node = nodes.remove
-- maybe in fonts namespace
-- deletion can be option
-checkers.enabled = false
-checkers.delete = false
+local cleanup = false
-- to tfmdata.properties ?
@@ -55,45 +60,48 @@ end
fonts.loggers.onetimemessage = onetimemessage
function checkers.missing(head)
- if checkers.enabled then
- local lastfont, characters, found = nil, nil, nil
- for n in traverse_id(glyph,head) do
- local font, char = n.font, n.char
- if font ~= lastfont then
- characters = fontdata[font].characters
+ local lastfont, characters, found = nil, nil, nil
+ for n in traverse_id(glyph,head) do
+ local font = n.font
+ local char = n.char
+ if font ~= lastfont then
+ characters = fontcharacters[font]
+ end
+ if not characters[char] and is_character[chardata[char].category] then
+ if cleanup then
+ onetimemessage(font,char,"missing (will be deleted)")
+ else
+ onetimemessage(font,char,"missing")
end
- if not characters[char] and is_character[chardata[char].category] then
- if checkers.delete then
- onetimemessage(font,char,"missing (will be deleted)")
- else
- onetimemessage(font,char,"missing")
- end
- if not found then
- found = { n }
- else
- found[#found+1] = n
- end
+ if not found then
+ found = { n }
+ else
+ found[#found+1] = n
end
end
- if found and checkers.delete then
- for i=1,#found do
- head = remove_node(head,found[i],true)
- end
+ end
+ if found and cleanup then
+ for i=1,#found do
+ head = remove_node(head,found[i],true)
end
end
return head, false
end
trackers.register("fonts.missing", function(v)
- tasks.enableaction("processors", "fonts.checkers.missing") -- always on then
- checkers.enabled = v
+ if v then
+ enableaction("processors","fonts.checkers.missing")
+ else
+ disableaction("processors","fonts.checkers.missing")
+ end
+ cleanup = v == "remove"
end)
-function checkers.enable(delete)
- tasks.enableaction("processors", "fonts.checkers.missing") -- always on then
- if delete ~= nil then
- checkers.delete = delete
- end
- checkers.enabled = true
+function commands.checkcharactersinfont()
+ enableaction("processors","fonts.checkers.missing")
end
+function commands.removemissingcharacters()
+ enableaction("processors","fonts.checkers.missing")
+ cleanup = true
+end
diff --git a/tex/context/base/font-col.lua b/tex/context/base/font-col.lua
index 95e390ae2..b5437e249 100644
--- a/tex/context/base/font-col.lua
+++ b/tex/context/base/font-col.lua
@@ -88,7 +88,9 @@ function collections.define(name,font,ranges,details)
end
end
-function collections.stage_one(name)
+-- todo: provide a lua variant (like with definefont)
+
+function collections.clone_one(name)
local last = font.current()
if trace_collecting then
report_fonts("def: registering font %s with name %s",last,name)
@@ -96,7 +98,7 @@ function collections.stage_one(name)
list[#list+1] = last
end
-function collections.stage_two(name)
+function collections.clone_two(name)
statistics.starttiming(fonts)
local d = definitions[name]
local t = { }
@@ -221,3 +223,12 @@ function collections.process(head) -- this way we keep feature processing
return head, false
end
end
+
+-- interface
+
+commands.definefontcollection = collections.define
+commands.resetfontcollection = collections.reset
+commands.preparefontcollection = collections.prepare
+commands.fontcollectionmessage = collections.message
+commands.clonefontcollection_one = collections.clone_one
+commands.clonefontcollection_two = collections.clone_two
diff --git a/tex/context/base/font-col.mkiv b/tex/context/base/font-col.mkiv
index 038a618b7..cbe183868 100644
--- a/tex/context/base/font-col.mkiv
+++ b/tex/context/base/font-col.mkiv
@@ -33,18 +33,18 @@
{\doquadrupleempty\dodefinefontfallback}
\def\dodefinefontfallback[#1][#2][#3][#4]%
- {\ctxlua{fonts.collections.define("#1","#2",\!!bs#3\!!es,\!!bs#4\!!es)}}
+ {\ctxcommand{definefontcollection("#1","#2",\!!bs#3\!!es,\!!bs#4\!!es)}}
\def\resetfontfallback
{\dodoubleempty\doresetfontfallback}
\def\doresetfontfallback[#1][#2]%
- {\ctxlua{fonts.collections.reset("#1","#2")}}
+ {\ctxcommand{resetfontcollection("#1","#2")}}
% add fallbacks to last font
\def\dodefinefontfallbacks#1%
- {\ctxlua{fonts.collections.prepare("#1")}}
+ {\ctxcommand{preparefontcollection("#1")}}
% we might as well move the handling to lua but then we need to pass the
% fallbacks, skewchar etc.
@@ -79,7 +79,7 @@
{\egroup}
\def\doclonefonta#1#2% kind of dododefinefont
- {\ctxlua{fonts.collections.message("defining #1 (relative scale: #2)")}% brrr
+ {\ctxcommand{fontcollectionmessage("defining #1 (relative scale: #2)")}% brrr
\autofontsizefalse
\let\lastfontidentifier\s!dummy
\def\localrelativefontsize{#2}%
@@ -93,11 +93,8 @@
\def\doclonefontb#1% #2
{\doclonefonta{#1 \savedfontspec}}
-\def\doclonefontstageone#1%
- {\ctxlua{fonts.collections.stage_one("#1")}}
-
-\def\doclonefontstagetwo#1%
- {\ctxlua{fonts.collections.stage_two("#1")}}
+\def\doclonefontstageone#1{\ctxcommand{clonefontcollection_one("#1")}}
+\def\doclonefontstagetwo#1{\ctxcommand{clonefontcollection_two("#1")}}
% check : only replace when present in replacement font (default: no)
% force : force replacent even when basefont has glyph (default: yes)
diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua
index d036c042c..1d7580d84 100644
--- a/tex/context/base/font-ctx.lua
+++ b/tex/context/base/font-ctx.lua
@@ -102,6 +102,8 @@ function definers.resetnullfont()
definers.resetnullfont = function() end
end
+commands.resetnullfont = definers.resetnullfont
+
setmetatableindex(fontdata, function(t,k) return nulldata end)
local chardata = allocate() -- chardata
@@ -152,13 +154,44 @@ local needsnodemode = {
gpos_mark2ligature = true,
}
+fonts.handlers.otf.tables.scripts.auto = "automatic fallback to latn when no dflt present"
+
+local privatefeatures = {
+ tlig = true,
+ trep = true,
+ anum = true,
+}
+
local function modechecker(tfmdata,features,mode) -- we cannot adapt features as they are shared!
if trace_features then
report_features(serialize(features,"used"))
end
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata and rawdata.resources
+ local script = features.script
+ if script == "auto" then
+ local latn = false
+ for g, list in next, resources.features do
+ for f, scripts in next, list do
+ if privatefeatures[f] then
+ -- skip
+ elseif scripts.dflt then
+ script = "dflt"
+ break
+ elseif scripts.latn then
+ latn = true
+ end
+ end
+ end
+ if script == "auto" then
+ script = latn and "latn" or "dflt"
+ end
+ features.script = script
+ if trace_automode then
+ report_defining("auto script mode: using script '%s' in font '%s'",script,file.basename(tfmdata.properties.name))
+ end
+ end
if mode == "auto" then
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata and rawdata.resources
local sequences = resources.sequences
if sequences and #sequences > 0 then
local script = features.script or "dflt"
@@ -599,7 +632,7 @@ local setsomefontname = context.fntsetsomename
local setemptyfontsize = context.fntsetnopsize
local setsomefontsize = context.fntsetsomesize
-function definers.stage_one(str)
+function commands.definefont_one(str)
statistics.starttiming(fonts)
if trace_defining then
report_defining("memory usage before: %s",statistics.memused())
@@ -644,7 +677,7 @@ local n = 0
-- we can also move rscale to here (more consistent)
-- the argument list will become a table
-function definers.stage_two(global,cs,str,size,inheritancemode,classfeatures,fontfeatures,classfallbacks,fontfallbacks,
+function commands.definefont_two(global,cs,str,size,inheritancemode,classfeatures,fontfeatures,classfallbacks,fontfallbacks,
mathsize,textsize,relativeid,classgoodies,goodies)
if trace_defining then
report_defining("start stage two: %s (%s)",str,size)
@@ -793,6 +826,10 @@ function definers.define(specification)
specification.method = specification.method or (method ~= "" and method) or "*"
specification.detail = specification.detail or (detail ~= "" and detail) or ""
--
+ if type(specification.size) == "string" then
+ specification.size = tex.sp(specification.size) or 655260
+ end
+ --
specification.specification = "" -- not used
specification.resolved = ""
specification.forced = ""
@@ -1079,7 +1116,7 @@ function commands.nbfs(amount,precision)
end
function commands.featureattribute(tag)
- tex.write(contextnumber(tag))
+ context(contextnumber(tag))
end
function commands.setfontfeature(tag)
diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua
index a5c60d252..84e98fb3e 100644
--- a/tex/context/base/font-gds.lua
+++ b/tex/context/base/font-gds.lua
@@ -51,7 +51,7 @@ function fontgoodies.report(what,trace,goodies)
end
end
-local function getgoodies(filename) -- maybe a merge is better
+local function loadgoodies(filename) -- maybe a merge is better
local goodies = data[filename] -- we assume no suffix is given
if goodies ~= nil then
-- found or tagged unfound
@@ -86,7 +86,7 @@ function fontgoodies.register(name,fnc) -- will be a proper sequencer
list[name] = fnc
end
-fontgoodies.get = getgoodies
+fontgoodies.load = loadgoodies
-- register goodies file
@@ -98,7 +98,7 @@ local function setgoodies(tfmdata,value)
end
for filename in gmatch(value,"[^, ]+") do
-- we need to check for duplicates
- local ok = getgoodies(filename)
+ local ok = loadgoodies(filename)
if ok then
goodies[#goodies+1] = ok
end
@@ -507,3 +507,8 @@ fontgoodies.register("compositions", initialize)
-- tex/fonts/data/foundry/collection
--
-- see lfg files in distribution
+
+-- interface
+
+commands.loadfontgoodies = fontgoodies.load
+commands.enablefontcolorschemes = colorschemes.enable
diff --git a/tex/context/base/font-gds.mkiv b/tex/context/base/font-gds.mkiv
index 7932a85ca..6e56c7f0a 100644
--- a/tex/context/base/font-gds.mkiv
+++ b/tex/context/base/font-gds.mkiv
@@ -17,8 +17,7 @@
\unprotect
-\def\loadfontgoodies[#1]%
- {\ctxlua{fonts.goodies.get("#1")}}
+\def\loadfontgoodies[#1]{\ctxcommand{loadfontgoodies("#1")}}
% this will become colorgroups and move to font-col or so
@@ -40,8 +39,8 @@
\definesystemattribute[colorscheme][public]
-\def\setfontcolorscheme
- {\ctxlua{fonts.goodies.colorschemes.enable()}%
+\def\setfontcolorscheme % will move to the lua end
+ {\ctxcommand{enablefontcolorschemes()}%
\xdef\setfontcolorscheme[##1]{\attribute\colorschemeattribute##1\relax}%
\setfontcolorscheme}
diff --git a/tex/context/base/font-ini.mkiv b/tex/context/base/font-ini.mkiv
index b70988348..066e2b7f0 100644
--- a/tex/context/base/font-ini.mkiv
+++ b/tex/context/base/font-ini.mkiv
@@ -579,13 +579,22 @@
\let\fontbody\savedfontbody
\autofontsizefalse}
-\def\dosavefontclassmathfamily#1#2%
+% tricky: we cannot reset in \everybeforedefinetypeface as we don't know all sizes
+% so we postpone the optimization to the first starttext
+
+\def\dosavefontclassmathfamilyindeed#1#2%
{\expandafter\xdef\csname\??ff:\fontclass:\textface:\c!mm:#2:\fontsize\endcsname{%
\scriptscriptfont#1\csname*\fontclass\scriptscriptface\c!mm#23\fontsize3*\endcsname
\scriptfont #1\csname*\fontclass\scriptface \c!mm#22\fontsize2*\endcsname
\textfont #1\csname*\fontclass\textface \c!mm#21\fontsize1*\endcsname
}}
+\let\dosavefontclassmathfamily\gobbletwoarguments
+
+\appendtoks
+ \glet\dosavefontclassmathfamily\dosavefontclassmathfamilyindeed
+\to \everystarttext
+
% It would be nice if characters could be defined in a neutral way (say fam 255) and
% be mapped to a real family during noad list construction. However, this changes
% tex in critical places so for the moment we simulate this using manipulation.
@@ -926,7 +935,7 @@
\unexpanded\def\lowleveldefinefont#1#2% #2 = cs
{% we can now set more at the lua end
- \ctxlua{fonts.definers.stage_one("\luaescapestring{#1}")}% the escapestring catches at \somedimen
+ \ctxcommand{definefont_one(\!!bs\luaescapestring{#1}\!!es)}% the escapestring catches at \somedimen
% sets \scaledfontmode and \somefontname and \somefontsize
\ifcase\scaledfontmode\relax
% none, avoid the designsize if possible
@@ -964,10 +973,10 @@
\fi
\updatefontparameters
\updatefontclassparameters
- \ctxlua{fonts.definers.stage_two(
+ \ctxcommand{definefont_two(
\ifx\fontclass\empty false\else true\fi,
"#2", % cs, trailing % is gone
- "\somefontfile",
+ \!!bs\somefontfile\!!es,
\number\scaledfontsize,
\number\featureinheritancemode,
"\@@fontclassfeatures",
@@ -2335,8 +2344,8 @@
\fetchruntimecommand \showfontparameters {\f!fontprefix\s!run}
\def\resetnullfont % this is needed because some macro packages (tikz) misuse \nullfont
- {\dorecurse7{\fontdimen\recurselevel\nullfont\zeropoint}%
- \ctxlua{fonts.definers.resetnullfont()}% in luatex 0.70 this will also do the previous
+ {\dorecurse7{\fontdimen\recurselevel\nullfont\zeropoint}% keep en eye on this as:
+ \ctxcommand{resetnullfont()}% in luatex 0.70 this will also do the previous
\globallet\resetnullfont\relax}
\def\preloaddefaultfonts
@@ -3068,11 +3077,12 @@
\def\dodefinefontfeature[#1][#2][#3]%
{\global\expandafter\chardef\csname\??fq=#1\endcsname % beware () needed as we get two values returned
- \ctxsprint{((fonts.specifiers.presetcontext("#1","#2","#3")))}\relax}
+ \cldcontext{((fonts.specifiers.presetcontext("#1","#2","#3")))}\relax}
\definefontfeature % experiment, this might move to the lua code
[always]
[mode=auto,
+ script=auto,
kern=yes,
mark=yes,
mkmk=yes,
@@ -3224,7 +3234,7 @@
{\dodoubleargument\dofontfeatureslist}
\def\dofontfeatureslist[#1][#2]% todo: arg voor type
- {\ctxsprint{fonts.specifiers.contexttostring("#1","otf","\luaescapestring{#2}","yes","no",true,{"number"})}}
+ {\cldcontext{fonts.specifiers.contexttostring("#1","otf","\luaescapestring{#2}","yes","no",true,{"number"})}}
\attribute\zerocount\zerocount % first in list, so fast match
@@ -3245,15 +3255,6 @@
%
% \typebuffer \getbuffer
-% \unexpanded\def\featureattribute#1{\ctxsprint{fonts.specifiers.contextnumber("#1"))}}
-% \unexpanded\def\setfontfeature #1{\edef\currentfeature{#1}\attribute\zerocount\featureattribute{#1}\relax}
-% \unexpanded\def\resetfontfeature#1{\let\currentfeature\empty\attribute\zerocount\zerocount} % initial value
-
-% \def\addfontfeaturetoset #1{\ctxlua{fonts.withset("#1", 1)}} % merge
-% \def\subtractfontfeaturefromset #1{\ctxlua{fonts.withset("#1",-1)}} % merge
-% \def\addfontfeaturetofont #1{\ctxlua{fonts.withfnt("#1", 2)}} % overload
-% \def\subtractfontfeaturefromfont#1{\ctxlua{fonts.withfnt("#1",-2)}} % overload
-
\unexpanded\def\featureattribute#1{\ctxcommand{featureattribute("#1")}}
\unexpanded\def\setfontfeature #1{\ctxcommand{setfontfeature("#1")}\edef\currentfeature{#1}}
\unexpanded\def\resetfontfeature#1{\ctxcommand{resetfontfeature()}\let\currentfeature\empty} % initial value
@@ -4335,13 +4336,6 @@
\definealternativestyle [\v!boldslanted,\v!slantedbold] [\bs] []
\definealternativestyle [\v!bolditalic,\v!italicbold] [\bi] []
-% \definealternativestyle [\v!small,\v!smallnormal] [\tfx] []
-% \definealternativestyle [\v!smallbold] [\bfx] []
-% \definealternativestyle [\v!smalltype] [\ttx] []
-% \definealternativestyle [\v!smallslanted] [\slx] []
-% \definealternativestyle [\v!smallboldslanted,\v!smallslantedbold] [\bsx] []
-% \definealternativestyle [\v!smallbolditalic,\v!smallitalicbold] [\bix] []
-
\definealternativestyle [\v!small,\v!smallnormal] [\setsmallbodyfont\tf] []
\definealternativestyle [\v!smallbold] [\setsmallbodyfont\bf] []
\definealternativestyle [\v!smalltype] [\setsmallbodyfont\tt] []
@@ -4355,6 +4349,36 @@
\definealternativestyle [\v!sans,\v!sansserif] [\ss] []
\definealternativestyle [\v!sansbold] [\ss\bf] []
+% % maybe we need interface neutral as well (for use in cld):
+%
+% \letcscsname\mediaeval \csname\v!mediaeval \endcsname
+% \letcscsname\normal \csname\v!normal \endcsname
+% \letcscsname\bold \csname\v!bold \endcsname
+% \letcscsname\mono \csname\v!mono \endcsname
+% \letcscsname\slanted \csname\v!slanted \endcsname
+% \letcscsname\italic \csname\v!italic \endcsname
+% \letcscsname\boldslanted \csname\v!boldslanted \endcsname
+% \letcscsname\slantedbold \csname\v!slantedbold \endcsname
+% \letcscsname\bolditalic \csname\v!bolditalic \endcsname
+% \letcscsname\italicbold \csname\v!italicbold \endcsname
+%
+% \letcscsname\small \csname\v!small \endcsname
+% \letcscsname\smallnormal \csname\v!smallnormal \endcsname
+% \letcscsname\smallbold \csname\v!smallbold \endcsname
+% \letcscsname\smalltype \csname\v!smalltype \endcsname
+% \letcscsname\smallslanted \csname\v!smallslanted \endcsname
+% \letcscsname\smallboldslanted\csname\v!smallboldslanted\endcsname
+% \letcscsname\smallslantedbold\csname\v!smallslantedbold\endcsname
+% \letcscsname\smallbolditalic \csname\v!smallbolditalic \endcsname
+% \letcscsname\smallitalicbold \csname\v!smallitalicbold \endcsname
+%
+% \letcscsname\bigger \csname\v!bigger \endcsname
+% \letcscsname\smaller \csname\v!smaller \endcsname
+%
+% \letcscsname\sans \csname\v!sans \endcsname
+% \letcscsname\sansserif \csname\v!sansserif \endcsname
+% \letcscsname\sansbold \csname\v!sansbold \endcsname
+
%D We can go on and on and on:
%D
%D \starttyping
@@ -4420,8 +4444,8 @@
%
% \setupfonts[check=...]
-\def\checkcharactersinfont {\ctxlua{fonts.checkers.enable()}}
-\def\removemissingcharacters{\ctxlua{fonts.checkers.enable(true)}}
+\def\checkcharactersinfont {\ctxcommand{checkcharactersinfont }}
+\def\removemissingcharacters{\ctxcommand{removemissingcharacters}}
%D New commands (not yet interfaced):
@@ -4601,5 +4625,13 @@
\def\doifelsecurrentfonthasfeature#1%
{\ctxcommand{doifelsecurrentfonthasfeature("#1")}}
+% variant selectors
+%
+% \mathematics {\vsone{\utfchar{"2229}}}
+% \mathematics {\utfchar{"2229}\vsone{}}
+
+\unexpanded\edef\vsone#1{#1\utfchar{"FE00}} % used
+\unexpanded\edef\vstwo#1{#1\utfchar{"FE01}} % not used but handy for testing
+
\protect \endinput
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index 06ec1efe4..b3147ede3 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -22,7 +22,7 @@ local handlers = fonts.handlers
handlers.otf = handlers.otf or { }
local otf = handlers.otf
-otf.version = otf.version or 2.732
+otf.version = otf.version or 2.733
otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
function otf.loadcached(filename,format,sub)
diff --git a/tex/context/base/font-otc.lua b/tex/context/base/font-otc.lua
index 1b4983ce0..fd3d7a761 100644
--- a/tex/context/base/font-otc.lua
+++ b/tex/context/base/font-otc.lua
@@ -59,86 +59,91 @@ local function addfeature(data,feature,specifications)
-- subtables are tables themselves but we also accept flattened singular subtables
for s=1,#specifications do
local specification = specifications[s]
- local askedfeatures = specification.features or everywhere
- local subtables = specification.subtables or { specification.data } or { }
- local featuretype = types[specification.type or "substitution"]
- local featureflags = specification.flags or noflags
- local added = false
- local featurename = format("ctx_%s_%s",feature,s)
- local st = { }
- for t=1,#subtables do
- local list = subtables[t]
- local full = format("%s_%s",featurename,t)
- st[t] = full
- if featuretype == "gsub_ligature" then
- lookuptypes[full] = "ligature"
- for code, ligature in next, list do
- local unicode = tonumber(code) or unicodes[code]
- local description = descriptions[unicode]
- if description then
- local slookups = description.slookups
- if type(ligature) == "string" then
- ligature = { lpegmatch(splitter,ligature) }
+ local valid = specification.valid
+ if not valid or valid(data,specification,feature) then
+ local askedfeatures = specification.features or everywhere
+ local subtables = specification.subtables or { specification.data } or { }
+ local featuretype = types[specification.type or "substitution"]
+ local featureflags = specification.flags or noflags
+ local added = false
+ local featurename = format("ctx_%s_%s",feature,s)
+ local st = { }
+ for t=1,#subtables do
+ local list = subtables[t]
+ local full = format("%s_%s",featurename,t)
+ st[t] = full
+ if featuretype == "gsub_ligature" then
+ lookuptypes[full] = "ligature"
+ for code, ligature in next, list do
+ local unicode = tonumber(code) or unicodes[code]
+ local description = descriptions[unicode]
+ if description then
+ local slookups = description.slookups
+ if type(ligature) == "string" then
+ ligature = { lpegmatch(splitter,ligature) }
+ end
+ if slookups then
+ slookups[full] = ligature
+ else
+ description.slookups = { [full] = ligature }
+ end
+ done, added = done + 1, true
end
- if slookups then
- slookups[full] = ligature
- else
- description.slookups = { [full] = ligature }
- end
- done, added = done + 1, true
end
- end
- elseif featuretype == "gsub_single" then
- lookuptypes[full] = "substitution"
- for code, replacement in next, list do
- local unicode = tonumber(code) or unicodes[code]
- local description = descriptions[unicode]
- if description then
- local slookups = description.slookups
- replacement = tonumber(replacement) or unicodes[replacement]
- if slookups then
- slookups[full] = replacement
- else
- description.slookups = { [full] = replacement }
+ elseif featuretype == "gsub_single" then
+ lookuptypes[full] = "substitution"
+ for code, replacement in next, list do
+ local unicode = tonumber(code) or unicodes[code]
+ local description = descriptions[unicode]
+ if description then
+ local slookups = description.slookups
+ replacement = tonumber(replacement) or unicodes[replacement]
+ if descriptions[replacement] then
+ if slookups then
+ slookups[full] = replacement
+ else
+ description.slookups = { [full] = replacement }
+ end
+ done, added = done + 1, true
+ end
end
- done, added = done + 1, true
end
end
end
- end
- if added then
- -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... }
- for k, v in next, askedfeatures do
- if v[1] then
- askedfeatures[k] = table.tohash(v)
+ if added then
+ -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... }
+ for k, v in next, askedfeatures do
+ if v[1] then
+ askedfeatures[k] = table.tohash(v)
+ end
end
- end
- sequences[#sequences+1] = {
- chain = 0,
- features = { [feature] = askedfeatures },
- flags = featureflags,
- name = featurename,
- subtables = st,
- type = featuretype,
- }
- -- register in metadata (merge as there can be a few)
- if not gsubfeatures then
- gsubfeatures = { }
- fontfeatures.gsub = gsubfeatures
- end
- local k = gsubfeatures[feature]
- if not k then
- k = { }
- gsubfeatures[feature] = k
- end
- for script, languages in next, askedfeatures do
- local kk = k[script]
- if not kk then
- kk = { }
- k[script] = kk
+ sequences[#sequences+1] = {
+ chain = 0,
+ features = { [feature] = askedfeatures },
+ flags = featureflags,
+ name = featurename,
+ subtables = st,
+ type = featuretype,
+ }
+ -- register in metadata (merge as there can be a few)
+ if not gsubfeatures then
+ gsubfeatures = { }
+ fontfeatures.gsub = gsubfeatures
end
- for language, value in next, languages do
- kk[language] = value
+ local k = gsubfeatures[feature]
+ if not k then
+ k = { }
+ gsubfeatures[feature] = k
+ end
+ for script, languages in next, askedfeatures do
+ local kk = k[script]
+ if not kk then
+ kk = { }
+ k[script] = kk
+ end
+ for language, value in next, languages do
+ kk[language] = value
+ end
end
end
end
@@ -241,22 +246,37 @@ local anum_persian = {
[0x0039] = 0x06F9,
}
+local function valid(data)
+ local features = data.resources.features
+ if features then
+ for k, v in next, features do
+ for k, v in next, v do
+ if v.arab then
+ return true
+ end
+ end
+ end
+ end
+end
+
local anum_specification = {
{
type = "substitution",
features = { arab = { URD = true, dflt = true } },
data = anum_arabic,
flags = noflags, -- { },
+ valid = valid,
},
{
type = "substitution",
features = { arab = { URD = true } },
data = anum_persian,
flags = noflags, -- { },
+ valid = valid,
},
}
-otf.addfeature("anum",anum_specification)
+otf.addfeature("anum",anum_specification) -- todo: only when there is already an arab script feature
registerotffeature {
name = 'anum',
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index 8155a3f1d..fe81f8bb1 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -47,7 +47,7 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.732 -- beware: also sync font-mis.lua
+otf.version = 2.733 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
local fontdata = fonts.hashes.identifiers
@@ -291,11 +291,14 @@ end
-- patches.register("before","migrate metadata","cambria",function() end)
function patches.register(what,where,pattern,action)
- local ww = what[where]
- if ww then
- ww[pattern] = action
- else
- ww = { [pattern] = action}
+ local pw = patches[what]
+ if pw then
+ local ww = pw[where]
+ if ww then
+ ww[pattern] = action
+ else
+ pw[where] = { [pattern] = action}
+ end
end
end
@@ -420,6 +423,9 @@ function otf.load(filename,format,sub,featurefile)
duplicates = {
-- alternative unicodes
},
+ variants = {
+ -- alternative unicodes (variants)
+ },
lookuptypes = {
},
},
@@ -444,6 +450,7 @@ function otf.load(filename,format,sub,featurefile)
if packdata then
if cleanup > 0 then
collectgarbage("collect")
+--~ lua.collectgarbage()
end
enhance("pack",data,filename,nil)
end
@@ -451,6 +458,7 @@ function otf.load(filename,format,sub,featurefile)
data = containers.write(otf.cache, hash, data)
if cleanup > 1 then
collectgarbage("collect")
+--~ lua.collectgarbage()
end
stoptiming(data)
if elapsedtime then -- not in generic
@@ -459,10 +467,12 @@ function otf.load(filename,format,sub,featurefile)
fontloader.close(fontdata) -- free memory
if cleanup > 3 then
collectgarbage("collect")
+--~ lua.collectgarbage()
end
data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
if cleanup > 2 then
collectgarbage("collect")
+--~ lua.collectgarbage()
end
else
data = nil
@@ -600,6 +610,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
local unicodes = resources.unicodes -- name to unicode
local indices = resources.indices -- index to unicode
local duplicates = resources.duplicates
+ local variants = resources.variants
if rawsubfonts then
@@ -699,11 +710,28 @@ actions["prepare glyphs"] = function(data,filename,raw)
}
local altuni = glyph.altuni
if altuni then
- local d = { }
+ local d
for i=1,#altuni do
- d[#d+1] = altuni[i].unicode
+ local a = altuni[i]
+ local u = a.unicode
+ local v = a.variant
+ if v then
+ local vv = variants[v]
+ if vv then
+ vv[u] = unicode
+ else -- xits-math has some:
+ vv = { [u] = unicode }
+ variants[v] = vv
+ end
+ elseif d then
+ d[#d+1] = u
+ else
+ d = { u }
+ end
+ end
+ if d then
+ duplicates[unicode] = d
end
- duplicates[unicode] = d
end
else
report_otf("potential problem: glyph 0x%04X is used but empty",index)
@@ -725,9 +753,8 @@ actions["check encoding"] = function(data,filename,raw)
local properties = data.properties
local unicodes = resources.unicodes -- name to unicode
local indices = resources.indices -- index to unicodes
- local duplicates = resources.duplicates
- -- begin of messy (not needed whwn cidmap)
+ -- begin of messy (not needed when cidmap)
local mapdata = raw.map or { }
local unicodetoindex = mapdata and mapdata.map or { }
@@ -801,7 +828,6 @@ actions["add duplicates"] = function(data,filename,raw)
end
end
end
-
end
-- class : nil base mark ligature component (maybe we don't need it in description)
@@ -1978,15 +2004,15 @@ local function check_otf(forced,specification,suffix,what)
if forced then
name = file.addsuffix(name,suffix,true)
end
- local fullname, tfmdata = findbinfile(name,suffix) or "", nil -- one shot
+ local fullname = findbinfile(name,suffix) or ""
if fullname == "" then
- fullname = fonts.names.getfilename(name,suffix)
+ fullname = fonts.names.getfilename(name,suffix) or ""
end
if fullname ~= "" then
- specification.filename, specification.format = fullname, what -- hm, so we do set the filename, then
- tfmdata = read_from_otf(specification) -- we need to do it for all matches / todo
+ specification.filename = fullname
+ specification.format = what
+ return read_from_otf(specification)
end
- return tfmdata
end
local function opentypereader(specification,suffix,what)
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index 2483f887c..c4f0e948b 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -32,8 +32,6 @@ local report_names = logs.reporter("fonts","names")
using a table that has keys filtered from the font related files.</p>
--ldx]]--
-local texsprint = (tex and tex.sprint) or print
-
fonts = fonts or { } -- also used elsewhere
local names = { }
@@ -317,7 +315,7 @@ local function cleanname(name)
end
local function cleanfilename(fullname,defaultsuffix)
- local _, _, name, suffix = file.splitname(fullname)
+ local path, name, suffix = file.splitname(fullname)
name = gsub(lower(name),"[^%a%d]","")
if suffix and suffix ~= "" then
return name .. ".".. suffix
@@ -920,7 +918,9 @@ local function is_reloaded()
local c_status = serialize(resolvers.datastate())
local f_status = serialize(data.datastate)
if c_status == f_status then
- report_names("font database has matching configuration and file hashes")
+ if trace_names then
+ report_names("font database has matching configuration and file hashes")
+ end
return
else
report_names("font database has mismatching configuration and file hashes")
@@ -1025,14 +1025,28 @@ function names.resolve(askedname,sub)
end
end
+-- function names.getfilename(askedname,suffix) -- last resort, strip funny chars
+-- names.load()
+-- local files = names.data.files
+-- askedname = files and files[cleanfilename(askedname,suffix)] or ""
+-- if askedname == "" then
+-- return ""
+-- else -- never entered
+-- return resolvers.findbinfile(askedname,suffix) or ""
+-- end
+-- end
+
function names.getfilename(askedname,suffix) -- last resort, strip funny chars
names.load()
local files = names.data.files
- askedname = files and files[cleanfilename(askedname,suffix)] or ""
- if askedname == "" then
- return ""
- else
- return resolvers.findbinfile(askedname,suffix) or ""
+ local cleanname = cleanfilename(askedname,suffix)
+ local found = files and files[cleanname] or ""
+ if found == "" and is_reloaded() then
+ files = names.data.files
+ found = files and files[cleanname] or ""
+ end
+ if found and found ~= "" then
+ return resolvers.findbinfile(found,suffix) or "" -- we still need to locate it
end
end
diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua
index d7c8fe314..354d77b68 100644
--- a/tex/context/base/font-tfm.lua
+++ b/tex/context/base/font-tfm.lua
@@ -117,7 +117,7 @@ local function check_tfm(specification,fullname) -- we could split up like afm/o
foundname = findbinfile(fullname, 'ofm') or "" -- not needed in context
end
if foundname == "" then
- foundname = fonts.names.getfilename(fullname,"tfm")
+ foundname = fonts.names.getfilename(fullname,"tfm") or ""
end
if foundname ~= "" then
specification.filename = foundname
diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua
index 4d3ba713d..3fe2270e3 100644
--- a/tex/context/base/grph-inc.lua
+++ b/tex/context/base/grph-inc.lua
@@ -20,7 +20,7 @@ if not modules then modules = { } end modules ['grph-inc'] = {
--[[
The ConTeXt figure inclusion mechanisms are among the oldest code
-in ConTeXt and evolve dinto a complex whole. One reason is that we
+in ConTeXt and evolved into a complex whole. One reason is that we
deal with backend in an abstract way. What complicates matters is
that we deal with internal graphics as well: TeX code, MetaPost code,
etc. Later on figure databases were introduced, which resulted in
@@ -413,7 +413,9 @@ local function register(askedname,specification)
report_inclusion("checking conversion of '%s' (%s): old format '%s', new format '%s', conversion '%s', resolution '%s'",
askedname,specification.fullname,format,newformat,conversion or "default",resolution or "default")
end
- local converter = (newformat ~= format) and converters[format]
+ -- quick hack
+ -- local converter = (newformat ~= format) and converters[format]
+ local converter = (newformat ~= format or resolution) and converters[format]
if converter then
if converter[newformat] then
converter = converter[newformat]
@@ -1307,3 +1309,13 @@ function figures.applyratio(width,height,w,h) -- width and height are strings an
end
end
end
+
+-- example of a simple plugin:
+--
+-- figures.converters.png = {
+-- png = function(oldname,newname,resolution)
+-- local command = string.format('gm convert -depth 1 "%s" "%s"',oldname,newname)
+-- logs.report(string.format("running command %s",command))
+-- os.execute(command)
+-- end,
+-- }
diff --git a/tex/context/base/grph-inc.mkiv b/tex/context/base/grph-inc.mkiv
index f63848587..17af235b9 100644
--- a/tex/context/base/grph-inc.mkiv
+++ b/tex/context/base/grph-inc.mkiv
@@ -98,9 +98,9 @@
\def\figurefullname {\ctxlua{figures.tprint("used","fullname")}}
\def\noffigurepages {\ctxlua{figures.tprint("used","pages",0)}}
-\def\figurefilepath {\ctxsprint{file.dirname (figures.get("used","fullname"))}}
-\def\figurefilename {\ctxsprint{file.nameonly(figures.get("used","fullname"))}}
-\def\figurefiletype {\ctxsprint{file.extname (figures.get("used","fullname"))}}
+\def\figurefilepath {\cldcontext{file.dirname (figures.get("used","fullname"))}}
+\def\figurefilename {\cldcontext{file.nameonly(figures.get("used","fullname"))}}
+\def\figurefiletype {\cldcontext{file.extname (figures.get("used","fullname"))}}
\let\naturalfigurewidth \figurenaturalwidth
\let\naturalfigureheight \figurenaturalheight
diff --git a/tex/context/base/java-ini.lua b/tex/context/base/java-ini.lua
index 55b60c14f..6f629cec0 100644
--- a/tex/context/base/java-ini.lua
+++ b/tex/context/base/java-ini.lua
@@ -187,18 +187,27 @@ end
local patterns = { "java-imp-%s.mkiv", "java-imp-%s.tex", "java-%s.mkiv", "java-%s.tex" }
+local function action(name,foundname)
+ context.startnointerference()
+ context.startreadingfile()
+ context.input(foundname)
+ status_javascripts("loaded: library '%s'",name)
+ context.stopreadingfile()
+ context.stopnointerference()
+end
+
+local function failure(name)
+ report_javascripts("unknown: library '%s'",name)
+end
+
function javascripts.usescripts(name)
- -- this will become pure lua, no context
if name ~= variables.reset then -- reset is obsolete
- commands.uselibrary(name,patterns,function(name,foundname)
- context.startnointerference()
- context.startreadingfile()
- context.input(foundname)
- status_javascripts("loaded: library '%s'",name)
- context.stopreadingfile()
- context.stopnointerference()
- end, function(name)
- report_javascripts("unknown: library '%s'",name)
- end)
+ commands.uselibrary {
+ name = name,
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = true,
+ }
end
end
diff --git a/tex/context/base/l-file.lua b/tex/context/base/l-file.lua
index 10ae1cb84..bf05e2c64 100644
--- a/tex/context/base/l-file.lua
+++ b/tex/context/base/l-file.lua
@@ -428,10 +428,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
+
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
index 0f89ba0c2..4b1a378ec 100644
--- a/tex/context/base/l-lpeg.lua
+++ b/tex/context/base/l-lpeg.lua
@@ -618,7 +618,7 @@ end
local sort, fastcopy, sortedpairs = table.sort, table.fastcopy, table.sortedpairs -- dependency!
-function lpeg.append(list,pp)
+function lpeg.append(list,pp,delayed)
local p = pp
if #list > 0 then
list = fastcopy(list)
@@ -630,6 +630,14 @@ function lpeg.append(list,pp)
p = P(list[l])
end
end
+ elseif delayed then
+ for k, v in sortedpairs(list) do
+ if p then
+ p = P(k)/list + p
+ else
+ p = P(k)/list
+ end
+ end
else
for k, v in sortedpairs(list) do
if p then
diff --git a/tex/context/base/l-unicode.lua b/tex/context/base/l-unicode.lua
index 0c7b24bd4..f30c32b9a 100644
--- a/tex/context/base/l-unicode.lua
+++ b/tex/context/base/l-unicode.lua
@@ -263,14 +263,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -285,17 +285,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -310,7 +310,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -403,6 +403,7 @@ patterns.toentities = toentities
function utf.toentities(str)
return lpegmatch(toentities,str)
end
+
--~ local utfchr = { } -- 60K -> 2.638 M extra mem but currently not called that often (on latin)
--~
--~ setmetatable(utfchr, { __index = function(t,k) local v = utfchar(k) t[k] = v return v end } )
diff --git a/tex/context/base/lang-ini.lua b/tex/context/base/lang-ini.lua
index 0022c8a41..97528097f 100644
--- a/tex/context/base/lang-ini.lua
+++ b/tex/context/base/lang-ini.lua
@@ -22,7 +22,6 @@ local utfbyte = utf.byte
local format, gsub = string.format, string.gsub
local concat, sortedkeys, sortedpairs = table.concat, table.sortedkeys, table.sortedpairs
local lpegmatch = lpeg.match
-local texwrite = tex.write
local settings_to_array = utilities.parsers.settings_to_array
@@ -219,7 +218,7 @@ if environment.initex then
end
function commands.languagenumber()
- texwrite(0)
+ context(0)
end
else
@@ -268,7 +267,7 @@ else
languages.getnumber = getnumber
function commands.languagenumber(tag,default,patterns)
- texwrite(getnumber(tag,default,patterns))
+ context(getnumber(tag,default,patterns))
end
end
diff --git a/tex/context/base/lang-ini.mkiv b/tex/context/base/lang-ini.mkiv
index 2959028bf..5cd7eaba6 100644
--- a/tex/context/base/lang-ini.mkiv
+++ b/tex/context/base/lang-ini.mkiv
@@ -13,7 +13,7 @@
%D This module needs a further cleanup (real split between ii/iv).
-% \ctxlua{tex.sprint(languages.numbers[tex.count.mainlanguagenumber])}
+% \cldcontext{languages.numbers[tex.count.mainlanguagenumber]}
%D This module implements the (for the moment still simple)
%D multi||language support of \CONTEXT, which should not be
@@ -347,7 +347,7 @@
\newtoks \everylanguage
\def\docomplexlanguage% assumes that \currentlanguage is set % % % use different name as complex
- {\normallanguage\ctxlua{commands.languagenumber(%
+ {\normallanguage\ctxcommand{languagenumber(%
"\currentlanguage",%
"\defaultlanguage\currentlanguage",%
"\languageparameter\s!patterns"%
diff --git a/tex/context/base/lang-lab.lua b/tex/context/base/lang-lab.lua
index 1947616a9..d113922a1 100644
--- a/tex/context/base/lang-lab.lua
+++ b/tex/context/base/lang-lab.lua
@@ -61,7 +61,6 @@ if not modules then modules = { } end modules ['lang-lab'] = {
local format, find = string.format, string.find
local next, rawget, type = next, rawget, type
-local texsprint = tex.sprint
local prtcatcodes = tex.prtcatcodes
languages.labels = languages.labels or { }
@@ -75,6 +74,7 @@ function languages.labels.define()
local data = languages.data.labels
local function define(command,list,prefixed)
if list then
+ context.pushcatcodes(prtcatcodes) -- context.unprotect
for tag, data in next, list do
if data.hidden then
-- skip
@@ -84,15 +84,15 @@ function languages.labels.define()
-- skip
elseif prefixed and rawget(variables,tag) then
if type(text) == "table" then
- texsprint(prtcatcodes,format("\\%s[%s][\\v!%s={{%s},{%s}}]",command,language,tag,text[1],text[2]))
+ context("\\%s[%s][\\v!%s={{%s},{%s}}]",command,language,tag,text[1],text[2])
else
- texsprint(prtcatcodes,format("\\%s[%s][\\v!%s={{%s},}]",command,language,tag,text))
+ context("\\%s[%s][\\v!%s={{%s},}]",command,language,tag,text)
end
else
if type(text) == "table" then
- texsprint(prtcatcodes,format("\\%s[%s][%s={{%s},{%s}}]",command,language,tag,text[1],text[2]))
+ context("\\%s[%s][%s={{%s},{%s}}]",command,language,tag,text[1],text[2])
else
- texsprint(prtcatcodes,format("\\%s[%s][%s={{%s},}]",command,language,tag,text))
+ context("\\%s[%s][%s={{%s},}]",command,language,tag,text)
end
end
if trace_labels then
@@ -105,12 +105,13 @@ function languages.labels.define()
end
end
end
+ context.popcatcodes() -- context.protect
end
end
- define("setupheadtext", data.titles, true)
- define("setuplabeltext", data.texts, true)
+ define("setupheadtext", data.titles, true)
+ define("setuplabeltext", data.texts, true)
define("setupmathlabeltext", data.functions)
- define("setuptaglabeltext", data.tags)
+ define("setuptaglabeltext", data.tags)
end
--~ function languages.labels.check()
diff --git a/tex/context/base/lang-lab.mkiv b/tex/context/base/lang-lab.mkiv
index ea845450e..db15308e6 100644
--- a/tex/context/base/lang-lab.mkiv
+++ b/tex/context/base/lang-lab.mkiv
@@ -273,10 +273,10 @@
%D Now we load the labels:
-\ifdefined\sixperemspace \else \def\sixperemspace{ } \fi % we could embed 0x2006 but it does not show up in a editor
+\ifdefined\sixperemspace \else \def\sixperemspace{ } \fi % \utfchar{2006"} % we could embed 0x2006 but it does not show up in a editor
%D Now we can load the labels:
-\ctxlua{languages.labels.define()}
+\ctxlua{languages.labels.define()} % no user command
\protect \endinput
diff --git a/tex/context/base/lang-url.lua b/tex/context/base/lang-url.lua
index e7422a38c..9643f8e0b 100644
--- a/tex/context/base/lang-url.lua
+++ b/tex/context/base/lang-url.lua
@@ -103,7 +103,7 @@ table.setmetatablecall(hyphenatedurl,action)
function hyphenatedurl.setcharacters(str,value) -- 1, 2 == before, after
for s in utfcharacters(str) do
- chars[s] = value or 1
+ characters[s] = value or 1
end
end
diff --git a/tex/context/base/lpdf-ini.lua b/tex/context/base/lpdf-ini.lua
index e22e33b4d..cb40edf89 100644
--- a/tex/context/base/lpdf-ini.lua
+++ b/tex/context/base/lpdf-ini.lua
@@ -735,7 +735,7 @@ function lpdf.id()
end
function lpdf.checkedkey(t,key,variant)
- local pn = t[key]
+ local pn = t and t[key]
if pn then
local tn = type(pn)
if tn == variant then
@@ -788,7 +788,7 @@ end
-- lpdf.addtoinfo("ConTeXt.Version", tex.contextversiontoks)
-- lpdf.addtoinfo("ConTeXt.Time", os.date("%Y.%m.%d %H:%M")) -- :%S
--- lpdf.addtoinfo("ConTeXt.Jobname", tex.jobname)
+-- lpdf.addtoinfo("ConTeXt.Jobname", environment.jobname)
-- lpdf.addtoinfo("ConTeXt.Url", "www.pragma-ade.com")
if not pdfreferenceobject then
diff --git a/tex/context/base/lpdf-mov.lua b/tex/context/base/lpdf-mov.lua
index 47e2fbb2a..41db97e0c 100644
--- a/tex/context/base/lpdf-mov.lua
+++ b/tex/context/base/lpdf-mov.lua
@@ -38,7 +38,7 @@ function nodeinjections.insertmovie(specification)
Movie = moviedict,
A = controldict,
}
- write_node(pdfannotation_node(width,height,0,action()))
+ write_node(pdfannotation_node(width,height,0,action())) -- test: context(...)
end
function nodeinjections.insertsound(specification)
@@ -58,6 +58,6 @@ function nodeinjections.insertsound(specification)
Movie = sounddict,
A = controldict,
}
- write_node(pdfannotation_node(0,0,0,action()))
+ write_node(pdfannotation_node(0,0,0,action())) -- test: context(...)
end
end
diff --git a/tex/context/base/lpdf-swf.lua b/tex/context/base/lpdf-swf.lua
index 3825bd44e..aadbbd639 100644
--- a/tex/context/base/lpdf-swf.lua
+++ b/tex/context/base/lpdf-swf.lua
@@ -56,7 +56,7 @@ local function insertswf(spec)
local resources = resources and parametersets[resources]
local display = display and parametersets[display]
- local controls = controls and parametersets[controls] -- not yet used
+ local controls = controls and parametersets[controls] -- not yet used
local preview = checkedkey(display,"preview","string")
local toolbar = checkedkey(display,"toolbar","boolean")
@@ -112,11 +112,14 @@ local function insertswf(spec)
end
end
+ local opendisplay = display and display.open or false
+ local closedisplay = display and display.close or false
+
local configurationreference = pdfreference(pdfflushobject(configuration))
local activation = pdfdictionary {
Type = pdfconstant("RichMediaActivation"),
- Condition = pdfconstant(activations[display.open]),
+ Condition = pdfconstant(activations[opendisplay]),
Configuration = flashreference,
Animation = pdfdictionary {
Subtype = pdfconstant("Linear"),
@@ -156,7 +159,7 @@ local function insertswf(spec)
local deactivation = pdfdictionary {
Type = pdfconstant("RichMediaDeactivation"),
- Condition = pdfconstant(deactivations[display.close]),
+ Condition = pdfconstant(deactivations[closedisplay]),
}
local richmediasettings = pdfdictionary {
@@ -199,5 +202,5 @@ function backends.pdf.nodeinjections.insertswf(spec)
-- factor = spec.factor,
-- label = spec.label,
}
- node.write(pdfannotation_node(spec.width,spec.height,0,annotation()))
+ context(pdfannotation_node(spec.width,spec.height,0,annotation())) -- the context wrap is probably also needed elsewhere
end
diff --git a/tex/context/base/lpdf-wid.lua b/tex/context/base/lpdf-wid.lua
index 026845698..2b1a241e6 100644
--- a/tex/context/base/lpdf-wid.lua
+++ b/tex/context/base/lpdf-wid.lua
@@ -52,7 +52,7 @@ local nodepool = nodes.pool
local pdfannotation_node = nodepool.pdfannotation
local hpack_node = node.hpack
-local write_node = node.write
+local write_node = node.write -- test context(...) instead
local pdf_border = pdfarray { 0, 0, 0 } -- can be shared
diff --git a/tex/context/base/luat-cod.mkiv b/tex/context/base/luat-cod.mkiv
index d3512cc67..fb659f2bb 100644
--- a/tex/context/base/luat-cod.mkiv
+++ b/tex/context/base/luat-cod.mkiv
@@ -43,6 +43,8 @@
%D new functionality. We no longer support the hooks for initializing
%D code as this can be done at the \LUA\ end.
+% instead of \ctxwrite and \ctxprint, use \cldcontext
+
\def\ctxdirectlua{\directlua\zerocount}
\def\ctxlatelua {\latelua \zerocount}
\def\ctxsprint #1{\directlua\zerocount{tex.sprint(tex.ctxcatcodes,#1)}} % saves tokens
@@ -59,7 +61,7 @@
%D Reporting the version of \LUA\ that we use is done as follows:
-\edef\luaversion{\ctxlua{tex.print(_VERSION)}}
+\edef\luaversion{\ctxwrite{_VERSION}} % no context luaded yet
\def\registerctxluafile#1#2{\ctxlua{lua.registercode("#1","#2")}}
\def\ctxloadluafile #1{\ctxlua{lua.registercode("#1")}}
diff --git a/tex/context/base/luat-fio.lua b/tex/context/base/luat-fio.lua
index 2e6beb89c..8e7988c4e 100644
--- a/tex/context/base/luat-fio.lua
+++ b/tex/context/base/luat-fio.lua
@@ -10,6 +10,7 @@ local texiowrite_nl = (texio and texio.write_nl) or print
local texiowrite = (texio and texio.write) or print
local format = string.format
+local concat = table.concat
local sequenced = table.sequenced
texconfig.kpse_init = false
@@ -84,28 +85,34 @@ if not resolvers.instance then
end
--- statistics.register("resource resolver", function()
--- if resolvers.scantime then
--- return format("loadtime %s seconds, scantime %s seconds", resolvers.loadtime(), resolvers.scantime())
--- else
--- return format("loadtime %s seconds", resolvers.loadtime())
--- end
--- end)
-
+local report_system = logs.reporter("system","files")
+local report_files = logs.reporter("used files")
luatex.registerstopactions(function()
local foundintrees = resolvers.instance.foundintrees
- texiowrite_nl("log","\n")
- for i=1,#foundintrees do
- texiowrite_nl("log",format("used file %4i > %s",i,sequenced(foundintrees[i])))
+ if #foundintrees > 0 then
+ logs.pushtarget("logfile")
+ logs.newline()
+ report_system("start used files")
+ logs.newline()
+ for i=1,#foundintrees do
+ report_files("%4i: %s",i,sequenced(foundintrees[i]))
+ end
+ logs.newline()
+ report_system("stop used files")
+ logs.newline()
+ logs.poptarget()
end
- texiowrite_nl("log","")
end)
statistics.register("resource resolver", function()
- return format("loadtime %s seconds, scantime %s seconds, %s found files",
+ local scandata = resolvers.scandata()
+ return format("loadtime %s seconds, %s scans with scantime %s seconds, %s shared scans, %s found files, scanned paths: %s",
resolvers.loadtime(),
- resolvers.scantime and resolvers.scantime() or 0,
- #resolvers.instance.foundintrees
+ scandata.n,
+ scandata.time,
+ scandata.shared,
+ #resolvers.instance.foundintrees,
+ concat(scandata.paths," ")
)
end)
diff --git a/tex/context/base/luat-ini.lua b/tex/context/base/luat-ini.lua
index 9a8651a9c..204cc7bd1 100644
--- a/tex/context/base/luat-ini.lua
+++ b/tex/context/base/luat-ini.lua
@@ -8,12 +8,9 @@ if not modules then modules = { } end modules ['luat-ini'] = {
-- rather experimental down here ... will change with lua 5.2 --
---~ local ctxcatcodes = tex.ctxcatcodes
-
local debug = require "debug"
local string, table, lpeg, math, io, system = string, table, lpeg, math, io, system
local next, setfenv = next, setfenv or debug.setfenv
-local format = string.format
local mark = utilities.storage.mark
@@ -144,7 +141,7 @@ function lua.registername(name,message)
lua.numbers[name] = lnn
end
lua.name[lnn] = message
- tex.write(lnn)
+ context(lnn)
-- initialize once
if name ~= "isolateddata" then
protect_full(name or "shareddata")
@@ -180,7 +177,7 @@ function document.getargument(key,default)
v = (v and "yes") or "no"
document.arguments[key] = v
end
- tex.sprint(tex.ctxcatcodes,v or default or "")
+ context(v or default or "")
end
function document.setfilename(i,name)
@@ -188,5 +185,5 @@ function document.setfilename(i,name)
end
function document.getfilename(i)
- tex.sprint(tex.ctxcatcodes,document.files[i] or "")
+ context(document.files[i] or "")
end
diff --git a/tex/context/base/luat-ini.mkiv b/tex/context/base/luat-ini.mkiv
index a4ac23420..774762ee7 100644
--- a/tex/context/base/luat-ini.mkiv
+++ b/tex/context/base/luat-ini.mkiv
@@ -131,16 +131,16 @@
%D
%D \startbuffer
%D \startluacode
-%D tex.print("LUA")
+%D context("LUA")
%D \stopluacode
%D
%D \startusercode
-%D global.tex.print("USER 1")
-%D tex.print("USER 2")
+%D global.context("USER 1")
+%D context("USER 2")
%D if characters then
-%D tex.print("ACCESS")
+%D context("ACCESS")
%D else
-%D tex.print("NO ACCESS")
+%D context("NO ACCESS")
%D end
%D \stopusercode
%D \stopbuffer
@@ -198,7 +198,7 @@
{\ctxlua{parametersets["#1"]={#2}}%
\endgroup}
-\def\luaparameterset#1#2{\ctxlua{parametersets["#1"]={#2} tex.sprint("#1")}}
+\def\luaparameterset#1#2{\ctxlua{parametersets["#1"]={#2} context("#1")}}
% todo: \mergeparameterset
diff --git a/tex/context/base/luat-lua.lua b/tex/context/base/luat-lua.lua
index 50492540d..d319508f0 100644
--- a/tex/context/base/luat-lua.lua
+++ b/tex/context/base/luat-lua.lua
@@ -6,11 +6,13 @@ if not modules then modules = { } end modules ['luat-lua'] = {
license = "see context related readme files"
}
+local concat = table.concat
+
if lua then do
local delayed = { }
- local function flushdelayed(...)
+ function lua.flushdelayed(...)
local t = delayed
delayed = { }
for i=1, #t do
@@ -23,7 +25,7 @@ if lua then do
end
function lua.flush(...)
- tex.sprint("\\directlua{flushdelayed(",table.concat({...},','),")}")
+ context.directlua("lua.flushdelayed(%s)",concat({...},','))
end
end end
@@ -32,12 +34,14 @@ end end
--~
--~ function test(n)
--~ lua.delay(function(...)
---~ tex.sprint(string.format("pi: %s %s %s\\par",...))
+--~ context("pi: %s %s %s",...)
+--~ context.par()
--~ end)
--~ lua.delay(function(...)
---~ tex.sprint(string.format("more pi: %s %s %s\\par",...))
+--~ context("more pi: %s %s %s",...)
+--~ context.par()
--~ end)
---~ tex.sprint(string.format("\\setbox0=\\hbox{%s}",math.pi*n))
+--~ context("\\setbox0=\\hbox{%s}",math.pi*n)
--~ local box = tex.box[0]
--~ lua.flush(box.width,box.height,box.depth)
--~ end
diff --git a/tex/context/base/luat-mac.lua b/tex/context/base/luat-mac.lua
index 0dc6593c6..4bc028f5c 100644
--- a/tex/context/base/luat-mac.lua
+++ b/tex/context/base/luat-mac.lua
@@ -269,3 +269,43 @@ end
--~ {\normalexpanded{\def\noexpand\next#content\expandafter\noexpand\csname stop#name\endcsname}{#name : #content}%
--~ \next}
--~ ]]))
+
+-- Just an experiment:
+--
+-- \catcode\numexpr"10FF25=\commentcatcode %% > 110000 is invalid
+--
+-- We could have a push/pop mechanism but binding to txtcatcodes
+-- is okay too.
+
+local txtcatcodes = false -- also signal and yet unknown
+
+local commentsignal = utf.char(0x10FF25)
+
+local encodecomment = P("%%") / commentsignal --
+----- encodepattern = Cs(((1-encodecomment)^0 * encodecomment)) -- strips but not nice for verbatim
+local encodepattern = Cs((encodecomment + 1)^0)
+local decodecomment = P(commentsignal) / "%%%%" -- why doubles here?
+local decodepattern = Cs((decodecomment + 1)^0)
+
+function resolvers.macros.encodecomment(str)
+ if txtcatcodes and tex.catcodetable == txtcatcodes then
+ return lpegmatch(encodepattern,str) or str
+ else
+ return str
+ end
+end
+
+function resolvers.macros.decodecomment(str) -- normally not needed
+ return txtcatcodes and lpegmatch(decodepattern,str) or str
+end
+
+-- resolvers.macros.commentsignal = commentsignal
+-- resolvers.macros.encodecommentpattern = encodepattern
+-- resolvers.macros.decodecommentpattern = decodepattern
+
+function resolvers.macros.enablecomment(thecatcodes)
+ if not txtcatcodes then
+ txtcatcodes = thecatcodes or catcodes.numbers.txtcatcodes
+ utilities.sequencers.appendaction(resolvers.openers.helpers.textlineactions,"system","resolvers.macros.encodecomment")
+ end
+end
diff --git a/tex/context/base/luat-run.lua b/tex/context/base/luat-run.lua
index 51856640d..ce25d1f55 100644
--- a/tex/context/base/luat-run.lua
+++ b/tex/context/base/luat-run.lua
@@ -138,13 +138,14 @@ directives.register("system.synctex", function(v)
else
report_system("synctex functionality is disabled!")
end
- -- current this is bugged:
+ -- currently this is bugged:
tex.synctex = synctex and 1 or 0
-- so for the moment we need:
+ context.normalsynctex()
if synctex then
- tex.print("\\normalsynctex\\plusone")
+ context.plusone()
else
- tex.print("\\normalsynctex\\zerocount")
+ context.zerocount()
end
end)
diff --git a/tex/context/base/luat-sto.lua b/tex/context/base/luat-sto.lua
index 461bd52ae..2c7a25aaa 100644
--- a/tex/context/base/luat-sto.lua
+++ b/tex/context/base/luat-sto.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['luat-sto'] = {
license = "see context related readme files"
}
-local type, next, setmetatable, getmetatable = type, next, setmetatable, getmetatable
+local type, next, setmetatable, getmetatable, collectgarbage = type, next, setmetatable, getmetatable, collectgarbage
local gmatch, format, write_nl = string.gmatch, string.format, texio.write_nl
local serialize, concat, sortedhash = table.serialize, table.concat, table.sortedhash
local bytecode = lua.bytecode
@@ -72,6 +72,22 @@ end
lua.registerfinalizer(dump,"dump storage")
+-- to be tested with otf caching:
+
+function lua.collectgarbage(threshold)
+ local current = collectgarbage("count")
+ local threshold = threshold or 256 * 1024
+ while true do
+ collectgarbage("collect")
+ local previous = collectgarbage("count")
+ if current - previous < threshold then
+ break
+ else
+ current = previous
+ end
+ end
+end
+
-- we also need to count at generation time (nicer for message)
--~ if lua.bytecode then -- from 0 upwards
diff --git a/tex/context/base/lxml-ctx.mkiv b/tex/context/base/lxml-ctx.mkiv
index ab4e2546c..9a5428a7b 100644
--- a/tex/context/base/lxml-ctx.mkiv
+++ b/tex/context/base/lxml-ctx.mkiv
@@ -20,45 +20,45 @@
\unprotect
-% the letterbar is a messy hack and is needed for the tabulate
+% The \let|=letterbar is a messy hack and is needed for the tabulate. We now use
+% \detokenize.
\settrue \xmllshowbuffer
\setfalse\xmllshowtitle
\settrue \xmllshowwarning
-\definehead[lshowtitle][subsubsubsubsubject]
-\setuphead[lshowtitle][style=\tta]
+\definehead
+ [lshowtitle]
+ [subsubsubsubsubject]
+
+\setuphead
+ [lshowtitle]
+ [\c!style=\tta]
% \unexpanded\def\setuplxmlshow[#1]%
% {\dodoubleargument\getparameters[\??xl]}
-\def\xmllshow#1%
- {\begingroup
- \let|=\letterbar
- \ctxlua{xml.ctx.tshow {
- pattern = \!!bs#1\!!es,
- \ifconditional\xmllshowtitle
- title = "lshowtitle",
- \fi
- \ifconditional\xmllshowwarning
- warning = true,
- \fi
- } }%
- \endgroup}
+\unexpanded\def\xmllshow#1%
+ {\ctxlua{xml.ctx.tshow {
+ pattern = \!!bs\detokenize{#1}\!!es,
+ \ifconditional\xmllshowtitle
+ title = "lshowtitle",
+ \fi
+ \ifconditional\xmllshowwarning
+ warning = true,
+ \fi
+ } }}
-\def\xmllshowbuffer#1#2#3%
- {\begingroup
- \let|=\letterbar
- \ctxlua{xml.ctx.tshow {
- pattern = \!!bs#2\!!es,
- \ifconditional\xmllshowbuffer
- xmlroot = "#1",
- attribute = "#3",
- \fi
- \ifconditional\xmllshowwarning
- warning = true,
- \fi
- } }%
- \endgroup}
+\unexpanded\def\xmllshowbuffer#1#2#3%
+ {\ctxlua{xml.ctx.tshow {
+ pattern = \!!bs\detokenize{#2}\!!es,
+ \ifconditional\xmllshowbuffer
+ xmlroot = "#1",
+ attribute = "#3",
+ \fi
+ \ifconditional\xmllshowwarning
+ warning = true,
+ \fi
+ } }}
\protect
diff --git a/tex/context/base/lxml-dir.lua b/tex/context/base/lxml-dir.lua
index 0924931c1..00375193f 100644
--- a/tex/context/base/lxml-dir.lua
+++ b/tex/context/base/lxml-dir.lua
@@ -7,8 +7,6 @@ if not modules then modules = { } end modules ['lxml-dir'] = {
}
local format, gsub = string.format, string.gsub
-local getid = lxml.getid
-local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
--~ <?xml version="1.0" standalone="yes"?>
--~ <!-- demo.cdx -->
@@ -25,7 +23,9 @@ local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
--~ <directive attribute='cdx' value="*" element="cals:table" setup="cdx:cals:table:*"/>
--~ </directives>
-local lxml = lxml
+local lxml, context = lxml, context
+
+local getid = lxml.getid
lxml.directives = lxml.directives or { }
local directives = lxml.directives
@@ -84,14 +84,15 @@ local function handle_setup(category,root,attribute,element)
setup = setup[category]
end
if setup then
- texsprint(ctxcatcodes,"\\directsetup{",setup,"}")
+ context.directsetup(setup)
else
setup = data[format("%s::%s::*",element,attribute)]
if setup then
setup = setup[category]
end
if setup then
- texsprint(ctxcatcodes,"\\directsetup{",gsub(setup,'%*',value),"}")
+ setup = gsub(setup,'%*',value)
+ context.directsetup(setup)
end
end
end
diff --git a/tex/context/base/lxml-ent.lua b/tex/context/base/lxml-ent.lua
index d47b44c2f..c7811c2a3 100644
--- a/tex/context/base/lxml-ent.lua
+++ b/tex/context/base/lxml-ent.lua
@@ -7,7 +7,6 @@ if not modules then modules = { } end modules ['lxml-ent'] = {
}
local type, next, tonumber = type, next, tonumber
-local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
local utf = unicode.utf8
local byte, format = string.byte, string.format
local utfupper, utfchar = utf.upper, utf.char
@@ -41,10 +40,6 @@ function xml.registerentity(key,value)
end
end
---~ entities.amp = function() tex.write("&") end
---~ entities.lt = function() tex.write("<") end
---~ entities.gt = function() tex.write(">") end
-
if characters and characters.entities then
function characters.registerentities(forcecopy)
diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua
index d114bb83d..5c5220cb8 100644
--- a/tex/context/base/lxml-tab.lua
+++ b/tex/context/base/lxml-tab.lua
@@ -35,10 +35,11 @@ local xml = xml
--~ local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -151,9 +152,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -319,7 +333,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -330,6 +344,57 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = {
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";"
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -371,7 +436,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -399,12 +464,12 @@ local function handle_any_entity(str)
a = entities[str]
end
if a then
-if type(a) == "function" then
- if trace_entities then
- report_xml("expanding entity &%s; (function)",str)
- end
- a = a(str) or ""
-end
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
a = lpegmatch(parsedentity,a) or a
if trace_entities then
report_xml("resolved entity &%s; -> %s (internal)",str,a)
@@ -440,18 +505,25 @@ end
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -591,17 +663,29 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -653,7 +737,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -661,6 +745,11 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+unify_predefined, cleanup, entities = nil, nil, nil
+stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+acache, hcache, dcache = nil, nil, nil
+reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
@@ -807,7 +896,7 @@ local function verbose_element(e,handlers)
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -823,7 +912,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -844,11 +933,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e))
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -883,7 +972,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -1011,20 +1100,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -1033,28 +1135,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua
index 1afccbfcb..1195f3be2 100644
--- a/tex/context/base/lxml-tex.lua
+++ b/tex/context/base/lxml-tex.lua
@@ -6,6 +6,10 @@ if not modules then modules = { } end modules ['lxml-tst'] = {
license = "see context related readme files"
}
+-- Because we split and resolve entities we use the direct printing
+-- interface and not the context one. If we ever do that there will
+-- be an cldf-xml helper library.
+
local utf = unicode.utf8
local utfchar, utfupper = utf.char, utf.upper
@@ -18,16 +22,18 @@ local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
local tex, xml = tex, xml
local lowerchars, upperchars, lettered = characters.lower, characters.upper, characters.lettered
-lxml = lxml or { }
+lxml = lxml or { }
local lxml = lxml
-local texsprint, texprint, texwrite = tex.sprint, tex.print, tex.write
-local texcatcodes, ctxcatcodes, vrbcatcodes, notcatcodes = tex.texcatcodes, tex.ctxcatcodes, tex.vrbcatcodes, tex.notcatcodes
+local ctxcatcodes, notcatcodes = tex.ctxcatcodes, tex.notcatcodes
+
+local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing)
local xmlelements, xmlcollected, xmlsetproperty = xml.elements, xml.collected, xml.setproperty
local xmlwithelements = xml.withelements
local xmlserialize, xmlcollect, xmltext, xmltostring = xml.serialize, xml.collect, xml.text, xml.tostring
local xmlapplylpath = xml.applylpath
+local xmlunprivatized, xmlprivatetoken = xml.unprivatized, xml.privatetoken
local variables = (interfaces and interfaces.variables) or { }
@@ -43,6 +49,8 @@ local trace_comments = false trackers.register("lxml.comments", function(v) tra
local report_lxml = logs.reporter("xml","tex")
-- tex entities
+--
+-- todo: unprivatize attributes
lxml.entities = lxml.entities or { }
@@ -79,7 +87,7 @@ function lxml.resolvedentity(str)
e = e(str)
end
if e then
- texsprint(notcatcodes,e)
+ contextsprint(notcatcodes,e)
end
return
end
@@ -110,112 +118,53 @@ local finalizers = xml.finalizers
finalizers.xml = finalizers.xml or { }
finalizers.tex = finalizers.tex or { }
--- this might look inefficient but it's actually rather efficient
--- because we avoid tokenization of leading spaces and xml can be
--- rather verbose (indented)
+-- serialization with entity handling
-local newline = lpeg.patterns.newline
-local space = lpeg.patterns.spacer
local ampersand = P("&")
local semicolon = P(";")
-local spacing = newline * space^0
-local content = C((1-spacing-ampersand)^1)
-local verbose = C((1-(space+newline))^1)
-local entity = ampersand * C((1-semicolon)^1) * semicolon
-
-local xmltextcapture = (
- space^0 * newline^2 * Cc("") / texprint + -- better ^-2 ?
- space^0 * newline * space^0 * Cc(" ") / texsprint +
- content / function(str) return texsprint(notcatcodes,str) end + -- was just texsprint, current catcodes regime is notcatcodes
- entity / lxml.resolvedentity
-)^0
-
-local ctxtextcapture = (
- space^0 * newline^2 * Cc("") / texprint + -- better ^-2 ?
- space^0 * newline * space^0 * Cc(" ") / texsprint +
- content / function(str) return texsprint(ctxcatcodes,str) end + -- was just texsprint, current catcodes regime is notcatcodes
- entity / lxml.resolvedentity
-)^0
-
-local forceraw, rawroot = false, nil
-
-function lxml.startraw()
- forceraw = true
-end
+local entity = ampersand * C((1-semicolon)^1) * semicolon / lxml.resolvedentity -- context.bold
-function lxml.stopraw()
- forceraw = false
-end
-
-function lxml.rawroot()
- return rawroot
-end
+local _, xmltextcapture = context.newtexthandler {
+ exception = entity,
+ catcodes = notcatcodes
+}
---~ function lxml.rawpath(rootid)
---~ if rawroot and type(rawroot) == "table" then
---~ local text, path, rp
---~ if not rawroot.dt then
---~ text, path, rp = "text", "", rawroot[0]
---~ else
---~ path, rp = "tree", "", rawroot.__p__
---~ end
---~ while rp do
---~ local rptg = rp.tg
---~ if rptg then
---~ path = rptg .. "/" .. path
---~ end
---~ rp = rp.__p__
---~ end
---~ return { rootid, "/" .. path, text }
---~ end
---~ end
+local _, ctxtextcapture = context.newtexthandler {
+ exception = entity,
+ catcodes = ctxcatcodes
+}
-- cdata
-local linecommand = "\\obeyedline"
-local spacecommand = "\\obeyedspace" -- "\\strut\\obeyedspace"
-local beforecommand = ""
-local aftercommand = ""
-
-local xmlverbosecapture = (
- newline / function( ) texsprint(texcatcodes,linecommand,"{}") end +
- verbose / function(s) texsprint(vrbcatcodes,s) end +
- space / function( ) texsprint(texcatcodes,spacecommand,"{}") end
-)^0
+local toverbatim = context.newverbosehandler {
+ line = context.xmlcdataobeyedline,
+ space = context.xmlcdataobeyedspace,
+ before = context.xmlcdatabefore,
+ after = context.xmlcdataafter,
+}
-local function toverbatim(str)
- if beforecommand then texsprint(texcatcodes,beforecommand,"{}") end
- lpegmatch(xmlverbosecapture,str)
- if aftercommand then texsprint(texcatcodes,aftercommand,"{}") end
-end
+lxml.toverbatim = context.newverbosehandler {
+ line = context.xmlcdataobeyedline,
+ space = context.xmlcdataobeyedspace,
+ before = context.xmlcdatabefore,
+ after = context.xmlcdataafter,
+ strip = true,
+}
-function lxml.setverbatim(before,after,obeyedline,obeyedspace)
- beforecommand, aftercommand, linecommand, spacecommand = before, after, obeyedline, obeyedspace
-end
+-- raw flushing
-local obeycdata = true
+local forceraw, rawroot = false, nil
-function lxml.setcdata()
- obeycdata = true
+function lxml.startraw()
+ forceraw = true
end
-function lxml.resetcdata()
- obeycdata = false
+function lxml.stopraw()
+ forceraw = false
end
--- cdata and verbatim
-
-lxml.setverbatim("\\xmlcdatabefore", "\\xmlcdataafter", "\\xmlcdataobeyedline", "\\xmlcdataobeyedspace")
-
--- local capture = (space^0*newline)^0 * capture * (space+newline)^0 * -1
-
-function lxml.toverbatim(str)
- if beforecommand then texsprint(texcatcodes,beforecommand,"{}") end
- -- todo: add this to capture
- str = gsub(str,"^[ \t]+[\n\r]+","")
- str = gsub(str,"[ \t\n\r]+$","")
- lpegmatch(xmlverbosecapture,str)
- if aftercommand then texsprint(texcatcodes,aftercommand,"{}") end
+function lxml.rawroot()
+ return rawroot
end
-- storage
@@ -358,22 +307,23 @@ end
function lxml.withindex(name,n,command) -- will change as name is always there now
local i, p = lpegmatch(splitter,n)
if p then
- texsprint(ctxcatcodes,"\\xmlw{",command,"}{",n,"}")
+ contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",n,"}")
else
- texsprint(ctxcatcodes,"\\xmlw{",command,"}{",name,"::",n,"}")
+ contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",name,"::",n,"}")
end
end
function lxml.getindex(name,n) -- will change as name is always there now
local i, p = lpegmatch(splitter,n)
if p then
- texsprint(ctxcatcodes,n)
+ contextsprint(ctxcatcodes,n)
else
- texsprint(ctxcatcodes,name,"::",n)
+ contextsprint(ctxcatcodes,name,"::",n)
end
end
--- loading (to be redone, no overload)
+-- loading (to be redone, no overload) .. best use different methods and
+-- keep raw xml (at least as option)
xml.originalload = xml.originalload or xml.load
@@ -391,21 +341,23 @@ end
local entities = xml.entities
local function entityconverter(id,str)
- return entities[str] or "" -- -and "&"..str..";" -- feed back into tex end later
+ return entities[str] or xmlprivatetoken(str) or "" -- roundtrip handler
end
function lxml.convert(id,data,entities,compress)
- local settings = {
- unify_predefined_entities = true,
---~ resolve_predefined_entities = true,
+ local settings = { -- we're now roundtrip anyway
+ unify_predefined_entities = true,
+ utfize_entities = true,
+ resolve_predefined_entities = true,
+ resolve_entities = function(str) return entityconverter(id,str) end, -- needed for mathml
}
if compress and compress == variables.yes then
settings.strip_cm_and_dt = true
end
- if entities and entities == variables.yes then
- settings.utfize_entities = true
- -- settings.resolve_entities = function (str) return entityconverter(id,str) end
- end
+ -- if entities and entities == variables.yes then
+ -- settings.utfize_entities = true
+ -- -- settings.resolve_entities = function (str) return entityconverter(id,str) end
+ -- end
return xml.convert(data,settings)
end
@@ -525,10 +477,10 @@ local function tex_element(e,handlers)
addindex(rootname,false,true)
ix = e.ix
end
- texsprint(ctxcatcodes,"\\xmlw{",command,"}{",rootname,"::",ix,"}")
+ contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",rootname,"::",ix,"}")
else
report_lxml( "fatal error: no index for '%s'",command)
- texsprint(ctxcatcodes,"\\xmlw{",command,"}{",ix or 0,"}")
+ contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",ix or 0,"}")
end
elseif tc == "function" then
command(e)
@@ -551,7 +503,7 @@ pihandlers[#pihandlers+1] = function(str)
if str then
local a, b, c, d = lpegmatch(parser,str)
if d then
- texsprint(ctxcatcodes,"\\xmlcontextdirective{",a,"}{",b,"}{",c,"}{",d,"}")
+ contextsprint(ctxcatcodes,"\\xmlcontextdirective{",a,"}{",b,"}{",c,"}{",d,"}")
end
end
end
@@ -563,6 +515,16 @@ local function tex_pi(e,handlers)
end
end
+local obeycdata = true
+
+function lxml.setcdata()
+ obeycdata = true
+end
+
+function lxml.resetcdata()
+ obeycdata = false
+end
+
local function tex_cdata(e,handlers)
if obeycdata then
toverbatim(e.dt[1])
@@ -570,16 +532,17 @@ local function tex_cdata(e,handlers)
end
local function tex_text(e)
+ e = xmlunprivatized(e)
lpegmatch(xmltextcapture,e)
end
-local function ctx_text(e)
+local function ctx_text(e) -- can be just context(e) as we split there
lpegmatch(ctxtextcapture,e)
end
local function tex_handle(...)
-- report_lxml( "error while flushing: %s", concat { ... })
- texsprint(...) -- notcatcodes is active anyway
+ contextsprint(ctxcatcodes,...) -- notcatcodes is active anyway
end
local xmltexhandler = xml.newhandlers {
@@ -617,11 +580,12 @@ local function sprint(root)
local tr = type(root)
if tr == "string" then -- can also be result of lpath
-- rawroot = false
+ root = xmlunprivatized(root)
lpegmatch(xmltextcapture,root)
elseif tr == "table" then
if forceraw then
rawroot = root
- texwrite(xmltostring(root))
+ contextsprint(ctxcatcodes,xmltostring(root))
else
xmlserialize(root,xmltexhandler)
end
@@ -641,6 +605,7 @@ local function tprint(root) -- we can move sprint inline
end
end
elseif tr == "string" then
+ root = xmlunprivatized(root)
lpegmatch(xmltextcapture,root)
end
end
@@ -651,12 +616,13 @@ local function cprint(root) -- content
-- quit
elseif type(root) == 'string' then
-- rawroot = false
+ root = xmlunprivatized(root)
lpegmatch(xmltextcapture,root)
else
local rootdt = root.dt
if forceraw then
rawroot = root
- texwrite(xmltostring(rootdt or root))
+ contextsprint(ctxcatcodes,xmltostring(rootdt or root))
else
xmlserialize(rootdt or root,xmltexhandler)
end
@@ -776,7 +742,7 @@ function lxml.flushsetups(id,...)
if trace_loading then
report_lxml("applying setup %02i = %s to %s",k,v,document)
end
- texsprint(ctxcatcodes,"\\xmlsetup{",id,"}{",v,"}")
+ contextsprint(ctxcatcodes,"\\xmlsetup{",id,"}{",v,"}")
done[v] = true
end
end
@@ -956,7 +922,7 @@ local function reverse(collected)
end
local function count(collected)
- texwrite((collected and #collected) or 0)
+ contextsprint(ctxcatcodes,(collected and #collected) or 0)
end
local function position(collected,n)
@@ -974,7 +940,7 @@ end
local function match(collected) -- is match in preceding collected, never change, see bibxml
local m = collected and collected[1]
- texwrite(m and m.mi or 0)
+ contextsprint(ctxcatcodes,m and m.mi or 0)
end
local function index(collected,n)
@@ -984,7 +950,7 @@ local function index(collected,n)
n = #collected + n + 1 -- brrr
end
if n > 0 then
- texwrite(collected[n].ni or 0)
+ contextsprint(ctxcatcodes,collected[n].ni or 0)
end
end
end
@@ -999,10 +965,10 @@ local function command(collected,cmd,otherwise)
lxml.addindex(e.name,false,true)
ix = e.ix
end
- texsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",e.name,"::",ix,"}")
+ contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",e.name,"::",ix,"}")
end
elseif otherwise then
- texsprint(ctxcatcodes,"\\xmlw{",otherwise,"}{#1}")
+ contextsprint(ctxcatcodes,"\\xmlw{",otherwise,"}{#1}")
end
end
@@ -1011,10 +977,10 @@ local function attribute(collected,a,default)
local at = collected[1].at
local str = (at and at[a]) or default
if str and str ~= "" then
- texsprint(notcatcodes,str)
+ contextsprint(notcatcodes,str)
end
elseif default then
- texsprint(notcatcodes,default)
+ contextsprint(notcatcodes,default)
end
end
@@ -1026,7 +992,7 @@ local function chainattribute(collected,arguments) -- todo: optional levels
if at then
local a = at[arguments]
if a then
- texsprint(notcatcodes,a)
+ contextsprint(notcatcodes,a)
end
else
break -- error
@@ -1050,7 +1016,7 @@ end
local function ctxtext(collected)
if collected then
for c=1,#collected do
- texsprint(ctxcatcodes,collected[1].dt)
+ contextsprint(ctxcatcodes,collected[1].dt)
end
end
end
@@ -1072,7 +1038,7 @@ end
local function lower(collected)
if collected then
for c=1,#collected do
- texsprint(ctxcatcodes,lowerchars(collected[1].dt[1]))
+ contextsprint(ctxcatcodes,lowerchars(collected[1].dt[1]))
end
end
end
@@ -1080,7 +1046,7 @@ end
local function upper(collected)
if collected then
for c=1,#collected do
- texsprint(ctxcatcodes,upperchars(collected[1].dt[1]))
+ contextsprint(ctxcatcodes,upperchars(collected[1].dt[1]))
end
end
end
@@ -1091,7 +1057,7 @@ local function number(collected)
for c=1,#collected do
n = n + tonumber(collected[c].dt[1] or 0)
end
- texwrite(n)
+ contextsprint(ctxcatcodes,n)
end
end
@@ -1111,9 +1077,9 @@ local function concatrange(collected,start,stop,separator,lastseparator,textonly
if i == nofcollected then
-- nothing
elseif i == nofcollected-1 and lastseparator ~= "" then
- texsprint(ctxcatcodes,lastseparator)
+ contextsprint(ctxcatcodes,lastseparator)
elseif separator ~= "" then
- texsprint(ctxcatcodes,separator)
+ contextsprint(ctxcatcodes,separator)
end
end
end
@@ -1157,7 +1123,7 @@ function finalizers.tag(collected)
c = collected[#collected-n+1]
end
if c then
- texsprint(c.tg)
+ contextsprint(ctxcatcodes,c.tg)
end
end
end
@@ -1174,9 +1140,9 @@ function finalizers.name(collected)
end
if c then
if c.ns == "" then
- texsprint(c.tg)
+ contextsprint(ctxcatcodes,c.tg)
else
- texsprint(c.ns,":",c.tg)
+ contextsprint(ctxcatcodes,c.ns,":",c.tg)
end
end
end
@@ -1188,9 +1154,9 @@ function finalizers.tags(collected,nonamespace)
local e = collected[c]
local ns, tg = e.ns, e.tg
if nonamespace or ns == "" then
- texsprint(tg)
+ contextsprint(ctxcatcodes,tg)
else
- texsprint(ns,":",tg)
+ contextsprint(ctxcatcodes,ns,":",tg)
end
end
end
@@ -1201,14 +1167,17 @@ end
local function verbatim(id,before,after)
local root = getid(id)
if root then
- if before then texsprint(ctxcatcodes,before,"[",root.tg or "?","]") end
+ if before then contextsprint(ctxcatcodes,before,"[",root.tg or "?","]") end
lxml.toverbatim(xmltostring(root.dt))
- if after then texsprint(ctxcatcodes,after) end
+--~ lxml.toverbatim(xml.totext(root.dt))
+ if after then contextsprint(ctxcatcodes,after) end
end
end
+
function lxml.inlineverbatim(id)
verbatim(id,"\\startxmlinlineverbatim","\\stopxmlinlineverbatim")
end
+
function lxml.displayverbatim(id)
verbatim(id,"\\startxmldisplayverbatim","\\stopxmldisplayverbatim")
end
@@ -1253,19 +1222,19 @@ end
function lxml.raw(id,pattern) -- the content, untouched by commands
local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id)
if collected then
- texsprint(xmltostring(collected[1].dt))
+ contextsprint(notcatcodes,xmltostring(collected[1].dt))
end
end
function lxml.context(id,pattern) -- the content, untouched by commands
if not pattern then
local collected = getid(id)
- -- texsprint(ctxcatcodes,collected.dt[1])
+ -- contextsprint(ctxcatcodes,collected.dt[1])
ctx_text(collected.dt[1])
else
local collected = xmlapplylpath(getid(id),pattern) or getid(id)
if collected and #collected > 0 then
- texsprint(ctxcatcodes,collected[1].dt)
+ contextsprint(ctxcatcodes,collected[1].dt)
end
end
end
@@ -1309,7 +1278,7 @@ lxml.index = lxml.position
function lxml.pos(id)
local root = getid(id)
- texwrite((root and root.ni) or 0)
+ contextsprint(ctxcatcodes,(root and root.ni) or 0)
end
function lxml.att(id,a,default)
@@ -1318,10 +1287,10 @@ function lxml.att(id,a,default)
local at = root.at
local str = (at and at[a]) or default
if str and str ~= "" then
- texsprint(notcatcodes,str)
+ contextsprint(notcatcodes,str)
end
elseif default then
- texsprint(notcatcodes,default)
+ contextsprint(notcatcodes,default)
end
end
@@ -1329,23 +1298,23 @@ function lxml.name(id) -- or remapped name? -> lxml.info, combine
local r = getid(id)
local ns = r.rn or r.ns or ""
if ns ~= "" then
- texsprint(ns,":",r.tg)
+ contextsprint(ctxcatcodes,ns,":",r.tg)
else
- texsprint(r.tg)
+ contextsprint(ctxcatcodes,r.tg)
end
end
function lxml.match(id) -- or remapped name? -> lxml.info, combine
- texsprint(getid(id).mi or 0)
+ contextsprint(ctxcatcodes,getid(id).mi or 0)
end
function lxml.tag(id) -- tag vs name -> also in l-xml tag->name
- texsprint(getid(id).tg or "")
+ contextsprint(ctxcatcodes,getid(id).tg or "")
end
function lxml.namespace(id) -- or remapped name?
local root = getid(id)
- texsprint(root.rn or root.ns or "")
+ contextsprint(ctxcatcodes,root.rn or root.ns or "")
end
function lxml.flush(id)
@@ -1382,7 +1351,7 @@ function lxml.command(id,pattern,cmd)
addindex(rootname,false,true)
ix = e.ix
end
- texsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",rootname,"::",ix,"}")
+ contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",rootname,"::",ix,"}")
end
end
end
@@ -1491,7 +1460,7 @@ lxml.get_id = getid lxml.obsolete.get_id = getid
function xml.finalizers.tex.lettered(collected)
if collected then
for c=1,#collected do
- texsprint(ctxcatcodes,lettered(collected[1].dt[1]))
+ contextsprint(ctxcatcodes,lettered(collected[1].dt[1]))
end
end
end
@@ -1499,7 +1468,7 @@ end
--~ function xml.finalizers.tex.apply(collected,what) -- to be tested
--~ if collected then
--~ for c=1,#collected do
---~ texsprint(ctxcatcodes,what(collected[1].dt[1]))
+--~ contextsprint(ctxcatcodes,what(collected[1].dt[1]))
--~ end
--~ end
--~ end
diff --git a/tex/context/base/lxml-xml.lua b/tex/context/base/lxml-xml.lua
index 89fcba871..5012f69e5 100644
--- a/tex/context/base/lxml-xml.lua
+++ b/tex/context/base/lxml-xml.lua
@@ -6,15 +6,16 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -119,10 +120,39 @@ local function raw(collected) -- hybrid
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
if collected then
local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ return (e and xmltotext(e.dt)) or ""
else
return ""
end
@@ -270,10 +300,10 @@ function xml.text(id,pattern)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return (collected and xmltotext(collected[1].dt)) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id.dt) or ""
else
return ""
end
@@ -281,6 +311,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
diff --git a/tex/context/base/m-barcodes.mkiv b/tex/context/base/m-barcodes.mkiv
index 89e089522..16b553fa2 100644
--- a/tex/context/base/m-barcodes.mkiv
+++ b/tex/context/base/m-barcodes.mkiv
@@ -65,7 +65,7 @@ function moduledata.barcodes.isbn_1(original)
code= code .. c
end
end
- tex.sprint(code)
+ context(code)
end
function moduledata.barcodes.isbn_2(original)
@@ -74,7 +74,7 @@ function moduledata.barcodes.isbn_2(original)
if t and #t == 12 then
original = original .. "-" .. c
end
- tex.sprint(original)
+ context(original)
end
\stopluacode
diff --git a/tex/context/base/m-database.lua b/tex/context/base/m-database.lua
index 8dde9690e..b9ec3aa36 100644
--- a/tex/context/base/m-database.lua
+++ b/tex/context/base/m-database.lua
@@ -10,7 +10,9 @@ local sub, gmatch, format = string.sub, string.gmatch, string.format
local concat = table.concat
local lpegpatterns, lpegmatch, lpegsplitat = lpeg.patterns, lpeg.match, lpeg.splitat
local lpegP, lpegC, lpegS, lpegCt = lpeg.P, lpeg.C, lpeg.S, lpeg.Ct
-local sprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
+
+-- One also needs to enable context.trace, here we only plug in some code (maybe
+-- some day this tracker will also toggle the main context tracer.
local trace_flush = false trackers.register("module.database.flush", function(v) trace_flush = v end)
@@ -26,14 +28,8 @@ local separators = { -- not interfaced
spaces = lpegpatterns.space^1,
}
-local function tracedsprint(c,str)
- report_database("snippet: %s",str)
- sprint(c,str)
-end
-
function buffers.database.process(settings)
local data
- local sprint = trace_flush and tracedsprint or sprint
if settings.type == "file" then
local filename = resolvers.finders.byscheme("any",settings.database)
data = filename ~= "" and io.loaddata(filename)
@@ -42,6 +38,9 @@ function buffers.database.process(settings)
data = buffers.getlines(settings.database)
end
if data and #data > 0 then
+ if trace_flush then
+ context.pushlogger(report_database)
+ end
local separatorchar, quotechar, commentchar = settings.separator, settings.quotechar, settings.commentchar
local before, after = settings.before or "", settings.after or ""
local first, last = settings.first or "", settings.last or ""
@@ -70,38 +69,57 @@ function buffers.database.process(settings)
for i=1,#data do
local line = data[i]
if line ~= "" and (not checker or not lpegmatch(checker,line)) then
- local result, r = { }, 0 -- we collect as this is nicer in tracing
local list = lpegmatch(splitter,line)
if not found then
if setups ~= "" then
- sprint(ctxcatcodes,format("\\begingroup\\setups[%s]",setups))
+ context.begingroup()
+ context.setups { setups }
end
- sprint(ctxcatcodes,before)
+ context(before)
found = true
end
- r = r + 1 ; result[r] = first
- for j=1,#list do
- r = r + 1 ; result[r] = left
- if command == "" then
- r = r + 1 ; result[r] = list[j]
- else
- r = r + 1 ; result[r] = command
- r = r + 1 ; result[r] = "{"
- r = r + 1 ; result[r] = list[j]
- r = r + 1 ; result[r] = "}"
+ if trace_flush then
+ local result, r = { }, 0
+ r = r + 1 ; result[r] = first
+ for j=1,#list do
+ r = r + 1 ; result[r] = left
+ if command == "" then
+ r = r + 1 ; result[r] = list[j]
+ else
+ r = r + 1 ; result[r] = command
+ r = r + 1 ; result[r] = "{"
+ r = r + 1 ; result[r] = list[j]
+ r = r + 1 ; result[r] = "}"
+ end
+ r = r + 1 ; result[r] = right
end
- r = r + 1 ; result[r] = right
+ r = r + 1 ; result[r] = last
+ context(concat(result))
+ else
+ context(first)
+ for j=1,#list do
+ context(left)
+ if command == "" then
+ context(list[j])
+ else
+ context(command)
+ context(false,list[j])
+ end
+ context(right)
+ end
+ context(last)
end
- r = r + 1 ; result[r] = last
- sprint(ctxcatcodes,concat(result))
end
end
if found then
- sprint(ctxcatcodes,after)
+ context(after)
if setups ~= "" then
- sprint(ctxcatcodes,"\\endgroup")
+ context.endgroup()
end
end
+ if trace_flush then
+ context.poplogger()
+ end
else
-- message
end
diff --git a/tex/context/base/m-graph.mkiv b/tex/context/base/m-graph.mkiv
index 53ca3962e..0fd22229f 100644
--- a/tex/context/base/m-graph.mkiv
+++ b/tex/context/base/m-graph.mkiv
@@ -31,7 +31,7 @@
str = str:gsub("e(.-)$",strip)
str = ("\\mathematics{%s}"):format(str)
end
- tex.sprint(str)
+ context(str)
end
\stopluacode
diff --git a/tex/context/base/m-timing.mkiv b/tex/context/base/m-timing.mkiv
index 409dde6ad..e5413ddb1 100644
--- a/tex/context/base/m-timing.mkiv
+++ b/tex/context/base/m-timing.mkiv
@@ -58,8 +58,8 @@ end
{\setbox\scratchbox\vbox\bgroup\startMPcode
begingroup ; save p, q, b, h, w ;
path p, q, b ; numeric h, w ;
- p := \ctxlua{tex.sprint(moduledata.progress.path("#1","#2"))} ;
-% p := p shifted -llcorner p ;
+ p := \cldcontext{moduledata.progress.path("#1","#2")} ;
+ % p := p shifted -llcorner p ;
if bbwidth(p) > 1 :
h := 100 ; w := 2 * h ;
w := \the\textwidth-3pt ; % correct for pen
@@ -69,8 +69,8 @@ end
draw b withcolor \MPcolor{usage:frame} ;
draw p withcolor \MPcolor{usage:line} ;
if ("#3" <> "") and ("#3" <> "#2") :
- q := \ctxlua{tex.sprint(moduledata.progress.path("#1","#3"))} ;
-% q := q shifted -llcorner q ;
+ q := \cldcontext{moduledata.progress.path("#1","#3")} ;
+ % q := q shifted -llcorner q ;
if bbwidth(q) > 1 :
q := q xstretched w ;
pickup pencircle scaled 1.5pt ; linecap := butt ;
@@ -85,9 +85,9 @@ end
\startlinecorrection
\box\scratchbox \endgraf
\hbox to \scratchdimen{\tttf\strut\detokenize{#2}\hss
- min:\ctxlua{tex.sprint(moduledata.progress.bot("#1","\detokenize{#2}"))}, %
- max:\ctxlua{tex.sprint(moduledata.progress.top("#1","\detokenize{#2}"))}, %
- pages:\ctxlua{tex.sprint(moduledata.progress.pages("#1"))}%
+ min:\cldcontext{moduledata.progress.bot("#1","\detokenize{#2}")}, %
+ max:\cldcontext{moduledata.progress.top("#1","\detokenize{#2}")}, %
+ pages:\cldcontext{moduledata.progress.pages("#1")}%
}%
\stoplinecorrection
\fi}
diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua
index 1516c16bc..6595dca3a 100644
--- a/tex/context/base/math-ini.lua
+++ b/tex/context/base/math-ini.lua
@@ -9,11 +9,14 @@ if not modules then modules = { } end modules ['math-ext'] = {
-- if needed we can use the info here to set up xetex definition files
-- the "8000 hackery influences direct characters (utf) as indirect \char's
-local texsprint, format, utfchar, utfbyte = tex.sprint, string.format, utf.char, utf.byte
+local format, utfchar, utfbyte = string.format, utf.char, utf.byte
local setmathcode, setdelcode = tex.setmathcode, tex.setdelcode
local texattribute = tex.attribute
local floor = math.floor
+local contextsprint = context.sprint
+local contextfprint = context.fprint -- a bit inefficient
+
local allocate = utilities.storage.allocate
local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
@@ -146,26 +149,26 @@ if setmathcode then
setmathsymbol = function(name,class,family,slot) -- hex is nicer for tracing
if class == classes.accent then
- texsprint(format([[\unexpanded\gdef\%s{\Umathaccent 0 "%X "%X }]],name,family,slot))
+ contextsprint(format([[\unexpanded\gdef\%s{\Umathaccent 0 "%X "%X }]],name,family,slot))
elseif class == classes.topaccent then
- texsprint(format([[\unexpanded\gdef\%s{\Umathaccent 0 "%X "%X }]],name,family,slot))
+ contextsprint(format([[\unexpanded\gdef\%s{\Umathaccent 0 "%X "%X }]],name,family,slot))
elseif class == classes.botaccent then
- texsprint(format([[\unexpanded\gdef\%s{\Umathbotaccent 0 "%X "%X }]],name,family,slot))
+ contextsprint(format([[\unexpanded\gdef\%s{\Umathbotaccent 0 "%X "%X }]],name,family,slot))
elseif class == classes.over then
- texsprint(format([[\unexpanded\gdef\%s{\Udelimiterover "%X "%X }]],name,family,slot))
+ contextsprint(format([[\unexpanded\gdef\%s{\Udelimiterover "%X "%X }]],name,family,slot))
elseif class == classes.under then
- texsprint(format([[\unexpanded\gdef\%s{\Udelimiterunder "%X "%X }]],name,family,slot))
+ contextsprint(format([[\unexpanded\gdef\%s{\Udelimiterunder "%X "%X }]],name,family,slot))
elseif class == classes.open or class == classes.close then
setdelcode(slot,{family,slot,0,0})
- texsprint(format([[\unexpanded\gdef\%s{\Udelimiter "%X "%X "%X }]],name,class,family,slot))
+ contextsprint(format([[\unexpanded\gdef\%s{\Udelimiter "%X "%X "%X }]],name,class,family,slot))
elseif class == classes.delimiter then
setdelcode(slot,{family,slot,0,0})
- texsprint(format([[\unexpanded\gdef\%s{\Udelimiter 0 "%X "%X }]],name,family,slot))
+ contextsprint(format([[\unexpanded\gdef\%s{\Udelimiter 0 "%X "%X }]],name,family,slot))
elseif class == classes.radical then
- texsprint(format([[\unexpanded\gdef\%s{\Uradical "%X "%X }]],name,family,slot))
+ contextsprint(format([[\unexpanded\gdef\%s{\Uradical "%X "%X }]],name,family,slot))
else
-- beware, open/close and other specials should not end up here
- texsprint(format([[\unexpanded\gdef\%s{\Umathchar "%X "%X "%X }]],name,class,family,slot))
+ contextsprint(format([[\unexpanded\gdef\%s{\Umathchar "%X "%X "%X }]],name,class,family,slot))
end
end
@@ -174,41 +177,41 @@ else
setmathcharacter = function(class,family,slot,unicode,firsttime)
if not firsttime and class <= 7 then
- texsprint(mathcode(slot,class,family,unicode or slot))
+ contextsprint(mathcode(slot,class,family,unicode or slot))
end
end
setmathsynonym = function(class,family,slot,unicode,firsttime)
if not firsttime and class <= 7 then
- texsprint(mathcode(slot,class,family,unicode))
+ contextsprint(mathcode(slot,class,family,unicode))
end
if class == classes.open or class == classes.close then
- texsprint(delcode(slot,family,unicode))
+ contextsprint(delcode(slot,family,unicode))
end
end
setmathsymbol = function(name,class,family,slot)
if class == classes.accent then
- texsprint(format("\\unexpanded\\xdef\\%s{%s}",name,mathaccent(class,family,slot)))
+ contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathaccent(class,family,slot)))
elseif class == classes.topaccent then
- texsprint(format("\\unexpanded\\xdef\\%s{%s}",name,mathtopaccent(class,family,slot)))
+ contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathtopaccent(class,family,slot)))
elseif class == classes.botaccent then
- texsprint(format("\\unexpanded\\xdef\\%s{%s}",name,mathbotaccent(class,family,slot)))
+ contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathbotaccent(class,family,slot)))
elseif class == classes.over then
- texsprint(format("\\unexpanded\\xdef\\%s{%s}",name,mathtopdelimiter(class,family,slot)))
+ contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathtopdelimiter(class,family,slot)))
elseif class == classes.under then
- texsprint(format("\\unexpanded\\xdef\\%s{%s}",name,mathbotdelimiter(class,family,slot)))
+ contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathbotdelimiter(class,family,slot)))
elseif class == classes.open or class == classes.close then
- texsprint(delcode(slot,family,slot))
- texsprint(format("\\unexpanded\\xdef\\%s{%s}",name,delimiter(class,family,slot)))
+ contextsprint(delcode(slot,family,slot))
+ contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,delimiter(class,family,slot)))
elseif class == classes.delimiter then
- texsprint(delcode(slot,family,slot))
- texsprint(format("\\unexpanded\\xdef\\%s{%s}",name,delimiter(0,family,slot)))
+ contextsprint(delcode(slot,family,slot))
+ contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,delimiter(0,family,slot)))
elseif class == classes.radical then
- texsprint(format("\\unexpanded\\xdef\\%s{%s}",name,radical(family,slot)))
+ contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,radical(family,slot)))
else
-- beware, open/close and other specials should not end up here
- texsprint(format("\\unexpanded\\xdef\\%s{%s}",name,mathchar(class,family,slot)))
+ contextsprint(format([[\unexpanded\xdef\%s{%s}]],name,mathchar(class,family,slot)))
end
end
diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv
index c7e44e2a7..54490b0a2 100644
--- a/tex/context/base/math-ini.mkiv
+++ b/tex/context/base/math-ini.mkiv
@@ -29,8 +29,8 @@
%D We move these definitions into the format:
-% test [[\char948 \ctxlua{tex.sprint(utf.char(948))}]]
-% test $[[\char948 \ctxlua{tex.sprint(utf.char(948))}]]$
+% test [[\char948 \cldcontext{utf.char(948)}]]
+% test $[[\char948 \cldcontext{utf.char(948)}]]$
\registerctxluafile{math-ini}{1.001}
\registerctxluafile{math-dim}{1.001}
@@ -226,7 +226,7 @@
\def\utfmathfiller #1{\cldcontext{mathematics.utfmathfiller (\!!bs#1\!!es)}}
% \def\utfmathclassdefault #1#2{\ctxlua{
-% tex.sprint(mathematics.utfmathclass("#1","#2"))
+% cldcontext(mathematics.utfmathclass("#1","#2"))
% }}
%
% \def\utfmathcommanddefault#1#2#3{\ctxlua{
@@ -527,29 +527,14 @@
% plain tex legacy:
\bgroup
+
\catcode\primeasciicode\activecatcode
+
\doglobal\appendtoks
- \let'\domathprime
+ \let'\domathprime % todo: do this at the lua end
\to \everymathematics
-\egroup
-
-\newtoks\everydonknuthmode
-\newtoks\everynonknuthmode
-
-\newconditional \knuthmode
-
-\def\nonknuthmode
- {\pushcatcodetable
- \setcatcodetable\ctxcatcodes
- \the\everynonknuthmode
- \let\nonknuthmode\relax
- \popcatcodetable}
-\def\donknuthmode
- {\pushcatcodetable
- \setcatcodetable\ctxcatcodes
- \the\everydonknuthmode
- \popcatcodetable}
+\egroup
\bgroup
@@ -557,63 +542,115 @@
\catcode\circumflexasciicode\activecatcode
\catcode\ampersandasciicode \activecatcode
- \global \everynonknuthmode {\appendtoks
+ \doglobal \appendtoks
\let_\normalsubscript
\let^\normalsuperscript
\let&\normalmathaligntab % use \def when it's \aligntab
- \to \everymathematics}
+ \to \everymathematics
\egroup
-\appendtoks
- \setfalse\knuthmode
- \catcode\underscoreasciicode\othercatcode
- \catcode\circumflexasciicode\othercatcode
- \catcode\ampersandasciicode \othercatcode
-\to \everynonknuthmode
-
-\appendtoks
- \settrue\knuthmode
- \catcode\underscoreasciicode\subscriptcatcode
- \catcode\circumflexasciicode\superscriptcatcode
- \catcode\ampersandasciicode \alignmentcatcode
-\to \everydonknuthmode
+\newtoks\everydonknuthmode
+\newtoks\everynonknuthmode
-\appendtoks
- \startextendcatcodetable\ctxcatcodes
- \catcode\underscoreasciicode\othercatcode
- \catcode\circumflexasciicode\othercatcode
- \catcode\ampersandasciicode \othercatcode
- \stopextendcatcodetable
-\to \everynonknuthmode
+\newconditional \knuthmode
-\appendtoks
- \startextendcatcodetable\ctxcatcodes
- \catcode\underscoreasciicode\subscriptcatcode
- \catcode\circumflexasciicode\superscriptcatcode
- \catcode\ampersandasciicode \alignmentcatcode
- \stopextendcatcodetable
-\to \everydonknuthmode
+\let\nonknuthmode\relax
+\let\donknuthmode\relax
-% \def\prefermathcatcodes
-% {\catcode\underscoreasciicode\subscriptcatcode
-% \catcode\circumflexasciicode\superscriptcatcode
-% \catcode\ampersandasciicode \alignmentcatcode}
+% \def\nonknuthmode
+% {\pushcatcodetable
+% \setcatcodetable\ctxcatcodes
+% \the\everynonknuthmode
+% \let\nonknuthmode\relax
+% \popcatcodetable}
+%
+% \def\donknuthmode
+% {\pushcatcodetable
+% \setcatcodetable\ctxcatcodes
+% \the\everydonknuthmode
+% \popcatcodetable}
+%
+% \bgroup
+%
+% \catcode\underscoreasciicode\activecatcode
+% \catcode\circumflexasciicode\activecatcode
+% \catcode\ampersandasciicode \activecatcode
+%
+% \global \everynonknuthmode {\appendtoks
+% \let_\normalsubscript
+% \let^\normalsuperscript
+% \let&\normalmathaligntab % use \def when it's \aligntab
+% \to \everymathematics}
+%
+% \egroup
+%
+% \appendtoks
+% \setfalse\knuthmode
+% \catcode\underscoreasciicode\othercatcode
+% \catcode\circumflexasciicode\othercatcode
+% \catcode\ampersandasciicode \othercatcode
+% \to \everynonknuthmode
+%
+% \appendtoks
+% \settrue\knuthmode
+% \catcode\underscoreasciicode\subscriptcatcode
+% \catcode\circumflexasciicode\superscriptcatcode
+% \catcode\ampersandasciicode \alignmentcatcode
+% \to \everydonknuthmode
+%
+% \appendtoks
+% \startextendcatcodetable\ctxcatcodes
+% \catcode\underscoreasciicode\othercatcode
+% \catcode\circumflexasciicode\othercatcode
+% \catcode\ampersandasciicode \othercatcode
+% \stopextendcatcodetable
+% \to \everynonknuthmode
+%
+% \appendtoks
+% \startextendcatcodetable\ctxcatcodes
+% \catcode\underscoreasciicode\subscriptcatcode
+% \catcode\circumflexasciicode\superscriptcatcode
+% \catcode\ampersandasciicode \alignmentcatcode
+% \stopextendcatcodetable
+% \to \everydonknuthmode
+
+%D Even more drastic (this code will move as nonknuthmode is default now)
+
+% \unexpanded\def\enableasciimode
+% {\ctxlua{resolvers.macros.enablecomment()}%
+% \glet\enableasciimode\relax}
+%
+% \unexpanded\def\asciimode
+% {\catcodetable\txtcatcodes
+% \enableasciimode
+% \nonknuthmode}
+%
+% \unexpanded\def\startasciimode
+% {\pushcatcodetable
+% \catcodetable\txtcatcodes
+% \enableasciimode
+% \nonknuthmode}
+%
+% \unexpanded\def\stopasciimode
+% {\popcatcodetable
+% \ifconditional\knuthmode\else\donknuthmode\fi}
-%D Even more drastic:
+\unexpanded\def\enableasciimode
+ {\ctxlua{resolvers.macros.enablecomment()}%
+ \glet\enableasciimode\relax}
\unexpanded\def\asciimode
{\catcodetable\txtcatcodes
- \nonknuthmode}
+ \enableasciimode}
\unexpanded\def\startasciimode
{\pushcatcodetable
\catcodetable\txtcatcodes
- \nonknuthmode}
+ \enableasciimode}
\unexpanded\def\stopasciimode
- {\popcatcodetable
- \ifconditional\knuthmode\else\donknuthmode\fi}
+ {\popcatcodetable}
%D Needed for unicode:
diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua
index e7258b1ca..e57f013ca 100644
--- a/tex/context/base/math-noa.lua
+++ b/tex/context/base/math-noa.lua
@@ -35,6 +35,7 @@ local trace_processing = false trackers.register("math.processing", functio
local trace_analyzing = false trackers.register("math.analyzing", function(v) trace_analyzing = v end)
local trace_normalizing = false trackers.register("math.normalizing", function(v) trace_normalizing = v end)
local trace_goodies = false trackers.register("math.goodies", function(v) trace_goodies = v end)
+local trace_variants = false trackers.register("math.variants", function(v) trace_variants = v end)
local check_coverage = true directives.register("math.checkcoverage", function(v) check_coverage = v end)
@@ -42,6 +43,7 @@ local report_processing = logs.reporter("mathematics","processing")
local report_remapping = logs.reporter("mathematics","remapping")
local report_normalizing = logs.reporter("mathematics","normalizing")
local report_goodies = logs.reporter("mathematics","goodies")
+local report_variants = logs.reporter("mathematics","variants")
local set_attribute = node.set_attribute
local has_attribute = node.has_attribute
@@ -684,6 +686,66 @@ function handlers.families(head,style,penalties)
return true
end
+-- variants
+
+local variants = { }
+
+local validvariants = { -- fast check on valid
+ [0x2229] = 0xFE00, [0x222A] = 0xFE00,
+ [0x2268] = 0xFE00, [0x2269] = 0xFE00,
+ [0x2272] = 0xFE00, [0x2273] = 0xFE00,
+ [0x228A] = 0xFE00, [0x228B] = 0xFE00,
+ [0x2293] = 0xFE00, [0x2294] = 0xFE00,
+ [0x2295] = 0xFE00,
+ [0x2297] = 0xFE00,
+ [0x229C] = 0xFE00,
+ [0x22DA] = 0xFE00, [0x22DB] = 0xFE00,
+ [0x2A3C] = 0xFE00, [0x2A3D] = 0xFE00,
+ [0x2A9D] = 0xFE00, [0x2A9E] = 0xFE00,
+ [0x2AAC] = 0xFE00, [0x2AAD] = 0xFE00,
+ [0x2ACB] = 0xFE00, [0x2ACC] = 0xFE00,
+}
+
+variants[math_char] = function(pointer,what,n,parent) -- also set export value
+ local char = pointer.char
+ local selector = validvariants[char]
+ if selector then
+ local next = parent.next
+ if next and next.id == math_noad then
+ local nucleus = next.nucleus
+ if nucleus and nucleus.id == math_char and nucleus.char == selector then
+ local variant
+ local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
+ local mathvariants = tfmdata.resources.variants -- and variantdata
+ if mathvariants then
+ mathvariants = mathvariants[selector]
+ if mathvariants then
+ variant = mathvariants[char]
+ end
+ end
+ if variant then
+ pointer.char = variant
+ set_attribute(pointer,exportstatus,char) -- we don't export the variant as it's visual markup
+ if trace_variants then
+ report_variants("variant (U+%05X,U+%05X) replaced by U+%05X",char,selector,variant)
+ end
+ else
+ if trace_variants then
+ report_variants("no variant (U+%05X,U+%05X)",char,selector)
+ end
+ end
+ next.prev = pointer
+ parent.next = next.next
+ node.free(next)
+ end
+ end
+ end
+end
+
+function handlers.variants(head,style,penalties)
+ processnoads(head,variants,"unicode variant")
+ return true
+end
-- the normal builder
diff --git a/tex/context/base/math-vfu.lua b/tex/context/base/math-vfu.lua
index 8acf12db2..e78a6f895 100644
--- a/tex/context/base/math-vfu.lua
+++ b/tex/context/base/math-vfu.lua
@@ -8,13 +8,15 @@ if not modules then modules = { } end modules ['math-vfu'] = {
-- All these math vectors .. thanks to Aditya and Mojca they become
-- better and better. If you have problems with math fonts or miss
--- characters report it to the ConTeXt mailing list.
+-- characters report it to the ConTeXt mailing list. Also thanks to
+-- Boguslaw for finding a couple of errors.
-- 20D6 -> 2190
-- 20D7 -> 2192
local type, next = type, next
local max = math.max
+local format = string.format
local fonts, nodes, mathematics = fonts, nodes, mathematics
@@ -139,7 +141,7 @@ local function make(main,characters,id,size,n,m)
end
end
-local function minus(main,characters,id,size,unicode)
+local function minus(main,characters,id,size,unicode) -- push/pop needed?
local minus = characters[0x002D]
if minus then
local mu = size/18
@@ -151,6 +153,25 @@ local function minus(main,characters,id,size,unicode)
end
end
+-- pdf:page pdf:direct don't work here
+
+local scale_factor = 0.7
+local scale_down = { "special", format("pdf: %s 0 0 %s 0 0 cm", scale_factor, scale_factor) } -- we need a scale
+local scale_up = { "special", format("pdf: %s 0 0 %s 0 0 cm",1/scale_factor,1/scale_factor) }
+
+local function raise(main,characters,id,size,unicode,private) -- this is a real fake mess
+ local raised = characters[private]
+ if raised then
+ local up = .85 * main.parameters.x_height
+ characters[unicode] = {
+ width = .7 * raised.width,
+ height = .7 * (raised.height + up),
+ depth = .7 * (raised.depth - up),
+ commands = { push, { "down", -up }, scale_down, { "slot", id, private }, scale_up, pop }
+ }
+ end
+end
+
local function dots(main,characters,id,size,unicode)
local c = characters[0x002E]
if c then
@@ -345,11 +366,15 @@ function vfmath.addmissing(main,id,size)
jointhree(main,characters,id,size,0x27FC,0xFE321,0,0x0002D,joinrelfactor,0x02192) -- \mapstochar\relbar\joinrel\rightarrow
jointwo (main,characters,id,size,0x2254,0x03A,0,0x03D) -- := (≔)
+-- raise (main,characters,id,size,0x02032,0xFE325) -- prime
+
-- there are more (needs discussion first):
-- characters[0x20D6] = characters[0x2190]
-- characters[0x20D7] = characters[0x2192]
+ characters[0x02B9] = characters[0x2032] -- we're nice
+
end
local unique = 0 -- testcase: \startTEXpage \math{!\text{-}\text{-}\text{-}} \stopTEXpage
@@ -612,6 +637,7 @@ function vfmath.define(specification,set,goodies)
commands = ref,
}
end
+--~ report_virtual("%05X %s %s",unicode,fci.height or "NO HEIGHT",fci.depth or "NO DEPTH")
end
end
if ss.extension then
@@ -675,6 +701,7 @@ function vfmath.define(specification,set,goodies)
if not fci then
-- do nothing
else
+ -- probably never entered
local ref = si[index]
if not ref then
ref = { { 'slot', s, index } }
@@ -794,20 +821,26 @@ mathencodings["large-to-small"] = {
[0x02044] = 0x0E, -- /
}
+-- Beware: these are (in cm/lm) below the baseline due to limitations
+-- in the tfm format bu the engien (combined with the mathclass) takes
+-- care of it. If we need them in textmode, we should make them virtual
+-- and move them up but we're in no hurry with that.
+
mathencodings["tex-ex"] = {
[0x0220F] = 0x51, -- prod
- [0x0222B] = 0x52, -- intop
[0x02210] = 0x60, -- coprod
[0x02211] = 0x50, -- sum
+ [0x0222B] = 0x52, -- intop
+ [0x0222E] = 0x48, -- ointop
[0x022C0] = 0x56, -- bigwedge
[0x022C1] = 0x57, -- bigvee
[0x022C2] = 0x54, -- bigcap
[0x022C3] = 0x53, -- bigcup
- [0x02A04] = 0x55, -- biguplus
- [0x02A02] = 0x4E, -- bigotimes
+ [0x02A00] = 0x4A, -- bigodot -- fixed BJ
[0x02A01] = 0x4C, -- bigoplus
- [0x02A03] = 0x4A, -- bigodot
- [0x0222E] = 0x48, -- ointop
+ [0x02A02] = 0x4E, -- bigotimes
+ -- [0x02A03] = , -- bigudot --
+ [0x02A04] = 0x55, -- biguplus
[0x02A06] = 0x46, -- bigsqcup
}
@@ -922,8 +955,10 @@ mathencodings["tex-mi"] = {
[0x021C1] = 0x2B, -- rightharpoondown
[0xFE322] = 0x2C, -- lhook (hook for combining arrows)
[0xFE323] = 0x2D, -- rhook (hook for combining arrows)
- [0x022B3] = 0x2E, -- triangleright (TODO: which one is right?)
- [0x022B2] = 0x2F, -- triangleleft (TODO: which one is right?)
+ [0x025B7] = 0x2E, -- triangleright : cf lmmath / BJ
+ [0x025C1] = 0x2F, -- triangleleft : cf lmmath / BJ
+ [0x022B3] = 0x2E, -- triangleright : cf lmmath this a cramped triangles / BJ / see *
+ [0x022B2] = 0x2F, -- triangleleft : cf lmmath this a cramped triangles / BJ / see *
-- [0x00041] = 0x30, -- 0
-- [0x00041] = 0x31, -- 1
-- [0x00041] = 0x32, -- 2
@@ -1055,7 +1090,7 @@ mathencodings["tex-sy"] = {
-- [0x02201] = 0x00, -- complement
-- [0x02206] = 0x00, -- increment
-- [0x02204] = 0x00, -- not exists
---~ [0x000B7] = 0x01, -- cdot
+-- [0x000B7] = 0x01, -- cdot
[0x022C5] = 0x01, -- cdot
[0x000D7] = 0x02, -- times
[0x0002A] = 0x03, -- *
@@ -1192,6 +1227,8 @@ mathencodings["tex-sy"] = {
[0x02661] = 0x7E, -- heartsuit
[0x02660] = 0x7F, -- spadesuit
[0xFE321] = 0x37, -- mapstochar
+
+ [0xFE325] = 0x30, -- prime 0x02032
}
-- The names in masm10.enc can be trusted best and are shown in the first
@@ -1204,9 +1241,9 @@ mathencodings["tex-ma"] = {
[0x022A0] = 0x02, -- squaremultiply \boxtimes
[0x025A1] = 0x03, -- square \square \Box
[0x025A0] = 0x04, -- squaresolid \blacksquare
- [0x000B7] = 0x05, -- squaresmallsolid \centerdot
+ [0x025AA] = 0x05, -- squaresmallsolid \centerdot
[0x022C4] = 0x06, -- diamond \Diamond \lozenge
- [0x029EB] = 0x07, -- diamondsolid \blacklozenge
+ [0x02666] = 0x07, -- diamondsolid \blacklozenge
[0x021BA] = 0x08, -- clockwise \circlearrowright
[0x021BB] = 0x09, -- anticlockwise \circlearrowleft
[0x021CC] = 0x0A, -- harpoonleftright \rightleftharpoons
@@ -1266,9 +1303,10 @@ mathencodings["tex-ma"] = {
[0x02277] = 0x3F, -- greaterorless \gtrless
[0x0228F] = 0x40, -- squareimage \sqsubset
[0x02290] = 0x41, -- squareoriginal \sqsupset
- -- wrong:
- [0x022B3] = 0x42, -- triangleright \rhd \vartriangleright
- [0x022B2] = 0x43, -- triangleleft \lhd \vartriangleleft
+ -- wrong: see **
+ -- [0x022B3] = 0x42, -- triangleright \rhd \vartriangleright
+ -- [0x022B2] = 0x43, -- triangleleft \lhd \vartriangleleft
+ -- cf lm
[0x022B5] = 0x44, -- trianglerightequal \unrhd \trianglerighteq
[0x022B4] = 0x45, -- triangleleftequal \unlhd \trianglelefteq
--
@@ -1303,7 +1341,7 @@ mathencodings["tex-ma"] = {
[0x022D0] = 0x62, -- subsetdbl \Subset
[0x022D1] = 0x63, -- supersetdbl \Supset
[0x022D3] = 0x64, -- uniondbl \doublecup \Cup
- [0x00100] = 0x65, -- intersectiondbl \doublecap \Cap
+ [0x022D2] = 0x65, -- intersectiondbl \doublecap \Cap
[0x022CF] = 0x66, -- uprise \curlywedge
[0x022CE] = 0x67, -- downfall \curlyvee
[0x022CB] = 0x68, -- multiopenleft \leftthreetimes
@@ -1319,7 +1357,7 @@ mathencodings["tex-ma"] = {
[0x024C7] = 0x72, -- circleR \circledR
[0x024C8] = 0x73, -- circleS \circledS
[0x022D4] = 0x74, -- fork \pitchfork
- [0x02245] = 0x75, -- dotplus \dotplus
+ [0x02214] = 0x75, -- dotplus \dotplus
[0x0223D] = 0x76, -- revsimilar \backsim
[0x022CD] = 0x77, -- revasymptequal \backsimeq -- AM: Check this! I mapped it to simeq.
[0x0231E] = 0x78, -- rightanglesw \llcorner
diff --git a/tex/context/base/meta-fun.lua b/tex/context/base/meta-fun.lua
index 9a6d971ba..7594d0c78 100644
--- a/tex/context/base/meta-fun.lua
+++ b/tex/context/base/meta-fun.lua
@@ -9,7 +9,6 @@ if not modules then modules = { } end modules ['meta-fun'] = {
-- very experimental, actually a joke ... see metafun manual for usage
local format, loadstring, type = string.format, loadstring, type
-local texwrite = tex.write
local metapost = metapost
@@ -17,42 +16,42 @@ metapost.metafun = metapost.metafun or { }
local metafun = metapost.metafun
function metafun.topath(t,connector)
- texwrite("(")
+ context("(")
if #t > 0 then
for i=1,#t do
if i > 1 then
- texwrite(connector or "..")
+ context(connector or "..")
end
local ti = t[i]
if type(ti) == "string" then
- texwrite(ti)
+ context(ti)
else
- texwrite(format("(%s,%s)",ti.x or ti[1] or 0,ti.y or ti[2] or 0))
+ context("(%s,%s)",ti.x or ti[1] or 0,ti.y or ti[2] or 0)
end
end
else
- texwrite("origin")
+ context("origin")
end
- texwrite(")")
+ context(")")
end
function metafun.interpolate(f,b,e,s,c)
local done = false
- texwrite("(")
+ context("(")
for i=b,e,(e-b)/s do
local d = loadstring(format("return function(x) return %s end",f))
if d then
d = d()
if done then
- texwrite(c or "...")
+ context(c or "...")
else
done = true
end
- texwrite(format("(%s,%s)",i,d(i)))
+ context("(%s,%s)",i,d(i))
end
end
if not done then
- texwrite("origin")
+ context("origin")
end
- texwrite(")")
+ context(")")
end
diff --git a/tex/context/base/meta-ini.lua b/tex/context/base/meta-ini.lua
index 6e7053667..5b1f89463 100644
--- a/tex/context/base/meta-ini.lua
+++ b/tex/context/base/meta-ini.lua
@@ -13,20 +13,30 @@ metapost = metapost or { }
-- for the moment downward compatible
-local report_metapost = logs.reporter ("metapost")
+local report_metapost = logs.reporter ("metapost")
local status_metapost = logs.messenger("metapost")
local patterns = { "meta-imp-%s.mkiv", "meta-imp-%s.tex", "meta-%s.mkiv", "meta-%s.tex" } -- we are compatible
-function metapost.uselibrary(name)
- commands.uselibrary(name,patterns,function(name,foundname)
- context.startreadingfile()
- status_metapost("loaded: library '%s'",name)
- context.input(foundname)
- context.stopreadingfile()
- end, function(name)
- report_metapost("unknown: library '%s'",name)
- end)
+local function action(name,foundname)
+ status_metapost("loaded: library '%s'",name)
+ context.startreadingfile()
+ context.input(foundname)
+ context.stopreadingfile()
+end
+
+local function failure(name)
+ report_metapost("unknown: library '%s'",name)
+end
+
+function commands.useMPlibrary(name)
+ commands.uselibrary {
+ name = name,
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = true,
+ }
end
-- experimental
diff --git a/tex/context/base/meta-ini.mkiv b/tex/context/base/meta-ini.mkiv
index 8dbbfca8a..e24908f9e 100644
--- a/tex/context/base/meta-ini.mkiv
+++ b/tex/context/base/meta-ini.mkiv
@@ -410,7 +410,9 @@
\newconstant\MPboxmode
\def\doobeyMPboxdepth % mode = 1
- {\setbox\MPgraphicbox\hbox{\hskip\MPllx\onebasepoint\raise\MPlly\onebasepoint\box\MPgraphicbox}}
+ {\setbox\MPgraphicbox\hbox\bgroup
+ \raise\MPlly\onebasepoint\box\MPgraphicbox
+ \egroup}
\def\doignoreMPboxdepth % mode = 2
{\normalexpanded
@@ -419,8 +421,15 @@
\ht\MPgraphicbox\the\ht\MPgraphicbox
\dp\MPgraphicbox\the\dp\MPgraphicbox}}
+\def\doobeyMPboxorigin % mode = 3
+ {\setbox\MPgraphicbox\hbox\bgroup
+ \hskip\MPllx\onebasepoint
+ \raise\MPlly\onebasepoint\box\MPgraphicbox
+ \egroup}
+
\def\obeyMPboxdepth {\MPboxmode\plusone}
\def\ignoreMPboxdepth{\MPboxmode\plustwo}
+\def\obeyMPboxorigin {\MPboxmode\plusthree}
\def\normalMPboxdepth{\MPboxmode\zerocount}
% compatibility hack:
@@ -434,10 +443,12 @@
\doobeyMPboxdepth
\or % 2
\doignoreMPboxdepth
+ \or % 3
+ \doobeyMPboxorigin
\fi
\box\MPgraphicbox}
-\def\reuseMPbox#1#2#3#4#5% space delimiting would save some tokens
+\unexpanded\def\reuseMPbox#1#2#3#4#5% space delimiting would save some tokens
{\xdef\MPllx{#2}% but it's not worth the effort and looks
\xdef\MPlly{#3}% ugly as well
\xdef\MPurx{#4}%
@@ -452,7 +463,7 @@
\enableincludeMPgraphics % redundant
\global\advance\MPobjectcounter\plusone
\setobject{MP}{\number\MPobjectcounter}\hbox{\processMPgraphic{#3}}% was vbox, graphic must end up as hbox
- \setxvalue{\@@MPG\overlaystamp:#1}{\noexpand\reuseMPbox{\number\MPobjectcounter}{\MPllx}{\MPlly}{\MPurx}{\MPury}}%
+ \setxvalue{\@@MPG\overlaystamp:#1}{\reuseMPbox{\number\MPobjectcounter}{\MPllx}{\MPlly}{\MPurx}{\MPury}}%
\fi
\getvalue{\@@MPG\overlaystamp:#1}%
\endgroup}
@@ -648,31 +659,18 @@
% \stopnointerference
\stopreadingfile}
-%D For the moment, the next one is a private macro:
+% We need this trick because we need to make sure that the tex scanner
+% sees newlines and does not quit. Also, we do need to flush the buffer
+% under a normal catcode regime in order to expand embedded tex macros.
+% As usual with buffers, \type {#1} can be a list.
\def\processMPbuffer
{\dosingleempty\doprocessMPbuffer}
\def\doprocessMPbuffer[#1]%
- {\doifelsenothing{#1}
- {\dodoprocessMPbuffer{\jobname}}
- {\dodoprocessMPbuffer{#1}}}
-
-% we need to go via a toks because we have no multiline print in
-% luatex (i.e. tex.sprint does not interpret lines) and therefore
-% omits all after a comment token
-
-\newtoks\mpbuffertoks
-
-\def\doprocessMPbuffer[#1]%
- {\doifelsenothing{#1}
- {\doprocessMPbuffer[\jobname]}
- {\beginMPgraphicgroup{#1}%
- % we need this trick because tex.sprint does not interprets newlines and the scanner
- % stops at a newline; also, we do need to flush the buffer under a normal catcode
- % regime in order to expand embedded tex macros; #1 can be a list
- \processMPgraphic{\ctxcommand{feedback("\currentMPgraphicname")}}%
- \endMPgraphicgroup}}
+ {\beginMPgraphicgroup{#1}%
+ \processMPgraphic{\ctxcommand{feedback("\currentMPgraphicname")}}%
+ \endMPgraphicgroup}
\def\runMPbuffer
{\dosingleempty\dorunMPbuffer}
@@ -830,7 +828,7 @@
%D Loading specific \METAPOST\ related definitions is
%D accomplished by:
-\unexpanded\def\useMPlibrary[#1]{\ctxlua{metapost.uselibrary(\!!bs#1\!!es)}}
+\unexpanded\def\useMPlibrary[#1]{\ctxcommand{useMPlibrary(\!!bs#1\!!es)}}
%D \macros
%D {setMPtext, MPtext, MPstring, MPbetex}
diff --git a/tex/context/base/meta-pdh.lua b/tex/context/base/meta-pdh.lua
index 117300f80..d8eb32793 100644
--- a/tex/context/base/meta-pdh.lua
+++ b/tex/context/base/meta-pdh.lua
@@ -25,7 +25,6 @@ os.exit()
-- only needed for mp output on disk
local concat, format, find, gsub, gmatch = table.concat, string.format, string.find, string.gsub, string.gmatch
-local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
local tostring, tonumber, select = tostring, tonumber, select
local lpegmatch = lpeg.match
@@ -155,11 +154,11 @@ end
-- from lua to tex
function mptopdf.pdfcode(str)
- texsprint(ctxcatcodes,"\\pdfliteral{" .. str .. "}") -- \\MPScode
+ context.pdfliteral(str) -- \\MPScode
end
function mptopdf.texcode(str)
- texsprint(ctxcatcodes,str)
+ context(str)
end
-- auxiliary functions
@@ -229,7 +228,7 @@ function mptopdf.convertmpstopdf(name)
mptopdf.reset()
statistics.stoptiming(mptopdf)
else
- tex.print("file " .. name .. " not found")
+ context("file " .. name .. " not found")
end
end
diff --git a/tex/context/base/mlib-ctx.lua b/tex/context/base/mlib-ctx.lua
index 1a5ce2a24..493a45248 100644
--- a/tex/context/base/mlib-ctx.lua
+++ b/tex/context/base/mlib-ctx.lua
@@ -9,7 +9,6 @@ if not modules then modules = { } end modules ['mlib-ctx'] = {
-- todo
local format, concat = string.format, table.concat
-local sprint = tex.sprint
local report_metapost = logs.reporter("metapost")
@@ -69,7 +68,7 @@ function metapost.theclippath(...)
local result = metapost.getclippath(...)
if result then -- we could just print the table
result = concat(metapost.flushnormalpath(result),"\n")
- sprint(result)
+ context(result)
end
end
diff --git a/tex/context/base/mult-chk.lua b/tex/context/base/mult-chk.lua
index 4b1ba12ee..ccac708b4 100644
--- a/tex/context/base/mult-chk.lua
+++ b/tex/context/base/mult-chk.lua
@@ -9,7 +9,6 @@ if not modules then modules = { } end modules ['mult-chk'] = {
local format = string.format
local lpegmatch = lpeg.match
local type = type
-local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
local make_settings_to_hash_pattern, settings_to_set = utilities.parsers.make_settings_to_hash_pattern, utilities.parsers.settings_to_set
local allocate = utilities.storage.allocate
diff --git a/tex/context/base/mult-de.mkii b/tex/context/base/mult-de.mkii
index bbcf11355..0f78b568d 100644
--- a/tex/context/base/mult-de.mkii
+++ b/tex/context/base/mult-de.mkii
@@ -729,6 +729,7 @@
\setinterfaceconstant{label}{label}
\setinterfaceconstant{lastnamesep}{lastnamesep}
\setinterfaceconstant{lastpubsep}{lastpubsep}
+\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{links}
\setinterfaceconstant{leftcolor}{linkerfarbe}
\setinterfaceconstant{leftcompoundhyphen}{leftcompoundhyphen}
diff --git a/tex/context/base/mult-def.lua b/tex/context/base/mult-def.lua
index 775eb662d..1144dba22 100644
--- a/tex/context/base/mult-def.lua
+++ b/tex/context/base/mult-def.lua
@@ -6590,6 +6590,10 @@ return {
["en"]="values",
["nl"]="waarden",
},
+ ["layout"]={
+ ["en"]="layout",
+ ["nl"]="layout",
+ },
["action"]={
["cs"]="akce",
["de"]="aktion",
diff --git a/tex/context/base/mult-en.mkii b/tex/context/base/mult-en.mkii
index 90beed8ed..f36227e25 100644
--- a/tex/context/base/mult-en.mkii
+++ b/tex/context/base/mult-en.mkii
@@ -729,6 +729,7 @@
\setinterfaceconstant{label}{label}
\setinterfaceconstant{lastnamesep}{lastnamesep}
\setinterfaceconstant{lastpubsep}{lastpubsep}
+\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{left}
\setinterfaceconstant{leftcolor}{leftcolor}
\setinterfaceconstant{leftcompoundhyphen}{leftcompoundhyphen}
diff --git a/tex/context/base/mult-fr.mkii b/tex/context/base/mult-fr.mkii
index 49186ce4f..2c583bbf3 100644
--- a/tex/context/base/mult-fr.mkii
+++ b/tex/context/base/mult-fr.mkii
@@ -729,6 +729,7 @@
\setinterfaceconstant{label}{etiquette}
\setinterfaceconstant{lastnamesep}{lastnamesep}
\setinterfaceconstant{lastpubsep}{lastpubsep}
+\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{gauche}
\setinterfaceconstant{leftcolor}{couleurgauche}
\setinterfaceconstant{leftcompoundhyphen}{leftcompoundhyphen}
diff --git a/tex/context/base/mult-ini.lua b/tex/context/base/mult-ini.lua
index 4156e813d..e073130db 100644
--- a/tex/context/base/mult-ini.lua
+++ b/tex/context/base/mult-ini.lua
@@ -12,7 +12,7 @@ local serialize = table.serialize
local allocate = utilities.storage.allocate
local mark = utilities.storage.mark
-local texsprint = tex.sprint
+local contextsprint = context.sprint
local setmetatableindex = table.setmetatableindex
local report_interface = logs.reporter("interface","initialization")
@@ -176,9 +176,9 @@ logs.setmessenger(context.verbatim.ctxreport)
-- status
-function commands.writestatus(category,message)
+function commands.writestatus(category,message,...)
local r = reporters[category]
- r(message)
+ r(message,...)
end
-- initialization
@@ -191,28 +191,28 @@ function interfaces.setuserinterface(interface,response)
for given, constant in next, complete.constants do
constant = constant[interface] or constant.en or given
constants[constant] = given -- breedte -> width
- texsprint("\\do@sicon{",given,"}{",constant,"}")
+ contextsprint("\\do@sicon{",given,"}{",constant,"}")
nofconstants = nofconstants + 1
end
local nofvariables = 0
for given, variable in next, complete.variables do
variable = variable[interface] or variable.en or given
variables[given] = variable -- ja -> yes
- texsprint("\\do@sivar{",given,"}{",variable,"}")
+ contextsprint("\\do@sivar{",given,"}{",variable,"}")
nofvariables = nofvariables + 1
end
local nofelements = 0
for given, element in next, complete.elements do
element = element[interface] or element.en or given
elements[element] = given
- texsprint("\\do@siele{",given,"}{",element,"}")
+ contextsprint("\\do@siele{",given,"}{",element,"}")
nofelements = nofelements + 1
end
local nofcommands = 0
for given, command in next, complete.commands do
command = command[interface] or command.en or given
if command ~= given then
- texsprint("\\do@sicom{",given,"}{",command,"}")
+ contextsprint("\\do@sicom{",given,"}{",command,"}")
end
nofcommands = nofcommands + 1
end
diff --git a/tex/context/base/mult-ini.mkiv b/tex/context/base/mult-ini.mkiv
index 5d45353f6..bb7e5b514 100644
--- a/tex/context/base/mult-ini.mkiv
+++ b/tex/context/base/mult-ini.mkiv
@@ -377,8 +377,8 @@
{\ifcsname\m!prefix!#1\endcsname\else\setgvalue{\m!prefix!#1}{#1}\fi
\ctxlua{interfaces.setmessage("#1","#2",\!!bs#3\!!es)}}
-\unexpanded\def\setmessagetext #1#2{\edef\currentmessagetext{\ctxsprint{interfaces.getmessage("#1","#2")}}}
-\unexpanded\def\getmessage #1#2{\ctxsprint{interfaces.getmessage("#1","#2")}}
+\unexpanded\def\setmessagetext #1#2{\edef\currentmessagetext{\cldcontext{interfaces.getmessage("#1","#2")}}}
+\unexpanded\def\getmessage #1#2{\cldcontext{interfaces.getmessage("#1","#2")}}
%D Till here.
diff --git a/tex/context/base/mult-it.mkii b/tex/context/base/mult-it.mkii
index f79030df1..2a971812f 100644
--- a/tex/context/base/mult-it.mkii
+++ b/tex/context/base/mult-it.mkii
@@ -729,6 +729,7 @@
\setinterfaceconstant{label}{etichetta}
\setinterfaceconstant{lastnamesep}{lastnamesep}
\setinterfaceconstant{lastpubsep}{lastpubsep}
+\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{sinistra}
\setinterfaceconstant{leftcolor}{coloresinistra}
\setinterfaceconstant{leftcompoundhyphen}{leftcompoundhyphen}
diff --git a/tex/context/base/mult-nl.mkii b/tex/context/base/mult-nl.mkii
index 848be382b..3cae35da7 100644
--- a/tex/context/base/mult-nl.mkii
+++ b/tex/context/base/mult-nl.mkii
@@ -729,6 +729,7 @@
\setinterfaceconstant{label}{label}
\setinterfaceconstant{lastnamesep}{lastnamesep}
\setinterfaceconstant{lastpubsep}{lastpubsep}
+\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{links}
\setinterfaceconstant{leftcolor}{linkerkleur}
\setinterfaceconstant{leftcompoundhyphen}{linkerkoppelteken}
diff --git a/tex/context/base/mult-pe.mkii b/tex/context/base/mult-pe.mkii
index 26e068e30..43ad6b122 100644
--- a/tex/context/base/mult-pe.mkii
+++ b/tex/context/base/mult-pe.mkii
@@ -729,6 +729,7 @@
\setinterfaceconstant{label}{برچسب}
\setinterfaceconstant{lastnamesep}{lastnamesep}
\setinterfaceconstant{lastpubsep}{lastpubsep}
+\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{چپ}
\setinterfaceconstant{leftcolor}{رنگ‌چپ}
\setinterfaceconstant{leftcompoundhyphen}{leftcompoundhyphen}
diff --git a/tex/context/base/mult-ro.mkii b/tex/context/base/mult-ro.mkii
index 299c21446..50274c1d3 100644
--- a/tex/context/base/mult-ro.mkii
+++ b/tex/context/base/mult-ro.mkii
@@ -729,6 +729,7 @@
\setinterfaceconstant{label}{eticheta}
\setinterfaceconstant{lastnamesep}{lastnamesep}
\setinterfaceconstant{lastpubsep}{lastpubsep}
+\setinterfaceconstant{layout}{layout}
\setinterfaceconstant{left}{stanga}
\setinterfaceconstant{leftcolor}{culoarestanga}
\setinterfaceconstant{leftcompoundhyphen}{leftcompoundhyphen}
diff --git a/tex/context/base/node-rul.lua b/tex/context/base/node-rul.lua
index 4cbd1ad0c..7f49edffc 100644
--- a/tex/context/base/node-rul.lua
+++ b/tex/context/base/node-rul.lua
@@ -74,8 +74,6 @@ local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
local a_colorspace = attributes.private('colormodel')
-local texwrite = tex.write
-
local insert_before = node.insert_before
local insert_after = node.insert_after
local striprange = nodes.striprange
@@ -234,7 +232,7 @@ local data = nodes.rules.data
function nodes.rules.define(settings)
data[#data+1] = settings
- texwrite(#data)
+ context(#data)
end
local a_viewerlayer = attributes.private("viewerlayer")
@@ -340,7 +338,7 @@ local data = nodes.shifts.data
function nodes.shifts.define(settings)
data[#data+1] = settings
- texwrite(#data)
+ context(#data)
end
local function flush_shifted(head,first,last,data,level,parent,strip) -- not that fast but acceptable for this purpose
diff --git a/tex/context/base/node-ser.lua b/tex/context/base/node-ser.lua
index 17d222633..63690d00a 100644
--- a/tex/context/base/node-ser.lua
+++ b/tex/context/base/node-ser.lua
@@ -235,14 +235,6 @@ function nodes.serializebox(n,flat,verbose,name)
return nodes.serialize(nodes.totable(tex.box[n],flat,verbose),name)
end
--- keep:
---
--- function nodes.visualizebox(...)
--- tex.print(ctxcatcodes,"\\starttyping")
--- tex.print(nodes.serializebox(...))
--- tex.print("\\stoptyping")
--- end
-
function nodes.visualizebox(...) -- to be checked .. will move to module anyway
context.starttyping()
context.pushcatcodes("verbatim")
@@ -257,7 +249,7 @@ function nodes.list(head,n) -- name might change to nodes.type -- to be checked
end
while head do
local id = head.id
- tex.write(rep(" ",n or 0) .. tostring(head) .. "\n")
+ context(rep(" ",n or 0) .. tostring(head) .. "\n")
if id == hlist_code or id == vlist_code then
nodes.list(head.list,(n or 0)+1)
end
diff --git a/tex/context/base/node-spl.lua b/tex/context/base/node-spl.lua
index 0fe3f759d..3b208e0e7 100644
--- a/tex/context/base/node-spl.lua
+++ b/tex/context/base/node-spl.lua
@@ -170,7 +170,7 @@ function splitters.define(name,parameters)
local l = less and settings_to_array(less)
local m = more and settings_to_array(more)
if goodies then
- goodies = fonts.goodies.get(goodies) -- also in tfmdata
+ goodies = fonts.goodies.load(goodies) -- also in tfmdata
if goodies then
local featuresets = goodies.featuresets
local solution = solution and goodies.solutions[solution]
@@ -217,7 +217,7 @@ function splitters.define(name,parameters)
more = more_set or { },
settings = settings, -- for tracing
}
- tex.write(nofsolutions)
+ context(nofsolutions)
end
local nofwords, noftries, nofadapted, nofkept, nofparagraphs = 0, 0, 0, 0, 0
diff --git a/tex/context/base/node-tra.lua b/tex/context/base/node-tra.lua
index 38855f659..5012095b3 100644
--- a/tex/context/base/node-tra.lua
+++ b/tex/context/base/node-tra.lua
@@ -218,7 +218,7 @@ function step_tracers.reset()
end
function step_tracers.nofsteps()
- return tex.write(#collection)
+ return context(#collection)
end
function step_tracers.glyphs(n,i)
diff --git a/tex/context/base/pack-obj.lua b/tex/context/base/pack-obj.lua
index 2dabfa784..c580aaa62 100644
--- a/tex/context/base/pack-obj.lua
+++ b/tex/context/base/pack-obj.lua
@@ -11,7 +11,7 @@ if not modules then modules = { } end modules ['pack-obj'] = {
reusable components.</p>
--ldx]]--
-local texsprint, texcount = tex.sprint, tex.count
+local texcount = tex.count
local allocate = utilities.storage.allocate
local collected = allocate()
@@ -46,12 +46,12 @@ end
function jobobjects.number(tag,default)
local o = collected[tag] or tobesaved[tag]
- texsprint((o and o[1]) or default)
+ context((o and o[1]) or default)
end
function jobobjects.page(tag,default)
local o = collected[tag] or tobesaved[tag]
- texsprint((o and o[2]) or default)
+ context((o and o[2]) or default)
end
function jobobjects.doifelse(tag)
diff --git a/tex/context/base/pack-obj.mkiv b/tex/context/base/pack-obj.mkiv
index 5adfeae86..266f34e49 100644
--- a/tex/context/base/pack-obj.mkiv
+++ b/tex/context/base/pack-obj.mkiv
@@ -288,9 +288,13 @@
\endgroup}
\def\getobject#1#2%
- {\begingroup
- \let\dohandleobject\dogetobject
- \csname\r!object#1::#2\endcsname}
+ {\ifcsname\r!object#1::#2\endcsname
+ \begingroup
+ \let\dohandleobject\dogetobject
+ \csname\r!object#1::#2\expandafter\endcsname
+ \else
+ {\infofont[object #1::#2]}%
+ \fi}
\def\dogetobject#1#2#3#4#5#6#7% don't change this, should work for dvi & pdf
{% \initializepaper
diff --git a/tex/context/base/page-flt.lua b/tex/context/base/page-flt.lua
index b691dbd45..67dd60f7f 100644
--- a/tex/context/base/page-flt.lua
+++ b/tex/context/base/page-flt.lua
@@ -7,6 +7,7 @@ if not modules then modules = { } end modules ['page-flt'] = {
}
-- floats -> managers.floats
+-- some functions are a tex/lua mix so we need a separation
local insert, remove = table.insert, table.remove
local find = string.find
@@ -46,10 +47,6 @@ function floats.stacked(which) -- floats.thenofstacked
return #stacks[which or default]
end
-function floats.thestacked(which)
- return context(#stacks[which or default])
-end
-
function floats.push()
insert(pushed,stacks)
stacks = initialize()
@@ -164,13 +161,6 @@ function floats.flush(which,n,bylabel)
end
end
-function floats.thevar(name,default)
- local value = last and last.data[name] or default
- if value and value ~= "" then
- context(value)
- end
-end
-
function floats.consult(which,n)
which = which or default
local stack = stacks[which]
@@ -213,12 +203,12 @@ function floats.collect(which,maxwidth,distance)
setcount("global","nofcollectedfloats",m)
end
-function commands.doifsavedfloatelse(which)
- local stack = stacks[which or default]
- commands.doifelse(#stack>0)
+function floats.getvariable(name,default)
+ local value = last and last.data[name] or default
+ return value ~= "" and value
end
-function floats.thecheckedpagefloat(packed)
+function floats.checkedpagefloat(packed)
local result = ""
if structures.pages.is_odd() then
if #stacks.rightpage > 0 then
@@ -245,28 +235,56 @@ function floats.thecheckedpagefloat(packed)
end
end
end
- context(result)
+ return result
+end
+
+function floats.nofstacked()
+ return #stacks[which or default] or 0
end
local method = C((1-S(", :"))^1)
local position = P(":") * C((1-S("*,"))^1) * P("*") * C((1-S(","))^1)
local label = P(":") * C((1-S(",*: "))^0)
-local pattern = method * (label * position + C("") * position + label + C("") * C("") * C(""))
+local pattern = method * (
+ label * position
+ + C("") * position
+ + label
+ + C("") * C("") * C("")
+) + C("") * C("") * C("") * C("")
+
--- table.print { lpeg.match(pattern,"somewhere:blabla,crap") }
--- table.print { lpeg.match(pattern,"somewhere:1*2") }
--- table.print { lpeg.match(pattern,"somewhere:blabla:1*2") }
--- table.print { lpeg.match(pattern,"somewhere::1*2") }
--- table.print { lpeg.match(pattern,"somewhere,") }
--- table.print { lpeg.match(pattern,"somewhere") }
+-- inspect { lpegmatch(pattern,"somewhere:blabla,crap") }
+-- inspect { lpegmatch(pattern,"somewhere:1*2") }
+-- inspect { lpegmatch(pattern,"somewhere:blabla:1*2") }
+-- inspect { lpegmatch(pattern,"somewhere::1*2") }
+-- inspect { lpegmatch(pattern,"somewhere,") }
+-- inspect { lpegmatch(pattern,"somewhere") }
+-- inspect { lpegmatch(pattern,"") }
function floats.analysemethod(str)
- if str ~= "" then -- extra check, already done at the tex end
- local method, label, row, column = lpegmatch(pattern,str)
- context.setvalue("floatmethod",method or "")
- context.setvalue("floatlabel", label or "")
- context.setvalue("floatrow", row or "")
- context.setvalue("floatcolumn",column or "")
- end
+ return lpegmatch(pattern,str or "")
+end
+
+-- interface
+
+commands.flushfloat = floats.flush
+commands.savefloat = floats.save
+commands.resavefloat = floats.resave
+commands.pushfloat = floats.push
+commands.popfloat = floats.pop
+commands.consultfloat = floats.consult
+commands.collectfloat = floats.collect
+
+function commands.nofstackedfloats (...) context(floats.nofstacked(...)) end
+function commands.getfloatvariable (...) context(floats.getvariable(...) or "") end
+function commands.doifelsesavedfloat(...) commands.doifelse(floats.nofstacked(...)>0) end
+function commands.checkedpagefloat (...) context(floats.checkedpagefloat(...)) end
+
+function commands.analysefloatmethod(str)
+ local method, label, row, column = floats.analysemethod(str)
+ context.setvalue("floatmethod",method)
+ context.setvalue("floatlabel", label )
+ context.setvalue("floatrow", row )
+ context.setvalue("floatcolumn",column)
end
diff --git a/tex/context/base/page-flt.mkiv b/tex/context/base/page-flt.mkiv
index 910e8261a..e182c1695 100644
--- a/tex/context/base/page-flt.mkiv
+++ b/tex/context/base/page-flt.mkiv
@@ -72,52 +72,49 @@
\to \everyfloatscheck
\def\dofloatsflush#1#2%
- {\ctxlua{floats.flush("#1",\number#2)}%
+ {\ctxcommand{flushfloat("#1",\number#2)}%
\the\everyfloatscheck}
\def\dofloatsflushbylabel#1#2%
- {\ctxlua{floats.flush("#1","#2",true)}%
+ {\ctxcommand{flushfloat("#1","#2",true)}%
\the\everyfloatscheck}
\def\dofloatssave#1%
- {\ctxlua{floats.save("#1")}%
+ {\ctxcommand{savefloat("#1")}%
\the\everyfloatscheck}
\def\dofloatsresave#1%
- {\ctxlua{floats.resave("#1")}%
+ {\ctxcommand{resavefloat("#1")}%
\the\everyfloatscheck}
\def\dopushsavedfloats
- {\ctxlua{floats.push()}%
+ {\ctxcommand{pushfloat()}%
\the\everyfloatscheck}
\def\dopopsavedfloats
- {\ctxlua{floats.pop()}%
+ {\ctxcommand{popfloat()}%
\the\everyfloatscheck}
\def\dofloatsgetinfo#1%
- {\ctxlua{floats.consult("#1")}}
+ {\ctxcommand{consultfloat("#1")}}
-\def\doifsavedfloatelse#1%
- {\ctxcommand{doifsavedfloatelse("#1")}}
+\def\doifelsesavedfloat#1%
+ {\ctxcommand{doifelsesavedfloat("#1")}}
\def\dofloatscollect#1#2#3%
- {\ctxlua{floats.collect("#1",\number\dimexpr#2,\number\dimexpr#3)}}
+ {\ctxcommand{collectfloat("#1",\number\dimexpr#2,\number\dimexpr#3)}}
-\def\dofloatsnofstacked#1%
- {\ctxlua{floats.thestacked("#1")}}
-
-\def\dofloatsgetvariable#1%
- {\ctxlua{floats.thevar("specification")}}
+\def\nofstackedfloatincategory#1%
+ {\ctxcommand{nofstackedfloats("#1")}}
\let\dopushcolumnfloats\dopushsavedfloats
\let\dopopcolumnfloats \dopopsavedfloats
\def\dofloatssavepagefloat#1#2%
- {\ctxlua{floats.save("#1", { specification = "#2" })}}
+ {\ctxcommand{savefloat("#1", { specification = "#2" })}}
\def\dofloatssavesomewherefloat#1#2% #1=method
- {\ctxlua{floats.save("#1", { specification = "#2", label = "\floatlabel" })}}
+ {\ctxcommand{savefloat("#1", { specification = "#2", label = "\floatlabel" })}}
%D This is an experimental new feature (for Alan Braslau), a prelude to more:
%D
@@ -206,7 +203,7 @@
\def\doflushsomepagefloat#1% future releases can do more clever things
{\dofloatsflush{#1}{1}%
- \edef\floatspecification{\dofloatsgetvariable{specification}}%
+ \edef\floatspecification{\ctxcommand{getfloatvariable("specification")}}% Is this okay?
\the\everybeforeflushedpagefloat
\vbox to \textheight
{\doifnotinset\v!high\floatspecification\vfill
@@ -216,25 +213,25 @@
% \def\doflushpagefloats
% {\doifoddpageelse
-% {\ifnum\dofloatsnofstacked\s!rightpage>\zerocount
+% {\ifnum\nofstackedfloatsincategory\s!rightpage>\zerocount
% \doflushsomepagefloat\s!rightpage
-% \else\ifnum\dofloatsnofstacked\s!page>\zerocount
+% \else\ifnum\nofstackedfloatsincategory\s!page>\zerocount
% \doflushsomepagefloat\s!page
-% \else\ifnum\dofloatsnofstacked\s!leftpage>\zerocount
+% \else\ifnum\nofstackedfloatsincategory\s!leftpage>\zerocount
% \emptyhbox\vfill\eject
% %\doflushsomepagefloat\s!leftpage
% \fi\fi\fi}
-% {\ifnum\dofloatsnofstacked\s!leftpage>\zerocount
+% {\ifnum\nofstackedfloatsincategory\s!leftpage>\zerocount
% \doflushsomepagefloat\s!leftpage
-% \else\ifnum\dofloatsnofstacked\s!page>\zerocount
+% \else\ifnum\nofstackedfloatsincategory\s!page>\zerocount
% \doflushsomepagefloat\s!page
-% \else\ifnum\dofloatsnofstacked\s!rightpage>\zerocount
+% \else\ifnum\nofstackedfloatsincategory\s!rightpage>\zerocount
% \emptyhbox\vfill\eject
% %\doflushsomepagefloat\s!rightpage
% \fi\fi\fi}}
\def\doflushpagefloats
- {\edef\checkedpagefloat{\ctxlua{floats.thecheckedpagefloat()}}% (true) for packed
+ {\edef\checkedpagefloat{\ctxcommand{checkedpagefloat()}}% (true) for packed
\ifx\checkedpagefloat\empty
% nothing
\else\ifx\checkedpagefloat\v!empty
diff --git a/tex/context/base/page-ini.mkiv b/tex/context/base/page-ini.mkiv
index 56d647205..8abb67000 100644
--- a/tex/context/base/page-ini.mkiv
+++ b/tex/context/base/page-ini.mkiv
@@ -475,15 +475,17 @@
\def\ejectinsert
{%\flushnotes already done
\bgroup
+\ifsomefloatwaiting
\noftopfloats\plusthousand
\nofbotfloats\zerocount
% this is needed in case a float that has been stored
% ends up at the current page; this border case occurs when
% the calculated room is 'eps' smaller that the room available
% when just flushing; so now we have (maybe optional):
- \pagebaselinecorrection
+ \pagebaselinecorrection % hm, needs checking, not needed when no floats
% alas, this is tricky but needed (first surfaced in prikkels)
\doflushfloats
+\fi
\egroup}
\def\ejectdummypage
@@ -795,13 +797,26 @@
\def\docolumnpagebodymarks#1#2#3#4% first last column box
{\ifnum#3=#1\relax
- \dosynchronizemarking[\number#3,\v!column:\number#3,\v!first,\v!column:\v!first][#4]%
+ \dosynchronizemarking[\number#3,\v!column:\number#3,\v!first,\v!column:\v!first][#4][]%
\else\ifnum#3=#2\relax
- \dosynchronizemarking[\number#3,\v!column:\number#3,\v!last, \v!column:\v!last ][#4]%
+ \dosynchronizemarking[\number#3,\v!column:\number#3,\v!last, \v!column:\v!last ][#4][]%
\else
- \dosynchronizemarking[\number#3,\v!column:\number#3 ][#4]%
+ \dosynchronizemarking[\number#3,\v!column:\number#3 ][#4][]%
\fi\fi}
+% tricky ... we need to retain the last mark
+
+\newconditional\buildingsuccessivepages \settrue\buildingsuccessivepages
+
+% \starttext
+% \dorecurse{3} {
+% \startchapter[title=Chapter #1]
+% \input tufte
+% \dorecurse{4}{\placefigure{Number ##1}{\externalfigure[cow][height=9cm]}}
+% \stopchapter
+% }
+% \stoptext
+
\def\buildpagebody#1#2%
{\ifsavepagebody\global\setbox\savedpagebody\fi
\vbox
@@ -809,7 +824,7 @@
\boxmaxdepth\maxdimen % new
\dontcomplain
% marks get done here
- \dosynchronizemarking[\v!page][#2]% #2 box
+ \dosynchronizemarking[\v!page][#2][\ifconditional\buildingsuccessivepages\v!keep\fi]% #2 box
% the following plugin uses and sets pagebox; beware: this
% will change and is for my (hh) personal experiments .. takes two
% arguments
diff --git a/tex/context/base/page-lay.mkiv b/tex/context/base/page-lay.mkiv
index 81070a8e9..b04008f9a 100644
--- a/tex/context/base/page-lay.mkiv
+++ b/tex/context/base/page-lay.mkiv
@@ -124,7 +124,7 @@
\appendtoks
\ifx\currentlayout\v!reset
\let\currentlayout\empty
- \letlayoutparameter\v!normal % global ?
+ \letlayoutparameter\c!state\v!normal % global ?
\fi
\globallet\currentlayout\currentlayout
\to \everysetuplayout
@@ -831,6 +831,16 @@
\def\adaptlayout
{\dodoubleempty\doadaptlayout}
+\unexpanded\def\startlayout[#1]%
+ {\page
+ \pushmacro\currentlayout
+ \setuplayout[#1]}
+
+\unexpanded\def\stoplayout
+ {\page
+ \popmacro\currentlayout
+ \setuplayout[\currentlayout]}
+
% describe interface
%D Centering the paper area on the print area is determined
diff --git a/tex/context/base/page-lin.mkiv b/tex/context/base/page-lin.mkiv
index b5d5e9ca0..a643d670a 100644
--- a/tex/context/base/page-lin.mkiv
+++ b/tex/context/base/page-lin.mkiv
@@ -93,7 +93,7 @@
{continue = "\linenumberparameter\c!continue"}
\def\mkdefinetextlinenumbering
- {\setxvalue{ln:c:\currentlinenumbering}{\number\ctxlua{tex.sprint(nodes.lines.boxed.register({\mklinenumberparameters}))}}}
+ {\setxvalue{ln:c:\currentlinenumbering}{\number\cldcontext{nodes.lines.boxed.register({\mklinenumberparameters})}}}
\def\mkupdatetextlinenumbering
{\ctxlua{nodes.lines.boxed.setup(\getvalue{ln:c:\currentlinenumbering},{\mklinenumberupdateparameters})}}
diff --git a/tex/context/base/page-mrk.mkiv b/tex/context/base/page-mrk.mkiv
index 9923632ac..de559fdee 100644
--- a/tex/context/base/page-mrk.mkiv
+++ b/tex/context/base/page-mrk.mkiv
@@ -52,7 +52,7 @@
\setbox\scratchbox\hbox \layoutcomponentboxattribute\bgroup
\uniqueMPgraphic
{print:color}%
- {w=\the\scratchwidth ,h=\the\scratchheight,%
+ {w=\the\scratchwidth,h=\the\scratchheight,%
l=\pagecutmarklength,o=\pagecutmarkoffset}%
\egroup
\wd\scratchbox\scratchwidth\ht\scratchbox\scratchheight\dp\scratchbox\scratchdepth
@@ -63,7 +63,7 @@
\setbox\scratchbox\hbox \layoutcomponentboxattribute\bgroup
\uniqueMPgraphic
{print:marking}%
- {w=\the\scratchwidth ,h=\the\scratchheight,%
+ {w=\the\scratchwidth,h=\the\scratchheight,%
l=\pagecutmarklength,o=\pagecutmarkoffset}%
\egroup
\wd\scratchbox\scratchwidth\ht\scratchbox\scratchheight\dp\scratchbox\scratchdepth
@@ -74,7 +74,7 @@
\setbox\scratchbox\hbox \layoutcomponentboxattribute\bgroup
\uniqueMPgraphic
{print:lines}%
- {w=\the\scratchwidth ,h=\the\scratchheight,%
+ {w=\the\scratchwidth,h=\the\scratchheight,%
l=\pagecutmarklength,o=\pagecutmarkoffset}%
\egroup
\wd\scratchbox\scratchwidth\ht\scratchbox\scratchheight\dp\scratchbox\scratchdepth
diff --git a/tex/context/base/page-run.mkiv b/tex/context/base/page-run.mkiv
index 1f8d75a1c..9d14ff523 100644
--- a/tex/context/base/page-run.mkiv
+++ b/tex/context/base/page-run.mkiv
@@ -15,79 +15,60 @@
\unprotect
-\gdef\doshowprint[#1][#2][#3]%
- {\framed
- [\c!offset=\v!overlay,
- \c!strut=\v!no]
- {\forgetall
- \dontcomplain
- \globaldefs\minusone
- \dimen\zerocount\pagegoal
- \definepapersize[X][\c!width=4em, \c!height=6em]%
- \definepapersize[Y][\c!width=12em,\c!height=14em]%
- \setuppapersize[#1,X][#2,Y]%
- \setuplayout[#3]%
- \setbox\zerocount\vbox
- {\framed
+\gdef\doshowprint[#1][#2][#3]% only english
+ {\setbuffer[crap]%
+ \unprotect
+ \definepapersize[X][\c!width=4em, \c!height=6em]
+ \definepapersize[Y][\c!width=12em,\c!height=14em]
+ \setuppapersize[X,#1][Y,#2]
+ \setuplayout[#3]
+ \setuplayout[\v!page]
+ \framed
[\c!offset=\v!overlay,\c!strut=\v!no,
\c!width=\paperwidth,\c!height=\paperheight]
- {\ss ABC\par DEF}}%
- \doublesidedfalse
- \def\cutmarklength{.5em}%
- \addpagecutmarks \zerocount
- \replicatepagebox\zerocount
- \scalepagebox \zerocount
- \mirrorpaperbox \zerocount
- \orientpaperbox \zerocount
- \centerpagebox \zerocount
- \mirrorprintbox \zerocount
- \orientprintbox \zerocount
- \offsetprintbox \zerocount
- \pagegoal\dimen\zerocount
- \box0}}
+ {\ss ABC\par DEF}%
+ \protect
+ \endbuffer
+ \framed[\c!offset=\v!overlay,\c!strut=\v!no]{\typesetbuffer[crap]}}
\gdef\showprint
{\dotripleempty\doshowprint}
-% \switchnaarkorps[8pt]
+% \switchtobodyfont[8pt]
%
-% \startcombinatie[4*4]
-% {\toonprint} {\strut}
-% {\toonprint[][][plaats=midden]} {\type{plaats=midden}}
-% {\toonprint[][][plaats=midden,markering=aan]} {\type{markering=aan}\break
-% \type{plaats=midden}}
-% {\toonprint[][][plaats=midden,markering=aan,nx=2]} {\type{markering=aan}\break
-% \type{plaats=midden}\break
-% \type{nx=2}}
-% {\toonprint[][][plaats=links]} {\type{plaats=links}}
-% {\toonprint[][][plaats=rechts]} {\type{plaats=rechts}}
-% {\toonprint[][][plaats={links,onder}]} {\type{plaats={links,onder}}}
-% {\toonprint[][][plaats={rechts,onder}]} {\type{plaats={rechts,onder}}}
-% {\toonprint[][][nx=2,ny=1]} {\type{nx=2,ny=1}}
-% {\toonprint[][][nx=1,ny=2]} {\type{nx=1,ny=2}}
-% {\toonprint[][][nx=2,ny=2]} {\type{nx=2,ny=2}}
-% {\toonprint[][][nx=2,ny=2,plaats=midden]} {\type{nx=2,ny=2}\break
-% \type{plaats=midden}}
-% {\toonprint[][][rugoffset=3pt]} {\type{rugoffset=.5cm}}
-% {\toonprint[][][kopoffset=3pt]} {\type{kopoffset=.5cm}}
-% {\toonprint[][][schaal=1.5]} {\type{schaal=1.5}}
-% {\toonprint[][][schaal=0.8]} {\type{schaal=0.8}}
-% \stopcombinatie
+% \startcombination[4*4]
+% {\showprint} {\strut}
+% {\showprint[][][location=middle]} {\type{location=middle}}
+% {\showprint[][][location=middle,marking=on]} {\type{markering=aan}\break\type{location=middle}}
+% {\showprint[][][location=middle,marking=on,nx=2]} {\type{markering=aan}\break\type{location=middle}\break\type{nx=2}}
+% {\showprint[][][location=left]} {\type{location=left}}
+% {\showprint[][][location=right]} {\type{location=right}}
+% {\showprint[][][location={left,bottom}]} {\type{location={left,bottom}}}
+% {\showprint[][][location={right,bottom}]} {\type{location={right,bottom}}}
+% {\showprint[][][nx=2,ny=1]} {\type{nx=2,ny=1}}
+% {\showprint[][][nx=1,ny=2]} {\type{nx=1,ny=2}}
+% {\showprint[][][nx=2,ny=2]} {\type{nx=2,ny=2}}
+% {\showprint[][][nx=2,ny=2,location=middle]} {\type{nx=2,ny=2}\break\type{location=middle}}
+% {\showprint[][][backoffset=3pt]} {\type{rugoffset=.5cm}}
+% {\showprint[][][topoffset=3pt]} {\type{kopoffset=.5cm}}
+% {\showprint[][][scale=1.5]} {\type{schaal=1.5}}
+% {\showprint[][][scale=0.8]} {\type{schaal=0.8}}
+% \stopcombination
%
-% \startcombinatie[3*4]
-% {\toonprint[liggend][][plaats=midden]} {\type{liggend}}
-% {\toonprint[][liggend][plaats=midden]} {\strut\break\type{liggend}}
-% {\toonprint[liggend][liggend][plaats=midden]} {\type{liggend}\break\type{liggend}}
-% {\toonprint[90][][plaats=midden]} {\type{90}}
-% {\toonprint[][90][plaats=midden]} {\strut\break\type{90}}
-% {\toonprint[90][90][plaats=midden]} {\type{90}\break\type{90}}
-% {\toonprint[180][][plaats=midden]} {\type{180}}
-% {\toonprint[][180][plaats=midden]} {\strut\break\type{180}}
-% {\toonprint[180][180][plaats=midden]} {\type{180}\break\type{180}}
-% {\toonprint[gespiegeld][][plaats=midden]} {\type{gespiegeld}}
-% {\toonprint[][gespiegeld][plaats=midden]} {\strut\break\type{gespiegeld}}
-% {\toonprint[gespiegeld][gespiegeld][plaats=midden]} {\type{gespiegeld}\break\type{gespiegeld}}
-% \stopcombinatie
+% \startcombination[3*4]
+% {\showprint[landscape][] [location=middle]} {\type{landscape}}
+% {\showprint[] [landscape][location=middle]} {\strut\break\type{landscape}}
+% {\showprint[landscape][landscape][location=middle]} {\type{landscape}\break\type{landscape}}
+% {\showprint[90] [] [location=middle]} {\type{90}}
+% {\showprint[] [90] [location=middle]} {\strut\break\type{90}}
+% {\showprint[90] [90] [location=middle]} {\type{90}\break\type{90}}
+% {\showprint[180] [] [location=middle]} {\type{180}}
+% {\showprint[] [180] [location=middle]} {\strut\break\type{180}}
+% {\showprint[180] [180] [location=middle]} {\type{180}\break\type{180}}
+% {\showprint[mirrored] [] [location=middle]} {\type{mirrored}}
+% {\showprint[] [mirrored] [location=middle]} {\strut\break\type{mirrored}}
+% {\showprint[mirrored] [mirrored] [location=middle]} {\type{mirrored}\break\type{mirrored}}
+% \stopcombination
% maybe we will have page-run.lua
diff --git a/tex/context/base/phys-dim.lua b/tex/context/base/phys-dim.lua
index 534e9a45a..c52359341 100644
--- a/tex/context/base/phys-dim.lua
+++ b/tex/context/base/phys-dim.lua
@@ -13,12 +13,14 @@ if not modules then modules = { } end modules ['phys-dim'] = {
-- todo: maybe also an sciunit command that converts to si units (1 inch -> 0.0254 m)
-- etc .. typical something to do when listening to a news whow or b-movie
-local P, S, R, C, Cc, Cs, matchlpeg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.match
-local format = string.format
+local V, P, S, R, C, Cc, Cs, matchlpeg, Carg = lpeg.V, lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.match, lpeg.Carg
+local format, lower = string.format, string.lower
local appendlpeg = lpeg.append
local mergetable, mergedtable, keys, loweredkeys = table.merge, table.merged, table.keys, table.loweredkeys
+local allocate = utilities.storage.allocate
+
physics = physics or { }
physics.patterns = physics.patterns or { }
@@ -132,8 +134,8 @@ local long_units = {
Newton = [[N]],
Pascal = [[Pa]],
Atom = [[u]],
- Joule = [[W]],
- Watt = [[J]],
+ Joule = [[J]],
+ Watt = [[W]],
Celsius = [[C]], -- no SI
Kelvin = [[K]],
Fahrenheit = [[F]], -- no SI
@@ -204,49 +206,54 @@ local long_suffixes = {
long_prefixes.Micro = [[\textmu]]
long_units .Ohm = [[\textohm]]
-mergetable(long_prefixes, loweredkeys(long_prefixes))
-mergetable(long_units, loweredkeys(long_units))
-mergetable(long_operators, loweredkeys(long_operators))
-mergetable(long_suffixes, loweredkeys(long_suffixes))
-
-local short_prefixes = {
- y = long_prefixes.Yocto,
- z = long_prefixes.Zetto,
- a = long_prefixes.Atto,
- f = long_prefixes.Femto,
- p = long_prefixes.Pico,
- n = long_prefixes.Nano,
- u = long_prefixes.Micro,
- m = long_prefixes.Milli,
- c = long_prefixes.Centi,
- d = long_prefixes.Deci,
- da = long_prefixes.Deca,
- h = long_prefixes.Hecto,
- k = long_prefixes.Kilo,
- M = long_prefixes.Mega,
- G = long_prefixes.Giga,
- T = long_prefixes.Tera,
- P = long_prefixes.Peta,
- E = long_prefixes.Exa,
- Z = long_prefixes.Zetta,
- Y = long_prefixes.Yotta,
+mergetable(long_suffixes,loweredkeys(long_suffixes))
+
+local long_prefixes_to_long = { } for k, v in next, long_prefixes do long_prefixes_to_long [lower(k)] = k end
+local long_units_to_long = { } for k, v in next, long_units do long_units_to_long [lower(k)] = k end
+local long_operators_to_long = { } for k, v in next, long_operators do long_operators_to_long[lower(k)] = k end
+
+local short_prefixes_to_long = {
+ y = "Yocto",
+ z = "Zetto",
+ a = "Atto",
+ f = "Femto",
+ p = "Pico",
+ n = "Nano",
+ u = "Micro",
+ m = "Milli",
+ c = "Centi",
+ d = "Deci",
+ da = "Deca",
+ h = "Hecto",
+ k = "Kilo",
+ M = "Mega",
+ G = "Giga",
+ T = "Tera",
+ P = "Peta",
+ E = "Exa",
+ Z = "Zetta",
+ Y = "Yotta",
}
-local short_units = {
- m = long_units.Meter,
- hz = long_units.Hertz,
- u = long_units.Hour,
- h = long_units.Hour,
- s = long_units.Second,
+local short_units_to_long = {
+ m = "Meter",
+ hz = "Hertz",
+ u = "Hour",
+ h = "Hour",
+ s = "Second",
}
-local short_operators = {
- ["."] = long_operators.Times,
- ["*"] = long_operators.Times,
- ["/"] = long_operators.Solidus,
- [":"] = long_operators.OutOf,
+local short_operators_to_long = {
+ ["."] = "Times",
+ ["*"] = "Times",
+ ["/"] = "Solidus",
+ [":"] = "OutOf",
}
+short_prefixes = { } for k, v in next, short_prefixes_to_long do short_prefixes [k] = long_prefixes [v] end
+short_units = { } for k, v in next, short_units_to_long do short_units [k] = long_units [v] end
+short_operators = { } for k, v in next, short_operators_to_long do short_operators[k] = long_operators[v] end
+
local short_suffixes = { -- maybe just raw digit match
["1"] = long_suffixes.Linear,
["2"] = long_suffixes.Square,
@@ -270,10 +277,10 @@ local short_suffixes = { -- maybe just raw digit match
["^-3"] = long_suffixes.ICubic,
}
-local prefixes = mergedtable(long_prefixes,short_prefixes)
-local units = mergedtable(long_units,short_units)
-local operators = mergedtable(long_operators,short_operators)
-local suffixes = mergedtable(long_suffixes,short_suffixes)
+local prefixes = long_prefixes
+local units = long_units
+local operators = long_operators
+local suffixes = long_suffixes
local somespace = P(" ")^0/""
@@ -282,22 +289,34 @@ local l_unit = appendlpeg(keys(long_units))
local l_operator = appendlpeg(keys(long_operators))
local l_suffix = appendlpeg(keys(long_suffixes))
-local s_prefix = appendlpeg(keys(short_prefixes))
-local s_unit = appendlpeg(keys(short_units))
-local s_operator = appendlpeg(keys(short_operators))
+local l_prefix = appendlpeg(long_prefixes_to_long,l_prefix)
+local l_unit = appendlpeg(long_units_to_long,l_unit)
+local l_operator = appendlpeg(long_operators_to_long,l_operator)
+
+local s_prefix = appendlpeg(short_prefixes_to_long)
+local s_unit = appendlpeg(short_units_to_long)
+local s_operator = appendlpeg(short_operators_to_long)
+
local s_suffix = appendlpeg(keys(short_suffixes))
-- space inside Cs else funny captures and args to function
-- square centi meter per square kilo seconds
-local l_suffix = Cs(somespace * l_suffix)
-local s_suffix = Cs(somespace * s_suffix) + Cc("")
-local l_operator = Cs(somespace * l_operator)
-local l_combination = (Cs(somespace * l_prefix) + Cc("")) * Cs(somespace * l_unit)
-local s_combination = Cs(somespace * s_prefix) * Cs(somespace * s_unit) + Cc("") * Cs(somespace * s_unit)
-
-local combination = l_combination + s_combination
+local l_suffix = Cs(somespace * l_suffix)
+local s_suffix = Cs(somespace * s_suffix) + Cc("")
+local l_operator = Cs(somespace * l_operator)
+
+local combination = P { "start",
+ l_prefix = Cs(somespace * l_prefix) + Cc(""),
+ s_prefix = Cs(somespace * s_prefix) + Cc(""),
+ l_unit = Cs(somespace * l_unit),
+ s_unit = Cs(somespace * s_unit),
+ start = V("l_prefix") * V("l_unit")
+ + V("s_prefix") * V("s_unit")
+ + V("l_prefix") * V("s_unit")
+ + V("s_prefix") * V("l_unit"),
+}
-- square kilo meter
-- square km
@@ -314,10 +333,33 @@ local unitsN = context.unitsN
local unitsNstart = context.unitsNstart
local unitsNstop = context.unitsNstop
-local function dimpus(p,u,s)
- p = prefixes[p] or p
- u = units[u] or u
+local l_prefixes = allocate()
+local l_units = allocate()
+local l_operators = allocate()
+
+local labels = languages.data.labels or { }
+
+labels.prefixes = l_prefixes
+labels.units = l_units
+labels.operators = l_operators
+
+l_prefixes .test = { Kilo = "kilo" }
+l_units .test = { Meter = "meter", Second = "second" }
+l_operators.test = { Solidus = " per " }
+
+local function dimpus(p,u,s,wherefrom)
+--~ print(p,u,s,wherefrom)
+ if wherefrom == "" then
+ p = prefixes[p] or p
+ u = units [u] or u
+ else
+ local lp = l_prefixes[wherefrom]
+ local lu = l_units [wherefrom]
+ p = lp and lp[p] or p
+ u = lu and lu[u] or u
+ end
s = suffixes[s] or s
+ --
if p ~= "" then
if u ~= "" then
if s ~= "" then
@@ -345,21 +387,28 @@ local function dimpus(p,u,s)
end
end
-local function dimspu(s,p,u)
- return dimpus(p,u,s)
+local function dimspu(s,p,u,wherefrom)
+ return dimpus(p,u,s,wherefrom)
end
-local function dimop(o)
- o = operators[o] or o
+local function dimop(o,wherefrom)
+--~ print(o,wherefrom)
+ if wherefrom == "" then
+ o = operators[o] or o
+ else
+ local lo = l_operators[wherefrom]
+ o = lo and lo[o] or o
+ end
if o then
unitsO(o)
end
end
-local dimension = (l_suffix * combination) / dimspu + (combination * s_suffix) / dimpus
-local number = lpeg.patterns.number / unitsN
-local operator = (l_operator + s_operator) / dimop
-local whatever = (P(1)^0) / unitsU
+local dimension = ((l_suffix * combination) * Carg(1)) / dimspu
+ + ((combination * s_suffix) * Carg(1)) / dimpus
+local number = lpeg.patterns.number / unitsN
+local operator = C((l_operator + s_operator) * Carg(1)) / dimop -- weird, why is the extra C needed here
+local whatever = (P(1)^0) / unitsU
dimension = somespace * dimension * somespace
number = somespace * number * somespace
@@ -368,12 +417,13 @@ operator = somespace * operator * somespace
----- unitparser = dimension * dimension^0 * (operator * dimension^1)^-1 + whatever
local unitparser = dimension^1 * (operator * dimension^1)^-1 + whatever
-local unitdigitparser = (P(true)/unitsNstart) * digitparser * (P(true)/unitsNstop)
+----- unitdigitparser = (P(true)/unitsNstart) * digitparser * (P(true)/unitsNstop) -- true forces { }
+local unitdigitparser = (Cc(nil)/unitsNstart) * digitparser * (Cc(nil)/unitsNstop) --
local combinedparser = (unitdigitparser + number)^-1 * unitparser
physics.patterns.unitparser = unitparser
physics.patterns.combinedparser = combinedparser
-function commands.unit(str)
- matchlpeg(combinedparser,str)
+function commands.unit(str,wherefrom)
+ matchlpeg(combinedparser,str,1,wherefrom or "")
end
diff --git a/tex/context/base/phys-dim.mkiv b/tex/context/base/phys-dim.mkiv
index 7631316cb..4218d9dd0 100644
--- a/tex/context/base/phys-dim.mkiv
+++ b/tex/context/base/phys-dim.mkiv
@@ -288,11 +288,13 @@
\installcommandhandler \??un {units} \??un
\setupunits
- [alternative=, % done: text
- %grid=yes, % (maybe)
- %style=..., % done
- %color=..., % done
- %space=..., % (maybe) small medium big
+ [\c!alternative=, % done: text
+ \c!separator=\v!normal, % done: cdot|big|medium|space
+ \c!label=, % done: (no interface yet)
+ %\c!grid=\v!yes, % (maybe)
+ %\c!style=..., % done
+ %\c!color=..., % done
+ %\c!space=..., % (maybe) small medium big
]
\newconstant \c_units_mode % 0=text 1=math 2=textinmath 3=mathintext
@@ -307,9 +309,25 @@
% [ \unit {micro ohm}]\par % space before unit
% [\unit{10 micro ohm}]\par % space before unit
-\def\unitshalfspace{\thinspace}
-\def\unitsfullspace{\thickspace}
-\def\unitsbackspace{\negthinspace}
+\def\unitssmallspace {\thinspace}
+\def\unitsmediumspace{\medspace}
+\def\unitsbigspace {\thickspace}
+\def\unitsbackspace {\negthinspace}
+
+\def\installunitsseparator#1#2%
+ {\setvalue{\??un::#1}{#2}}
+
+\unexpanded\def\dounitsseparator
+ {\edef\currentunitsseparator{\unitsparameter\c!separator}%
+ \csname\??un::%
+ \ifcsname\??un::\currentunitsseparator\endcsname\currentunitsseparator\else\v!normal\fi
+ \endcsname}
+
+\installunitsseparator\v!normal {\cdot}
+\installunitsseparator\v!big {\unitsbigspace}
+\installunitsseparator\v!medium {\unitsmediumspace}
+\installunitsseparator\v!small {\unitssmallspace}
+\installunitsseparator\v!none {}
\newtoks \everyunits % we keep the old \units command so we need a longer one
@@ -381,7 +399,7 @@
\let\units_direct\units_direct_nested
\to \everyunits
-\unexpanded\def\units_indeed#1{\ctxcommand{unit(\!!bs\detokenize{#1}\!!es)}}
+\unexpanded\def\units_indeed#1{\ctxcommand{unit(\!!bs\detokenize{#1}\!!es,"\unitsparameter\c!label")}}
\unexpanded\def\unitsPUS#1#2#3{\units_next#1#2\unitsraise{#3}\c_units_state\plusone} % suffix
\unexpanded\def\unitsPU #1#2{\units_next#1#2\c_units_state \plusthree} % unit
@@ -466,19 +484,19 @@
{\ifcase\c_units_state % start
\ifconditional\c_units_dospace
% \ifdim\lastskip=\zeropoint
- \unitsfullspace
+ \unitsbigspace
% \else
% % too tricky ... we could remove and add
% \fi
\fi
\or % suffix
- {\cdot}% \unitshalfspace
+ {\dounitsseparator}%
\or % operator
\or % unit
- {\cdot}% \unitshalfspace
+ {\dounitsseparator}%
\or % prefix
\or % number
- \unitsfullspace
+ \unitsbigspace
\fi
\setfalse\c_units_dospace
\units_start}
@@ -486,14 +504,14 @@
\unexpanded\def\unitsTIMES
{\ifnum\c_units_state=\plusone % suffix
\else
- \unitshalfspace
+ \unitssmallspace
\fi
\cdot} % or \times
\unexpanded\def\unitsOUTOF
{\ifnum\c_units_state=\plusone % suffix
\else
- \unitshalfspace
+ \unitssmallspace
\fi
:}
@@ -509,4 +527,45 @@
\defineunits
[unit]
+%D Example:
+%D
+%D \startbuffer[definitions]
+%D \startluacode
+%D languages.data.labels.prefixes.whatever = {
+%D Kilo = "olik"
+%D }
+%D
+%D languages.data.labels.units.whatever = {
+%D Meter = "retem",
+%D Second = "dnoces",
+%D }
+%D
+%D languages.data.labels.operators.whatever = {
+%D Solidus = " rep "
+%D }
+%D \stopluacode
+%D \stopbuffer
+%D
+%D \startbuffer[sample]
+%D \startlines
+%D \lunit{10 km/s}
+%D \lunit{10 Kilo Meter/s}
+%D \lunit{10 kilo Meter/s}
+%D \lunit{10 Kilo m/s}
+%D \lunit{10 k Meter/s}
+%D \stoplines
+%D \stopbuffer
+%D
+%D \typebuffer[definitions] \getbuffer[definitions]
+%D
+%D \startbuffer
+%D \typebuffer[sample]
+%D
+%D \defineunits[lunit] \getbuffer[sample]
+%D \defineunits[lunit][label=test] \getbuffer[sample]
+%D \defineunits[lunit][label=whatever] \getbuffer[sample]
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+
\protect \endinput
diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua
index d94c7b0dc..b5e928af0 100644
--- a/tex/context/base/regi-ini.lua
+++ b/tex/context/base/regi-ini.lua
@@ -213,6 +213,8 @@ end
-- return regime and synonyms[regime] or regime or currentregime
-- end
--
+-- commands.setregimesynonym = regimes.setsynonym
+--
-- function commands.trueregimename(regime)
-- context(regimes.truename(regime))
-- end
diff --git a/tex/context/base/regi-ini.mkiv b/tex/context/base/regi-ini.mkiv
index c7fbd3f50..651e2f13c 100644
--- a/tex/context/base/regi-ini.mkiv
+++ b/tex/context/base/regi-ini.mkiv
@@ -34,7 +34,7 @@
% {\dodoubleargument\dodefineregimesynonym}
%
% \def\dodefineregimesynonym[#1][#2]%
-% {\ctxlua{regimes.setsynonym("#1","#2")}}
+% {\ctxcommand{setregimesynonym("#1","#2")}}
%
% \def\trueregimename#1%
% {\ctxcommand{trueregimename("#1")}}
diff --git a/tex/context/base/s-fnt-23.mkiv b/tex/context/base/s-fnt-23.mkiv
index e068e97a5..5be554d21 100644
--- a/tex/context/base/s-fnt-23.mkiv
+++ b/tex/context/base/s-fnt-23.mkiv
@@ -11,18 +11,17 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% last_data was written wrong so it needs checking
-
\startluacode
- local fontdata = fonts.hashes.identifiers
- local otfhandler = fonts.handlers.otf --- will be moduledata
+ moduledata.fonts = moduledata.fonts or { }
- local last_data = nil
+ local fontdata = fonts.hashes.identifiers
- local format = string.format
+ local last_data = nil -- still relevant
+ local format = string.format
- function otfhandler.show_shape(n)
+ function moduledata.fonts.show_shape(n)
local tfmdata = fontdata[font.current()]
+ -- local _, tfmdata = fonts.definers.define { name = fontname, size = fontsize }
last_data = tfmdata
local charnum = tonumber(n)
if not charnum then
@@ -200,21 +199,23 @@
context("no such shape: %s",n)
end
end
- function otfhandler.show_all_shapes(start,stop)
+
+ function moduledata.fonts.show_all_shapes()
local tfmdata = fontdata[font.current()]
+ -- local _, tfmdata = fonts.definers.define { name = fontname, size = fontsize }
last_data = tfmdata
- start, stop = start or "\\startTEXpage\\gobbleoneargument", stop or "\\stopTEXpage"
local unicodes, descriptions = tfmdata.unicodes, tfmdata.descriptions
for unicode, description in fonts.iterators.descriptions(tfmdata) do
local name = description.name
- context("%s{%s}%%",start,unicode)
- context("\\writestatus{glyph}{U+%04X -> %s}%%",unicode,name)
- otfhandler.show_shape(unicode)
- context(stop)
+ context.StartShowGlyphShape(unicode)
+ moduledata.fonts.show_shape(unicode)
+ context.StopShowGlyphShape()
end
end
- function otfhandler.show_shape_field(unicode,name)
+
+ function moduledata.fonts.show_shape_field(unicode,name)
local tfmdata = last_data or fontdata[font.current()]
+ -- local _, tfmdata = fonts.definers.define { name = fontname, size = fontsize }
local d = tfmdata.descriptions[unicode]
if d then
if name == "unicode" then
@@ -229,15 +230,14 @@
end
\stopluacode
-\setupcolors
- [state=start]
+% we can move all to lua (cld)
\def\GetGlyphField#1#2%
- {\ctxlua{fonts.handlers.otf.show_shape_field(#1,"#2")}}
+ {\ctxlua{moduledata.fonts.show_shape_field(#1,"#2")}}
\def\StartShowGlyphShape#1%
{\startTEXpage
- \nonknuthmode
+ \nonknuthmode % default anyway
\def\GlyphUnicode{#1}}
\def\StopShowGlyphShape
@@ -249,19 +249,20 @@
{\begingroup
\definedfont[#1 at #2]%
\obeyMPboxdepth
- \ctxlua{fonts.handlers.otf.show_shape("#3")}%
+ \ctxlua{moduledata.fonts.show_shape("#3")}%
\endgroup}
\def\ShowAllGlyphShapes#1#2% name size
{\begingroup
- \nonknuthmode
+ \nonknuthmode % default anyway
\definedfont[#1 at #2]%
- \ctxlua{fonts.handlers.otf.show_all_shapes("\\StartShowGlyphShape","\\StopShowGlyphShape")}%
+ \ctxlua{moduledata.fonts.show_all_shapes()}%
\endgroup}
\setupcolors
[state=start]
+% \continueifinputfile{s-fnt-23.mkiv}
\doifnotmode{demo}{\endinput}
\starttext
@@ -276,15 +277,15 @@
\switchtobodyfont[cambria,10pt]
-\startTEXpage[offset=10pt] \ShowGlyphShape{name:cambria-math} {40bp}{0x00066} \stopTEXpage
-\startTEXpage[offset=10pt] \ShowGlyphShape{name:cambria-math} {40bp}{0x1D453} \stopTEXpage
-\startTEXpage[offset=10pt] \ShowGlyphShape{name:cambria-math} {40bp}{0x1D43F} \stopTEXpage
-\startTEXpage[offset=10pt] \ShowGlyphShape{name:cambria-math}{100bp}{0x1D444} \stopTEXpage
-\startTEXpage[offset=10pt] \ShowGlyphShape{name:cambria-math}{100bp}{0x1D447} \stopTEXpage
-\startTEXpage[offset=10pt] \ShowGlyphShape{name:cambria-math}{100bp}{0x02112} \stopTEXpage
-\startTEXpage[offset=10pt] \ShowGlyphShape{name:cambria-math}{100bp}{0x1D432} \stopTEXpage
-\startTEXpage[offset=10pt] \ShowGlyphShape{name:cambria-math}{100bp}{0x1D43D} \stopTEXpage
-\startTEXpage[offset=10pt] \ShowGlyphShape{name:cambria-math}{100bp}{0x1D44A} \stopTEXpage
-\startTEXpage[offset=10pt] \ShowGlyphShape{name:cambria-math}{100bp}{0x1D45D} \stopTEXpage
+\startTEXpage[offset=0pt]\ShowGlyphShape{name:cambria-math} {40bp}{0x00066}\stopTEXpage
+\startTEXpage[offset=0pt]\ShowGlyphShape{name:cambria-math} {40bp}{0x1D453}\stopTEXpage
+\startTEXpage[offset=0pt]\ShowGlyphShape{name:cambria-math} {40bp}{0x1D43F}\stopTEXpage
+\startTEXpage[offset=0pt]\ShowGlyphShape{name:cambria-math}{100bp}{0x1D444}\stopTEXpage
+\startTEXpage[offset=0pt]\ShowGlyphShape{name:cambria-math}{100bp}{0x1D447}\stopTEXpage
+\startTEXpage[offset=0pt]\ShowGlyphShape{name:cambria-math}{100bp}{0x02112}\stopTEXpage
+\startTEXpage[offset=0pt]\ShowGlyphShape{name:cambria-math}{100bp}{0x1D432}\stopTEXpage
+\startTEXpage[offset=0pt]\ShowGlyphShape{name:cambria-math}{100bp}{0x1D43D}\stopTEXpage
+\startTEXpage[offset=0pt]\ShowGlyphShape{name:cambria-math}{100bp}{0x1D44A}\stopTEXpage
+\startTEXpage[offset=0pt]\ShowGlyphShape{name:cambria-math}{100bp}{0x1D45D}\stopTEXpage
\stoptext
diff --git a/tex/context/base/s-fnt-26.mkiv b/tex/context/base/s-fnt-26.mkiv
index 5edf80a57..b8da09533 100644
--- a/tex/context/base/s-fnt-26.mkiv
+++ b/tex/context/base/s-fnt-26.mkiv
@@ -14,7 +14,7 @@
\startluacode
function document.show_goodies_stylistics(name)
- local goodies = fonts.goodies.get(name)
+ local goodies = fonts.goodies.load(name)
local stylistics = goodies and goodies.stylistics
if stylistics then
local col, row, type = context.NC, context.NR, context.type
@@ -28,7 +28,7 @@
end
function document.show_goodies_featuresets(name)
- local goodies = fonts.goodies.get(name)
+ local goodies = fonts.goodies.load(name)
local featuresets = goodies and goodies.featuresets
if featuresets then
local col, row, type = context.NC, context.NR, context.type
@@ -47,7 +47,7 @@
end
function document.show_goodies_colorschemes(name)
- local goodies = fonts.goodies.get(name)
+ local goodies = fonts.goodies.load(name)
local colorschemes = goodies and goodies.colorschemes
if colorschemes then
local col, row, type = context.NC, context.NR, context.type
diff --git a/tex/context/base/s-fnt-28.mkiv b/tex/context/base/s-fnt-28.mkiv
index 00a7b1437..039cc6ca8 100644
--- a/tex/context/base/s-fnt-28.mkiv
+++ b/tex/context/base/s-fnt-28.mkiv
@@ -13,7 +13,7 @@
\startluacode
function fonts.tracers.files(goodyfile)
- local goodies = fonts.goodies.get(goodyfile)
+ local goodies = fonts.goodies.load(goodyfile)
if goodies then
local files = goodies.files
if files and files.list then
diff --git a/tex/context/base/s-mag-01.tex b/tex/context/base/s-mag-01.tex
index 8da6d8c02..6591fc23a 100644
--- a/tex/context/base/s-mag-01.tex
+++ b/tex/context/base/s-mag-01.tex
@@ -178,7 +178,7 @@
[palatino,10pt]
\setuptolerance
- [verytolerant]
+ [verytolerant,stretch]
\appendtoks\setups[papershift]\to\beforeeverypage
@@ -248,6 +248,8 @@
\startstandardmakeup[doublesided=no]
+ \dontcomplain
+
\definelayer
[makeup]
[width=\textwidth,
@@ -307,7 +309,11 @@
\setupheadertexts[][] \setupheadertexts[source code of this document]
\setupfootertexts[][] \setupfootertexts[]
- \typefile[TEX]{\inputfilename} % \jobname
+ \start \dontcomplain
+
+ \typefile[TEX]{\inputfilename}
+
+ \stop
\stopsetups
diff --git a/tex/context/base/s-mod-02.mkiv b/tex/context/base/s-mod-02.mkiv
index fa43c302e..067599668 100644
--- a/tex/context/base/s-mod-02.mkiv
+++ b/tex/context/base/s-mod-02.mkiv
@@ -11,10 +11,12 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+\endinput
+
%D This module looks like crap, is not documented, will
%D change, and used to be called modu-*.tex.
-% now split in mkii/mkiv so we will cleanup
+% now split in mkii/mkiv so we will cleanup (needed)
% Macro's
@@ -24,16 +26,13 @@
% todo: internationalize + setups
-\setuphead[paragraaf][expansion=command]
-\setuphead[section][expansion=command]
+\setuphead
+ [section]
+ [expansion=command]
\def\complexmodule[#1]% redefined
- {\startglobal % i.v.m. \bgroup in \startdocumentation
- \getparameters[Module][#1]
- \stopglobal % i.v.m. \bgroup in \startdocumentation
- %%\section{\Modulesubtitle}
- \xdef\Temp{\Modulesubtitle}%%Modulesubtitle:\framed{BEGIN \Modulesubtitle END} :#1 !}
- \@EA\section\@EA{\Temp}
+ {\getgparameters[Module][#1]
+ \normalexpanded{\section{\Modulesubtitle}}
\WriteLists}
\def\stopmodule % redefined
@@ -58,8 +57,6 @@
\def\WriteBatchFile
{\doglobal\increment\ModuleNumber
-% \immediate\write\BatchFile{call modu-run \FileName\space \ModuleNumber}}
-% \immediate\write\BatchFile{texmfstart texutil --modu \FileName}}
\immediate\write\BatchFile{texmfstart texexec --pdf --modu --batch \FileName }}
\newif\ifProcessingPublic
diff --git a/tex/context/base/s-pre-30.mkiv b/tex/context/base/s-pre-30.mkiv
index c1dbd9b93..1be85d02b 100644
--- a/tex/context/base/s-pre-30.mkiv
+++ b/tex/context/base/s-pre-30.mkiv
@@ -242,13 +242,13 @@
\StartSample{Basics}
\startbuffer
-\lua{a = 1.5 ; b = 1.8 ; c = a*b ; tex.print(c) ;}
+\lua{a = 1.5 ; b = 1.8 ; c = a*b ; context(c) ;}
\startlua
a = 1
b = 2
c = a*b
- tex.print(c)
+ context(c)
\stoplua
\stopbuffer
diff --git a/tex/context/base/s-pre-69.mkiv b/tex/context/base/s-pre-69.mkiv
index 0df68c9e7..c87bcd537 100644
--- a/tex/context/base/s-pre-69.mkiv
+++ b/tex/context/base/s-pre-69.mkiv
@@ -231,7 +231,8 @@
shape[#shape+1] = string.format("%sbp %sbp",left,hsize)
end
-- print(table.serialize(shape))
- tex.sprint(tex.ctxcatcodes,string.format("\\parshape %s %s",#shape,table.concat(shape," ")))
+ -- context.parshape(string.format("%s %s ",#shape,table.concat(shape," ")))
+ context("\\parshape %s %s ",#shape,table.concat(shape," "))
end
\stopluacode
diff --git a/tex/context/base/scrn-fld.mkvi b/tex/context/base/scrn-fld.mkvi
index 9c51d22b1..506bb4a07 100644
--- a/tex/context/base/scrn-fld.mkvi
+++ b/tex/context/base/scrn-fld.mkvi
@@ -806,18 +806,30 @@
\egroup
\fi}
-\protect \endinput
+% \protect \endinput % THE FOLLOWING CODE IS NOT CHECKED
%D I will redo these when I need them.
+% \setupinteraction[state=start]
+%
% \definepushbutton [reset]
%
-% \definepushsymbol [reset] [n] [\uniqueMPgraphic{whatever}{color=green}]
-% \definepushsymbol [reset] [r] [\uniqueMPgraphic{whatever}{color=white}]
+% \startuniqueMPgraphic{whatever}{color}
+% fill fullcircle xysized (OverlayWidth,OverlayHeight) withcolor \MPvar{color} ;
+% \stopuniqueMPgraphic
%
-% \startinteractionmenu[bottom]
-% \psh [reset] [JS(reset_something)] \\
-% \stopinteractionmenu
+% \definepushsymbol [reset] [n] [\uniqueMPgraphic{whatever}{color=red}]
+% \definepushsymbol [reset] [r] [\uniqueMPgraphic{whatever}{color=green}]
+% \definepushsymbol [reset] [d] [\uniqueMPgraphic{whatever}{color=blue}]
+%
+% \starttext
+% \startTEXpage
+% \pushbutton [reset] [page(2)]
+% \stopTEXpage
+% \startTEXpage
+% \pushbutton [reset] [page(1)]
+% \stopTEXpage
+% \stoptext
\newcount\scrn_pushbutton_n
@@ -967,6 +979,6 @@
\let\startrob\scrn_menu_rob_start
\let\stoprob \relax
\let\rob \scrn_menu_rob_direct
-\everysetmenucommands
+\to \everysetmenucommands
\protect \endinput
diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua
index 6e33bac11..6e85a2c5e 100644
--- a/tex/context/base/scrp-ini.lua
+++ b/tex/context/base/scrp-ini.lua
@@ -10,8 +10,6 @@ if not modules then modules = { } end modules ['scrp-ini'] = {
local attributes, nodes, node = attributes, nodes, node
-local texwrite = tex.write
-
local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end)
local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end)
diff --git a/tex/context/base/sort-ini.lua b/tex/context/base/sort-ini.lua
index 7a91910ca..d9a6bb64d 100644
--- a/tex/context/base/sort-ini.lua
+++ b/tex/context/base/sort-ini.lua
@@ -317,7 +317,7 @@ local function setlanguage(l,m,d,u)
method = predefinedmethods[variables[method]] or method
data.method = method
--
- data.digits = digite
+ data.digits = digits
--
local seq = utilities.parsers.settings_to_array(method or "") -- check the list
sequence = { }
diff --git a/tex/context/base/spac-hor.mkiv b/tex/context/base/spac-hor.mkiv
index e121a1d52..4fd50b398 100644
--- a/tex/context/base/spac-hor.mkiv
+++ b/tex/context/base/spac-hor.mkiv
@@ -347,8 +347,8 @@
\ifdefined\softhyphen \else \let\softhyphen\- \fi
-\ctxsprint{"\string\\unexpanded\string\\def\string\\\string\n{\string\\space}"}
-%ctxsprint{"\string\\let\string\\\string\n=\string\\space"}
+\cldcontext{"\string\\unexpanded\string\\def\string\\\string\n{\string\\space}"}
+%cldcontext{"\string\\let\string\\\string\n=\string\\space"}
% in tables we need:
%
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index 6796c8206..bb5a3f7f8 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -170,7 +170,7 @@ function vspacing.definesnapmethod(name,method)
local t = listtohash(method)
snapmethods[n] = t
t.name, t.specification = name, method
- tex.write(n)
+ context(n)
end
--~ local rule_id = nodecodes.rule
@@ -488,7 +488,8 @@ h, d = ch, cd
local lines = (ch+cd)/snaphtdp
if t then
local original = (h+d)/snaphtdp
- t[#t+1] = format("final lines: %s -> %s",original,lines)
+ local whatever = (ch+cd)/(texdimen.globalbodyfontstrutheight + texdimen.globalbodyfontstrutdepth)
+ t[#t+1] = format("final lines: %s -> %s (%s)",original,lines,whatever)
t[#t+1] = format("final height: %s -> %s",points(h),points(ch))
t[#t+1] = format("final depth: %s -> %s",points(d),points(cd))
end
diff --git a/tex/context/base/spac-ver.mkiv b/tex/context/base/spac-ver.mkiv
index 77c014228..da82ebbb0 100644
--- a/tex/context/base/spac-ver.mkiv
+++ b/tex/context/base/spac-ver.mkiv
@@ -1279,7 +1279,9 @@
% minheight round height down
% maxheight round height up
% local use local interline space
-% shift:-3tp vertical shift within box
+% offset:-3tp vertical shift within box
+% bottom:lines
+% top:lines
%D We're not downward compatible with \MKII !
@@ -1300,6 +1302,7 @@
\definegridsnapping[\v!first] [\v!first]
\definegridsnapping[\v!last] [\v!last]
\definegridsnapping[\v!high] [\v!minheight,\v!maxdepth,\v!none]
+\definegridsnapping[\v!one] [\v!minheight,\v!mindepth]
\definegridsnapping[\v!low] [\v!maxheight,\v!mindepth,\v!none]
\definegridsnapping[\v!line] [\v!line]
\definegridsnapping[\v!strut] [\v!strut]
@@ -1309,6 +1312,7 @@
\definegridsnapping[\v!middle] [\v!maxheight,\v!maxdepth] % used in placement
+
\newtoks\everysetupgridsnapping % this only happens at the setuplayout level
\def\dosetupgridsnapping{\the\everysetupgridsnapping} % not used !
@@ -1343,7 +1347,7 @@
\unexpanded\def\placeongrid{\dosingleempty\doplaceongrid}
\def\domoveongrid[#1]%
- {} % gone, unless we set an attribute
+ {[obsolete]} % gone, unless we set an attribute
\def\doplaceongrid[#1]%
{\snaptogrid[#1]\vbox} % mark as done
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 07c9912c0..656810cc2 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 192b0aed8..552e2405f 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/strc-bkm.lua b/tex/context/base/strc-bkm.lua
index 6a2cc3827..4eb9551a9 100644
--- a/tex/context/base/strc-bkm.lua
+++ b/tex/context/base/strc-bkm.lua
@@ -188,3 +188,9 @@ function bookmarks.finalize(levels)
-- that we will support when the main loop has become a coroutine.
codeinjections.addbookmarks(levels,bookmarks.method)
end
+
+-- interface
+
+commands.overloadbookmark = bookmarks.overload
+commands.registerbookmark = bookmarks.register
+commands.setupbookmarks = bookmarks.setup
diff --git a/tex/context/base/strc-bkm.mkiv b/tex/context/base/strc-bkm.mkiv
index f5f0bcf59..8ddf0a6df 100644
--- a/tex/context/base/strc-bkm.mkiv
+++ b/tex/context/base/strc-bkm.mkiv
@@ -50,7 +50,7 @@
\def\dobookmark[#1]#2%
{\begingroup
\simplifycommands
- \ctxlua{structures.bookmarks.overload("#1",\!!bs\detokenize\expandafter{\normalexpanded{#2}}\!!es)}%
+ \ctxcommand{overloadbookmark("#1",\!!bs\detokenize\expandafter{\normalexpanded{#2}}\!!es)}%
\endgroup}
%D Placement \unknown\ look how simple compared to \MKII:
@@ -86,7 +86,7 @@
\else\ifsecondargument
\doifassignmentelse{#2}{\let\askedopened\empty\getparameters[\??bm][#2]}\donothing
\fi\fi
- \ctxlua{structures.bookmarks.register {
+ \ctxcommand{registerbookmark {
names = "\askednames",
opened = "\askedopened",
force = "\bookmarkparameter\c!force",
@@ -100,7 +100,7 @@
\c!number=\v!yes] % might become v!no
\appendtoks
- \ctxlua{structures.bookmarks.setup {
+ \ctxcommand{setupbookmarks {
separatorset = "\bookmarkparameter\c!numberseparatorset",
conversionset = "\bookmarkparameter\c!numberconversionset",
starter = \!!bs\bookmarkparameter\c!numberstarter\!!es,
@@ -109,9 +109,6 @@
}}%
\to \everysetupbookmarks
-% \prependtoks\ctxlua{structures.bookmarks.place()}\to\everystoptext % too late
-% \prependtoks\ctxlua{structures.bookmarks.place()}\to\everylastbackendshipout % okay but not nice
-
\protect \endinput
% \starttext
diff --git a/tex/context/base/strc-blk.lua b/tex/context/base/strc-blk.lua
index cbdc8c6ea..d2f25dfb3 100644
--- a/tex/context/base/strc-blk.lua
+++ b/tex/context/base/strc-blk.lua
@@ -37,7 +37,6 @@ end
job.register('structures.blocks.collected', tobesaved, initializer)
-local printer = (lpeg.patterns.textline/tex.print)^0 -- can be shared
local listitem = utilities.parsers.listitem
function blocks.print(name,data,hide)
@@ -144,3 +143,11 @@ function blocks.save(name,tag,buffer) -- wrong, not yet adapted
end
buffers.erase(buffer)
end
+
+-- interface
+
+
+commands.definestructureblock = blocks.define
+commands.savestructureblock = blocks.save
+commands.selectstructureblock = blocks.select
+commands.setstructureblockstate = blocks.setstate
diff --git a/tex/context/base/strc-blk.mkiv b/tex/context/base/strc-blk.mkiv
index c3fe0eb0b..b8f8c6b3a 100644
--- a/tex/context/base/strc-blk.mkiv
+++ b/tex/context/base/strc-blk.mkiv
@@ -41,14 +41,14 @@
\c!inner=,
\c!style=,
\c!file=]% todo
- \ctxlua{structures.blocks.define("#1")}%
+ \ctxcommand{definestructureblock("#1")}%
\setuvalue{\e!begin#1}{\dodoubleempty\dobeginofblock[#1]}%
\letvalue{\e!end#1}\relax}
\long\def\dobeginofblock[#1][#2]%
{\normalexpanded{\noexpand\dodowithbuffer{@block@}{\e!begin#1}{\e!end#1}}
{}% before
- {\ctxlua{structures.blocks.save("#1","#2","@block@")}}}% after
+ {\ctxcommand{savestructureblock("#1","#2","@block@")}}}% after
\def\dostarthiddenblock
{\startnointerference
@@ -83,17 +83,17 @@
\egroup}
\def\dosetblockstate[#1][#2][#3]% state name tag
- {\ctxlua{structures.blocks.setstate("#1","#2","#3")}}
+ {\ctxcommand{setstructureblockstate("#1","#2","#3")}}
\def\doselectblocks[#1][#2][#3][#4]% state name tag setups
{\bgroup
\doifassignmentelse{#3}
{\getparameters[\??tb\??tb][\c!criterium=\v!text,#3]%
\def\doblocksetups##1{\getparameters[\??tb##1][#3]}%
- \ctxlua{structures.blocks.select("#1","#2","","\@@tb@@tbcriterium")}}
+ \ctxcommand{selectstructureblock("#1","#2","","\@@tb@@tbcriterium")}}
{\getparameters[\??tb\??tb][\c!criterium=\v!text,#4]%
\def\doblocksetups##1{\getparameters[\??tb##1][#4]}%
- \ctxlua{structures.blocks.select("#1","#2","#3","\@@tb@@tbcriterium")}}%
+ \ctxcommand{selectstructureblock("#1","#2","#3","\@@tb@@tbcriterium")}}%
\egroup}
% hide: save, if [+] also hidden execute
diff --git a/tex/context/base/strc-doc.lua b/tex/context/base/strc-doc.lua
index 36c2578c8..024baa1c5 100644
--- a/tex/context/base/strc-doc.lua
+++ b/tex/context/base/strc-doc.lua
@@ -7,18 +7,18 @@ if not modules then modules = { } end modules ['strc-doc'] = {
}
-- todo: associate counter with head
-
+-- we need to better split the lua/tex end
-- we need to freeze and document this module
local next, type = next, type
local format, gsub, find, concat, gmatch, match = string.format, string.gsub, string.find, table.concat, string.gmatch, string.match
-local texsprint, texwrite = tex.sprint, tex.write
local concat = table.concat
local max, min = math.max, math.min
local allocate, mark = utilities.storage.allocate, utilities.storage.mark
-local ctxcatcodes = tex.ctxcatcodes
-local variables = interfaces.variables
+local catcodenumbers = catcodes.numbers
+local ctxcatcodes = tex.ctxcatcodes
+local variables = interfaces.variables
--~ if not trackers then trackers = { register = function() end } end
@@ -29,19 +29,21 @@ local report_structure = logs.reporter("structure","sectioning")
local structures, context = structures, context
-local helpers = structures.helpers
-local documents = structures.documents
-local sections = structures.sections
-local lists = structures.lists
-local counters = structures.counters
-local sets = structures.sets
-local tags = structures.tags
-local processors = structures.processors
+local helpers = structures.helpers
+local documents = structures.documents
+local sections = structures.sections
+local lists = structures.lists
+local counters = structures.counters
+local sets = structures.sets
+local tags = structures.tags
+local processors = structures.processors
-local sprintprocessor = processors.sprint
-local ignoreprocessor = processors.ignore
+local applyprocessor = processors.apply
+local startapplyprocessor = processors.startapply
+local stopapplyprocessor = processors.stopapply
+local strippedprocessor = processors.stripped
-local a_internal = attributes.private('internal')
+local a_internal = attributes.private('internal')
-- -- -- document -- -- --
@@ -173,7 +175,7 @@ end
function sections.setblock(name)
local block = name or data.block or "unknown" -- can be used to set the default
data.block = block
- texwrite(block)
+ context(block)
end
function sections.pushblock(name)
@@ -182,7 +184,7 @@ function sections.pushblock(name)
data.blocks[#data.blocks+1] = block
data.block = block
documents.reset()
- texwrite(block)
+ context(block)
end
function sections.popblock()
@@ -190,7 +192,7 @@ function sections.popblock()
local block = data.blocks[#data.blocks] or data.block
data.block = block
documents.reset()
- texwrite(block)
+ context(block)
end
function sections.currentblock()
@@ -202,7 +204,7 @@ function sections.currentlevel()
end
function sections.getcurrentlevel()
- texwrite(data.depth)
+ context(data.depth)
end
function sections.somelevel(given)
@@ -390,7 +392,7 @@ function sections.matchingtilldepth(depth,numbers,parentnumbers)
end
function sections.getnumber(depth) -- redefined later ...
- texwrite(data.numbers[depth] or 0)
+ context(data.numbers[depth] or 0)
end
function sections.set(key,value)
@@ -402,6 +404,8 @@ function sections.cct()
context(metadata and metadata.catcodes or ctxcatcodes)
end
+-- this one will become: return catcode, d (etc)
+
function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: spec table and then also depth
if not depth or depth == 0 then depth = data.depth end
local data = data.status[depth]
@@ -417,16 +421,21 @@ function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: sp
if type(d) == "string" then
if honorcatcodetable == true or honorcatcodetable == variables.auto then
local metadata = data.metadata
- texsprint((metadata and metadata.catcodes) or ctxcatcodes,d)
- elseif not honorcatcodetable then
+ local catcodes = metadata and metadata.catcodes
+ if catcodes then
+ context.sprint(catcodes,d)
+ else
+ context(d)
+ end
+ elseif not honorcatcodetable or honorcatcodetable == "" then
context(d)
- elseif type(honorcatcodetable) == "number" then
- texsprint(honorcatcodetable,d)
- elseif type(honorcatcodetable) == "string" and honorcatcodetable ~= "" then
- honorcatcodetable = tex[honorcatcodetable] or ctxcatcodes-- we should move ctxcatcodes to another table, ctx or so
- texsprint(honorcatcodetable,d)
else
- context(d)
+ local catcodes = catcodenumbers[honorcatcodetable]
+ if catcodes then
+ context.sprint(catcodes,d)
+ else
+ context(d)
+ end
end
return
end
@@ -463,7 +472,7 @@ function sections.depthnumber(n)
elseif n < 0 then
n = depth + n
end
- return texwrite(data.numbers[n] or 0)
+ return context(data.numbers[n] or 0)
end
function sections.autodepth(numbers)
@@ -504,9 +513,9 @@ local function process(index,numbers,ownnumbers,criterium,separatorset,conversio
local separator = sets.get("structure:separators",block,separatorset,preceding,".")
if separator then
if result then
- result[#result+1] = ignoreprocessor(separator)
+ result[#result+1] = strippedprocessor(separator)
else
- sprintprocessor(ctxcatcodes,separator)
+ applyprocessor(separator)
end
end
preceding = false
@@ -522,14 +531,14 @@ local function process(index,numbers,ownnumbers,criterium,separatorset,conversio
end
else
if ownnumber ~= "" then
- sprintprocessor(ctxcatcodes,ownnumber)
+ applyprocessor(ownnumber)
elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups)
context.convertnumber(conversion,number)
else
local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
- sprintprocessor(ctxcatcodes,theconversion,function(str)
- return format("\\convertnumber{%s}{%s}",str or "numbers",number)
- end)
+ local data = startapplyprocessor(theconversion)
+ context.convertnumber(data or "numbers",number)
+ stopapplyprocessor()
end
end
return index, true
@@ -611,9 +620,9 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
local prefixlist = set and sets.getall("structure:prefixes","",set) -- "" == block
if starter then
if result then
- result[#result+1] = ignoreprocessor(starter)
+ result[#result+1] = strippedprocessor(starter)
else
- sprintprocessor(ctxcatcodes,starter)
+ applyprocessor(starter)
end
end
if prefixlist and (kind == 'section' or kind == 'prefix' or kind == 'direct') then
@@ -676,13 +685,13 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
if result then
-- can't happen as we're in 'direct'
else
- sprintprocessor(ctxcatcodes,connector)
+ applyprocessor(connector)
end
elseif done and stopper then
if result then
- result[#result+1] = ignoreprocessor(stopper)
+ result[#result+1] = strippedprocessor(stopper)
else
- sprintprocessor(ctxcatcodes,stopper)
+ applyprocessor(stopper)
end
end
return result -- a table !
@@ -804,5 +813,16 @@ end
function sections.getnumber(depth,what) -- redefined here
local sectiondata = sections.findnumber(depth,what)
- texwrite((sectiondata and sectiondata.numbers[depth]) or 0)
+ context((sectiondata and sectiondata.numbers[depth]) or 0)
end
+
+-- interface (some are actually already commands, like sections.fullnumber)
+
+commands.structurenumber = function() sections.fullnumber() end
+commands.structuretitle = function() sections.title () end
+
+commands.structurevariable = function(name) sections.structuredata(nil,name) end
+commands.structureuservariable = function(name) sections.userdata (nil,name) end
+commands.structurecatcodedget = function(name) sections.structuredata(nil,name,nil,true) end
+commands.structuregivencatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end
+commands.structureautocatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end
diff --git a/tex/context/base/strc-doc.mkiv b/tex/context/base/strc-doc.mkiv
index eaba066e2..a677c4d2d 100644
--- a/tex/context/base/strc-doc.mkiv
+++ b/tex/context/base/strc-doc.mkiv
@@ -221,12 +221,12 @@
% \stopchapter
% \stoptext
-\def\structurevariable #1{\ctxlua{structures.sections.structuredata(nil,"#1")}}
-\def\structureuservariable #1{\ctxlua{structures.sections.userdata(nil,"#1")}}
-\def\structurenumber {\ctxlua{structures.sections.fullnumber()}}
-\def\structuretitle {\ctxlua{structures.sections.title()}}
-\def\structurecatcodedget #1{\ctxlua{structures.sections.structuredata(nil,"#1",nil,true)}} % bad name
-\def\structuregivencatcodedget#1#2{\ctxlua{structures.sections.structuredata(nil,"#1",nil,\number#2)}} % bad name
-\def\structureautocatcodedget #1#2{\ctxlua{structures.sections.structuredata(nil,"#1",nil,"#2")}}
+\def\structurenumber {\ctxcommand{structurenumber()}}
+\def\structuretitle {\ctxcommand{structuretitle()}}
+\def\structurevariable #1{\ctxcommand{structurevariable("#1")}}
+\def\structureuservariable #1{\ctxcommand{structureuservariable("#1")}}
+\def\structurecatcodedget #1{\ctxcommand{structurecatcodedget("#1")}} % bad name
+\def\structuregivencatcodedget#1#2{\ctxcommand{structuregivencatcodedget("#1",\number#2)}} % bad name
+\def\structureautocatcodedget #1#2{\ctxcommand{structureautocatcodedget ("#1","#2")}}
\protect \endinput
diff --git a/tex/context/base/strc-flt.mkiv b/tex/context/base/strc-flt.mkiv
index d9486c569..c3366b855 100644
--- a/tex/context/base/strc-flt.mkiv
+++ b/tex/context/base/strc-flt.mkiv
@@ -272,6 +272,8 @@
{\setuvalue {\e!place\e!listof#2}{\dodoubleempty\doplacelist[#1]}%
\setuvalue {\e!complete\e!listof#2}{\dotripleempty\dodocompletelist[#1][#2]}%
\setuvalue {\e!place#1}{\dotripleempty\docomplexplacefloat[#1]}%
+ \setuvalue {\e!start\e!place#1}{\dodoubleempty\dostartplacefloat[#1]}%
+ \setuvalue {\e!stop\e!place#1}{\dostopplacefloat}%
\setuvalue {\e!reserve#1}{\doquadrupleempty\docomplexreserveblock[#1]}%
\setuvalue {\e!start#1\e!text}{\dotripleempty\docomplexstarttextblock[#1]}%
\setuvalue {\e!stop#1\e!text}{\dostoptextfloat}%
@@ -588,10 +590,10 @@
% A complication is that we may have to handle a pagebreak
% first, which in turn may issue a (postponed) float.
% Therefore we may not trust on variable assignments before
-% we're realy dealing with the float. Some day I'll root out
+% we're really dealing with the float. Some day I'll root out
% the global settings.
-\def\docomplexplacefloat[#1][#2]% [#3]#4%
+\unexpanded\def\docomplexplacefloat[#1][#2]% [#3]#4%
{\edef\currentfloat{#1}%
\doifnothing\currentfloat{\let\currentfloat\v!figure}%
\doifelsenothing{#2}
@@ -601,9 +603,21 @@
{\normalexpanded{\noexpand\dodocomplexsplitfloat[\currentfloat][\floatlocation]}}
{\normalexpanded{\noexpand\dodocomplexplacefloat[\currentfloat][\floatlocation]}}}
-\long\def\dodocomplexsplitfloat[#1][#2][#3]#4%
+\unexpanded\def\dodocomplexsplitfloat[#1][#2][#3]#4%
{\splitfloat{\dodocomplexplacefloat[#1][#2][#3]{#4}}}
+\unexpanded\def\dostartplacefloat[#1][#2]% will be done the other way around but okay for now
+ {\begingroup
+ \getparameters[\??fl\??fl][\c!location=,\c!reference=,\c!title=,#2]%
+ \normalexpanded{\docomplexplacefloat[#1][\@@fl@@fllocation][\@@fl@@flreference]{\@@fl@@fltitle}}%
+ \bgroup
+ \ignorespaces}
+
+\unexpanded\def\dostopplacefloat
+ {\removeunwantedspaces
+ \egroup
+ \endgroup}
+
\def\flushfloatslist
{\v!left,\v!right,\v!inner,\v!outer,%
\v!backspace,\v!cutspace,%
@@ -840,7 +854,7 @@
\hsize\localhsize
\fi}
-\newevery \everyinsidefloat \relax
+\ifdefined\everyinsidefloat \else \newevery \everyinsidefloat \relax \fi
\appendtoks
\everyinsidefloat\emptytoks % in case it's called earlier
@@ -910,12 +924,12 @@
\def\docommand##1%
{\processaction
[##1]%
- [ \v!hang=>\dodocommand+,%
- +\v!hang=>\dodocommand+,%
- -\v!hang=>\dodocommand-]}%
+ [ \v!hang=>\dodocommand\plusone,%
+ +\v!hang=>\dodocommand\plusone,%
+ -\v!hang=>\dodocommand\minusone]}%
\def\dodocommand##1% inefficient but who cares
- {\ifdone\else\global\sidefloatsidelines\zeropoint\donetrue\fi
- \global\advance\sidefloatsidelines\plusone\relax}%
+ {\ifdone\else\global\sidefloatsidelines\zerocount\donetrue\fi
+ \global\advance\sidefloatsidelines##1\relax}%
\donefalse\normalexpanded{\noexpand\dorepeatwithcommand[#1]}\docommand}%
\egroup}
@@ -1907,7 +1921,7 @@
% \@EA\beforesplitstring\floatcolumn\at*\to\floatcolumn}
\def\setfloatmethodvariables#1% \floatmethod \floatlabel \floatrow \floatcolumn
- {\ctxlua{floats.analysemethod("#1")}}
+ {\ctxcommand{analysefloatmethod("#1")}}
\def\dogetfloatbox#1#2%
{\ifvisible
diff --git a/tex/context/base/strc-ini.lua b/tex/context/base/strc-ini.lua
index a9013c641..784518048 100644
--- a/tex/context/base/strc-ini.lua
+++ b/tex/context/base/strc-ini.lua
@@ -21,28 +21,21 @@ but it does not make sense to store all processdata.
]]--
local format, concat, match = string.format, table.concat, string.match
-local count, texwrite, texprint, texsprint = tex.count, tex.write, tex.print, tex.sprint
+local count = tex.count
local type, next, tonumber, tostring = type, next, tonumber, tostring
local lpegmatch = lpeg.match
local settings_to_array, settings_to_hash = utilities.parsers.settings_to_array, utilities.parsers.settings_to_hash
local allocate = utilities.storage.allocate
-local ctxcatcodes, xmlcatcodes, notcatcodes = tex.ctxcatcodes, tex.xmlcatcodes, tex.notcatcodes -- tricky as we're in notcatcodes
+local ctxcatcodes = tex.ctxcatcodes
+local xmlcatcodes = tex.xmlcatcodes
+local notcatcodes = tex.notcatcodes
+local txtcatcodes = tex.txtcatcodes
local trace_processors = false trackers.register("structures.processors", function(v) trace_processors = v end)
local report_processors = logs.reporter("structure","processors")
--- move this
-
-commands = commands or { }
-local commands = commands
-
-function commands.firstinlist(str)
- local first = match(str,"^([^,]+),")
- texsprint(ctxcatcodes,first or str)
-end
-
-- -- -- namespace -- -- --
-- This is tricky: we have stored and initialized already some of
@@ -104,9 +97,9 @@ function specials.store(class,data)
tobesaved[class] = s
end
s[#s+1] = data
- texwrite(#s)
+ context(#s)
else
- texwrite(0)
+ context(0)
end
end
@@ -207,11 +200,11 @@ function helpers.title(title,metadata) -- coding is xml is rather old and not th
if trace_processors then
report_processors("cct: %s, txt: %s",catcodes,title)
end
- texsprint(catcodes,title)
+ context.sprint(catcodes,title) -- was: texsprint(catcodes,title)
end
end
else
- texsprint(title) -- no catcode switch
+ context(title) -- no catcode switch, was: texsprint(title)
end
end
end
@@ -241,21 +234,65 @@ function processors.split(str)
end
end
-function processors.sprint(catcodes,str,fnc,...) -- not ok: mixed
+--~ function processors.sprint(catcodes,str,fnc,...) -- not ok: mixed
+--~ local p, s = lpegmatch(splitter,str)
+--~ local code
+--~ if registered[p] then
+--~ code = format("\\applyprocessor{%s}{%s}",p,(fnc and fnc(s,...)) or s)
+--~ else
+--~ code = (fnc and fnc(str,...)) or str
+--~ end
+--~ if trace_processors then
+--~ report_processors("cct: %s, seq: %s",catcodes,code)
+--~ end
+--~ context.sprint(catcodes,code) -- was: texsprint(catcodes,code)
+--~ end
+
+function processors.apply(str)
local p, s = lpegmatch(splitter,str)
- local code
- if registered[p] then
- code = format("\\applyprocessor{%s}{%s}",p,(fnc and fnc(s,...)) or s)
+ if p and registered[p] then
+ if trace_processors then
+ report_processors("known: %s, argument: %s",p,s or "")
+ end
+ context.applyprocessor(p,s)
+ elseif s then
+ if trace_processors then
+ report_processors("unknown: %s, argument: %s",p or "?",s)
+ end
+ context(s)
+ elseif str then
+ if trace_processors then
+ report_processors("direct: %s",str)
+ end
+ context(str)
+ end
+end
+
+function processors.startapply(str)
+ local p, s = lpegmatch(splitter,str)
+ if p and registered[p] then
+ if trace_processors then
+ report_processors("start: %s",p or "?")
+ end
+ context.applyprocessor(p)
else
- code = (fnc and fnc(str,...)) or str
+ if trace_processors then
+ report_processors("start: %s (unknown)",p or "?")
+ end
+ context.firstofoneargument()
end
+ context("{")
+ return s -- not: or str
+end
+
+function processors.stopapply()
+ context("}")
if trace_processors then
- report_processors("cct: %s, seq: %s",catcodes,code)
+ report_processors("stop")
end
- texsprint(catcodes,code)
end
-function processors.apply(str)
+function processors.tostring(str)
local p, s = lpegmatch(splitter,str)
if registered[p] then
return format("\\applyprocessor{%s}{%s}",p,s)
@@ -264,7 +301,7 @@ function processors.apply(str)
end
end
-function processors.ignore(str)
+function processors.stripped(str)
local p, s = lpegmatch(splitter,str)
return s or str
end
@@ -339,3 +376,10 @@ function sets.get(namespace,block,name,level,default) -- check if name is passed
local dl = dn[1][level]
return dl or dn[2] or default
end
+
+-- interface
+
+commands.definestructureset = sets.define
+
+commands.registerstructureprocessor = processors.register
+commands.resetstructureprocessor = processors.reset
diff --git a/tex/context/base/strc-ini.mkiv b/tex/context/base/strc-ini.mkiv
index 5a78d301d..29560be3f 100644
--- a/tex/context/base/strc-ini.mkiv
+++ b/tex/context/base/strc-ini.mkiv
@@ -70,10 +70,10 @@
\unexpanded\def\definestructureconversionset{\dotripleempty\dodefinestructureconversionset}
\unexpanded\def\definestructureprefixset {\dotripleempty\dodefinestructureprefixset}
-\def\dodefinestructureresetset [#1][#2][#3]{\ctxlua{structures.sets.define("structure:resets", "#1","\luaescapestring{\detokenize{#2}}","\luaescapestring{\detokenize{#3}}",true)}}
-\def\dodefinestructureseparatorset [#1][#2][#3]{\ctxlua{structures.sets.define("structure:separators", "#1","\luaescapestring{\detokenize{#2}}","\luaescapestring{\detokenize{#3}}")}}
-\def\dodefinestructureconversionset[#1][#2][#3]{\ctxlua{structures.sets.define("structure:conversions","#1","\luaescapestring{\detokenize{#2}}","\luaescapestring{\detokenize{#3}}")}}
-\def\dodefinestructureprefixset [#1][#2][#3]{\ctxlua{structures.sets.define("structure:prefixes", "#1","\luaescapestring{\detokenize{#2}}","\luaescapestring{\detokenize{#3}}")}}
+\def\dodefinestructureresetset [#1][#2][#3]{\ctxcommand{definestructureset("structure:resets", "#1","\luaescapestring{\detokenize{#2}}","\luaescapestring{\detokenize{#3}}",true)}}
+\def\dodefinestructureseparatorset [#1][#2][#3]{\ctxcommand{definestructureset("structure:separators", "#1","\luaescapestring{\detokenize{#2}}","\luaescapestring{\detokenize{#3}}")}}
+\def\dodefinestructureconversionset[#1][#2][#3]{\ctxcommand{definestructureset("structure:conversions","#1","\luaescapestring{\detokenize{#2}}","\luaescapestring{\detokenize{#3}}")}}
+\def\dodefinestructureprefixset [#1][#2][#3]{\ctxcommand{definestructureset("structure:prefixes", "#1","\luaescapestring{\detokenize{#2}}","\luaescapestring{\detokenize{#3}}")}}
% \definestructureseparatorset [weird][!,?,*][:] % tex content
% \definestructureconversionset[weird][numbers,characters,romannumerals][numbers] % symbolic names
diff --git a/tex/context/base/strc-lst.lua b/tex/context/base/strc-lst.lua
index 930ff2d0a..edb036a5c 100644
--- a/tex/context/base/strc-lst.lua
+++ b/tex/context/base/strc-lst.lua
@@ -13,18 +13,16 @@ if not modules then modules = { } end modules ['strc-lst'] = {
local format, gmatch, gsub = string.format, string.gmatch, string.gsub
local tonumber = tonumber
-local texsprint, texprint, texwrite, texcount = tex.sprint, tex.print, tex.write, tex.count
+local texcount = tex.count
local concat, insert, remove = table.concat, table.insert, table.remove
local lpegmatch = lpeg.match
local simple_hash_to_string, settings_to_hash = utilities.parsers.simple_hash_to_string, utilities.parsers.settings_to_hash
local allocate, checked = utilities.storage.allocate, utilities.storage.checked
-local trace_lists = false trackers.register("structures.lists", function(v) trace_lists = v end)
+local trace_lists = false trackers.register("structures.lists", function(v) trace_lists = v end)
local report_lists = logs.reporter("structure","lists")
-local ctxcatcodes = tex.ctxcatcodes
-
local structures = structures
local lists = structures.lists
local sections = structures.sections
@@ -123,7 +121,7 @@ function lists.push(t)
pushed[i] = p
r.listindex = p
end
- texwrite(p)
+ context(p)
end
function lists.doifstoredelse(n)
@@ -477,7 +475,12 @@ function lists.userdata(name,r,tag) -- to tex (todo: xml)
local userdata, metadata = result.userdata, result.metadata
local str = userdata and userdata[tag]
if str then
- texsprint(metadata and metadata.catcodes or ctxcatcodes,str)
+ local catcodes = metadata and metadata.catcodes
+ if catcodes then
+ context.sprint(catcodes,str)
+ else
+ context(str)
+ end
end
end
end
@@ -490,19 +493,19 @@ function lists.uservalue(name,r,tag,default) -- to lua
end
function lists.size()
- texprint(#lists.result)
+ context(#lists.result)
end
function lists.location(n)
local l = lists.result[n]
- texsprint(l.references.internal or n)
+ context(l.references.internal or n)
end
function lists.label(n,default)
local l = lists.result[n]
local t = l.titledata
if t then
- texsprint(t.label or default or "")
+ context(t.label or default or "")
end
end
@@ -572,9 +575,9 @@ function lists.realpage(name,n)
local data = lists.result[n]
if data then
local references = data.references
- texsprint(references and references.realpage or 0)
+ context(references and references.realpage or 0)
else
- texsprint(0)
+ context(0)
end
end
diff --git a/tex/context/base/strc-lst.mkiv b/tex/context/base/strc-lst.mkiv
index 149b1a734..c532f0ae7 100644
--- a/tex/context/base/strc-lst.mkiv
+++ b/tex/context/base/strc-lst.mkiv
@@ -961,40 +961,52 @@
\unexpanded\def\determinelistcharacteristics
{\dodoubleempty\dodeterminelistcharacteristics}
-\def\combinedlistparameter#1{\csname\??ih\currentcombinedlist#1\endcsname}
-
-\unexpanded\def\setupcombinedlist
- {\dodoubleargument\dosetupcombinedlist}
+% todo: make simple parameter handler
-\def\dosetupcombinedlist[#1][#2]%
- {\getparameters[\??ih#1][#2]%
- \edef\currentcombinedlist{#1}%
- \normalexpanded{\noexpand\setuplist[\combinedlistparameter\c!list]}[#2]}
+\def\combinedlistparameter#1{\csname\??ih\currentcombinedlist#1\endcsname}
+\def\combinedlisttoks {\csname\??ih::\currentcombinedlist\endcsname}
\unexpanded\def\definecombinedlist
{\dotripleempty\dodefinecombinedlist}
\def\dodefinecombinedlist[#1][#2][#3]%
- {\getparameters
+ {\edef\currentcombinedlist{#1}%
+ \getparameters
[\??ih#1]
[\c!criterium=\v!local,\c!number=0,\c!list={#2},#3]%
+\expandafter\newtoks\csname\??ih::\currentcombinedlist\endcsname
+\combinedlisttoks{#3}%
\setvalue{\e!setup#1\e!endsetup}{\dodoubleempty\dosetupcombinedlist[#1]}%
\setvalue{\e!place#1}{\dodoubleempty\doplacecombinedlist[#1]}%
\setvalue{\e!complete#1}{\dodoubleempty\docompletecombinedlist[#1]}}
+\unexpanded\def\setupcombinedlist
+ {\dodoubleargument\dosetupcombinedlist}
+
+% \def\dosetupcombinedlist[#1][#2]% this will change, as we now adapt all list elements
+% {\getparameters[\??ih#1][#2]% % maybe some parent magic or store in a toks
+% \edef\currentcombinedlist{#1}%
+% \normalexpanded{\setuplist[\combinedlistparameter\c!list]}[#2]}
+
+\def\dosetupcombinedlist[#1][#2]%
+ {\edef\currentcombinedlist{#1}%
+ \getparameters[\??ih#1][#2]%
+ \combinedlisttoks\expandafter{\the\combinedlisttoks,#2}}% can accumulate
+
\unexpanded\def\placecombinedlist
{\dodoubleempty\doplacecombinedlist}
\def\doplacecombinedlist[#1][#2]% we can move much of the analysis to lua
{\begingroup
% level is no longer supported
- \def\currentcombinedlist{#1}%
+ \edef\currentcombinedlist{#1}%
\getparameters[\??ih#1][#2]%
\edef\combinedlist{\combinedlistparameter\c!list}%
\the\everystructurelist
\doif{\combinedlistparameter\c!coupling}\v!on{\startlistreferences{#1}}%
\dobeginoflist
- \normalexpanded{\setuplist[\combinedlist][#2]}%
+% \normalexpanded{\setuplist[\combinedlist][#2]}%
+\normalexpanded{\setuplist[\combinedlist][\the\combinedlisttoks,#2]}% or ,}#2]%
\doplacestructurelist
{\combinedlist}%
{\combinedlistparameter\c!criterium}%
diff --git a/tex/context/base/strc-mar.lua b/tex/context/base/strc-mar.lua
index 5bb40fa94..50c3562e0 100644
--- a/tex/context/base/strc-mar.lua
+++ b/tex/context/base/strc-mar.lua
@@ -7,10 +7,12 @@ if not modules then modules = { } end modules ['strc-mar'] = {
}
-- todo: cleanup stack (structures.marks.reset(v_all) also does the job)
+-- todo: only commands.* print to tex, native marks return values
local insert, concat = table.insert, table.concat
local tostring, next, rawget = tostring, next, rawget
local lpegmatch = lpeg.match
+local match = string.match
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -45,6 +47,7 @@ local v_current = variables.current
local v_default = variables.default
local v_page = variables.page
local v_all = variables.all
+local v_keep = variables.keep
local v_nocheck_suffix = ":" .. variables.nocheck
@@ -130,21 +133,33 @@ local classes = { }
setmetatableindex(classes, function(t,k) local s = settings_to_array(k) t[k] = s return s end)
-function marks.synchronize(class,n)
+local lasts = { }
+
+function marks.synchronize(class,n,option)
local box = texbox[n]
if box then
local first, last = sweep(box.list,0,0)
- local classlist = classes[class]
- for i=1,#classlist do
- local class = classlist[i]
- local range = ranges[class]
- if not range then
- range = { }
- ranges[class] = range
- end
- range.first, range.last = first, last
+ if option == v_keep and first == 0 and last == 0 then
if trace_marks_get or trace_marks_set then
- report_marks("synchronize: class=%s, first=%s, last=%s",class,range.first,range.last)
+ report_marks("synchronize: class=%s, box=%s, retaining",class,n)
+ end
+ -- todo: check if still valid firts/last in range
+ first = lasts[class] or 0
+ last = first
+ else
+ lasts[class] = last
+ local classlist = classes[class]
+ for i=1,#classlist do
+ local class = classlist[i]
+ local range = ranges[class]
+ if not range then
+ range = { }
+ ranges[class] = range
+ end
+ range.first, range.last = first, last
+ if trace_marks_get or trace_marks_set then
+ report_marks("synchronize: class=%s, first=%s, last=%s",class,range.first,range.last)
+ end
end
end
elseif trace_marks_get or trace_marks_set then
@@ -274,7 +289,7 @@ function marks.set(name,value)
report_marks("set: parent=%s, child=%s, index=%s, value=%s",parent,child,topofstack,value)
end
end
- tex.setattribute("global",a_marks,topofstack)
+ texsetattribute("global",a_marks,topofstack)
end
end
@@ -479,6 +494,15 @@ local function doresolve(name,rangename,swap,df,dl,strict)
return value, index, found
end
+-- previous : last before sync
+-- next : first after sync
+
+-- top : first in sync
+-- bottom : last in sync
+
+-- first : first not top in sync
+-- last : last not bottom in sync
+
methods[v_previous] = function(name,range) return doresolve(name,range,false,-1,0,true ) end -- strict
methods[v_top] = function(name,range) return doresolve(name,range,false, 0,0,true ) end -- strict
methods[v_bottom] = function(name,range) return doresolve(name,range,true , 0,0,true ) end -- strict
@@ -489,12 +513,12 @@ methods[v_top_nocheck] = function(name,range) return doresolve(name,range,f
methods[v_bottom_nocheck] = function(name,range) return doresolve(name,range,true , 0,0,false) end
methods[v_next_nocheck] = function(name,range) return doresolve(name,range,true , 0,1,false) end
-local function resolve(name,range,f_swap,l_swap,step,strict) -- we can have an offset
- local f_value, f_index, f_found = doresolve(name,range,f_swap,0,0,strict)
- local l_value, l_index, l_found = doresolve(name,range,l_swap,0,0,strict)
+local function do_first(name,range,check)
+ local f_value, f_index, f_found = doresolve(name,range,false,0,0,check)
+ local l_value, l_index, l_found = doresolve(name,range,true ,0,0,check)
if f_found and l_found and l_index > f_index then
local name = parentname(name)
- for i=f_index,l_index,step do
+ for i=f_index,l_index,1 do
local si = stack[i]
local sn = si[name]
if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= f_value then
@@ -505,11 +529,26 @@ local function resolve(name,range,f_swap,l_swap,step,strict) -- we can have an o
return f_value, f_index, f_found
end
-methods[v_first ] = function(name,range) return resolve(name,range,false,true, 1,true ) end -- strict
-methods[v_last ] = function(name,range) return resolve(name,range,true,false,-1,true ) end -- strict
+local function do_last(name,range,check)
+ local f_value, f_index, f_found = doresolve(name,range,false,0,0,check)
+ local l_value, l_index, l_found = doresolve(name,range,true ,0,0,check)
+ if f_found and l_found and l_index > f_index then
+ local name = parentname(name)
+ for i=l_index,f_index,-1 do
+ local si = stack[i]
+ local sn = si[name]
+ if sn and sn ~= false and sn ~= true and sn ~= "" and sn ~= l_value then
+ return sn, i, si
+ end
+ end
+ end
+ return l_value, l_index, l_found
+end
-methods[v_first_nocheck] = function(name,range) return resolve(name,range,false,true, 1,false) end
-methods[v_last_nocheck ] = function(name,range) return resolve(name,range,true,false,-1,false) end
+methods[v_first ] = function(name,range) return do_first(name,range,true ) end
+methods[v_last ] = function(name,range) return do_last (name,range,true ) end
+methods[v_first_nocheck] = function(name,range) return do_first(name,range,false) end
+methods[v_last_nocheck ] = function(name,range) return do_last (name,range,false) end
methods[v_current] = function(name,range) -- range is ignored here
local top = stack[topofstack]
@@ -595,12 +634,22 @@ function marks.fetchallmarks(name,range) fetchallmarks(name,range )
-- here we have a few helpers
function marks.title(tag,n)
- lists.savedtitle(tag,n,"marking")
+ local listindex = match(n,"^li::(.-)$")
+ if listindex then
+ lists.savedtitle(tag,listindex,"marking")
+ else
+ context(n)
+ end
end
function marks.number(tag,n) -- no spec
- -- no prefix (as it is the prefix)
- lists.savednumber(tag,n)
+ local listindex = match(n,"^li::(.-)$")
+ if listindex then
+ lists.savednumber(tag,listindex)
+ else
+ -- no prefix (as it is the prefix)
+ context(n)
+ end
end
-- interface
diff --git a/tex/context/base/strc-mar.mkiv b/tex/context/base/strc-mar.mkiv
index 612492019..a1160c73a 100644
--- a/tex/context/base/strc-mar.mkiv
+++ b/tex/context/base/strc-mar.mkiv
@@ -81,7 +81,7 @@
\unexpanded\def\relatemarking {\dodoubleempty \dorelatemarking}
\unexpanded\def\setmarking {\dosingleargument\dosetmarking } \let\marking\setmarking
\unexpanded\def\resetmarking {\dosingleargument\doresetmarking }
-\unexpanded\def\synchronizemarking{\dodoubleargument\dosynchronizemarking }
+\unexpanded\def\synchronizemarking{\dotripleargument\dosynchronizemarking }
\def\dodefinemarking[#1][#2]% marking parent
{\doifelsenothing{#2}
@@ -108,11 +108,11 @@
\def\doifelsemarking#1%
{\ctxcommand{doifelsemarking("#1")}}
-\def\dosynchronizemarking[#1][#2]% class boxnumber (some day also name), maybe second argument table
- {\ifvoid#2\else\ctxcommand{synchronizemarking("#1",\number#2)}\fi}
+\def\dosynchronizemarking[#1][#2][#3]% #1=class #2=boxnumber (some day also name) #3=options, maybe second argument table
+ {\ifvoid#2\else\ctxcommand{synchronizemarking("#1",\number#2,"#3")}\fi}
% \appendtoks
-% \dosynchronizemarking[\v!page][\normalpagebox]%
+% \dosynchronizemarking[\v!page][\normalpagebox][\v!keep]% keep if no marks
% \to \everybeforepagebody
% defaults
diff --git a/tex/context/base/strc-mat.lua b/tex/context/base/strc-mat.lua
index d9cca2717..98b1e996c 100644
--- a/tex/context/base/strc-mat.lua
+++ b/tex/context/base/strc-mat.lua
@@ -22,7 +22,7 @@ local formuladata = { }
function formulas.store(data)
formuladata[#formuladata+1] = data
- tex.write(#formuladata)
+ context(#formuladata)
end
function formulas.current()
diff --git a/tex/context/base/strc-mat.mkiv b/tex/context/base/strc-mat.mkiv
index 60fb0be31..d9adee964 100644
--- a/tex/context/base/strc-mat.mkiv
+++ b/tex/context/base/strc-mat.mkiv
@@ -605,7 +605,7 @@
\unexpanded\def\stopsubformulas
{\nonoindentation
- \checknextindentation[\formulaparameter\c!indentnext]%
+ \checknextindentation[\subformulaparameter\c!indentnext]%
\the\everyresetformulas % to be checked
\global\setfalse\insidesubformulas
\dorechecknextindentation} % here ?
diff --git a/tex/context/base/strc-num.lua b/tex/context/base/strc-num.lua
index 95cf6d941..0af5f6421 100644
--- a/tex/context/base/strc-num.lua
+++ b/tex/context/base/strc-num.lua
@@ -11,7 +11,7 @@ if not modules then modules = { } end modules ['strc-num'] = {
local format = string.format
local next, type = next, type
local min, max = math.min, math.max
-local texsprint, texcount = tex.sprint, tex.count
+local texcount = tex.count
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -181,26 +181,31 @@ local function savevalue(name,i)
end
end
-function counters.define(name, start, counter, method) -- todo: step
- local d = allocate(name,1)
- d.start = start
- d.state = variables.start or ""
- if counter ~= "" then
- d.counter = counter -- only for special purposes, cannot be false
- d.method = method -- frozen at define time
+function counters.define(specification)
+ local name = specification.name
+ if name and name ~= "" then
+ -- todo: step
+ local d = allocate(name,1)
+ d.start = specification.start
+ d.state = variables.start or ""
+ local counter = specification.counter
+ if counter and counter ~= "" then
+ d.counter = counter -- only for special purposes, cannot be false
+ d.method = specification.method -- frozen at define time
+ end
end
end
-function counters.trace(name)
+function counters.show(name)
local cd = counterdata[name]
if cd then
- texsprint(format("[%s:",name))
+ context("[%s:",name)
local data = cd.data
for i=1,#data do
local d = data[i]
- texsprint(format(" (%s: %s,%s,%s s:%s r:%s)",i,(d.start or 0),d.number or 0,d.last,d.step or 0,d.range or 0))
+ context(" (%s: %s,%s,%s s:%s r:%s)",i,(d.start or 0),d.number or 0,d.last,d.step or 0,d.range or 0)
end
- texsprint("]")
+ context("]")
end
end
@@ -228,34 +233,30 @@ end
-- depends on when incremented, before or after (driven by d.offset)
-function counters.doifelse(name)
- commands.doifelse(counterdata[name])
-end
-
function counters.previous(name,n)
- texsprint(allocate(name,n).previous)
+ context(allocate(name,n).previous)
end
function counters.next(name,n)
- texsprint(allocate(name,n).next)
+ context(allocate(name,n).next)
end
counters.prev = counters.previous
function counters.current(name,n)
- texsprint(allocate(name,n).number)
+ context(allocate(name,n).number)
end
function counters.first(name,n)
- texsprint(allocate(name,n).first)
+ context(allocate(name,n).first)
end
function counters.last(name,n)
- texsprint(allocate(name,n).last)
+ context(allocate(name,n).last)
end
function counters.subs(name,n)
- texsprint(counterdata[name].data[n].subs or 0)
+ context(counterdata[name].data[n].subs or 0)
end
function counters.setvalue(name,tag,value)
@@ -426,7 +427,7 @@ function counters.get(name,n,key)
end
function counters.value(name,n) -- what to do with own
- tex.write(counters.get(name,n or 1,'number') or 0)
+ context(counters.get(name,n or 1,'number') or 0)
end
function counters.converted(name,spec) -- name can be number and reference to storage
@@ -481,6 +482,30 @@ function counters.converted(name,spec) -- name can be number and reference to st
end
end
+-- interfacing
+
+commands.definestructurecounter = counters.define
+commands.setstructurecounter = counters.set
+commands.setownstructurecounter = counters.setown
+commands.resetstructurecounter = counters.reset
+commands.restartstructurecounter = counters.restart
+commands.savestructurecounter = counters.save
+commands.restorestructurecounter = counters.restore
+commands.addstructurecounter = counters.add
+commands.structurecountervalue = counters.value
+commands.laststructurecounter = counters.last
+commands.firststructurecounter = counters.first
+commands.nextstructurecounter = counters.next
+commands.prevstructurecounter = counters.prev
+commands.structurecountersubs = counters.subs
+commands.showstructurecounter = counters.show
+
+function commands.doifelsestructurecounter(name) commands.doifelse(counterdata[name]) end
+function commands.doifstructurecounter (name) commands.doif (counterdata[name]) end
+function commands.doifnotstructurecounter (name) commands.doifnot (counterdata[name]) end
+
+function commands.incrementedstructurecounter(...) context(counters.add(...)) end
+
--~ -- move to strc-pag.lua
--~ function counters.analyze(name,counterspecification)
diff --git a/tex/context/base/strc-num.mkiv b/tex/context/base/strc-num.mkiv
index 909851656..2d70f00ce 100644
--- a/tex/context/base/strc-num.mkiv
+++ b/tex/context/base/strc-num.mkiv
@@ -105,10 +105,12 @@
\def\dododefinestructurecounter[#1][#2]%
{\getparameters[\??nn#1][\s!counter=,#2]% counter is for internal purposes
- \ctxlua{structures.counters.define("#1", % this will be a table
- tonumber("\structurecounterparameter{#1}\c!start") or 0,
- "\structurecounterparameter{#1}\s!counter",
- "\structurecounterparameter{#1}\c!method")}%
+ \ctxcommand{definestructurecounter {
+ name = "#1",
+ start = tonumber("\structurecounterparameter{#1}\c!start") or 0,
+ counter = "\structurecounterparameter{#1}\s!counter",
+ method = "\structurecounterparameter{#1}\c!method",
+ }}%
\docheckstructurecountersetup{#1}}
\def\donodefinestructurecounter[#1][#2]% inherit
@@ -128,7 +130,7 @@
\def\thenamedstructurecounterlevel#1%
{\thenamedheadlevel{\structurecounterway{#1}}}
-\def\docheckstructurecountersetup#1%
+\def\docheckstructurecountersetup#1% does it have to happen here?
{% this can be done at the lua end / a bit messy here ... todo ...
\ifcsname\??nn#1\c!number\endcsname
\doifelsevalue {\??nn#1\c!number}{#1} {\letbeundefined{\??nn#1\c!number}}%
@@ -138,7 +140,7 @@
% it's a clone
\else
\edef\currentstructurecounterlevel{\thenamedstructurecounterlevel{#1}}%
- \ctxlua{
+ \ctxlua{% will be a command (depends on sections)
structures.counters.restart("#1",1,"\structurecounterparameter{#1}\c!start")
structures.counters.setstate("#1","\structurecounterparameter{#1}\c!state")
structures.counters.setlevel("#1",\currentstructurecounterlevel)
@@ -146,29 +148,28 @@
}%
\fi}
-\def\doifstructurecounterelse#1{\ctxlua{structures.counters.doifelse("\@@thestructurecounter{#1}")}}
-\def\doifstructurecounter #1{\ctxlua{structures.counters.doif ("\@@thestructurecounter{#1}")}}
-\def\doifnotstructurecounter #1{\ctxlua{structures.counters.doifnot ("\@@thestructurecounter{#1}")}}
-
-\def\setstructurecounter [#1]#2{\ctxlua{structures.counters.set ("\@@thestructurecounter{#1}",1,\number#2)}}
-\def\setstructurecounterown [#1]#2{\ctxlua{structures.counters.setown ("\@@thestructurecounter{#1}",1,"#2")}}
-\def\resetstructurecounter [#1]{\ctxlua{structures.counters.reset ("\@@thestructurecounter{#1}",1)}}
-\def\restartstructurecounter [#1]#2{\ctxlua{structures.counters.restart("\@@thestructurecounter{#1}",1,#2)}}
-\def\savestructurecounter [#1]{\ctxlua{structures.counters.save ("\@@thestructurecounter{#1}")}}
-\def\restorestructurecounter [#1]{\ctxlua{structures.counters.restore("\@@thestructurecounter{#1}")}}
-\def\incrementstructurecounter [#1]{\ctxlua{structures.counters.add ("\@@thestructurecounter{#1}",1,1)}}
-\def\decrementstructurecounter [#1]{\ctxlua{structures.counters.add ("\@@thestructurecounter{#1}",1,-1)}}
-\def\rawstructurecounter [#1]{\ctxlua{structures.counters.value ("\@@thestructurecounter{#1}",1)}}
-\def\laststructurecounter [#1]{\ctxlua{structures.counters.last ("\@@thestructurecounter{#1}",1)}}
-\def\firststructurecounter [#1]{\ctxlua{structures.counters.first ("\@@thestructurecounter{#1}",1)}}
-\def\nextstructurecounter [#1]{\ctxlua{structures.counters.next ("\@@thestructurecounter{#1}",1)}}
-\def\prevstructurecounter [#1]{\ctxlua{structures.counters.prev ("\@@thestructurecounter{#1}",1)}}
-\def\structurecountersubs [#1]{\ctxlua{structures.counters.subs ("\@@thestructurecounter{#1}",1)}}
-
-\def\tracestructurecounter [#1]{\ctxlua{structures.counters.trace ("\@@thestructurecounter{#1}")}}
-
-\def\incrementedstructurecounter[#1]{\ctxlua{tex.write(structures.counters.add("\@@thestructurecounter{#1}",1,1))}}
-\def\decrementedstructurecounter[#1]{\ctxlua{tex.write(structures.counters.add("\@@thestructurecounter{#1}",1,-1))}}
+\def\doifstructurecounterelse#1{\ctxcommand{doifelsestructurecounter("\@@thestructurecounter{#1}")}}
+\def\doifstructurecounter #1{\ctxcommand{doifstructurecounter ("\@@thestructurecounter{#1}")}}
+\def\doifnotstructurecounter #1{\ctxcommand{doifnotstructurecounter ("\@@thestructurecounter{#1}")}}
+
+\def\setstructurecounter [#1]#2{\ctxcommand{setstructurecounter ("\@@thestructurecounter{#1}",1,\number#2)}}
+\def\setstructurecounterown [#1]#2{\ctxcommand{setownstructurecounter ("\@@thestructurecounter{#1}",1,"#2")}}
+\def\resetstructurecounter [#1]{\ctxcommand{resetstructurecounter ("\@@thestructurecounter{#1}",1)}}
+\def\restartstructurecounter [#1]#2{\ctxcommand{restartstructurecounter("\@@thestructurecounter{#1}",1,#2)}}
+\def\savestructurecounter [#1]{\ctxcommand{savestructurecounter ("\@@thestructurecounter{#1}")}}
+\def\restorestructurecounter [#1]{\ctxcommand{restorestructurecounter("\@@thestructurecounter{#1}")}}
+\def\incrementstructurecounter [#1]{\ctxcommand{addstructurecounter ("\@@thestructurecounter{#1}",1,1)}}
+\def\decrementstructurecounter [#1]{\ctxcommand{addstructurecounter ("\@@thestructurecounter{#1}",1,-1)}}
+\def\rawstructurecounter [#1]{\ctxcommand{structurecountervalue ("\@@thestructurecounter{#1}",1)}}
+\def\laststructurecounter [#1]{\ctxcommand{laststructurecounter ("\@@thestructurecounter{#1}",1)}}
+\def\firststructurecounter [#1]{\ctxcommand{firststructurecounter ("\@@thestructurecounter{#1}",1)}}
+\def\nextstructurecounter [#1]{\ctxcommand{nextstructurecounter ("\@@thestructurecounter{#1}",1)}}
+\def\prevstructurecounter [#1]{\ctxcommand{prevstructurecounter ("\@@thestructurecounter{#1}",1)}}
+\def\structurecountersubs [#1]{\ctxcommand{structurecounterssub ("\@@thestructurecounter{#1}",1)}}
+\def\showstructurecounter [#1]{\ctxcommand{tracestructurecounter ("\@@thestructurecounter{#1}")}}
+
+\def\incrementedstructurecounter[#1]{\ctxcommand{incrementedstructurecounter("\@@thestructurecounter{#1}",1, 1)}}
+\def\decrementedstructurecounter[#1]{\ctxcommand{incrementedstructurecounter("\@@thestructurecounter{#1}",1,-1)}}
\def\setsubstructurecounter {\dodoubleargument\dosetsubstructurecounter}
\def\setsubstructurecounterown {\dodoubleargument\dosetsubstructurecounterown}
@@ -178,16 +179,16 @@
\def\decrementsubstructurecounter {\dodoubleargument\dodecrementsubstructurecounter}
\def\rawsubstructurecounter {\dodoubleargument\dorawsubstructurecounter}
-\def\dosetsubstructurecounter [#1][#2]#3{\ctxlua{structures.counters.set ("\@@thestructurecounter{#1}",#2,\number#3)}}
-\def\dosetsubstructurecounterown [#1][#2]#3{\ctxlua{structures.counters.setown ("\@@thestructurecounter{#1}",#2,"#3")}}
-\def\doresetsubstructurecounter [#1][#2]{\ctxlua{structures.counters.reset ("\@@thestructurecounter{#1}",#2)}}
-\def\dorestartsubstructurecounter [#1][#2]#3{\ctxlua{structures.counters.restart("\@@thestructurecounter{#1}",#2,#3)}}
-\def\doincrementsubstructurecounter [#1][#2]{\ctxlua{structures.counters.add ("\@@thestructurecounter{#1}",#2,1)}}
-\def\dodecrementsubstructurecounter [#1][#2]{\ctxlua{structures.counters.add ("\@@thestructurecounter{#1}",#2,-1)}}
-\def\dorawsubstructurecounter [#1][#2]{\ctxlua{structures.counters.value ("\@@thestructurecounter{#1}",#2)}}
-\def\dolastsubstructurecounter [#1][#2]{\ctxlua{structures.counters.last ("\@@thestructurecounter{#1}",#2)}}
-\def\dofirstsubstructurecounter [#1][#2]{\ctxlua{structures.counters.first ("\@@thestructurecounter{#1}",#2)}}
-\def\dosubstructurecountersubs [#1][#2]{\ctxlua{structures.counters.subs ("\@@thestructurecounter{#1}",#2)}}
+\def\dosetsubstructurecounter [#1][#2]#3{\ctxcommand{setstructurecounter ("\@@thestructurecounter{#1}",#2,\number#3)}}
+\def\dosetsubstructurecounterown [#1][#2]#3{\ctxcommand{setownstructurecounter ("\@@thestructurecounter{#1}",#2,"#3")}}
+\def\doresetsubstructurecounter [#1][#2]{\ctxcommand{resetstructurecounter ("\@@thestructurecounter{#1}",#2)}}
+\def\dorestartsubstructurecounter [#1][#2]#3{\ctxcommand{restartstructurecounter ("\@@thestructurecounter{#1}",#2,#3)}}
+\def\doincrementsubstructurecounter [#1][#2]{\ctxcommand{incrementedstructurecounter("\@@thestructurecounter{#1}",#2,1)}}
+\def\dodecrementsubstructurecounter [#1][#2]{\ctxcommand{incrementedstructurecounter("\@@thestructurecounter{#1}",#2,-1)}}
+\def\dorawsubstructurecounter [#1][#2]{\ctxcommand{valuestructurecounter ("\@@thestructurecounter{#1}",#2)}}
+\def\dolastsubstructurecounter [#1][#2]{\ctxcommand{laststructurecounter ("\@@thestructurecounter{#1}",#2)}}
+\def\dofirstsubstructurecounter [#1][#2]{\ctxcommand{firststructurecounter ("\@@thestructurecounter{#1}",#2)}}
+\def\dosubstructurecountersubs [#1][#2]{\ctxcommand{structurecountersubs ("\@@thestructurecounter{#1}",#2)}}
% The bypage check needs a multipass reference and therefore
% we only check for it when we increment and know that some
@@ -200,11 +201,11 @@
\def\incrementstructurecounter[#1]%
{\docheckstructurecounterbypage{#1}%
- \ctxlua{structures.counters.add("\@@thestructurecounter{#1}",1,1)}}
+ \ctxcommand{addstructurecounter("\@@thestructurecounter{#1}",1,1)}}
\def\doincrementsubstructurecounter[#1][#2]%
{\docheckstructurecounterbypage{#1}%
- \ctxlua{structures.counters.add("\@@thestructurecounter{#1}",#2,1)}}
+ \ctxcommand{addstructurecounter("\@@thestructurecounter{#1}",#2,1)}}
\def\convertedstructurecounter
{\dodoubleempty\doconvertedstructurecounter}
@@ -212,7 +213,7 @@
\def\doconvertedstructurecounter[#1][#2]%
{\begingroup
\ifsecondargument\getparameters[\??nn#1][#2]\fi
- \ctxlua{structures.counters.prefixedconverted(
+ \ctxlua{structures.sections.prefixedconverted(
"\@@thestructurecounter{#1}",
{
prefix = "\structurecounterparameter{#1}\c!prefix",
@@ -241,7 +242,7 @@
\def\directconvertedstructurecounter#1#2% name, type
{\begingroup
- \ctxlua{structures.counters.prefixedconverted(
+ \ctxlua{structures.sections.prefixedconverted(
"\@@thestructurecounter{#1}",
{
prefix = "\structurecounterparameter{#1}\c!prefix",
@@ -290,14 +291,9 @@
%D What follows is a compatibility layer. This will be phased out (at
%D least from core usage).
-\def\reset
- {\dosingleargument\doreset}
-
-\def\doreset[#1]%
- {\processcommalist[#1]\dodoreset}
-
-\def\dodoreset#1%
- {\csname\s!reset#1\endcsname}%
+% \def\reset {\dosingleargument\doreset}
+% \def\doreset[#1]{\processcommalist[#1]\dodoreset}
+% \def\dodoreset#1{\csname\s!reset#1\endcsname}%
\let \numberparameter \structurecounterparameter % {name}\c!key
@@ -318,19 +314,20 @@
\let \doifundefinednumber \doifnotstructurecounter % {number}{true}
\let \doifdefinednumberelse \doifstructurecounterelse % {number}{true}{false}
-% weird one
-
-\def\accumulatednumber[#1]{}
-
-% funny, here, todo: these are the defaults
+\let \setupnumbering \setupstructurecountering
-\unexpanded\def\setupnumbering
- {\dodoubleempty\getparameters[\??nr]}
+% \unexpanded\def\resetnumber {\dosingleargument\doresetnumber}
+% \def\doresetnumber [#1]{\processcommalistwithparameters[#1]\resetstructurecounter}
-\setupnumbering
- [\c!way=\v!by\v!chapter,
- \c!blockway=,
- \c!state=\v!start]
+% \def\accumulatednumber[#1]{}
+%
+% \unexpanded\def\setupnumbering
+% {\dodoubleempty\getparameters[\??nr]}
+%
+% \setupnumbering
+% [\c!way=\v!by\v!chapter,
+% \c!blockway=,
+% \c!state=\v!start]
%D Helpers:
diff --git a/tex/context/base/strc-pag.lua b/tex/context/base/strc-pag.lua
index bce8a2c3a..9382fc2b5 100644
--- a/tex/context/base/strc-pag.lua
+++ b/tex/context/base/strc-pag.lua
@@ -8,26 +8,29 @@ if not modules then modules = { } end modules ['strc-pag'] = {
local texcount, format = tex.count, string.format
-local ctxcatcodes = tex.ctxcatcodes
local allocate, mark = utilities.storage.allocate, utilities.storage.mark
-local trace_pages = false trackers.register("structures.pages", function(v) trace_pages = v end)
+local trace_pages = false trackers.register("structures.pages", function(v) trace_pages = v end)
-local report_pages = logs.reporter("structure","pages")
+local report_pages = logs.reporter("structure","pages")
-local structures = structures
+local structures = structures
-local helpers = structures.helpers
-local sections = structures.sections
-local pages = structures.pages
-local processors = structures.processors
-local sets = structures.sets
-local counters = structures.counters
+local helpers = structures.helpers
+local sections = structures.sections
+local pages = structures.pages
+local processors = structures.processors
+local sets = structures.sets
+local counters = structures.counters
-local counterdata = counters.data
+local counterdata = counters.data
-local variables = interfaces.variables
-local context = context
+local variables = interfaces.variables
+local context = context
+
+local applyprocessor = processors.apply
+local startapplyprocessor = processors.startapply
+local stopapplyprocessor = processors.stopapply
-- storage
@@ -91,17 +94,19 @@ function pages.number(realdata,pagespec)
local starter = (pagespec and pagespec.starter ~= "" and pagespec.starter ) or (numberspec and numberspec.starter ~= "" and numberspec.starter ) or ""
local stopper = (pagespec and pagespec.stopper ~= "" and pagespec.stopper ) or (numberspec and numberspec.stopper ~= "" and numberspec.stopper ) or ""
if starter ~= "" then
- processors.sprint(ctxcatcodes,starter)
+ applyprocessor(starter)
end
if conversion ~= "" then
context.convertnumber(conversion,userpage)
else
if conversionset == "" then conversionset = "default" end
local theconversion = sets.get("structure:conversions",block,conversionset,1,"numbers") -- to be checked: 1
- processors.sprint(ctxcatcodes,theconversion,convertnumber,userpage)
+ local data = startapplyprocessor(theconversion)
+ context.convertnumber(data or "number",userpage)
+ stopapplyprocessor()
end
if stopper ~= "" then
- processors.sprint(ctxcatcodes,stopper)
+ applyprocessors(stopper)
end
end
@@ -271,7 +276,7 @@ function counters.analyze(name,counterspecification)
return cd, sectiondata, "okay"
end
-function counters.prefixedconverted(name,prefixspec,numberspec)
+function sections.prefixedconverted(name,prefixspec,numberspec)
local cd, prefixdata, result = counters.analyze(name,prefixspec)
if cd then
if prefixdata then
diff --git a/tex/context/base/strc-prc.mkiv b/tex/context/base/strc-prc.mkiv
index c3f1de4dd..a9bab1c18 100644
--- a/tex/context/base/strc-prc.mkiv
+++ b/tex/context/base/strc-prc.mkiv
@@ -50,11 +50,11 @@
\def\dodefineprocessor[#1][#2]%
{\ifsecondargument
\letbeundefined{\??po#1\c!command}%
- \ctxlua{structures.processors.register("#1")}%
+ \ctxcommand{registerstructureprocessor("#1")}%
\getparameters[\??po#1][\c!style=,\c!color=,\c!left=,\c!right=,#2]%
\else
\letbeundefined{\??po#1\c!style}%
- \ctxlua{structures.processors.reset("#1")}%
+ \ctxcommand{resetstructureprocessor("#1")}%
\fi}
%D The following command can be used by users but normally it will be
diff --git a/tex/context/base/strc-ref.mkiv b/tex/context/base/strc-ref.mkiv
index e7728a7b9..71de715ba 100644
--- a/tex/context/base/strc-ref.mkiv
+++ b/tex/context/base/strc-ref.mkiv
@@ -1147,8 +1147,8 @@
\def\extrareferencearguments{\luaconditional\highlighthyperlinks,\luaconditional\gotonewwindow,"\currentviewerlayer"}
-\unexpanded\def\directgoto{\ifconditional\uselocationstrut\expandafter\dodirectgoto\else\expandafter\directgotohtdp\fi}
-\unexpanded\def\goto {\ifconditional\uselocationstrut\expandafter\dogoto \else\expandafter\gotohtdp \fi}
+\unexpanded\def\directgoto{\ifconditional\uselocationstrut\expandafter\dodirectgoto\else\expandafter\dodirectgotohtdp\fi}
+\unexpanded\def\goto {\ifconditional\uselocationstrut\expandafter\dogoto \else\expandafter\dogotohtdp \fi}
% The unbox trick is needed in order to permit \par inside a reference. Otherwise
% the reference attribute migrates to the outer boxes.
diff --git a/tex/context/base/strc-reg.lua b/tex/context/base/strc-reg.lua
index 8ba01682b..523309a5f 100644
--- a/tex/context/base/strc-reg.lua
+++ b/tex/context/base/strc-reg.lua
@@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['strc-reg'] = {
}
local next, type = next, type
-local texwrite, texcount = tex.write, tex.count
+local texcount = tex.count
local format, gmatch = string.format, string.gmatch
local equal, concat, remove = table.are_equal, table.concat, table.remove
local utfchar = utf.char
@@ -31,7 +31,7 @@ local mappings = sorters.mappings
local entries = sorters.entries
local replacements = sorters.replacements
-local processor_split = processors.split
+local splitprocessor = processors.split
local variables = interfaces.variables
local context = context
@@ -240,13 +240,13 @@ local function preprocessentries(rawdata)
if type(e) == "table" then
et = e
else
- entryproc, e = processor_split(e)
+ entryproc, e = splitprocessor(e)
et = lpegmatch(entrysplitter,e)
end
if type(k) == "table" then
kt = k
else
- pageproc, k = processor_split(k)
+ pageproc, k = splitprocessor(k)
kt = lpegmatch(entrysplitter,k)
end
entries = { }
@@ -276,7 +276,7 @@ function registers.store(rawdata) -- metadata, references, entries
data[#data+1] = rawdata
local label = references.label
if label and label ~= "" then tagged[label] = #data end
- texwrite(#data)
+ context(#data)
end
function registers.enhance(name,n)
@@ -847,7 +847,7 @@ function registers.flush(data,options,prefixspec,pagespec)
end
function registers.analyze(class,options)
- texwrite(registers.analyzed(class,options))
+ context(registers.analyzed(class,options))
end
function registers.process(class,...)
diff --git a/tex/context/base/strc-ren.mkiv b/tex/context/base/strc-ren.mkiv
index 3a160ee9e..be6758a21 100644
--- a/tex/context/base/strc-ren.mkiv
+++ b/tex/context/base/strc-ren.mkiv
@@ -296,19 +296,19 @@
\let\headlastlinewidth\!!zeropoint
% kind of special, we want to snap heads also according to local specs local
\ifgridsnapping
- \begingroup
- \edef\currentheadgridsnapping{\headparameter\c!grid}%
- \ifconditional\headisdisplay
- \ifx\currentheadgridsnapping\empty\else
- \dosetheadattributes\c!style\c!color
- \setupinterlinespace
- \dosetheadattributes\c!textstyle\c!textcolor
- \setupinterlinespace
+ \hbox\bgroup % extra hbox will trigger global snapper on top of local
+ \edef\currentheadgridsnapping{\headparameter\c!grid}%
+ \ifconditional\headisdisplay
+ \ifx\currentheadgridsnapping\empty\else
+ \dosetheadattributes\c!style\c!color
+ \setupinterlinespace
+ \dosetheadattributes\c!textstyle\c!textcolor
+ \setupinterlinespace
+ \fi
\fi
- \fi
- \snaptogrid[\currentheadgridsnapping]\hbox
- {\hskip\localheadskip\hskip\headparameter\c!margin\box\sectionheadbox}%
- \endgroup
+ \snaptogrid[\currentheadgridsnapping]\hbox
+ {\hskip\localheadskip\hskip\headparameter\c!margin\box\sectionheadbox}%
+ \egroup
\else
\hbox
{\hskip\localheadskip\hskip\headparameter\c!margin\box\sectionheadbox}%
diff --git a/tex/context/base/strc-sec.mkiv b/tex/context/base/strc-sec.mkiv
index 2cf02561b..be1cf3b2f 100644
--- a/tex/context/base/strc-sec.mkiv
+++ b/tex/context/base/strc-sec.mkiv
@@ -365,8 +365,14 @@
{\pagetype[\currentheadcoupling]% hm also number
\currentstructuresynchronize}
-\unexpanded\def\setheadmarking
- {\normalexpanded{\noexpand\setmarking[\currenthead]{\currentstructurelistnumber}}}
+% BEWARE: \marking[section]{my text} does not work as we use list indices instead
+% so we need a 'keep track of raw set option' (or maybe a funny internal prefix)
+
+% \unexpanded\def\setheadmarking
+% {\normalexpanded{\noexpand\setmarking[\currenthead]{\currentstructurelistnumber}}}
+
+\unexpanded\def\setheadmarking % li:: so that we can use \marking[section]{Taco needed this}
+ {\normalexpanded{\noexpand\setmarking[\currenthead]{li::\currentstructurelistnumber}}}
\let\deepstructurenumbercommand\relax
\let\deepstructuretitlecommand \relax
diff --git a/tex/context/base/strc-syn.lua b/tex/context/base/strc-syn.lua
index f3ba97ffc..c2b5251d0 100644
--- a/tex/context/base/strc-syn.lua
+++ b/tex/context/base/strc-syn.lua
@@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['str-syn'] = {
}
local next, type = next, type
-local texwrite, format = tex.write, string.format
+local format = string.format
local allocate = utilities.storage.allocate
-- interface to tex end
diff --git a/tex/context/base/strc-syn.mkiv b/tex/context/base/strc-syn.mkiv
index 5efa48f7c..571eccd8e 100644
--- a/tex/context/base/strc-syn.mkiv
+++ b/tex/context/base/strc-syn.mkiv
@@ -158,10 +158,10 @@
\fi
\endgroup}
-\def\registersynonym
+\unexpanded\def\registersynonym
{\dodoubleargument\doregistersynonym}
-\def\registersynonym[#1][#2]%
+\def\doregistersynonym[#1][#2]%
{\ctxlua{structures.synonyms.registerused("#1","#2")}}
\unexpanded\def\doinsertsynonymmeaning#1#2% name tag
@@ -343,10 +343,10 @@
\dostoptagged
\normalexpanded{\endgroup\sortingparameter\c!next}}
-\def\registersort
+\unexpanded\def\registersort
{\dodoubleargument\doregistersort}
-\def\registersort[#1][#2]%
+\def\doregistersort[#1][#2]%
{\ctxlua{structures.synonyms.registerused("#1","#2")}}
% before after
@@ -373,7 +373,8 @@
{\dodoubleempty\docompletelistofsorts}
\def\docompletelistofsorts[#1][#2]%
- {\normalexpanded{\systemsuppliedchapter[#1]{\noexpand\headtext{#2}}}%
+ {\edef\currentsorting{#1}%
+ \normalexpanded{\systemsuppliedchapter[#1]{\noexpand\headtext{\sortingparameter\s!multi}}}%
\doplacelistofsorts[#1][#2]%
\page[\v!yes]}
diff --git a/tex/context/base/supp-fil.lua b/tex/context/base/supp-fil.lua
deleted file mode 100644
index 449fa4894..000000000
--- a/tex/context/base/supp-fil.lua
+++ /dev/null
@@ -1,336 +0,0 @@
-if not modules then modules = { } end modules ['supp-fil'] = {
- version = 1.001,
- comment = "companion to supp-fil.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This module will be redone !
-
--- context is not defined yet! todo! (we need to load tupp-fil after cld)
--- todo: move startreadingfile to lua and push regime there
-
---[[ldx--
-<p>It's more convenient to manipulate filenames (paths) in
-<l n='lua'/> than in <l n='tex'/>. These methods have counterparts
-at the <l n='tex'/> side.</p>
---ldx]]--
-
-local find, gsub, match, format, concat = string.find, string.gsub, string.match, string.format, table.concat
-local texcount = tex.count
-local isfile = lfs.isfile
-
-local trace_modules = false trackers.register("modules.loading", function(v) trace_modules = v end)
-local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end)
-
-local report_modules = logs.reporter("resolvers","modules")
-local report_files = logs.reporter("files","readfile")
-
-commands = commands or { }
-local commands = commands
-environment = environment or { }
-local environment = environment
-
-local findbyscheme = resolvers.finders.byscheme
-
--- needs a cleanup:
-
-function commands.checkfilename(str) -- "/whatever..." "c:..." "http://..."
- texcount.kindoffile = (find(str,"^/") or find(str,"[%a]:") and 1) or 0
-end
-
-function commands.thesanitizedfilename(str)
- context((gsub(str,"\\","/")))
-end
-
-local testcase = commands.testcase
-
-function commands.splitfilename(fullname)
- local path, name, base, suffix = '', fullname, fullname, ''
- local p, n = match(fullname,"^(.+)/(.-)$")
- if p and n then
- path, name, base = p, n, n
- end
- local b, s = match(base,"^(.+)%.(.-)$")
- if b and s then
- name, suffix = b, s
- end
- texcount.splitoffkind = (path == "" and 0) or (path == '.' and 1) or 2
- local setvalue = context.setvalue
- setvalue("splitofffull", fullname)
- setvalue("splitoffpath", path)
- setvalue("splitoffbase", base)
- setvalue("splitoffname", name)
- setvalue("splitofftype", suffix)
-end
-
-function commands.splitfiletype(fullname)
- local name, suffix = fullname, ''
- local n, s = match(fullname,"^(.+)%.(.-)$")
- if n and s then
- name, suffix = n, s
- end
- local setvalue = context.setvalue
- setvalue("splitofffull", fullname)
- setvalue("splitoffpath", "")
- setvalue("splitoffname", name)
- setvalue("splitofftype", suffix)
-end
-
-function commands.doifparentfileelse(n)
- testcase(n == environment.jobname or n == environment.jobname .. '.tex' or n == environment.outputfilename)
-end
-
--- saves some .15 sec on 12 sec format generation
-
-local lastexistingfile = ""
-
-function commands.doiffileexistelse(name)
- if not name or name == "" then
- lastexistingfile = ""
- else
- lastexistingfile = resolvers.findtexfile(name) or ""
- end
- return testcase(lastexistingfile ~= "")
-end
-
-function commands.lastexistingfile()
- context(lastexistingfile)
-end
-
--- more, we can cache matches
-
-local finders, loaders, openers = resolvers.finders, resolvers.loaders, resolvers.openers
-
-local found = { } -- can best be done in the resolver itself
-
--- todo: tracing
-
-local function readfilename(specification,backtrack,treetoo)
- local name = specification.filename
- local fnd = found[name]
- if not fnd then
- if isfile(name) then
- if trace_files then
- report_files("found local: %s",name)
- end
- fnd = name
- end
- if not fnd and backtrack then
- local fname = name
- for i=1,backtrack,1 do
- fname = "../" .. fname
- if isfile(fname) then
- if trace_files then
- report_files("found by backtracking: %s",fname)
- end
- fnd = fname
- break
- elseif trace_files then
- report_files("not found by backtracking: %s",fname)
- end
- end
- end
- if not fnd and treetoo then
- fnd = resolvers.findtexfile(name) or ""
- if trace_files then
- if fnd ~= "" then
- report_files("found by tree lookup: %s",fnd)
- else
- report_files("not found by tree lookup: %s",name)
- end
- end
- end
- found[name] = fnd
- elseif trace_files then
- if fnd ~= "" then
- report_files("already found: %s",fnd)
- else
- report_files("already not found: %s",name)
- end
- end
- return fnd or ""
-end
-
-function commands.readfilename(filename)
- return findbyscheme("any",filename)
-end
-
-function finders.job(specification) return readfilename(specification,false,false) end -- current path, no backtracking
-function finders.loc(specification) return readfilename(specification,2, false) end -- current path, backtracking
-function finders.sys(specification) return readfilename(specification,false,true ) end -- current path, obeys tex search
-function finders.fix(specification) return readfilename(specification,2, false) end -- specified path, backtracking
-function finders.set(specification) return readfilename(specification,false,false) end -- specified path, no backtracking
-function finders.any(specification) return readfilename(specification,2, true ) end -- loc job sys
-
-openers.job = openers.file loaders.job = loaders.file -- default anyway
-openers.loc = openers.file loaders.loc = loaders.file
-openers.sys = openers.file loaders.sys = loaders.file
-openers.fix = openers.file loaders.fix = loaders.file
-openers.set = openers.file loaders.set = loaders.file
-openers.any = openers.file loaders.any = loaders.file
-
-function finders.doreadfile(scheme,path,name) -- better do a split and then pass table
- local fullname
- if url.hasscheme(name) then
- fullname = name
- else
- fullname = ((path == "") and format("%s:///%s",scheme,name)) or format("%s:///%s/%s",scheme,path,name)
- end
- return resolvers.findtexfile(fullname) or "" -- can be more direct
-end
-
-function commands.doreadfile(scheme,path,name)
- context(finders.doreadfile(scheme,path,name))
-end
-
--- modules can have a specific suffix or can specify one
-
-local prefixes = { "m", "p", "s", "x", "v", "t" }
-local suffixes = { "mkiv", "tex", "mkvi" } -- order might change and how about cld
-local modstatus = { }
-
-local function usemodule(name,hasscheme)
- local foundname
- if hasscheme then
- -- no auto suffix as http will return a home page or error page
- -- so we only add one if missing
- local fullname = file.addsuffix(name,"tex")
- if trace_modules then
- report_modules("checking url: '%s'",fullname)
- end
- foundname = resolvers.findtexfile(fullname) or ""
- elseif file.extname(name) ~= "" then
- if trace_modules then
- report_modules("checking file: '%s'",name)
- end
- foundname = findbyscheme("any",name) or ""
- else
- for i=1,#suffixes do
- local fullname = file.addsuffix(name,suffixes[i])
- if trace_modules then
- report_modules("checking file: '%s'",fullname)
- end
- foundname = findbyscheme("any",fullname) or ""
- if foundname ~= "" then
- break
- end
- end
- end
- if foundname ~= "" then
- if trace_modules then
- report_modules("loading: '%s'",foundname)
- end
- context.startreadingfile()
- context.input(foundname)
- context.stopreadingfile()
- return true
- else
- return false
- end
-end
-
-function commands.usemodules(prefix,askedname,truename)
- local hasprefix = prefix and prefix ~= ""
- local hashname = ((hasprefix and prefix) or "*") .. "-" .. truename
- local status = modstatus[hashname]
- if status == 0 then
- -- not found
- elseif status == 1 then
- status = status + 1
- else
- if trace_modules then
- report_modules("locating: prefix: '%s', askedname: '%s', truename: '%s'",prefix or "", askedname or "", truename or "")
- end
- local hasscheme = url.hasscheme(truename)
- if hasscheme then
- -- no prefix and suffix done
- if usemodule(truename,true) then
- status = 1
- else
- status = 0
- end
- elseif hasprefix then
- if usemodule(prefix .. "-" .. truename) then
- status = 1
- else
- status = 0
- end
- else
- for i=1,#prefixes do
- -- todo: reconstruct name i.e. basename
- local thename = prefixes[i] .. "-" .. truename
- if usemodule(thename) then
- status = 1
- break
- end
- end
- if status then
- -- ok, don't change
- elseif usemodule(truename) then
- status = 1
- else
- status = 0
- end
- end
- end
- if status == 0 then
- report_modules("not found: '%s'",askedname)
- elseif status == 1 then
- report_modules("loaded: '%s'",trace_modules and truename or askedname)
- else
- report_modules("already loaded: '%s'",trace_modules and truename or askedname)
- end
- modstatus[hashname] = status
-end
-
-local loaded = { }
-
-function commands.uselibrary(name,patterns,action,failure)
- local files = utilities.parsers.settings_to_array(name)
- local done = false
- for i=1,#files do
- local filename = files[i]
- if not loaded[filename] then
- loaded[filename] = true
- for i=1,#patterns do
- local filename = format(patterns[i],filename)
- -- local foundname = resolvers.findfile(filename) or ""
- local foundname = finders.doreadfile("any",".",filename)
- if foundname ~= "" then
- action(name,foundname)
- done = true
- break
- end
- end
- if done then
- break
- end
- end
- end
- if failure and not done then
- failure(name)
- end
-end
-
-statistics.register("loaded tex modules", function()
- if next(modstatus) then
- local t, f, nt, nf = { }, { }, 0, 0
- for k, v in table.sortedhash(modstatus) do
- k = file.basename(k)
- if v == 0 then
- nf = nf + 1
- f[nf] = k
- else
- nt = nt + 1
- t[nt] = k
- end
- end
- local ts = (nt>0 and format(" (%s)",concat(t," "))) or ""
- local fs = (nf>0 and format(" (%s)",concat(f," "))) or ""
- return format("%s requested, %s found%s, %s missing%s",nt+nf,nt,ts,nf,fs)
- else
- return nil
- end
-end)
diff --git a/tex/context/base/supp-fil.mkiv b/tex/context/base/supp-fil.mkiv
deleted file mode 100644
index 93d8d4e4a..000000000
--- a/tex/context/base/supp-fil.mkiv
+++ /dev/null
@@ -1,462 +0,0 @@
-%D \module
-%D [ file=supp-fil,
-%D version=1995.10.10,
-%D title=\CONTEXT\ Support Macros,
-%D subtitle=Files,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-%D \TEX\ operates on files, so one wouldn't wonder that there
-%D is a separate module for file commands. In \CONTEXT\ files
-%D are used for several purposes:
-%D
-%D \startitemize[packed]
-%D \item general textual input
-%D \item logging status information
-%D \item saving registers, lists and references
-%D \item buffering defered textual input
-%D \stopitemize
-%D
-%D When dealing with files we can load them as a whole, using
-%D the \type{\input} primitive or load them on a line||by||line
-%D basis, using \type{\read}. Writing is always done line by
-%D line, using \type{\write}.
-
-\writestatus{loading}{ConTeXt Support Macros / Files}
-
-\registerctxluafile{supp-fil}{1.001}
-
-\unprotect
-
-\ifx\undefined\f!pathseparator
- \def\f!pathseparator{/}
- \def\f!currentpath {.}
- \def\f!parentpath {..}
-\fi
-
-\def\openinputfile #1#2{\immediate\openin #1={#2}\relax} \def\closeinputfile #1{\immediate\closein #1\relax}
-\def\openoutputfile#1#2{\immediate\openout#1={#2}\relax} \def\closeoutputfile#1{\immediate\closeout#1\relax}
-
-%D \macros
-%D {pushendofline,popendofline}
-%D
-%D When we are loading files in the middle of the typesetting
-%D process, for instance when we load references, we have to be
-%D sure that the reading process does not generate so called
-%D 'spurious spaces'. This can be prevented by assigning the
-%D line ending character the \CATCODE\ comment. This is
-%D accomplished by
-%D
-%D \starttyping
-%D \pushendofline
-%D ... reading ...
-%D \popendofline
-%D \stoptyping
-
-\newcount \endoflinelevel
-
-\def\pushendofline
- {\advance\endoflinelevel\plusone
- \expandafter\chardef\csname :eol:\number\endoflinelevel\endcsname\catcode\endoflineasciicode
- \catcode\endoflineasciicode\commentcatcode\relax}
-
-\def\popendofline
- {\catcode\endoflineasciicode\csname :eol:\number\endoflinelevel\endcsname
- \advance\endoflinelevel\minusone}
-
-\def\restoreendofline
- {\catcode\endoflineasciicode\endoflinecatcode}
-
-%D \macros
-%D {scratchread, scratchwrite}
-%D
-%D We define a scratch file for reading. Keep in mind that
-%D the number of files is limited to~16, so use this one when
-%D possible. We also define a scratch output file.
-
-\ifx\undefined\scratchread \newread \scratchread \fi
-\ifx\undefined\scratchwrite \newwrite\scratchwrite \fi
-
-%D \macros
-%D {unlinkfile}
-%D
-%D Sometimes we want to make sure a file is deleted, so here
-%D is a macro that does the job. It's named after the \PERL\
-%D one.
-
-\def\unlinkfile#1{\ctxlua{os.remove([[#1]])}}
-
-%D \macros
-%D {writeln}
-%D
-%D This saves a few tokens:
-
-\def\writeln#1{\write#1{}}
-
-\def\doiffileexistselse #1{\ctxcommand{doiffileexistelse([[#1]])}}
-\def\lastfoundexistingfile {\ctxcommand{lastexistingfile()}}
-
-%D \macros
-%D {doprocessfile,fileline,fileprocessedtrue,dofinishfile}
-%D
-%D The next macro offers a framework for processing files on a
-%D line by line basis.
-%D
-%D \starttyping
-%D \doprocessfile \identifier {name} \action
-%D \stoptyping
-%D
-%D The first argument can for instance be \type{\scratchread}.
-%D The action must do something with \type{\fileline}, which
-%D holds the current line. One can halfway step out using
-%D \type{\dofinishfile} and ise \type{\iffileprocessed} to
-%D see if indeed some content was found.
-
-\newif\iffileprocessed
-
-\let\fileline\empty
-
-\def\doprocessfile#1#2#3%
- {\openinputfile{#1}{#2}%
- \ifeof#1%
- \fileprocessedfalse
- \closeinputfile#1%
- \else
- \fileprocessedtrue
- \gdef\dofinishfile
- {\closeinputfile#1%
- \global\let\doprocessline\relax}%
- \gdef\doprocessline
- {\ifeof#1%
- \expandafter\dofinishfile
- \else
- \global\read#1 to \fileline
- #3\relax
- \expandafter\doprocessline
- \fi}%
- \expandafter\doprocessline
- \fi}
-
-%D \macros
-%D {pathplusfile,assignfullfilename,sanitizefilename}
-%D
-%D Use \type{\pathplusfile} to compose a full file name, like
-%D in:
-%D
-%D \starttyping
-%D \pathplusfile{path}{file}
-%D \stoptyping
-%D
-%D By default, this expands into {\tt \pathplusfile{path}{file}}.
-
-\def\pathplusfile#1#2{#1\f!pathseparator#2}
-
-%D This one constructs a filename from a (possible empty)
-%D path and filename.
-
-\def\assignfullfilename#1#2\to#3%
- {\doifelsenothing{#1}
- {\edef#3{#2}}
- {\edef#3{#1\f!pathseparator#2}}}
-
-\def\sanitizefilename#1\to#2{\edef#2{\ctxcommand{thesanitizedfilename([[#1]])}}}
-
-%D NEW:
-
-\newconstant\kindoffile % 0=normal 1=full path spec (or http) / set at the lua end
-
-\def\checkfilename#1{\ctxcommand{checkfilename([[#1]])}}
-
-%D \macros
-%D {input, normalinput}
-%D
-%D Sometimes we run into troubles when \type {\input} wants to get
-%D expanded, e.g. in a \type {\write} (which happens in the metafun
-%D manual when we permit long MP lines). So, instead of fixing that,
-%D we go for a redefinition of \type {\input}. Of course it's better
-%D to use \type {\readfile} or \type {\processfile}.
-
-\unexpanded\def\input{\normalinput}
-
-\def\inputgivenfile#1{\normalinput{#1}}
-
-%D \macros
-%D {readfile,ReadFile}
-%D
-%D One cannot be sure if a file exists. When no file can be
-%D found, the \type{\input} primitive gives an error message
-%D and switches to interactive mode. The macro \type{\readfile}
-%D takes care of non||existing files. This macro has two faces.
-%D
-%D \starttyping
-%D \ReadFile {filename}
-%D \readfile {filename} {before loading} {not found}
-%D \stoptyping
-%D
-%D Many \TEX\ implementations have laid out some strategy for
-%D locating files. This can lead to unexpected results,
-%D especially when one loads files that are not found in the
-%D current directory. Let's give an example of this. In
-%D \CONTEXT\ illustrations can be defined in an external file.
-%D The resizing macro first looks if an illustration is defined
-%D in the local definitions file. When no such file is found,
-%D it searches for a global file and when this file is not
-%D found either, the illustration itself is scanned for
-%D dimensions. One can imagine what happens if an adapted,
-%D localy stored illustration, is scaled according to
-%D dimensions stored somewhere else.
-%D
-%D When some \TEX\ implementation starts looking for a file, it
-%D normally first looks in the current directory. When no file
-%D is found, \TEX\ starts searching on the path where format
-%D and|/|or style files are stored. Depending on the implementation
-%D this can considerably slow down processing speed.
-%D
-%D In \CONTEXT, we support a project||wise ordening of files.
-%D In such an approach it seems feasible to store common files
-%D in a lower directory. When for instance searching for a
-%D general layout file, we therefore have to backtrack.
-%D
-%D These three considerations have lead to a more advanced
-%D approach for loading files.
-%D
-%D We first present an earlier implementation of
-%D \type{\readfile}. This command backtracks parent
-%D directories, upto a predefined level. Users can change this
-%D level (on the commandline using a directive); we default to~3.
-%D
-%D We use \type{\normalinput} instead of \type{\input}
-%D because we want to be able to redefine the original
-%D \type{\input} when needed, for instance when loading third
-%D party libraries.
-
-\newevery \everybeforereadfile \EveryBeforeReadFile
-\newevery \everyafterreadfile \EveryAfterReadFile
-
-\let \everyreadfile \everybeforereadfile
-
-\def\maxreadlevel{\ctxcommand{maxreadlevel()}}
-
-% We need to postpone loading, else we got frozen type-* files and so when
-% a format is generated on a source path.
-
-\def\doreadfile#1#2#3% protocol path filename true false
- {\edef\readfilename{\ctxcommand{doreadfile("#1","#2","#3")}}%
- \ifx\readfilename\empty
- \expandafter\doreadfilenop
- \else
- \expandafter\doreadfileyes
- \fi}
-
-\long\def\doreadfileyes#1#2%
- {#1\relax
- \the\everybeforereadfile
- \relax\inputgivenfile\readfilename\relax
- \the\everyafterreadfile}
-
-\long\def\doreadfilenop#1#2%
- {#2}
-
-%D \macros
-%D {readjobfile,readlocfile,readsysfile,
-%D readfixfile,readsetfile}
-%D
-%D This implementation honnors the third situation, but we
-%D still can get unwanted files loaded and/or can get involved
-%D in extensive searching.
-%D
-%D Due to different needs, we decided to offer four alternative
-%D loading commands. With \type{\readjobfile} we load a local
-%D file and do no backtracking, while \type{\readlocfile}
-%D backtracks~\number\maxreadlevel\ directories, including the current
-%D one.
-%D
-%D System files can be anywhere and therefore
-%D \type{\readsysfile} is not bound to the current directory
-%D and obeys the \TEX\ implementation.
-%D
-%D Of the last two, \type{\readfixfile} searches on the
-%D directory specified and backtracks too, while
-%D \type{\readsetfile} does only search on the specified path.
-%D
-%D The most liberal is \type {\readfile}.
-
-\unexpanded\def\readjobfile #1{\doreadfile{job} {.}{#1}} % current path, no backtracking
-\unexpanded\def\readlocfile #1{\doreadfile{loc} {.}{#1}} % current path, backtracking
-\unexpanded\def\readsysfile #1{\doreadfile{sys} {.}{#1}} % current path, obeys tex search
-\unexpanded\def\readfixfile#1#2{\doreadfile{fix}{#1}{#2}} % specified path, backtracking
-\unexpanded\def\readsetfile#1#2{\doreadfile{set}{#1}{#2}} % specified path, no backtracking
-\unexpanded\def\readfile #1{\doreadfile{any} {.}{#1}}
-\unexpanded\def\ReadFile #1{\doreadfile{any} {.}{#1}\donothing\donothing}
-
-%D So now we've got ourselves five file loading commands:
-%D
-%D \starttyping
-%D \readfile {filename} {before loading} {not found}
-%D
-%D \readjobfile {filename} {before loading} {not found}
-%D \readlocfile {filename} {before loading} {not found}
-%D \readfixfile {filename} {before loading} {not found}
-%D \readsysfile {directory} {filename} {before loading} {not found}
-%D \stoptyping
-
-\def\readtexfile#1#2#3%
- {\pushcatcodetable \catcodetable \ctxcatcodes
- \readfile{#1}{#2}{#3}%
- \popcatcodetable}
-
-\ifdefined \xmlcatcodes
-
- \def\readxmlfile#1#2#3%
- {\pushcatcodetable \catcodetable \xmlcatcodes
- \readfile{#1}{#2}{#3}%
- \popcatcodetable}
-
-\fi
-
-%D \macros
-%D {doiffileelse,doiflocfileelse}
-%D
-%D The next alternative only looks if a file is present. No
-%D loading is done. This one obeys the standard \TEX\
-%D implementation method.
-%D
-%D \starttyping
-%D \doiffileelse {filename} {found} {not found}
-%D \stoptyping
-%D
-%D \starttyping
-%D \doiflocfileelse {filename} {before loading} {not found}
-%D \stoptyping
-
-\def\doiffileelse {\doiffileexistselse}
-\def\doiffile #1{\doiffileexistselse{#1}\firstofoneargument\gobbleoneargument}
-\def\doifnotfile #1{\doiffileexistselse{#1}\gobbleoneargument\firstofoneargument}
-
-\def\doiflocfileelse#1%
- {\makelocreadfilename{#1}%
- \doiffileelse\readfilename}
-
-\def\makelocreadfilename#1%
- {\sanitizefilename#1\to\readfilename
- \checkfilename\readfilename
- \ifcase\kindoffile
- \edef\readfilename{\pathplusfile\f!currentpath{#1}}%
- \fi}
-
-%D \macros
-%D {doonlyonce, doinputonce, doendinputonce}
-%D
-%D Especially macropackages need only be loaded once.
-%D Repetitive loading not only costs time, relocating registers
-%D often leads to abortion of the processing because \TEX's
-%D capacity is limited. One can prevent multiple execution and
-%D loading by using one of both:
-%D
-%D \starttyping
-%D \doonlyonce{actions}
-%D \doinputonce{filename}
-%D \doendinputonce{filename}
-%D \stoptyping
-%D
-%D This command obeys the standard method for locating files.
-
-\long\def\doonlyonce#1%
- {\doifundefinedelse{@@@#1@@@}
- {\letgvalue{@@@#1@@@}\empty
- \firstofoneargument}
- {\gobbleoneargument}}
-
-\def\doinputonce#1%
- {\doonlyonce{#1}{\doiffileelse{#1}{\inputgivenfile{#1}}\donothing}}
-
-\def\doendinputonce#1%
- {\doifdefined{@@@#1@@@}\endinput}
-
-\def\forgetdoingonce#1%
- {\global\letbeundefined{@@@#1@@@}}
-
-%D \macros
-%D {doifparentfileelse}
-%D
-%D The test \type{\doifelse{\jobname}{filename}} does not give
-%D the desired result, simply because \type{\jobname} expands
-%D to characters with \CATCODE~12, while the characters in
-%D \type{filename} have \CATCODE~11. So we can better use:
-%D
-%D \starttyping
-%D \doifparentfileelse{filename}{yes}{no}
-%D \stoptyping
-%D
-%D Since \TEXEXEC\ (and thereby \CONTEXT) supports renaming of
-%D the outputfile, we also need to check on that alternative
-%D name.
-
-\ifx\outputfilename\undefined \def\outputfilename{\jobname} \fi
-
-\def\doifparentfileelse#1{\ctxcommand{doifparentfileelse([[#1]])}}
-
-\newcount\readingfilelevel
-
-%D We need to redo this: catcode sets and such
-
-\newtoks \everystartreadingfile
-\newtoks \everystopreadingfile
-
-\unexpanded\def\startreadingfile% beter een every en \setnormalcatcodes
- {\global\advance\readingfilelevel\plusone
- \the\everystartreadingfile
- \pushcatcodetable % saveguard
- \ctxlua{regimes.push()}% temporarily this way
- }% \setcatcodetable\prtcatcodes % no longer in mkiv, has to be done explictly
-
-\unexpanded\def\stopreadingfile
- {\popcatcodetable % saveguard
- \ctxlua{regimes.pop()}% temporarily this way
- \the\everystopreadingfile
- \global\advance\readingfilelevel\minusone}
-
-%D \macros
-%D {splitfilename}
-%D
-%D I should have made this one sooner. This macro was first needed when
-%D ran into graphic with a period in the pathpart.
-%D
-%D \startbuffer
-%D \def\showfilesplit
-%D {\bgroup \tttf
-%D \hbox{(full: \splitofffull)}\space
-%D \hbox{(path: \splitoffpath)}\space
-%D \hbox{(base: \splitoffbase)}\space
-%D \hbox{(name: \splitoffname)}\space
-%D \hbox{(type: \splitofftype)}\space
-%D \egroup}
-%D
-%D \splitfilename{c:/aa/bb/cc/dd.ee.ff} \showfilesplit \endgraf
-%D \splitfilename{c:/aa/bb/cc/dd.ee} \showfilesplit \endgraf
-%D \splitfilename{c:/aa/bb/cc/dd} \showfilesplit \endgraf
-%D
-%D \splitfilename{dd.ee.ff} \showfilesplit \endgraf
-%D \splitfilename{dd.ee} \showfilesplit \endgraf
-%D \splitfilename{dd} \showfilesplit \endgraf
-%D \stopbuffer
-%D
-%D \start \typebuffer \getbuffer \stop
-
-\def\splitoffroot{.} \newconstant\splitoffkind
-
-\let\splitofffull\empty
-\let\splitoffpath\empty
-\let\splitoffbase\empty
-\let\splitoffname\empty
-\let\splitofftype\empty
-
-\def\splitfilename#1{\ctxcommand{splitfilename([[#1]])}}
-\def\splitfiletype#1{\ctxcommand{splitfiletype([[#1]])}}
-
-\protect \endinput
diff --git a/tex/context/base/supp-ran.lua b/tex/context/base/supp-ran.lua
index accd5c94c..75202f696 100644
--- a/tex/context/base/supp-ran.lua
+++ b/tex/context/base/supp-ran.lua
@@ -16,7 +16,6 @@ local commands = commands
local math = math
local random, randomseed, round, seed, last = math.random, math.randomseed, math.round, false, 1
-local texwrite = tex.write
function math.setrandomseedi(n,comment)
if not n then
@@ -35,12 +34,12 @@ end
function commands.getrandomcounta(min,max)
last = random(min,max)
- texwrite(last)
+ context(last)
end
function commands.getrandomcountb(min,max)
last = random(min,max)/65536
- texwrite(last)
+ context(last)
end
function commands.setrandomseed(n)
@@ -49,7 +48,7 @@ function commands.setrandomseed(n)
end
function commands.getrandomseed(n)
- texwrite(last)
+ context(last)
end
-- maybe stack
diff --git a/tex/context/base/symb-ini.lua b/tex/context/base/symb-ini.lua
index 1bc8ee2aa..559b032e4 100644
--- a/tex/context/base/symb-ini.lua
+++ b/tex/context/base/symb-ini.lua
@@ -18,19 +18,29 @@ local status_symbols = logs.messenger("fonts","symbols")
local patterns = { "symb-imp-%s.mkiv", "symb-imp-%s.tex", "symb-%s.mkiv", "symb-%s.tex" }
local listitem = utilities.parsers.listitem
+local function action(name,foundname)
+ -- context.startnointerference()
+ context.startreadingfile()
+ context.input(foundname)
+ status_symbols("loaded: library '%s'",name)
+ context.stopreadingfile()
+ -- context.stopnointerference()
+end
+
+local function failure(name)
+ report_symbols("unknown: library '%s'",name)
+end
+
function symbols.uselibrary(name)
if name ~= variables.reset then
for name in listitem(name) do
- commands.uselibrary(name,patterns,function(name,foundname)
- -- context.startnointerference()
- context.startreadingfile()
- context.input(foundname)
- status_symbols("loaded: library '%s'",name)
- context.stopreadingfile()
- -- context.stopnointerference()
- end, function(name)
- report_symbols("unknown: library '%s'",name)
- end)
+ commands.uselibrary {
+ name = name,
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = true,
+ }
end
end
end
diff --git a/tex/context/base/syst-aux.mkiv b/tex/context/base/syst-aux.mkiv
index 20674663c..7016228c9 100644
--- a/tex/context/base/syst-aux.mkiv
+++ b/tex/context/base/syst-aux.mkiv
@@ -53,23 +53,6 @@
\let\unexpanded\normalprotected
%D \macros
-%D {doifolderversionelse}
-%D
-%D We start with a macro specially for Aditya who wants to be able
-%D to use development versions of \MKIV\ for real documents.
-%D
-%D \starttyping
-%D \doifolderversionelse\contextversion{1010.10.10} {OLDER} {OKAY} => OLDER
-%D \doifolderversionelse\contextversion{2020.20.20} {OLDER} {OKAY} => OKAY
-%D \doifolderversionelse\contextversion{2020} {OLDER} {OKAY} => OKAY
-%D \stoptyping
-%D
-%D The version pattern is \type {yyyy.mm.dd} (with mm and dd being optional).
-
-\def\doifolderversionelse#1#2{\ctxcommand{doifolderversionelse("#1","#2")}}
-\def\doifoldercontextelse #1{\ctxcommand{doifolderversionelse("#1")}}
-
-%D \macros
%D {normalspace}
%D
%D There is already \type{\space} but just to be sure we also
@@ -3660,7 +3643,7 @@
\long\unexpanded\def\xdorecurse#1#2%
{\global\advance\outerrecurse \plusone
- \long\global\@EA\def\csname\@@arecurse\recursedepth\endcsname{#2}%
+ \long\@EA\gdef\csname\@@arecurse\recursedepth\endcsname{#2}%
\global\@EA\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
\@EA\dodorecurse\@EA1\@EA{\number#1}}
@@ -3718,7 +3701,7 @@
\unexpanded\long\def\doloop#1%
{\global\advance\outerrecurse \plusone
- \long\global\@EA\def\csname\@@arecurse\recursedepth\endcsname{#1}%
+ \long\@EA\gdef\csname\@@arecurse\recursedepth\endcsname{#1}%
\global\@EA\let\csname\@@irecurse\recursedepth\endcsname\recurselevel
\let\endofloop\dodoloop
\dodoloop1} % no \plusone else \recurselevel wrong
@@ -3925,24 +3908,57 @@
\everydump\expandafter{\the\everydump\resetrecurselevel}
-%D This alternative looks a bit different and uses a
-%D pseudo counter. When this macro is nested, we have to use
-%D different counters. This time we use keywords.
+% %
+
+%D \macros
+%D {doloopoverlist}
%D
%D \starttyping
-%D \def\alfa{2} \def\beta{100} \def\gamma{3}
-%D
-%D \for \n=55 \to 100 \step 1 \do {... \n ...}
-%D \for \n=\alfa \to \beta \step \gamma \do {... \n ...}
-%D \for \n=\n \to 120 \step 1 \do {... \n ...}
-%D \for \n=120 \to 100 \step -3 \do {... \n ...}
-%D \for \n=55 \to 100 \step 2 \do {... \n ...}
+%D \doloopoverlist {red,green,blue} {
+%D \setuppalet[\recursestring]
+%D \doloopoverlist {light,normal,dark} {
+%D \blackrule[color=\recursestring,width=20cm,height=2cm,depth=0cm]\par
+%D }
+%D }
%D \stoptyping
%D
-%D Only in the third example we need to predefine \type{\n}.
-%D The use of \type{\od} as a dilimiter would have made nested
-%D use more problematic.
+%D or:
+%D
+%D \starttyping
+%D \doloopoverlist {red,green,blue} {
+%D \setuppalet[#1]
+%D \doloopoverlist {light,normal,dark} {
+%D \blackrule[color=##1,width=20cm,height=2cm,depth=0cm]\par
+%D }
+%D }
+%D \stoptyping
+\unexpanded\def\doloopoverlist#1#2%
+ {\global\advance\outerrecurse\plusone
+ \@EA\gdef\csname\@@arecurse\recursedepth\endcsname##1{\edef\recursestring{##1}#2}%
+ \@EA\glet\csname\@@irecurse\recursedepth\endcsname\recursestring
+ \normalexpanded{\processcommalist[#1]{\expandafter\noexpand\csname\@@arecurse\recursedepth\endcsname}}%
+ \@EA\let\@EA\recursestring\csname\@@irecurse\recursedepth\endcsname
+ \global\advance\outerrecurse\minusone}
+
+% D This alternative looks a bit different and uses a
+% D pseudo counter. When this macro is nested, we have to use
+% D different counters. This time we use keywords.
+% D
+% D \starttyping
+% D \def\alfa{2} \def\beta{100} \def\gamma{3}
+% D
+% D \for \n=55 \to 100 \step 1 \do {... \n ...}
+% D \for \n=\alfa \to \beta \step \gamma \do {... \n ...}
+% D \for \n=\n \to 120 \step 1 \do {... \n ...}
+% D \for \n=120 \to 100 \step -3 \do {... \n ...}
+% D \for \n=55 \to 100 \step 2 \do {... \n ...}
+% D \stoptyping
+% D
+% D Only in the third example we need to predefine \type{\n}.
+% D The use of \type{\od} as a dilimiter would have made nested
+% D use more problematic.
+%
% obsolete:
%
% \def\for#1=#2\to#3\step#4\do#5%
@@ -4808,8 +4824,7 @@
%D \dotoks\tokenlist
%D \stoptyping
%D
-%D Er worden eerst enkele klad||registers gedefinieerd. These
-%D macros are clones of the ones implemented in page~378 of
+%D These macros are clones of the ones implemented in page~378 of
%D Knuth's \TeX book.
\newtoks\@@scratchtoks
diff --git a/tex/context/base/syst-con.lua b/tex/context/base/syst-con.lua
index f78d83011..2eaf98fd7 100644
--- a/tex/context/base/syst-con.lua
+++ b/tex/context/base/syst-con.lua
@@ -14,28 +14,28 @@ the top of <l n='luatex'/>'s char range but outside the unicode range.</p>
--ldx]]--
local tonumber = tonumber
-local char, texsprint = unicode.utf8.char, tex.sprint
+local utfchar = unicode.utf8.char
local gsub, format = string.gsub, string.format
function converters.hexstringtonumber(n) tonumber(n,16) end
function converters.octstringtonumber(n) tonumber(n, 8) end
-function converters.rawcharacter (n) char(0x110000+n) end
+function converters.rawcharacter (n) utfchar(0x110000+n) end
function converters.lchexnumber (n) format("%x" ,n) end
function converters.uchexnumber (n) format("%X" ,n) end
function converters.lchexnumbers (n) format("%02x",n) end
function converters.uchexnumbers (n) format("%02X",n) end
function converters.octnumber (n) format("%03o",n) end
-function commands.hexstringtonumber(n) context(tonumber(n,16)) end
-function commands.octstringtonumber(n) context(tonumber(n, 8)) end
-function commands.rawcharacter (n) context(char(0x110000+n)) end
-function commands.lchexnumber (n) context(format("%x" ,n)) end
-function commands.uchexnumber (n) context(format("%X" ,n)) end
-function commands.lchexnumbers (n) context(format("%02x",n)) end
-function commands.uchexnumbers (n) context(format("%02X",n)) end
-function commands.octnumber (n) context(format("%03o",n)) end
+function commands.hexstringtonumber(n) context(tonumber(n,16)) end
+function commands.octstringtonumber(n) context(tonumber(n, 8)) end
+function commands.rawcharacter (n) context(utfchar(0x110000+n)) end
+function commands.lchexnumber (n) context("%x" ,n) end
+function commands.uchexnumber (n) context("%X" ,n) end
+function commands.lchexnumbers (n) context("%02x",n) end
+function commands.uchexnumbers (n) context("%02X",n) end
+function commands.octnumber (n) context("%03o",n) end
-function commands.format(fmt,...)
+function commands.format(fmt,...) -- used ?
fmt = gsub(fmt,"@","%%")
context(fmt,...)
end
diff --git a/tex/context/base/syst-con.mkiv b/tex/context/base/syst-con.mkiv
index 077e69007..6ef734c8f 100644
--- a/tex/context/base/syst-con.mkiv
+++ b/tex/context/base/syst-con.mkiv
@@ -134,16 +134,6 @@
% \let\calculatecos\gobbleoneargument
% \let\calculatetan\gobbleoneargument
-% \def\calculatedsin#1{\ctxsprint{math.sin(#1)}}
-% \def\calculatedcos#1{\ctxsprint{math.cos(#1)}}
-% \def\calculatedtan#1{\ctxsprint{math.tan(#1)}}
-
-% \def\setcalculatedsin#1#2{\edef#1{\ctxsprint{math.sind(#2)}}}
-% \def\setcalculatedcos#1#2{\edef#1{\ctxsprint{math.cosd(#2)}}}
-% \def\setcalculatedtan#1#2{\edef#1{\ctxsprint{math.tand(#2)}}}
-
-% this is actually 20% faster: some overhead in functions but less tokenization
-
\def\setcalculatedsin#1#2{\edef#1{\cldcontext{math.sind(#2)}}}
\def\setcalculatedcos#1#2{\edef#1{\cldcontext{math.cosd(#2)}}}
\def\setcalculatedtan#1#2{\edef#1{\cldcontext{math.tand(#2)}}}
diff --git a/tex/context/base/syst-lua.lua b/tex/context/base/syst-lua.lua
index 678842025..000b16963 100644
--- a/tex/context/base/syst-lua.lua
+++ b/tex/context/base/syst-lua.lua
@@ -6,52 +6,54 @@ if not modules then modules = { } end modules ['syst-lua'] = {
license = "see context related readme files"
}
-local texsprint, texprint, texwrite, texiowrite_nl = tex.sprint, tex.print, tex.write, texio.write_nl
-local format, find = string.format, string.find
+local format, find, match = string.format, string.find, string.match
local tonumber = tonumber
local S, lpegmatch, lpegtsplitat = lpeg.S, lpeg.match, lpeg.tsplitat
-local ctxcatcodes = tex.ctxcatcodes
-
-commands = commands or { } -- cs = commands -- shorter, maybe some day, not used now
+commands = commands or { }
function commands.writestatus(...) logs.status(...) end -- overloaded later
--- todo: use shorter names i.e. less tokenization
+-- todo: use shorter names i.e. less tokenization, like prtcatcodes + f_o_t_a
+
+local firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" )
+local secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments")
+local firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" )
+local gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" )
local function testcase(b)
- if b then -- looks faster with if than with expression
- texsprint(ctxcatcodes,"\\firstoftwoarguments")
+ if b then
+ firstoftwoarguments()
else
- texsprint(ctxcatcodes,"\\secondoftwoarguments")
+ secondoftwoarguments()
end
end
-commands.testcase = testcase
-commands.doifelse = testcase
-
function commands.doif(b)
if b then
- texsprint(ctxcatcodes,"\\firstofoneargument")
+ firstofoneargument()
else
- texsprint(ctxcatcodes,"\\gobbleoneargument")
+ gobbleoneargument()
end
end
function commands.doifnot(b)
if b then
- texsprint(ctxcatcodes,"\\gobbleoneargument")
+ gobbleoneargument()
else
- texsprint(ctxcatcodes,"\\firstofoneargument")
+ firstofoneargument()
end
end
+commands.testcase = testcase
+commands.doifelse = testcase
+
function commands.boolcase(b)
- if b then texwrite(1) else texwrite(0) end
+ context(b and 1 or 0)
end
function commands.doifelsespaces(str)
- return commands.doifelse(find(str,"^ +$"))
+ return testcase(find(str,"^ +$"))
end
local s = lpegtsplitat(",")
@@ -89,19 +91,7 @@ function commands.doifdimenstringelse(str)
testcase(lpegmatch(pattern,str))
end
-local splitter = lpegtsplitat(S(". "))
-
-function commands.doifolderversionelse(one,two) -- one >= two
- if not two then
- one, two = environment.version, one
- elseif one == "" then
- one = environment.version
- end
- local y_1, m_1, d_1 = lpegmatch(splitter,one)
- local y_2, m_2, d_2 = lpegmatch(splitter,two)
- commands.testcase (
- (tonumber(y_1) or 0) >= (tonumber(y_2) or 0) and
- (tonumber(m_1) or 0) >= (tonumber(m_2) or 0) and
- (tonumber(d_1) or 0) >= (tonumber(d_1) or 0)
- )
+function commands.firstinlist(str)
+ local first = match(str,"^([^,]+),")
+ context(first or str)
end
diff --git a/tex/context/base/syst-lua.mkiv b/tex/context/base/syst-lua.mkiv
index 34e3631c3..a42634896 100644
--- a/tex/context/base/syst-lua.mkiv
+++ b/tex/context/base/syst-lua.mkiv
@@ -37,7 +37,7 @@
% a handy helper (we can probably omit the tex.ctxcatcodes here as nowadays we seldom
% change the regime at the tex end
-%def\luaexpr#1{\ctxlua {tex.sprint(tex.ctxcatcodes,tostring(#1))}}
-\def\luaexpr#1{\directlua\zerocount{tex.sprint(tex.ctxcatcodes,tostring(#1))}} % wrap in global function ?
+%def\luaexpr#1{\ctxlua {context(tostring(#1))}}
+\def\luaexpr#1{\directlua\zerocount{context(tostring(#1))}} % wrap in global function ?
\protect \endinput
diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua
index d5bc4cb56..c3122aee0 100644
--- a/tex/context/base/task-ini.lua
+++ b/tex/context/base/task-ini.lua
@@ -62,6 +62,7 @@ appendaction("shipouts", "finishers", "attributes.effects.handler")
appendaction("shipouts", "finishers", "attributes.viewerlayers.handler") -- disabled
appendaction("math", "normalizers", "noads.handlers.unscript", nil, "nohead") -- always on (maybe disabled)
+appendaction("math", "normalizers", "noads.handlers.variants", nil, "nohead") -- always on
appendaction("math", "normalizers", "noads.handlers.families", nil, "nohead") -- always on
appendaction("math", "normalizers", "noads.handlers.relocate", nil, "nohead") -- always on
appendaction("math", "normalizers", "noads.handlers.render", nil, "nohead") -- always on
diff --git a/tex/context/base/toks-ini.lua b/tex/context/base/toks-ini.lua
index f8b5b9439..1f4d48466 100644
--- a/tex/context/base/toks-ini.lua
+++ b/tex/context/base/toks-ini.lua
@@ -33,8 +33,6 @@ a module.</p>
local token, tex = token, tex
-local texsprint = tex.sprint
-
local createtoken = token.create
local csname_id = token.csname_id
local command_id = token.command_id
@@ -81,7 +79,7 @@ local function printlist(data)
end)
end
-tex.printlist = printlist
+tex.printlist = printlist -- will change to another namespace
function collectors.flush(tag)
printlist(collectordata[tag])
@@ -107,7 +105,7 @@ function collectors.install(tag,end_cs)
local t = get_next()
local a, b = t[1], t[3]
if b == endcs then
- texsprint('\\' ..end_cs) -- to be checked, can be context[end_cs]()
+ context["end_cs"]()
return
elseif a == call and registered[b] then
expand()
diff --git a/tex/context/base/trac-inf.lua b/tex/context/base/trac-inf.lua
index 5d8ea3cf8..a5a87c78d 100644
--- a/tex/context/base/trac-inf.lua
+++ b/tex/context/base/trac-inf.lua
@@ -190,5 +190,5 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
diff --git a/tex/context/base/trac-log.lua b/tex/context/base/trac-log.lua
index 13bb18b06..c843236de 100644
--- a/tex/context/base/trac-log.lua
+++ b/tex/context/base/trac-log.lua
@@ -61,6 +61,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -166,6 +170,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -225,6 +233,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -585,3 +594,8 @@ else
print(format(...))
end
end
+
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
diff --git a/tex/context/base/type-ini.lua b/tex/context/base/type-ini.lua
index fd1282474..c2b274fb9 100644
--- a/tex/context/base/type-ini.lua
+++ b/tex/context/base/type-ini.lua
@@ -8,24 +8,26 @@ if not modules then modules = { } end modules ['type-ini'] = {
-- more code will move here
-local format, gsub = string.format, string.gsub
+local gsub = string.gsub
local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex", "type-%s.mkiv", "type-%s.tex" }
+local function action(name,foundname)
+ context.startreadingfile()
+ context.pushendofline()
+ context.unprotect()
+ context.input(foundname)
+ context.protect()
+ context.popendofline()
+ context.stopreadingfile()
+end
+
function commands.doprocesstypescriptfile(name)
- name = gsub(name,"^type%-","")
- for i=1,#patterns do
- local filename = format(patterns[i],name)
- local foundname = resolvers.finders.doreadfile("any",".",filename)
- if foundname ~= "" then
- context.startreadingfile()
- context.pushendofline()
- context.unprotect()
- context.input(foundname)
- context.protect()
- context.popendofline()
- context.stopreadingfile()
- return
- end
- end
+ commands.uselibrary {
+ name = gsub(name,"^type%-",""),
+ patterns = patterns,
+ action = action,
+ }
end
+
+
diff --git a/tex/context/base/type-ini.mkiv b/tex/context/base/type-ini.mkiv
index d901f2052..22a487831 100644
--- a/tex/context/base/type-ini.mkiv
+++ b/tex/context/base/type-ini.mkiv
@@ -20,6 +20,8 @@
%D apart from downward compatibility issues, it would make global, class
%D spanning definitions a pain. Some day we will introduce a default class.
+%D Todo: store them at the lua end
+
\unprotect
\unexpanded\def\starttypescriptcollection
@@ -36,8 +38,8 @@
\unexpanded\def\usetypescriptfile[#1]%
{\doifelse{#1}\v!reset
{\let\typescriptfiles\empty}
- {\splitfiletype{#1}%
- \addtocommalist\splitoffname\typescriptfiles}}
+ {\splitfilename{#1}%
+ \addtocommalist\splitoffbase\typescriptfiles}}
% SO FAR
@@ -370,16 +372,25 @@
\let\typefaceencoding\s!default % obsolete
+\newtoks \everybeforedefinetypeface
+\newtoks \everyafterdefinetypeface
+
+\let\fontclassstyle\empty
+
\def\dostarttypefacedefining#1#2#3%
{\geteparameters[\??ts][\s!rscale=\plusone,\s!features=,\s!fallbacks=,\s!goodies=,\s!direction=,#3]%
\pushmacro\fontclass
+ \pushmacro\fontclassstyle
\setcurrentfontclass{#1}%
\pushmacro\relativefontsize
\let\relativefontsize\@@tsrscale % still needed ?
- \savefontclassparameters{#2}\@@tsrscale\@@tsfeatures\@@tsfallbacks\@@tsgoodies\@@tsdirection}
+ \savefontclassparameters{#2}\@@tsrscale\@@tsfeatures\@@tsfallbacks\@@tsgoodies\@@tsdirection
+ \the\everybeforedefinetypeface}
\def\dostoptypefacedefining
- {\popmacro\relativefontsize
+ {\the\everyafterdefinetypeface
+ \popmacro\relativefontsize
+ \popmacro\fontclassstyle
\popmacro\fontclass}
\def\dofastdefinetypeface#1#2#3#4#5%
@@ -417,7 +428,7 @@
\setgvalue{\??tf#1\s!default}{#2}%
\fi
\ifcsname#1\endcsname \else
- \setugvalue{#1}{\switchtotypeface[#1][#2]}%
+ \setugvalue{#1}{\switchtotypeface[#1][#2]}% hm, what if #2 changes
\fi}}
\unexpanded\def\setuptypeface% [class] [settings]
diff --git a/tex/context/base/type-one.mkii b/tex/context/base/type-one.mkii
index c6cb5fd61..dfdbd2c17 100644
--- a/tex/context/base/type-one.mkii
+++ b/tex/context/base/type-one.mkii
@@ -1483,7 +1483,8 @@
\definefontsynonym [AntykwaPoltawskiego-Expanded-CapsBold] [\typescriptthree-antpb6-sc] [encoding=\typescriptthree]
\definefontsynonym [AntykwaPoltawskiego-Expanded-CapsBoldItalic] [\typescriptthree-antpbi6-sc] [encoding=\typescriptthree]
- \loadmapfile[poltawski-\typescriptthree.map]
+ % \loadmapfile[poltawski-\typescriptthree.map]
+ \loadmapfile[ap-\typescriptthree.map]
\stoptypescript
\starttypescript [serif] [antykwa-poltawskiego] [name]
diff --git a/tex/context/base/type-otf.mkiv b/tex/context/base/type-otf.mkiv
index dbd161438..5224a8012 100644
--- a/tex/context/base/type-otf.mkiv
+++ b/tex/context/base/type-otf.mkiv
@@ -1526,17 +1526,21 @@
\definefontsynonym [CambriaSerif] [\s!file:cambria.ttf]
\stoptypescript
+ % We load a goodies file that will apply a patch to the font. As a demonstration
+ % we apply the patch when caching as as when scaling which is why we also specify
+ % the goodies file with the name.
+
\starttypescript [math] [cambria,cambria-m,cambria-a] [name]
\loadfontgoodies[cambria-math]
- \definefontsynonym [MathRoman] [CambriaMath] [\s!features=\s!math\mathsizesuffix]
+ \definefontsynonym [MathRoman] [CambriaMath] [\s!features=\s!math\mathsizesuffix,\s!goodies=cambria-math]
\stoptypescript
\starttypescript [math] [cambria-x] [name]
\loadfontgoodies[cambria-math]
- \definefontsynonym [MathRoman] [CambriaMath] [\s!features=\s!math]
+ \definefontsynonym [MathRoman] [CambriaMath] [\s!features=\s!math,\s!goodies=cambria-math]
\stoptypescript
\starttypescript [math] [cambria-y] [name]
\loadfontgoodies[cambria-math]
- \definefontsynonym [MathRoman] [CambriaMath] [\s!features=\s!math-nostack\mathsizesuffix]
+ \definefontsynonym [MathRoman] [CambriaMath] [\s!features=\s!math-nostack\mathsizesuffix,\s!goodies=cambria-math]
\stoptypescript
\starttypescript [serif] [cambria,cambria-m,cambria-a] [name]
@@ -1847,6 +1851,7 @@
\starttypescriptcollection[xits]
\starttypescript [math] [xits,xitsbidi] [name]
+ \loadfontgoodies[xits-math]
\definefontsynonym[MathRoman] [\s!file:xits-math.otf][\s!features=\s!math\mathsizesuffix,\s!goodies=xits-math]
\definefontsynonym[MathRomanL2R][\s!file:xits-math.otf][\s!features=\s!math\mathsizesuffix-l2r,\s!goodies=xits-math]
\definefontsynonym[MathRomanR2L][\s!file:xits-math.otf][\s!features=\s!math\mathsizesuffix-r2l,\s!goodies=xits-math]
diff --git a/tex/context/base/typo-dir.mkiv b/tex/context/base/typo-dir.mkiv
index 9d6935962..33f11d9c8 100644
--- a/tex/context/base/typo-dir.mkiv
+++ b/tex/context/base/typo-dir.mkiv
@@ -91,7 +91,7 @@
\startluacode
function documentdata.split_tokens(str)
for s in str:bytes() do
- tex.sprint(tex.ctxcatcodes,string.format("\\hbox{\\char %s}",s))
+ context.sprint(tex.ctxcatcodes,string.format("\\hbox{\\char %s}",s))
end
end
\stopluacode
diff --git a/tex/context/base/typo-mar.mkiv b/tex/context/base/typo-mar.mkiv
index 5abe6f032..0a8e1b1d2 100644
--- a/tex/context/base/typo-mar.mkiv
+++ b/tex/context/base/typo-mar.mkiv
@@ -286,7 +286,7 @@
\definemargindata [inmargin] [\v!left] [\c!margin=\c!margin,\c!width=\leftmarginwidth, \c!align=\v!flushright]
\definemargindata [inother] [\v!right] [\c!margin=\c!margin,\c!width=\rightmarginwidth,\c!align=\v!flushleft]
-\definemargindata [margintext] [\v!left] [\c!margin=\c!margin,\c!width=\leftmarginwidth, \c!align=\v!flushright]
+\definemargindata [margintext] [\v!left] [\c!margin=\c!margin,\c!width=\leftmarginwidth, \c!align=\v!flushright,\c!stack=\v!yes]
\setupmarginframed [\v!left ] [\c!method=\v!first,\c!align=\v!flushright,\s!parent=\??mf] % we could autoparent when no define yet
\setupmarginframed [\v!right] [\c!method=\v!first,\c!align=\v!flushleft, \s!parent=\??mf]
@@ -300,8 +300,9 @@
\definemarginframed [inmargin] [\v!inleft]
\definemarginframed [inother] [\v!inright]
-\let\marginword \margintext
-\let\margintitle\margintext
+\let\marginword \margintext
+\let\margintitle \margintext
+\let\inothermargin\inother % for old times sake
%definemargindata [inouterextra] [\v!outer] [\c!margin=\c!edge,\c!location=\v!outer,\c!width=\outeredgewidth,\c!align=\v!outer,\c!category=\v!edge]
%definemargindata [ininnerextra] [\v!inner] [\c!margin=\c!edge,\c!location=\v!inner,\c!width=\inneredgewidth,\c!align=\v!inner,\c!category=\v!edge]
@@ -310,42 +311,26 @@
%definemarginframed [ininnerextra] [\v!inner]
%D As we have more control we are not backward compatible although in
-%D practice it won't hurt that much.
-%D So, from now on use:
+%D practice it won't hurt that much. So, from now on use:
%D
%D \starttyping
+%D \definemargindata
%D \setupmargindata
-%D \setupmargintext
+%D \definemarginframed
+%D \setupmarginframed
%D \stoptyping
-% The following sort of works okay:
-%
-% \let\definemarginline\definemargindata
-%
-% \unexpanded\def\defineinmargin
-% {\doquadrupleempty\dodefineinmargin}
-%
-% \def\dodefineinmargin[#name][#location][#align][#settings]% not completely compatible
-% {\definemargindata[#name][\c!location=#location,\c!align=#align,#settings]%
-% \definemarginframed[#name][#location][\c!align=#align,#settings]}
+% The following sort of works okay but is to be avoided:
-\let\setupinmargin\setupmargindata
+\let\definemarginline\definemargindata
-% The following is too dangerous:
-%
-% \unexpanded\def\setupinmargin
-% {\dodoubleempty\dosetupinmargin}
-%
-% \def\dosetupinmargin[#1][#2]%
-% {\ifsecondargument
-% \processcommalist[#1]{\dodosetupinmargin[#2]}%
-% \else
-% \setupmargindata [#1]% beware, here we can have clashes, so
-% \setupmarginframed[#1]% don't use setupinmargin any more
-% \fi}
-%
-% \def\dodosetupinmargin[#1]#2% [settings]{class}
-% {\setupmargindata[#2][#1]%
-% \setupmargintext[#2][#1]}
+\unexpanded\def\defineinmargin
+ {\doquadrupleempty\dodefineinmargin}
+
+\def\dodefineinmargin[#name][#location][#align][#settings]% not completely compatible
+ {\definemargindata[#name][\c!location=#location,\c!align=#align,#settings]%
+ \definemarginframed[#name][#location][\c!align=#align,#settings]}
+
+\let\setupinmargin\setupmargindata % only partial (no framed)
\protect \endinput
diff --git a/tex/context/base/util-sto.lua b/tex/context/base/util-sto.lua
index f4521c91f..8710f78af 100644
--- a/tex/context/base/util-sto.lua
+++ b/tex/context/base/util-sto.lua
@@ -98,8 +98,8 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
+local function f_empty() return "" end -- t,k
+local function f_self(t,k) t[k] = k return k end
local function f_ignore() end -- t,k,v
local t_empty = { __index = empty }
diff --git a/tex/context/base/x-asciimath.lua b/tex/context/base/x-asciimath.lua
index 50f4b0391..5ef741ce3 100644
--- a/tex/context/base/x-asciimath.lua
+++ b/tex/context/base/x-asciimath.lua
@@ -19,9 +19,7 @@ moduledata.asciimath = asciimath
local report_asciimath = logs.reporter("mathematics","asciimath")
local format = string.format
-local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
local lpegmatch = lpeg.match
-
local S, P, R, C, V, Cc, Ct, Cs = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct, lpeg.Cs
local letter = lpeg.patterns.utf8
@@ -199,9 +197,9 @@ local function converted(original,totex)
end
if totex then
if ok then
- texsprint(ctxcatcodes,"\\mathematics{",result,"}")
+ context.mathematics(result)
else
- texsprint(ctxcatcodes,"{\\tt",result,"}")
+ context.type(result) -- some day monospaced
end
else
return result
diff --git a/tex/context/base/x-cals.lua b/tex/context/base/x-cals.lua
index ba6d705bf..4051dd157 100644
--- a/tex/context/base/x-cals.lua
+++ b/tex/context/base/x-cals.lua
@@ -7,8 +7,7 @@ if not modules then modules = { } end modules ['x-cals'] = {
}
local format, lower = string.format, string.lower
-local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
-local xmlsprint, xmlcprint, xmlcollected = xml.sprint, xml.cprint, xml.collected
+local xmlsprint, xmlcprint, xmlcollected, xmlelements = xml.sprint, xml.cprint, xml.collected, xml.elements
local n_todimen, s_todimen = number.todimen, string.todimen
-- there is room for speedups as well as cleanup (using context functions)
@@ -120,8 +119,7 @@ local function getspans(root, pattern, names, spans)
end
end
---local function texsprint(a,b) print(b) end
---local function xmlsprint(a) print(a) end
+local bTR, eTR, bTD, eTD = context.bTR, context.eTR, context.bTD, context.eTD
function cals.table(root,namespace)
@@ -142,25 +140,28 @@ function cals.table(root,namespace)
local frowspec = p .. "tfoot" .. p .. "row"
local function tablepart(root, xcolspec, xrowspec, before, after) -- move this one outside
- texsprint(ctxcatcodes,before)
+ before()
local at = root.at
local pphalign, ppvalign = at.align, at.valign
local names, widths, spans = { }, { }, { }
getspecs(root, colspec , names, widths)
getspecs(root, xcolspec, names, widths)
getspans(root, spanspec, names, spans)
- for r, d, k in xml.elements(root,xrowspec) do
- texsprint(ctxcatcodes,"\\bTR")
+ for r, d, k in xmlelements(root,xrowspec) do
+ bTR()
local dk = d[k]
local at = dk.at
local phalign, pvalign = at.align or pphalign, at.valign or ppvalign -- todo: __p__ test
local col = 1
- for rr, dd, kk in xml.elements(dk,entryspec) do
+ for rr, dd, kk in xmlelements(dk,entryspec) do
local dk = dd[kk]
if dk.tg == "entrytbl" then
- texsprint(ctxcatcodes,"\\bTD{")
+ -- bTD(function() cals.table(dk) end)
+ bTD()
+ context("{")
cals.table(dk)
- texsprint(ctxcatcodes,"}\\eTD")
+ context("}")
+ eTD()
col = col + 1
else
local at = dk.at
@@ -176,36 +177,42 @@ function cals.table(root,namespace)
valign = valignments[valign]
end
local width = widths[col]
- if s or m or halign or valign or width then -- only english interface !
- texsprint(ctxcatcodes,format("\\bTD[nx=%s,ny=%s,align={%s,%s},width=%s]",
- s or 1, (m or 0)+1, halign or "flushleft", valign or "high", width or "fit"))
- -- texsprint(ctxcatcodes,"\\bTD[nx=",s or 1,"ny=",(m or 0)+1,"align={",halign or "flushleft",",",valign or "high","},width=",width or "fit","]")
+ if s or m or halign or valign or width then -- currently only english interface !
+ bTD {
+ nx = s or 1,
+ ny = (m or 0) + 1,
+ align = format("{%s,%s}",halign or "flushleft",valign or "high"),
+ width = width or "fit",
+ }
else
- texsprint(ctxcatcodes,"\\bTD[align={flushleft,high},width=fit]") -- else problems with vertical material
+ bTD {
+ align = "{flushleft,high}",
+ width = "fit", -- else problems with vertical material
+ }
end
xmlcprint(dk)
- texsprint(ctxcatcodes,"\\eTD")
+ eTD()
col = col + (s or 1)
end
end
- texsprint(ctxcatcodes,"\\eTR")
+ eTR()
end
- texsprint(ctxcatcodes,after)
+ after()
end
for tgroup in lxml.collected(root,tgroupspec) do
- texsprint(ctxcatcodes, "\\directsetup{cals:table:before}")
+ context.directsetup("cals:table:before")
lxml.directives.before(root,"cdx") -- "cals:table"
- texsprint(ctxcatcodes, "\\bgroup")
+ context.bgroup()
lxml.directives.setup(root,"cdx") -- "cals:table"
- texsprint(ctxcatcodes, "\\bTABLE")
- tablepart(tgroup, hcolspec, hrowspec, "\\bTABLEhead", "\\eTABLEhead")
- tablepart(tgroup, bcolspec, browspec, "\\bTABLEbody", "\\eTABLEbody")
- tablepart(tgroup, fcolspec, frowspec, "\\bTABLEfoot", "\\eTABLEfoot")
- texsprint(ctxcatcodes, "\\eTABLE")
- texsprint(ctxcatcodes, "\\egroup")
+ context.bTABLE()
+ tablepart(tgroup, hcolspec, hrowspec, context.bTABLEhead, context.eTABLEhead)
+ tablepart(tgroup, bcolspec, browspec, context.bTABLEbody, context.eTABLEbody)
+ tablepart(tgroup, fcolspec, frowspec, context.bTABLEfoot, context.eTABLEfoot)
+ context.eTABLE()
+ context.egroup()
lxml.directives.after(root,"cdx") -- "cals:table"
- texsprint(ctxcatcodes, "\\directsetup{cals:table:after}")
+ context.directsetup("cals:table:after")
end
end
diff --git a/tex/context/base/x-ct.lua b/tex/context/base/x-ct.lua
index 222a127ce..adaa0204f 100644
--- a/tex/context/base/x-ct.lua
+++ b/tex/context/base/x-ct.lua
@@ -6,8 +6,9 @@ if not modules then modules = { } end modules ['x-ct'] = {
license = "see context related readme files"
}
-local xmlsprint, xmlfilter, xmlcollected = xml.sprint, xml.filter, xml.collected
-local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
+-- needs testing
+
+local xmlsprint, xmlcprint, xmlfilter, xmlcollected = xml.sprint, xml.cprint, xml.filter, xml.collected
local format, concat, rep, find = string.format, table.concat, string.rep, string.find
lxml.context = { }
@@ -79,7 +80,7 @@ function lxml.context.tabulate(root,namespace)
local prefix = (namespace or "context") .. ":"
local templatespec = "/" .. prefix .. "template" .. "/" .. prefix .. "column"
- local bodyrowspec = "/" .. prefix .. "body" .. "/" .. prefix .. "row"
+ local bodyrowspec = "/" .. prefix .. "body" .. "/" .. prefix .. "row"
local cellspec = "/" .. prefix .. "cell"
local template =
@@ -90,22 +91,6 @@ function lxml.context.tabulate(root,namespace)
-- todo: head and foot
---~ lxml.directives.before(root,'cdx')
---~ texsprint(ctxcatcodes, "\\bgroup")
---~ lxml.directives.setup(root,'cdx')
---~ texsprint(ctxcatcodes, format("\\starttabulate[%s]",template))
---~ for e in xmlcollected(root,bodyrowspec) do
---~ texsprint(ctxcatcodes, "\\NC ")
---~ for e in xmlcollected(e,cellspec) do
---~ texsprint(xml.text(e)) -- use some xmlprint
---~ texsprint(ctxcatcodes, "\\NC")
---~ end
---~ texsprint(ctxcatcodes, "\\NR")
---~ end
---~ texsprint(ctxcatcodes, "\\stoptabulate")
---~ texsprint(ctxcatcodes, "\\egroup")
---~ lxml.directives.after(root,'cdx')
-
local NC, NR = context.NC, context.NR
lxml.directives.before(root,'cdx')
@@ -115,7 +100,7 @@ function lxml.context.tabulate(root,namespace)
for e in xmlcollected(root,bodyrowspec) do
NC()
for e in xmlcollected(e,cellspec) do
- texsprint(xml.text(e)) -- test: xmlcprint(e)
+ xmlcprint(e)
NC()
end
NR()
@@ -147,33 +132,20 @@ function lxml.context.combination(root,namespace)
end
local template = format("%s*%s", nx or 1, ny or 1)
- -- todo: alignments
-
---~ lxml.directives.before(root,'cdx')
---~ texsprint(ctxcatcodes, "\\bgroup")
---~ lxml.directives.setup(root,'cdx')
---~ texsprint(ctxcatcodes, "\\startcombination[",template,"]")
---~ for e in xmlcollected(root,pairspec) do
---~ texsprint(ctxcatcodes,"{")
---~ xmlfilter(e,contentspec)
---~ texsprint(ctxcatcodes,"}{")
---~ xmlfilter(e,captionspec)
---~ texsprint(ctxcatcodes,"}")
---~ end
---~ texsprint(ctxcatcodes, "\\stopcombination")
---~ texsprint(ctxcatcodes, "\\egroup")
---~ lxml.directives.after(root,'cdx')
-
lxml.directives.before(root,'cdx')
context.bgroup()
lxml.directives.setup(root,'cdx')
context.startcombination { template }
for e in xmlcollected(root,pairspec) do
- texsprint(ctxcatcodes,"{")
+ -- context.combination(
+ -- function() xmlfilter(e,contentspec) end,
+ -- function() xmlfilter(e,captionspec) end
+ -- )
+ context("{")
xmlfilter(e,contentspec)
- texsprint(ctxcatcodes,"}{")
+ context("}{")or
xmlfilter(e,captionspec)
- texsprint(ctxcatcodes,"}")
+ context("}")
end
context.stopcombination()
context.egroup()
diff --git a/tex/context/base/x-dir-05.mkiv b/tex/context/base/x-dir-05.mkiv
index 5a1cfd96b..de1d3fa5f 100644
--- a/tex/context/base/x-dir-05.mkiv
+++ b/tex/context/base/x-dir-05.mkiv
@@ -46,7 +46,7 @@
else
value = fs[name] or ""
end
- tex.sprint(tex.vrbcatcodes,value)
+ context.sprint(tex.vrbcatcodes,value)
end
end
\stopluacode
diff --git a/tex/context/base/x-mathml.lua b/tex/context/base/x-mathml.lua
index ccd9c1e4b..30e770190 100644
--- a/tex/context/base/x-mathml.lua
+++ b/tex/context/base/x-mathml.lua
@@ -10,12 +10,11 @@ if not modules then modules = { } end modules ['x-mathml'] = {
local type, next = type, next
local utf = unicode.utf8
-local texsprint, ctxcatcodes, txtcatcodes = tex.sprint, tex.ctxcatcodes, tex.txtcatcodes
local format, lower, find, gsub = string.format, string.lower, string.find, string.gsub
local strip = string.strip
local utfchar, utffind, utfgmatch, utfgsub = utf.char, utf.find, utf.gmatch, utf.gsub
local xmlsprint, xmlcprint, xmltext, xmlcontent = xml.sprint, xml.cprint, xml.text, xml.content
-local lxmltext, getid = lxml.text, lxml.getid
+local getid = lxml.getid
local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues
local lpegmatch = lpeg.match
@@ -57,6 +56,8 @@ local r_replacements = { -- in main table
[doublebar] = "\\mmlrightdelimiter\\Vert",
}
+-- todo: play with asciimode and avoid mmlchar
+
local o_replacements = { -- in main table
["@l"] = "\\mmlleftdelimiter.",
["@r"] = "\\mmlrightdelimiter.",
@@ -464,8 +465,8 @@ function mathml.stripped(str)
context(strip(str))
end
-function characters.remapentity(chr,slot) -- brrrrrr
- texsprint(format("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr))
+function characters.remapentity(chr,slot) -- Brrrrrr, this will be replaced!
+ context("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr)
end
function mathml.mn(id,pattern)
@@ -475,7 +476,6 @@ function mathml.mn(id,pattern)
local rep = gsub(str,"&.-;","")
local rep = gsub(rep,"(%s+)",utfchar(0x205F)) -- medspace e.g.: twenty one (nbsp is not seen)
local rep = gsub(rep,".",n_replacements)
- -- texsprint(ctxcatcodes,rep)
context.mn(rep)
end
@@ -508,14 +508,14 @@ function mathml.mfenced(id) -- multiple separators
id = getid(id)
local left, right, separators = id.at.open or "(", id.at.close or ")", id.at.separators or ","
local l, r = l_replacements[left], r_replacements[right]
- texsprint(ctxcatcodes,"\\enabledelimiter")
+ context.enabledelimiter()
if l then
- texsprint(ctxcatcodes,l_replacements[left] or o_replacements[left] or "")
+ context(l_replacements[left] or o_replacements[left] or "")
else
- texsprint(ctxcatcodes,o_replacements["@l"])
- texsprint(ctxcatcodes,left)
+ context(o_replacements["@l"])
+ context(left)
end
- texsprint(ctxcatcodes,"\\disabledelimiter")
+ context.disabledelimiter()
local collected = lxml.filter(id,"/*") -- check the *
if collected then
local n = #collected
@@ -541,19 +541,19 @@ function mathml.mfenced(id) -- multiple separators
elseif m == "}" then
m = "\\}"
end
- texsprint(ctxcatcodes,m)
+ context(m)
end
end
end
end
- texsprint(ctxcatcodes,"\\enabledelimiter")
+ context.enabledelimiter()
if r then
- texsprint(ctxcatcodes,r_replacements[right] or o_replacements[right] or "")
+ context(r_replacements[right] or o_replacements[right] or "")
else
- texsprint(ctxcatcodes,right)
- texsprint(ctxcatcodes,o_replacements["@r"])
+ context(right)
+ context(o_replacements["@r"])
end
- texsprint(ctxcatcodes,"\\disabledelimiter")
+ context.disabledelimiter()
end
--~ local function flush(e,tag,toggle)
@@ -677,7 +677,9 @@ str = gsub(str,"&.-;","")
collect(m,e)
end
end
- tex.sprint(ctxcatcodes,[[\halign\bgroup\hss\startimath\alignmark\stopimath\aligntab\startimath\alignmark\stopimath\cr]])
+ context.halign()
+ context.bgroup()
+ context([[\hss\startimath\alignmark\stopimath\aligntab\startimath\alignmark\stopimath\cr]])
for i=1,#matrix do
local m = matrix[i]
local mline = true
@@ -688,7 +690,7 @@ str = gsub(str,"&.-;","")
end
end
if mline then
- tex.sprint(ctxcatcodes,[[\noalign{\obeydepth\nointerlineskip}]])
+ context.noalign([[\obeydepth\nointerlineskip]])
end
for j=1,#m do
local mm = m[j]
@@ -726,14 +728,14 @@ str = gsub(str,"&.-;","")
chr = "\\mmlmcolumndigitspace" -- utfchar(0x2007)
end
if j == numbers + 1 then
- tex.sprint(ctxcatcodes,"&")
+ context("\\aligntab")
end
local nchr = n_replacements[chr]
- tex.sprint(ctxcatcodes,nchr or chr)
+ context(nchr or chr)
end
- tex.sprint(ctxcatcodes,"\\crcr")
+ context.crcr()
end
- tex.sprint(ctxcatcodes,"\\egroup")
+ context.egroup()
end
local spacesplitter = lpeg.tsplitat(" ")
@@ -751,11 +753,9 @@ function mathml.mtable(root)
local framespacing = at.framespacing or "0pt"
local framespacing = at.framespacing or "-\\ruledlinewidth" -- make this an option
- texsprint(ctxcatcodes, format("\\bTABLE[frame=%s,offset=%s]",frametypes[frame or "none"] or "off",framespacing))
---~ context.bTABLE { frame = frametypes[frame or "none"] or "off", offset = framespacing }
+ context.bTABLE { frame = frametypes[frame or "none"] or "off", offset = framespacing }
for e in lxml.collected(root,"/(mml:mtr|mml:mlabeledtr)") do
- texsprint(ctxcatcodes,"\\bTR")
---~ context.bTR()
+ context.bTR()
local at = e.at
local col = 0
local rfr = at.frame or (frames and frames [#frames])
@@ -772,28 +772,23 @@ function mathml.mtable(root)
local cra = rowalignments [at.rowalign or (rowaligns and rowaligns [col]) or rra or "center"] or "lohi"
local cca = columnalignments[at.columnalign or (columnaligns and columnaligns[col]) or rca or "center"] or "middle"
local cfr = frametypes [at.frame or (frames and frames [col]) or rfr or "none" ] or "off"
- texsprint(ctxcatcodes,format("\\bTD[align={%s,%s},frame=%s,nx=%s,ny=%s]$\\ignorespaces",cra,cca,cfr,columnspan,rowspan))
---~ texfprint("\\bTD[align={%s,%s},frame=%s,nx=%s,ny=%s]$\\ignorespaces",cra,cca,cfr,columnspan,rowspan)
---~ context.bTD { align = format("{%s,%s}",cra,cca), frame = cfr, nx = columnspan, ny = rowspan }
---~ context.bmath()
---~ context.ignorespaces()
+ context.bTD { align = format("{%s,%s}",cra,cca), frame = cfr, nx = columnspan, ny = rowspan }
+ context.startimath()
+ context.ignorespaces()
xmlcprint(e)
- texsprint(ctxcatcodes,"\\removeunwantedspaces$\\eTD") -- $
---~ context.emath()
---~ context.removeunwantedspaces()
---~ context.eTD()
+ context.stopimath()
+ context.removeunwantedspaces()
+ context.eTD()
end
end
---~ if e.tg == "mlabeledtr" then
---~ texsprint(ctxcatcodes,"\\bTD")
---~ xmlcprint(xml.first(e,"/!mml:mtd"))
---~ texsprint(ctxcatcodes,"\\eTD")
---~ end
- texsprint(ctxcatcodes,"\\eTR")
---~ context.eTR()
+ -- if e.tg == "mlabeledtr" then
+ -- context.bTD()
+ -- xmlcprint(xml.first(e,"/!mml:mtd"))
+ -- context.eTD()
+ -- end
+ context.eTR()
end
- texsprint(ctxcatcodes, "\\eTABLE")
---~ context.eTABLE()
+ context.eTABLE()
end
function mathml.csymbol(root)
diff --git a/tex/context/fonts/asana-math.lfg b/tex/context/fonts/asana-math.lfg
index f845ca4de..2c13600c1 100644
--- a/tex/context/fonts/asana-math.lfg
+++ b/tex/context/fonts/asana-math.lfg
@@ -14,9 +14,22 @@ local function patch(data,filename,threshold)
end
end
-patches.register("after","check math parameters","asana",function(data,filename) patch(data,filename,1350) end)
+patches.register("after","analyze math","asana",function(data,filename) patch(data,filename,1350) end)
+
+local function less(value,target,original)
+ -- officially we should check the original
+ return 0.25 * value
+end
+
+local function more(value,target,original)
+ local o = original.mathparameters.DisplayOperatorMinHeight
+ if o < 2800 then
+ return 2800 * target.parameters.factor
+ else
+ return value -- already scaled
+ end
+end
-local function less(value,target,original) return 0.25 * value end
return {
name = "asana-math",
@@ -26,6 +39,7 @@ return {
copyright = "ConTeXt development team",
mathematics = {
parameters = {
+ DisplayOperatorMinHeight = more,
-- StackBottomDisplayStyleShiftDown = 0,
-- StackBottomShiftDown = 0,
-- StackDisplayStyleGapMin = 0,
diff --git a/tex/context/fonts/cambria-math.lfg b/tex/context/fonts/cambria-math.lfg
index 3fd15d8a0..6415069e6 100644
--- a/tex/context/fonts/cambria-math.lfg
+++ b/tex/context/fonts/cambria-math.lfg
@@ -1,6 +1,11 @@
-- This patch code is moved from font-pat.lua to this goodies
-- files as it does not belong in the core code.
+-- This is a fix to the font itself i.e. the cached instance will
+-- be patched. When the goodie file is loaded the patch will be
+-- added to the patch list. No goodies setting is needed with
+-- the filename.
+
local patches = fonts.handlers.otf.enhancers.patches
local function patch(data,filename,threshold)
@@ -14,8 +19,21 @@ local function patch(data,filename,threshold)
end
end
-patches.register("after","check math parameters","cambria", function(data,filename) patch(data,filename,2800) end)
-patches.register("after","check math parameters","cambmath",function(data,filename) patch(data,filename,2800) end)
+patches.register("after","analyze math","cambria", function(data,filename) patch(data,filename,2800) end)
+patches.register("after","analyze math","cambmath",function(data,filename) patch(data,filename,2800) end)
+
+-- This is a runtime fix, but then we need to explicitly set
+-- the goodies parameter for the font. As a demonstration we
+-- we do both.
+
+local function FixDisplayOperatorMinHeight(value,target,original)
+ local o = original.mathparameters.DisplayOperatorMinHeight
+ if o < 2800 then
+ return 2800 * target.parameters.factor
+ else
+ return value -- already scaled
+ end
+end
return {
name = "cambria-math",
@@ -23,4 +41,9 @@ return {
comment = "Goodies that complement cambria.",
author = "Hans Hagen",
copyright = "ConTeXt development team",
+ mathematics = {
+ parameters = {
+ DisplayOperatorMinHeight = FixDisplayOperatorMinHeight,
+ }
+ }
}
diff --git a/tex/context/fonts/lm-math.lfg b/tex/context/fonts/lm-math.lfg
index 07e8036e9..e6b64c638 100644
--- a/tex/context/fonts/lm-math.lfg
+++ b/tex/context/fonts/lm-math.lfg
@@ -1,33 +1,36 @@
-- This patch code is moved from font-pat.lua to this goodies
--- files as it does not belomg in the core code.
+-- files as it does not belong in the core code.
-local patches = fonts.handlers.otf.enhancers.patches
-
-local function patch(data,filename)
- local uni_to_ind = data.map.map
- if not uni_to_ind[0x391] then
- -- beware, this is a hack, features for latin often don't apply to greek
- -- but lm has not much features anyway (and only greek for math)
- patches.report("adding 13 greek capitals")
- uni_to_ind[0x391] = uni_to_ind[0x41]
- uni_to_ind[0x392] = uni_to_ind[0x42]
- uni_to_ind[0x395] = uni_to_ind[0x45]
- uni_to_ind[0x397] = uni_to_ind[0x48]
- uni_to_ind[0x399] = uni_to_ind[0x49]
- uni_to_ind[0x39A] = uni_to_ind[0x4B]
- uni_to_ind[0x39C] = uni_to_ind[0x4D]
- uni_to_ind[0x39D] = uni_to_ind[0x4E]
- uni_to_ind[0x39F] = uni_to_ind[0x4F]
- uni_to_ind[0x3A1] = uni_to_ind[0x52]
- uni_to_ind[0x3A4] = uni_to_ind[0x54]
- uni_to_ind[0x3A7] = uni_to_ind[0x58]
- uni_to_ind[0x396] = uni_to_ind[0x5A]
- end
-end
-
-patches.register("after","prepare glyphs","^lmroman", patch)
-patches.register("after","prepare glyphs","^lmsans", patch)
-patches.register("after","prepare glyphs","^lmtypewriter",patch)
+-- This patch is no longer needed but we keep it commented as
+-- it is an example.
+--
+-- local patches = fonts.handlers.otf.enhancers.patches
+--
+-- local function patch(data,filename)
+-- local uni_to_ind = data.map.map
+-- if not uni_to_ind[0x391] then
+-- -- beware, this is a hack, features for latin often don't apply to greek
+-- -- but lm has not much features anyway (and only greek for math)
+-- patches.report("adding 13 greek capitals")
+-- uni_to_ind[0x391] = uni_to_ind[0x41]
+-- uni_to_ind[0x392] = uni_to_ind[0x42]
+-- uni_to_ind[0x395] = uni_to_ind[0x45]
+-- uni_to_ind[0x397] = uni_to_ind[0x48]
+-- uni_to_ind[0x399] = uni_to_ind[0x49]
+-- uni_to_ind[0x39A] = uni_to_ind[0x4B]
+-- uni_to_ind[0x39C] = uni_to_ind[0x4D]
+-- uni_to_ind[0x39D] = uni_to_ind[0x4E]
+-- uni_to_ind[0x39F] = uni_to_ind[0x4F]
+-- uni_to_ind[0x3A1] = uni_to_ind[0x52]
+-- uni_to_ind[0x3A4] = uni_to_ind[0x54]
+-- uni_to_ind[0x3A7] = uni_to_ind[0x58]
+-- uni_to_ind[0x396] = uni_to_ind[0x5A]
+-- end
+-- end
+--
+-- patches.register("after","prepare glyphs","^lmroman", patch)
+-- patches.register("after","prepare glyphs","^lmsans", patch)
+-- patches.register("after","prepare glyphs","^lmtypewriter",patch)
-- rm-lmr5 : LMMathRoman5-Regular
-- rm-lmbx5 : LMMathRoman5-Bold ]
diff --git a/tex/context/fonts/xits-math.lfg b/tex/context/fonts/xits-math.lfg
index a97a7599d..8043a0093 100644
--- a/tex/context/fonts/xits-math.lfg
+++ b/tex/context/fonts/xits-math.lfg
@@ -4,6 +4,43 @@
-- $ABC$ $\cal ABC$ $\mathalternate{cal}\cal ABC$
-- \stoptext
+-- fonts.handlers.otf.enhancers.patches.register("after","check metadata","xits%-math", function(data,filename)
+-- local variants = {
+-- [0xFE00] = {
+-- [0x2229] = "uni2229.vs1",
+-- [0x222A] = "uni222A.vs1",
+-- [0x2268] = "uni2268.vs1",
+-- [0x2269] = "uni2269.vs1",
+-- [0x2272] = "uni2272.vs1",
+-- [0x2273] = "uni2273.vs1",
+-- [0x228A] = "uni228A.vs1",
+-- [0x228B] = "uni228B.vs1",
+-- [0x2293] = "uni2293.vs1",
+-- [0x2294] = "uni2294.vs1",
+-- [0x2295] = "uni2295.vs1",
+-- [0x2297] = "uni2297.vs1",
+-- [0x229C] = "uni229C.vs1",
+-- [0x22DA] = "uni22DA.vs1",
+-- [0x22DB] = "uni22DB.vs1",
+-- [0x2A3C] = "uni2A3C.vs1",
+-- [0x2A3D] = "uni2A3D.vs1",
+-- [0x2A9D] = "uni2A9D.vs1",
+-- [0x2A9E] = "uni2A9E.vs1",
+-- [0x2AAC] = "uni2AAC.vs1",
+-- [0x2AAD] = "uni2AAD.vs1",
+-- [0x2ACB] = "uni2ACB.vs1",
+-- [0x2ACC] = "uni2ACC.vs1",
+-- }
+-- }
+-- local unicodes = data.resources.unicodes
+-- for k, v in next, variants do
+-- for kk, vv in next, v do
+-- v[kk] = unicodes[vv]
+-- end
+-- end
+-- data.resources.variants = variants
+-- end)
+
return {
name = "xits-math",
version = "1.00",
@@ -20,6 +57,6 @@ return {
mathbbit = { feature = 'ss06', value = 1, comment = "Mathematical Italic Double-Struck Alphabet" },
mathbbbi = { feature = 'ss07', value = 1, comment = "Mathematical Bold Italic Double-Struck Alphabet" },
upint = { feature = 'ss08', value = 1, comment = "Upright Integrals" },
- }
+ },
}
}
diff --git a/tex/context/interface/keys-cs.xml b/tex/context/interface/keys-cs.xml
index a0b9b94b2..7027f8c0c 100644
--- a/tex/context/interface/keys-cs.xml
+++ b/tex/context/interface/keys-cs.xml
@@ -735,6 +735,7 @@
<cd:constant name='label' value='popisek'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
+ <cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='vlevo'/>
<cd:constant name='leftcolor' value='barvavlevo'/>
<cd:constant name='leftcompoundhyphen' value='leftcompoundhyphen'/>
diff --git a/tex/context/interface/keys-de.xml b/tex/context/interface/keys-de.xml
index 60cf52da9..37a3ee77d 100644
--- a/tex/context/interface/keys-de.xml
+++ b/tex/context/interface/keys-de.xml
@@ -735,6 +735,7 @@
<cd:constant name='label' value='label'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
+ <cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='links'/>
<cd:constant name='leftcolor' value='linkerfarbe'/>
<cd:constant name='leftcompoundhyphen' value='leftcompoundhyphen'/>
diff --git a/tex/context/interface/keys-en.xml b/tex/context/interface/keys-en.xml
index d8f525567..9bd938d43 100644
--- a/tex/context/interface/keys-en.xml
+++ b/tex/context/interface/keys-en.xml
@@ -735,6 +735,7 @@
<cd:constant name='label' value='label'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
+ <cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='left'/>
<cd:constant name='leftcolor' value='leftcolor'/>
<cd:constant name='leftcompoundhyphen' value='leftcompoundhyphen'/>
diff --git a/tex/context/interface/keys-fr.xml b/tex/context/interface/keys-fr.xml
index 45c5dad8e..51588a244 100644
--- a/tex/context/interface/keys-fr.xml
+++ b/tex/context/interface/keys-fr.xml
@@ -735,6 +735,7 @@
<cd:constant name='label' value='etiquette'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
+ <cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='gauche'/>
<cd:constant name='leftcolor' value='couleurgauche'/>
<cd:constant name='leftcompoundhyphen' value='leftcompoundhyphen'/>
diff --git a/tex/context/interface/keys-it.xml b/tex/context/interface/keys-it.xml
index aea63860f..f44709875 100644
--- a/tex/context/interface/keys-it.xml
+++ b/tex/context/interface/keys-it.xml
@@ -735,6 +735,7 @@
<cd:constant name='label' value='etichetta'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
+ <cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='sinistra'/>
<cd:constant name='leftcolor' value='coloresinistra'/>
<cd:constant name='leftcompoundhyphen' value='leftcompoundhyphen'/>
diff --git a/tex/context/interface/keys-nl.xml b/tex/context/interface/keys-nl.xml
index 469c5e592..67e9152b2 100644
--- a/tex/context/interface/keys-nl.xml
+++ b/tex/context/interface/keys-nl.xml
@@ -735,6 +735,7 @@
<cd:constant name='label' value='label'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
+ <cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='links'/>
<cd:constant name='leftcolor' value='linkerkleur'/>
<cd:constant name='leftcompoundhyphen' value='linkerkoppelteken'/>
diff --git a/tex/context/interface/keys-pe.xml b/tex/context/interface/keys-pe.xml
index 365c903d4..0cdf2d2c1 100644
--- a/tex/context/interface/keys-pe.xml
+++ b/tex/context/interface/keys-pe.xml
@@ -735,6 +735,7 @@
<cd:constant name='label' value='برچسب'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
+ <cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='چپ'/>
<cd:constant name='leftcolor' value='رنگ‌چپ'/>
<cd:constant name='leftcompoundhyphen' value='leftcompoundhyphen'/>
diff --git a/tex/context/interface/keys-ro.xml b/tex/context/interface/keys-ro.xml
index 9bcedba09..5b0df2880 100644
--- a/tex/context/interface/keys-ro.xml
+++ b/tex/context/interface/keys-ro.xml
@@ -735,6 +735,7 @@
<cd:constant name='label' value='eticheta'/>
<cd:constant name='lastnamesep' value='lastnamesep'/>
<cd:constant name='lastpubsep' value='lastpubsep'/>
+ <cd:constant name='layout' value='layout'/>
<cd:constant name='left' value='stanga'/>
<cd:constant name='leftcolor' value='culoarestanga'/>
<cd:constant name='leftcompoundhyphen' value='leftcompoundhyphen'/>
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 71a70aba0..679db7d2f 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 06/29/11 09:57:49
+-- merge date : 07/13/11 20:14:04
do -- begin closure to overcome local limits and interference
@@ -1710,7 +1710,7 @@ end
local sort, fastcopy, sortedpairs = table.sort, table.fastcopy, table.sortedpairs -- dependency!
-function lpeg.append(list,pp)
+function lpeg.append(list,pp,delayed)
local p = pp
if #list > 0 then
list = fastcopy(list)
@@ -1722,6 +1722,14 @@ function lpeg.append(list,pp)
p = P(list[l])
end
end
+ elseif delayed then
+ for k, v in sortedpairs(list) do
+ if p then
+ p = P(k)/list + p
+ else
+ p = P(k)/list
+ end
+ end
else
for k, v in sortedpairs(list) do
if p then
@@ -2270,10 +2278,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
+
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -4990,7 +5030,7 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.732 -- beware: also sync font-mis.lua
+otf.version = 2.733 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
local fontdata = fonts.hashes.identifiers
@@ -5234,11 +5274,14 @@ end
-- patches.register("before","migrate metadata","cambria",function() end)
function patches.register(what,where,pattern,action)
- local ww = what[where]
- if ww then
- ww[pattern] = action
- else
- ww = { [pattern] = action}
+ local pw = patches[what]
+ if pw then
+ local ww = pw[where]
+ if ww then
+ ww[pattern] = action
+ else
+ pw[where] = { [pattern] = action}
+ end
end
end
@@ -5363,6 +5406,9 @@ function otf.load(filename,format,sub,featurefile)
duplicates = {
-- alternative unicodes
},
+ variants = {
+ -- alternative unicodes (variants)
+ },
lookuptypes = {
},
},
@@ -5387,6 +5433,7 @@ function otf.load(filename,format,sub,featurefile)
if packdata then
if cleanup > 0 then
collectgarbage("collect")
+--~ lua.collectgarbage()
end
enhance("pack",data,filename,nil)
end
@@ -5394,6 +5441,7 @@ function otf.load(filename,format,sub,featurefile)
data = containers.write(otf.cache, hash, data)
if cleanup > 1 then
collectgarbage("collect")
+--~ lua.collectgarbage()
end
stoptiming(data)
if elapsedtime then -- not in generic
@@ -5402,10 +5450,12 @@ function otf.load(filename,format,sub,featurefile)
fontloader.close(fontdata) -- free memory
if cleanup > 3 then
collectgarbage("collect")
+--~ lua.collectgarbage()
end
data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
if cleanup > 2 then
collectgarbage("collect")
+--~ lua.collectgarbage()
end
else
data = nil
@@ -5543,6 +5593,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
local unicodes = resources.unicodes -- name to unicode
local indices = resources.indices -- index to unicode
local duplicates = resources.duplicates
+ local variants = resources.variants
if rawsubfonts then
@@ -5642,11 +5693,28 @@ actions["prepare glyphs"] = function(data,filename,raw)
}
local altuni = glyph.altuni
if altuni then
- local d = { }
+ local d
for i=1,#altuni do
- d[#d+1] = altuni[i].unicode
+ local a = altuni[i]
+ local u = a.unicode
+ local v = a.variant
+ if v then
+ local vv = variants[v]
+ if vv then
+ vv[u] = unicode
+ else -- xits-math has some:
+ vv = { [u] = unicode }
+ variants[v] = vv
+ end
+ elseif d then
+ d[#d+1] = u
+ else
+ d = { u }
+ end
+ end
+ if d then
+ duplicates[unicode] = d
end
- duplicates[unicode] = d
end
else
report_otf("potential problem: glyph 0x%04X is used but empty",index)
@@ -5668,9 +5736,8 @@ actions["check encoding"] = function(data,filename,raw)
local properties = data.properties
local unicodes = resources.unicodes -- name to unicode
local indices = resources.indices -- index to unicodes
- local duplicates = resources.duplicates
- -- begin of messy (not needed whwn cidmap)
+ -- begin of messy (not needed when cidmap)
local mapdata = raw.map or { }
local unicodetoindex = mapdata and mapdata.map or { }
@@ -5744,7 +5811,6 @@ actions["add duplicates"] = function(data,filename,raw)
end
end
end
-
end
-- class : nil base mark ligature component (maybe we don't need it in description)
@@ -6921,15 +6987,15 @@ local function check_otf(forced,specification,suffix,what)
if forced then
name = file.addsuffix(name,suffix,true)
end
- local fullname, tfmdata = findbinfile(name,suffix) or "", nil -- one shot
+ local fullname = findbinfile(name,suffix) or ""
if fullname == "" then
- fullname = fonts.names.getfilename(name,suffix)
+ fullname = fonts.names.getfilename(name,suffix) or ""
end
if fullname ~= "" then
- specification.filename, specification.format = fullname, what -- hm, so we do set the filename, then
- tfmdata = read_from_otf(specification) -- we need to do it for all matches / todo
+ specification.filename = fullname
+ specification.format = what
+ return read_from_otf(specification)
end
- return tfmdata
end
local function opentypereader(specification,suffix,what)