summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHans Hagen <pragma@wxs.nl>2010-05-08 13:33:00 +0200
committerHans Hagen <pragma@wxs.nl>2010-05-08 13:33:00 +0200
commit8ad1a9bed2cf3271f1922759060c2ba1c8e3ced1 (patch)
tree5958b9a4206fab98782cd05e4f7a9532524dd3ab
parent66a308adb8acc92c16afd883827e6e835d721a8e (diff)
downloadcontext-8ad1a9bed2cf3271f1922759060c2ba1c8e3ced1.tar.gz
stable 2010.05.08 13:33
-rw-r--r--scripts/context/lua/luatools.lua10
-rw-r--r--scripts/context/lua/mtx-mptopdf.lua22
-rw-r--r--scripts/context/lua/mtxrun.lua281
-rw-r--r--scripts/context/perl/mptopdf.pl8
-rw-r--r--scripts/context/stubs/mswin/luatools.lua10
-rw-r--r--scripts/context/stubs/mswin/mtxrun.dllbin9216 -> 9216 bytes
-rwxr-xr-xscripts/context/stubs/mswin/mtxrun.exebin6144 -> 6144 bytes
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua281
-rw-r--r--scripts/context/stubs/source/mtxrun_dll.c117
-rwxr-xr-xscripts/context/stubs/unix/luatools10
-rwxr-xr-xscripts/context/stubs/unix/mtxrun281
-rw-r--r--tex/context/base/cont-new.tex2
-rw-r--r--tex/context/base/context.tex2
-rw-r--r--tex/context/base/data-use.lua10
-rw-r--r--tex/context/base/metatex.tex10
-rw-r--r--tex/context/base/mlib-ctx.lua2
-rw-r--r--tex/context/base/page-str.lua221
-rw-r--r--tex/context/base/page-str.mkiv379
-rw-r--r--tex/generic/context/luatex-fonts-merged.lua2
19 files changed, 1021 insertions, 627 deletions
diff --git a/scripts/context/lua/luatools.lua b/scripts/context/lua/luatools.lua
index 35a61ea43..ccedd97cd 100644
--- a/scripts/context/lua/luatools.lua
+++ b/scripts/context/lua/luatools.lua
@@ -7419,11 +7419,13 @@ function statistics.check_fmt_status(texname)
local luv = dofile(luvname)
if luv and luv.sourcefile then
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
- if luv.enginebanner and luv.enginebanner ~= enginebanner then
- return "engine mismatch"
+ local luvbanner = luv.enginebanner or "?"
+ if luvbanner ~= enginebanner then
+ return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
end
- if luv.sourcehash and luv.sourcehash ~= sourcehash then
- return "source mismatch"
+ local luvhash = luv.sourcehash or "?"
+ if luvhash ~= sourcehash then
+ return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
end
else
return "invalid status file"
diff --git a/scripts/context/lua/mtx-mptopdf.lua b/scripts/context/lua/mtx-mptopdf.lua
index 69950e008..6fa5bbcb8 100644
--- a/scripts/context/lua/mtx-mptopdf.lua
+++ b/scripts/context/lua/mtx-mptopdf.lua
@@ -1,6 +1,6 @@
if not modules then modules = { } end modules ['mtx-mptopdf'] = {
version = 1.303,
- comment = "companion to mtxrun.lua",
+ comment = "companion to mtxrun.lua, patched by HH so errors are his",
author = "Taco Hoekwater, Elvenkind BV, Dordrecht NL",
copyright = "Elvenkind BV / ConTeXt Development Team",
license = "see context related readme files"
@@ -32,16 +32,18 @@ function scripts.mptopdf.aux.do_convert (fname)
else
command = string.format('%s \\\\relax "%s"',command,fname)
end
- os.execute(command)
- local name, suffix = file.nameonly(fname), file.extname(fname)
- local pdfsrc = name .. ".pdf"
- if lfs.isfile(pdfsrc) then
- pdfdest = name .. "-" .. suffix .. ".pdf"
- os.rename(pdfsrc, pdfdest)
- if lfs.isfile(pdfsrc) then -- rename failed
- file.copy(pdfsrc, pdfdest)
+ local result = os.execute(command)
+ if result == 0 then
+ local name, suffix = file.nameonly(fname), file.extname(fname)
+ local pdfsrc = name .. ".pdf"
+ if lfs.isfile(pdfsrc) then
+ pdfdest = name .. "-" .. suffix .. ".pdf"
+ os.rename(pdfsrc, pdfdest)
+ if lfs.isfile(pdfsrc) then -- rename failed
+ file.copy(pdfsrc, pdfdest)
+ end
+ done = 1
end
- done = 1
end
end
return done, pdfdest
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 3c72f59d1..cefa3192b 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -3950,7 +3950,7 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined = false, false, false, false, false
+local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
local dcache, hcache, acache = { }, { }, { }
local mt = { }
@@ -4078,22 +4078,72 @@ function xml.unknown_dec_entity_format(str) return (str == "" and "&error;") or
function xml.unknown_hex_entity_format(str) return format("&#x%s;",str) end
function xml.unknown_any_entity_format(str) return format("&#x%s;",str) end
+local function fromhex(s)
+ local n = tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return format("h:%s",s), true
+ end
+end
+
+local function fromdec(s)
+ local n = tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return format("d:%s",s), true
+ end
+end
+
+-- one level expansion (simple case), no checking done
+
+local rest = (1-P(";"))^0
+local many = P(1)^0
+
+local parsedentity =
+ P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
+ (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
+
+-- parsing in the xml file
+
+local predefined_unified = {
+ [38] = "&amp;",
+ [42] = "&quot;",
+ [47] = "&apos;",
+ [74] = "&lt;",
+ [76] = "&gr;",
+}
+
+local predefined_simplified = {
+ [38] = "&", amp = "&",
+ [42] = '"', quot = '"',
+ [47] = "'", apos = "'",
+ [74] = "<", lt = "<",
+ [76] = ">", gt = ">",
+}
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
- if utfize then
- local n = tonumber(str,16)
+ local n = tonumber(str,16)
+ h = unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ end
+ elseif utfize then
h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or ""
if not n then
logs.report("xml","utfize, ignoring hex entity &#x%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,c)
+ logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
end
else
if trace_entities then
logs.report("xml","found entity &#x%s;",str)
end
- h = "&#c" .. str .. ";"
+ h = "&#x" .. str .. ";"
end
hcache[str] = h
end
@@ -4103,13 +4153,18 @@ end
local function handle_dec_entity(str)
local d = dcache[str]
if not d then
- if utfize then
- local n = tonumber(str)
+ local n = tonumber(str)
+ d = unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ logs.report("xml","utfize, converting dec entity &#%s; into %s",str,d)
+ end
+ elseif utfize then
d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or ""
if not n then
logs.report("xml","utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting dec entity &#%s; into %s",str,c)
+ logs.report("xml","utfize, converting dec entity &#%s; into %s",str,h)
end
else
if trace_entities then
@@ -4122,48 +4177,13 @@ local function handle_dec_entity(str)
return d
end
--- one level expansion (simple case)
-
-local function fromhex(s)
- local n = tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return format("h:%s",s), true
- end
-end
-
-local function fromdec(s)
- local n = tonumber(s)
- if n then
- return utfchar(n)
- else
- return format("d:%s",s), true
- end
-end
-
-local rest = (1-P(";"))^0
-local many = P(1)^0
-
-local parsedentity =
- P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
- (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
-
xml.parsedentitylpeg = parsedentity
-local predefined = {
- amp = "&",
- lt = "<",
- gt = ">",
- quot = '"',
- apos = "'",
-}
-
local function handle_any_entity(str)
if resolve then
local a = acache[str] -- per instance ! todo
if not a then
- a = resolve_predefined and predefined[str]
+ a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
elseif type(resolve) == "function" then
@@ -4209,7 +4229,7 @@ local function handle_any_entity(str)
if trace_entities then
logs.report("xml","found entity &%s;",str)
end
- a = resolve_predefined and predefined[str]
+ a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
@@ -4359,6 +4379,7 @@ local function xmlconvert(data, settings)
utfize = settings.utfize_entities
resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
cleanup = settings.text_cleanup
stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { }
acache, hcache, dcache = { }, { }, { } -- not stored
@@ -4465,21 +4486,19 @@ the whole file first. The function accepts a string representing
a filename or a file handle.</p>
--ldx]]--
-function xml.load(filename)
+function xml.load(filename,settings)
+ local data = ""
if type(filename) == "string" then
+ -- local data = io.loaddata(filename) - -todo: check type in io.loaddata
local f = io.open(filename,'r')
if f then
- local root = xmlconvert(f:read("*all"))
+ data = f:read("*all")
f:close()
- return root
- else
- return xmlconvert("")
end
elseif filename then -- filehandle
- return xmlconvert(filename:read("*all"))
- else
- return xmlconvert("")
+ data = filename:read("*all")
end
+ return xmlconvert(data,settings)
end
--[[ldx--
@@ -5109,17 +5128,17 @@ apply_axis['child'] = function(list)
for l=1,#list do
local ll = list[l]
local dt = ll.dt
-local en = 0
+ local en = 0
for k=1,#dt do
local dk = dt[k]
if dk.tg then
collected[#collected+1] = dk
dk.ni = k -- refresh
-en = en + 1
-dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
-ll.en = en
+ ll.en = en
end
return collected
end
@@ -5127,18 +5146,18 @@ end
local function collect(list,collected)
local dt = list.dt
if dt then
-local en = 0
+ local en = 0
for k=1,#dt do
local dk = dt[k]
if dk.tg then
collected[#collected+1] = dk
dk.ni = k -- refresh
-en = en + 1
-dk.ei = en
+ en = en + 1
+ dk.ei = en
collect(dk,collected)
end
end
-list.en = en
+ list.en = en
end
end
apply_axis['descendant'] = function(list)
@@ -5152,18 +5171,18 @@ end
local function collect(list,collected)
local dt = list.dt
if dt then
-local en = 0
+ local en = 0
for k=1,#dt do
local dk = dt[k]
if dk.tg then
collected[#collected+1] = dk
dk.ni = k -- refresh
-en = en + 1
-dk.ei = en
+ en = en + 1
+ dk.ei = en
collect(dk,collected)
end
end
-list.en = en
+ list.en = en
end
end
apply_axis['descendant-or-self'] = function(list)
@@ -5800,17 +5819,17 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal
add_comment(parsed, "initial-child removed") -- we could also make it a auto-self
remove(parsed,1)
end
-local np = #parsed -- can have changed
-if np > 1 then
- local pnp = parsed[np]
- if pnp.kind == "nodes" and pnp.nodetest == true then
- local nodes = pnp.nodes
- if nodes[1] == true and nodes[2] == false and nodes[3] == false then
- add_comment(parsed, "redundant final wildcard filter removed")
- remove(parsed,np)
- end
- end
-end
+ local np = #parsed -- can have changed
+ if np > 1 then
+ local pnp = parsed[np]
+ if pnp.kind == "nodes" and pnp.nodetest == true then
+ local nodes = pnp.nodes
+ if nodes[1] == true and nodes[2] == false and nodes[3] == false then
+ add_comment(parsed, "redundant final wildcard filter removed")
+ remove(parsed,np)
+ end
+ end
+ end
end
else
parsed = { pattern = pattern }
@@ -5836,6 +5855,10 @@ end
-- caching found lookups saves not that much (max .1 sec on a 8 sec run)
-- and it also messes up finalizers
+-- watch out: when there is a finalizer, it's always called as there
+-- can be cases that a finalizer returns (or does) something in case
+-- there is no match; an example of this is count()
+
local profiled = { } xml.profiled = profiled
local function profiled_apply(list,parsed,nofparsed,order)
@@ -5863,6 +5886,12 @@ local function profiled_apply(list,parsed,nofparsed,order)
return collected
end
if not collected or #collected == 0 then
+ local pn = i < nofparsed and parsed[nofparsed]
+ if pn and pn.kind == "finalizer" then
+ collected = pn.finalizer(collected)
+ p.finalized = p.finalized + 1
+ return collected
+ end
return nil
end
end
@@ -5894,10 +5923,16 @@ local function traced_apply(list,parsed,nofparsed,order)
logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
elseif kind == "finalizer" then
collected = pi.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(collected and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
return collected
end
if not collected or #collected == 0 then
+ local pn = i < nofparsed and parsed[nofparsed]
+ if pn and pn.kind == "finalizer" then
+ collected = pn.finalizer(collected)
+ logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ return collected
+ end
return nil
end
end
@@ -5922,6 +5957,10 @@ local function normal_apply(list,parsed,nofparsed,order)
return pi.finalizer(collected)
end
if not collected or #collected == 0 then
+ local pf = i < nofparsed and parsed[nofparsed].finalizer
+ if pf then
+ return pf(collected) -- can be anything
+ end
return nil
end
end
@@ -6698,7 +6737,7 @@ function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and
end
end
else
---~ str.ni = i
+ --~ str.ni = i
t[#t+1] = str
end
end
@@ -6708,6 +6747,78 @@ function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and
end
end
+function xml.strip_whitespace(root, pattern, nolines, anywhere) -- strips all leading and trailing spacing
+ local collected = xmlparseapply({ root },pattern) -- beware, indices no longer are valid now
+ if collected then
+ for i=1,#collected do
+ local e = collected[i]
+ local edt = e.dt
+ if edt then
+ if anywhere then
+ local t = { }
+ for e=1,#edt do
+ local str = edt[e]
+ if type(str) ~= "string" then
+ t[#t+1] = str
+ elseif str ~= "" then
+ -- todo: lpeg for each case
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s*(.-)%s*$","%1")
+ if str ~= "" then
+ t[#t+1] = str
+ end
+ end
+ end
+ e.dt = t
+ else
+ -- we can assume a regular sparse xml table with no successive strings
+ -- otherwise we should use a while loop
+ if #edt > 0 then
+ -- strip front
+ local str = edt[1]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt,1)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s+","")
+ if str == "" then
+ remove(edt,1)
+ else
+ edt[1] = str
+ end
+ end
+ end
+ if #edt > 1 then
+ -- strip end
+ local str = edt[#edt]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"%s+$","")
+ if str == "" then
+ remove(edt)
+ else
+ edt[#edt] = str
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
local function rename_space(root, oldspace, newspace) -- fast variant
local ndt = #root.dt
for i=1,ndt or 0 do
@@ -10623,11 +10734,13 @@ function statistics.check_fmt_status(texname)
local luv = dofile(luvname)
if luv and luv.sourcefile then
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
- if luv.enginebanner and luv.enginebanner ~= enginebanner then
- return "engine mismatch"
+ local luvbanner = luv.enginebanner or "?"
+ if luvbanner ~= enginebanner then
+ return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
end
- if luv.sourcehash and luv.sourcehash ~= sourcehash then
- return "source mismatch"
+ local luvhash = luv.sourcehash or "?"
+ if luvhash ~= sourcehash then
+ return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
end
else
return "invalid status file"
@@ -10982,7 +11095,7 @@ local _path_, libpaths, _cpath_, clibpaths
function package.libpaths()
if not _path_ or package.path ~= _path_ then
_path_ = package.path
- libpaths = file.split_path(_path_)
+ libpaths = file.split_path(_path_,";")
end
return libpaths
end
@@ -10990,7 +11103,7 @@ end
function package.clibpaths()
if not _cpath_ or package.cpath ~= _cpath_ then
_cpath_ = package.cpath
- clibpaths = file.split_path(_cpath_)
+ clibpaths = file.split_path(_cpath_,";")
end
return clibpaths
end
diff --git a/scripts/context/perl/mptopdf.pl b/scripts/context/perl/mptopdf.pl
index a6b946baa..41d1ae1f7 100644
--- a/scripts/context/perl/mptopdf.pl
+++ b/scripts/context/perl/mptopdf.pl
@@ -104,7 +104,7 @@ if (($pattern eq '')||($Help)) {
my $error = system ($runner) ;
if ($error) {
print "\n$program : error while processing mp file\n" ;
- exit ;
+ exit 1 ;
} else {
$pattern =~ s/\.mp$//io ;
@files = glob "$pattern.*" ;
@@ -131,7 +131,11 @@ foreach my $file (@files) {
} else {
$command = "$command \\\\relax $file" ;
}
- system($command) ;
+ my $error = system($command) ;
+ if ($error) {
+ print "\n$program : error while processing tex file\n" ;
+ exit 1 ;
+ }
my $pdfsrc = basename($_).".pdf";
rename ($pdfsrc, "$_-$1.pdf") ;
if (-e $pdfsrc) {
diff --git a/scripts/context/stubs/mswin/luatools.lua b/scripts/context/stubs/mswin/luatools.lua
index 35a61ea43..ccedd97cd 100644
--- a/scripts/context/stubs/mswin/luatools.lua
+++ b/scripts/context/stubs/mswin/luatools.lua
@@ -7419,11 +7419,13 @@ function statistics.check_fmt_status(texname)
local luv = dofile(luvname)
if luv and luv.sourcefile then
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
- if luv.enginebanner and luv.enginebanner ~= enginebanner then
- return "engine mismatch"
+ local luvbanner = luv.enginebanner or "?"
+ if luvbanner ~= enginebanner then
+ return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
end
- if luv.sourcehash and luv.sourcehash ~= sourcehash then
- return "source mismatch"
+ local luvhash = luv.sourcehash or "?"
+ if luvhash ~= sourcehash then
+ return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
end
else
return "invalid status file"
diff --git a/scripts/context/stubs/mswin/mtxrun.dll b/scripts/context/stubs/mswin/mtxrun.dll
index 1ccb76efc..23e476cac 100644
--- a/scripts/context/stubs/mswin/mtxrun.dll
+++ b/scripts/context/stubs/mswin/mtxrun.dll
Binary files differ
diff --git a/scripts/context/stubs/mswin/mtxrun.exe b/scripts/context/stubs/mswin/mtxrun.exe
index 35c8c24d7..745eaf224 100755
--- a/scripts/context/stubs/mswin/mtxrun.exe
+++ b/scripts/context/stubs/mswin/mtxrun.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 3c72f59d1..cefa3192b 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -3950,7 +3950,7 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined = false, false, false, false, false
+local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
local dcache, hcache, acache = { }, { }, { }
local mt = { }
@@ -4078,22 +4078,72 @@ function xml.unknown_dec_entity_format(str) return (str == "" and "&error;") or
function xml.unknown_hex_entity_format(str) return format("&#x%s;",str) end
function xml.unknown_any_entity_format(str) return format("&#x%s;",str) end
+local function fromhex(s)
+ local n = tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return format("h:%s",s), true
+ end
+end
+
+local function fromdec(s)
+ local n = tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return format("d:%s",s), true
+ end
+end
+
+-- one level expansion (simple case), no checking done
+
+local rest = (1-P(";"))^0
+local many = P(1)^0
+
+local parsedentity =
+ P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
+ (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
+
+-- parsing in the xml file
+
+local predefined_unified = {
+ [38] = "&amp;",
+ [42] = "&quot;",
+ [47] = "&apos;",
+ [74] = "&lt;",
+ [76] = "&gr;",
+}
+
+local predefined_simplified = {
+ [38] = "&", amp = "&",
+ [42] = '"', quot = '"',
+ [47] = "'", apos = "'",
+ [74] = "<", lt = "<",
+ [76] = ">", gt = ">",
+}
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
- if utfize then
- local n = tonumber(str,16)
+ local n = tonumber(str,16)
+ h = unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ end
+ elseif utfize then
h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or ""
if not n then
logs.report("xml","utfize, ignoring hex entity &#x%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,c)
+ logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
end
else
if trace_entities then
logs.report("xml","found entity &#x%s;",str)
end
- h = "&#c" .. str .. ";"
+ h = "&#x" .. str .. ";"
end
hcache[str] = h
end
@@ -4103,13 +4153,18 @@ end
local function handle_dec_entity(str)
local d = dcache[str]
if not d then
- if utfize then
- local n = tonumber(str)
+ local n = tonumber(str)
+ d = unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ logs.report("xml","utfize, converting dec entity &#%s; into %s",str,d)
+ end
+ elseif utfize then
d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or ""
if not n then
logs.report("xml","utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting dec entity &#%s; into %s",str,c)
+ logs.report("xml","utfize, converting dec entity &#%s; into %s",str,h)
end
else
if trace_entities then
@@ -4122,48 +4177,13 @@ local function handle_dec_entity(str)
return d
end
--- one level expansion (simple case)
-
-local function fromhex(s)
- local n = tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return format("h:%s",s), true
- end
-end
-
-local function fromdec(s)
- local n = tonumber(s)
- if n then
- return utfchar(n)
- else
- return format("d:%s",s), true
- end
-end
-
-local rest = (1-P(";"))^0
-local many = P(1)^0
-
-local parsedentity =
- P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
- (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
-
xml.parsedentitylpeg = parsedentity
-local predefined = {
- amp = "&",
- lt = "<",
- gt = ">",
- quot = '"',
- apos = "'",
-}
-
local function handle_any_entity(str)
if resolve then
local a = acache[str] -- per instance ! todo
if not a then
- a = resolve_predefined and predefined[str]
+ a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
elseif type(resolve) == "function" then
@@ -4209,7 +4229,7 @@ local function handle_any_entity(str)
if trace_entities then
logs.report("xml","found entity &%s;",str)
end
- a = resolve_predefined and predefined[str]
+ a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
@@ -4359,6 +4379,7 @@ local function xmlconvert(data, settings)
utfize = settings.utfize_entities
resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
cleanup = settings.text_cleanup
stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { }
acache, hcache, dcache = { }, { }, { } -- not stored
@@ -4465,21 +4486,19 @@ the whole file first. The function accepts a string representing
a filename or a file handle.</p>
--ldx]]--
-function xml.load(filename)
+function xml.load(filename,settings)
+ local data = ""
if type(filename) == "string" then
+ -- local data = io.loaddata(filename) - -todo: check type in io.loaddata
local f = io.open(filename,'r')
if f then
- local root = xmlconvert(f:read("*all"))
+ data = f:read("*all")
f:close()
- return root
- else
- return xmlconvert("")
end
elseif filename then -- filehandle
- return xmlconvert(filename:read("*all"))
- else
- return xmlconvert("")
+ data = filename:read("*all")
end
+ return xmlconvert(data,settings)
end
--[[ldx--
@@ -5109,17 +5128,17 @@ apply_axis['child'] = function(list)
for l=1,#list do
local ll = list[l]
local dt = ll.dt
-local en = 0
+ local en = 0
for k=1,#dt do
local dk = dt[k]
if dk.tg then
collected[#collected+1] = dk
dk.ni = k -- refresh
-en = en + 1
-dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
-ll.en = en
+ ll.en = en
end
return collected
end
@@ -5127,18 +5146,18 @@ end
local function collect(list,collected)
local dt = list.dt
if dt then
-local en = 0
+ local en = 0
for k=1,#dt do
local dk = dt[k]
if dk.tg then
collected[#collected+1] = dk
dk.ni = k -- refresh
-en = en + 1
-dk.ei = en
+ en = en + 1
+ dk.ei = en
collect(dk,collected)
end
end
-list.en = en
+ list.en = en
end
end
apply_axis['descendant'] = function(list)
@@ -5152,18 +5171,18 @@ end
local function collect(list,collected)
local dt = list.dt
if dt then
-local en = 0
+ local en = 0
for k=1,#dt do
local dk = dt[k]
if dk.tg then
collected[#collected+1] = dk
dk.ni = k -- refresh
-en = en + 1
-dk.ei = en
+ en = en + 1
+ dk.ei = en
collect(dk,collected)
end
end
-list.en = en
+ list.en = en
end
end
apply_axis['descendant-or-self'] = function(list)
@@ -5800,17 +5819,17 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal
add_comment(parsed, "initial-child removed") -- we could also make it a auto-self
remove(parsed,1)
end
-local np = #parsed -- can have changed
-if np > 1 then
- local pnp = parsed[np]
- if pnp.kind == "nodes" and pnp.nodetest == true then
- local nodes = pnp.nodes
- if nodes[1] == true and nodes[2] == false and nodes[3] == false then
- add_comment(parsed, "redundant final wildcard filter removed")
- remove(parsed,np)
- end
- end
-end
+ local np = #parsed -- can have changed
+ if np > 1 then
+ local pnp = parsed[np]
+ if pnp.kind == "nodes" and pnp.nodetest == true then
+ local nodes = pnp.nodes
+ if nodes[1] == true and nodes[2] == false and nodes[3] == false then
+ add_comment(parsed, "redundant final wildcard filter removed")
+ remove(parsed,np)
+ end
+ end
+ end
end
else
parsed = { pattern = pattern }
@@ -5836,6 +5855,10 @@ end
-- caching found lookups saves not that much (max .1 sec on a 8 sec run)
-- and it also messes up finalizers
+-- watch out: when there is a finalizer, it's always called as there
+-- can be cases that a finalizer returns (or does) something in case
+-- there is no match; an example of this is count()
+
local profiled = { } xml.profiled = profiled
local function profiled_apply(list,parsed,nofparsed,order)
@@ -5863,6 +5886,12 @@ local function profiled_apply(list,parsed,nofparsed,order)
return collected
end
if not collected or #collected == 0 then
+ local pn = i < nofparsed and parsed[nofparsed]
+ if pn and pn.kind == "finalizer" then
+ collected = pn.finalizer(collected)
+ p.finalized = p.finalized + 1
+ return collected
+ end
return nil
end
end
@@ -5894,10 +5923,16 @@ local function traced_apply(list,parsed,nofparsed,order)
logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
elseif kind == "finalizer" then
collected = pi.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(collected and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
return collected
end
if not collected or #collected == 0 then
+ local pn = i < nofparsed and parsed[nofparsed]
+ if pn and pn.kind == "finalizer" then
+ collected = pn.finalizer(collected)
+ logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ return collected
+ end
return nil
end
end
@@ -5922,6 +5957,10 @@ local function normal_apply(list,parsed,nofparsed,order)
return pi.finalizer(collected)
end
if not collected or #collected == 0 then
+ local pf = i < nofparsed and parsed[nofparsed].finalizer
+ if pf then
+ return pf(collected) -- can be anything
+ end
return nil
end
end
@@ -6698,7 +6737,7 @@ function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and
end
end
else
---~ str.ni = i
+ --~ str.ni = i
t[#t+1] = str
end
end
@@ -6708,6 +6747,78 @@ function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and
end
end
+function xml.strip_whitespace(root, pattern, nolines, anywhere) -- strips all leading and trailing spacing
+ local collected = xmlparseapply({ root },pattern) -- beware, indices no longer are valid now
+ if collected then
+ for i=1,#collected do
+ local e = collected[i]
+ local edt = e.dt
+ if edt then
+ if anywhere then
+ local t = { }
+ for e=1,#edt do
+ local str = edt[e]
+ if type(str) ~= "string" then
+ t[#t+1] = str
+ elseif str ~= "" then
+ -- todo: lpeg for each case
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s*(.-)%s*$","%1")
+ if str ~= "" then
+ t[#t+1] = str
+ end
+ end
+ end
+ e.dt = t
+ else
+ -- we can assume a regular sparse xml table with no successive strings
+ -- otherwise we should use a while loop
+ if #edt > 0 then
+ -- strip front
+ local str = edt[1]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt,1)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s+","")
+ if str == "" then
+ remove(edt,1)
+ else
+ edt[1] = str
+ end
+ end
+ end
+ if #edt > 1 then
+ -- strip end
+ local str = edt[#edt]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"%s+$","")
+ if str == "" then
+ remove(edt)
+ else
+ edt[#edt] = str
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
local function rename_space(root, oldspace, newspace) -- fast variant
local ndt = #root.dt
for i=1,ndt or 0 do
@@ -10623,11 +10734,13 @@ function statistics.check_fmt_status(texname)
local luv = dofile(luvname)
if luv and luv.sourcefile then
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
- if luv.enginebanner and luv.enginebanner ~= enginebanner then
- return "engine mismatch"
+ local luvbanner = luv.enginebanner or "?"
+ if luvbanner ~= enginebanner then
+ return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
end
- if luv.sourcehash and luv.sourcehash ~= sourcehash then
- return "source mismatch"
+ local luvhash = luv.sourcehash or "?"
+ if luvhash ~= sourcehash then
+ return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
end
else
return "invalid status file"
@@ -10982,7 +11095,7 @@ local _path_, libpaths, _cpath_, clibpaths
function package.libpaths()
if not _path_ or package.path ~= _path_ then
_path_ = package.path
- libpaths = file.split_path(_path_)
+ libpaths = file.split_path(_path_,";")
end
return libpaths
end
@@ -10990,7 +11103,7 @@ end
function package.clibpaths()
if not _cpath_ or package.cpath ~= _cpath_ then
_cpath_ = package.cpath
- clibpaths = file.split_path(_cpath_)
+ clibpaths = file.split_path(_cpath_,";")
end
return clibpaths
end
diff --git a/scripts/context/stubs/source/mtxrun_dll.c b/scripts/context/stubs/source/mtxrun_dll.c
index 540bed38b..5b7cd31a0 100644
--- a/scripts/context/stubs/source/mtxrun_dll.c
+++ b/scripts/context/stubs/source/mtxrun_dll.c
@@ -5,8 +5,8 @@
Public Domain
Originally written in 2010 by Tomasz M. Trzeciak and Hans Hagen
- This program is derived from the 'runscript' program originally
- written in 2009 by T.M. Trzeciak. It has been adapted for use in
+ This program is derived from the 'runscript' program originally
+ written in 2009 by T.M. Trzeciak. It has been adapted for use in
ConTeXt MkIV.
Comment:
@@ -18,26 +18,26 @@
mtxrun --script font --reload
Here mtxrun is a lua script. In order to avoid the usage of a cmd
- file on windows this runner will start texlua directly. If the
- shared library luatex.dll is available, texlua will be started in
- the same process avoiding thus any additional overhead. Otherwise
+ file on windows this runner will start texlua directly. If the
+ shared library luatex.dll is available, texlua will be started in
+ the same process avoiding thus any additional overhead. Otherwise
it will be spawned in a new proces.
We also don't want to use other runners, like those that use kpse
to locate the script as this is exactly what mtxrun itself is doing
already. Therefore the runscript program is adapted to a more direct
approach suitable for mtxrun.
-
+
Compilation:
with gcc (size optimized):
- gcc -Os -s -shared -o mtxrun.dll mtxrun_dll.c
+ gcc -Os -s -shared -o mtxrun.dll mtxrun_dll.c
gcc -Os -s -o mtxrun.exe mtxrun_exe.c -L./ -lmtxrun
with tcc (extra small size):
-
- tcc -shared -o mtxrun.dll mtxrun_dll.c
+
+ tcc -shared -o mtxrun.dll mtxrun_dll.c
tcc -o mtxrun.exe mtxrun_exe.c mtxrun.def
************************************************************************/
@@ -60,46 +60,45 @@ static char cmdline[MAX_CMD];
static char dirpath[MAX_PATH];
static char progname[MAX_PATH];
static char scriptpath[MAX_PATH];
+static char luatexpath[MAX_PATH];
HMODULE dllluatex = NULL;
typedef int ( *mainlikeproc )( int, char ** );
#ifdef STATIC
-int main( int argc, char *argv[] )
+int main( int argc, char *argv[] )
#else
-__declspec(dllexport) int dllrunscript( int argc, char *argv[] )
+__declspec(dllexport) int dllrunscript( int argc, char *argv[] )
#endif
{
- char *s, *argstr, **lua_argv;
+ char *s, *luatexfname, *argstr, **lua_argv;
int k, quoted, lua_argc;
int passprogname = 0;
// directory of this module/executable
-
- HMODULE module_handle = GetModuleHandle( "mtxrun.dll" );
+
+ HMODULE module_handle = GetModuleHandle( "mtxrun.dll" );
// if ( module_handle == NULL ) exe path will be used, which is OK too
k = (int) GetModuleFileName( module_handle, dirpath, MAX_PATH );
- if ( !k || ( k == MAX_PATH ) )
+ if ( !k || ( k == MAX_PATH ) )
DIE( "unable to determine a valid module name\n" );
s = strrchr(dirpath, '\\');
if ( s == NULL ) DIE( "no directory part in module path: %s\n", dirpath );
*(++s) = '\0'; //remove file name, leave trailing backslash
-
+
// program name
-
+
k = strlen(argv[0]);
- while ( k && (argv[0][k] != '/') && (argv[0][k] != '\\') ) k--;
- if ((argv[0][k] == '/') || (argv[0][k] == '\\')) k++; // correct for slash
- // while ( k && (argv[0][k-1] != '/') && (argv[0][k-1] != '\\') ) k--;
+ while ( k && (argv[0][k-1] != '/') && (argv[0][k-1] != '\\') ) k--;
strcpy(progname, &argv[0][k]);
s = progname;
if ( s = strrchr(s, '.') ) *s = '\0'; // remove file extension part
-
+
// script path
-
+
strcpy( scriptpath, dirpath );
k = strlen(progname);
if ( k < 6 ) k = 6; // in case the program name is shorter than "mtxrun"
- if ( strlen(dirpath) + k + 4 >= MAX_PATH )
+ if ( strlen(dirpath) + k + 4 >= MAX_PATH )
DIE( "path too long: %s%s\n", dirpath, progname );
if ( ( strcmpi(progname,"mtxrun") == 0 ) || ( strcmpi(progname,"luatools") == 0 ) ) {
strcat( scriptpath, progname );
@@ -108,19 +107,31 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
strcat( scriptpath, "mtxrun.lua" );
if ( strcmpi(progname,"texmfstart") != 0 ) passprogname = 1;
}
- if ( GetFileAttributes(scriptpath) == INVALID_FILE_ATTRIBUTES )
+ if ( GetFileAttributes(scriptpath) == INVALID_FILE_ATTRIBUTES )
DIE( "file not found: %s\n", scriptpath );
+
+ // find texlua.exe
+
+ if ( !SearchPath(
+ getenv( "PATH" ), // path to search (optional)
+ "texlua.exe", // file name to search
+ NULL, // file extension to add (optional)
+ MAX_PATH, // output buffer size
+ luatexpath, // output buffer pointer
+ &luatexfname ) // pointer to a file part in the output buffer (optional)
+ ) DIE( "unable to locate texlua.exe on the search path" );
- // link with luatex.dll if available
+ // link directly with luatex.dll if available in texlua's dir
- if ( dllluatex = LoadLibrary("luatex.dll") )
+ strcpy( luatexfname, "luatex.dll" );
+ if ( dllluatex = LoadLibrary(luatexpath) )
{
mainlikeproc dllluatexmain = (mainlikeproc) GetProcAddress( dllluatex, "dllluatexmain" );
- if ( dllluatexmain == NULL )
+ if ( dllluatexmain == NULL )
DIE( "unable to locate dllluatexmain procedure in luatex.dll" );
-
+
// set up argument list for texlua script
-
+
lua_argv = (char **)malloc( (argc + 4) * sizeof(char *) );
if ( lua_argv == NULL ) DIE( "out of memory\n" );
lua_argv[lua_argc=0] = texlua_name;
@@ -128,29 +139,23 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
if (passprogname) {
lua_argv[++lua_argc] = "--script";
lua_argv[++lua_argc] = progname;
- }
+ }
for ( k = 1; k < argc; k++ ) lua_argv[++lua_argc] = argv[k];
lua_argv[++lua_argc] = NULL;
// call texlua interpreter
// dllluatexmain never returns, but we pretend that it does
-
+
k = dllluatexmain( lua_argc, lua_argv );
if (lua_argv) free( lua_argv );
return k;
}
-
+
// we are still here, so no luatex.dll; spawn texlua.exe instead
+ strcpy( luatexfname, "texlua.exe" );
strcpy( cmdline, "\"" );
- if ( !SearchPath(
- getenv( "PATH" ), // path to search (optional)
- "texlua.exe", // file name to search
- NULL, // file extension to add (optional)
- MAX_CMD - 3, // output buffer size
- &cmdline[1], // output buffer pointer
- NULL ) // pointer to a file part variable (optional)
- ) DIE( "unable to locate texlua.exe on the search path" );
+ strcat( cmdline, luatexpath );
strcat( cmdline, "\" \"" );
strcat( cmdline, scriptpath );
strcat( cmdline, "\"" );
@@ -158,24 +163,24 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
strcat( cmdline, " --script " );
strcat( cmdline, progname );
}
-
+
argstr = GetCommandLine(); // get the command line of this process
if ( argstr == NULL ) DIE( "unable to retrieve the command line string\n" );
// skip over argv[0] in the argument string
// (it can contain embedded double quotes if launched from cmd.exe!)
-
- for ( quoted = 0; (*argstr) && ( !IS_WHITESPACE(*argstr) || quoted ); argstr++ )
+
+ for ( quoted = 0; (*argstr) && ( !IS_WHITESPACE(*argstr) || quoted ); argstr++ )
if (*argstr == '"') quoted = !quoted;
-
+
// pass through all the arguments
-
- if ( strlen(cmdline) + strlen(argstr) >= MAX_CMD )
+
+ if ( strlen(cmdline) + strlen(argstr) >= MAX_CMD )
DIE( "command line string too long:\n%s%s\n", cmdline, argstr );
- strcat( cmdline, argstr );
-
+ strcat( cmdline, argstr );
+
// create child process
-
+
STARTUPINFO si;
PROCESS_INFORMATION pi;
ZeroMemory( &si, sizeof(si) );
@@ -187,7 +192,7 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
si.hStdOutput = GetStdHandle( STD_OUTPUT_HANDLE );
si.hStdError = GetStdHandle( STD_ERROR_HANDLE );
ZeroMemory( &pi, sizeof(pi) );
-
+
if( !CreateProcess(
NULL, // module name (uses command line if NULL)
cmdline, // command line
@@ -200,17 +205,17 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
&si, // STARTUPINFO structure
&pi ) // PROCESS_INFORMATION structure
) DIE( "command execution failed: %s\n", cmdline );
-
+
DWORD ret = 0;
CloseHandle( pi.hThread ); // thread handle is not needed
if ( WaitForSingleObject( pi.hProcess, INFINITE ) == WAIT_OBJECT_0 ) {
- if ( !GetExitCodeProcess( pi.hProcess, &ret) )
+ if ( !GetExitCodeProcess( pi.hProcess, &ret) )
DIE( "unable to retrieve process exit code: %s\n", cmdline );
} else DIE( "failed to wait for process termination: %s\n", cmdline );
CloseHandle( pi.hProcess );
-
+
// propagate exit code from the child process
-
- return ret;
-
+
+ return ret;
+
}
diff --git a/scripts/context/stubs/unix/luatools b/scripts/context/stubs/unix/luatools
index 35a61ea43..ccedd97cd 100755
--- a/scripts/context/stubs/unix/luatools
+++ b/scripts/context/stubs/unix/luatools
@@ -7419,11 +7419,13 @@ function statistics.check_fmt_status(texname)
local luv = dofile(luvname)
if luv and luv.sourcefile then
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
- if luv.enginebanner and luv.enginebanner ~= enginebanner then
- return "engine mismatch"
+ local luvbanner = luv.enginebanner or "?"
+ if luvbanner ~= enginebanner then
+ return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
end
- if luv.sourcehash and luv.sourcehash ~= sourcehash then
- return "source mismatch"
+ local luvhash = luv.sourcehash or "?"
+ if luvhash ~= sourcehash then
+ return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
end
else
return "invalid status file"
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 3c72f59d1..cefa3192b 100755
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -3950,7 +3950,7 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined = false, false, false, false, false
+local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
local dcache, hcache, acache = { }, { }, { }
local mt = { }
@@ -4078,22 +4078,72 @@ function xml.unknown_dec_entity_format(str) return (str == "" and "&error;") or
function xml.unknown_hex_entity_format(str) return format("&#x%s;",str) end
function xml.unknown_any_entity_format(str) return format("&#x%s;",str) end
+local function fromhex(s)
+ local n = tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return format("h:%s",s), true
+ end
+end
+
+local function fromdec(s)
+ local n = tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return format("d:%s",s), true
+ end
+end
+
+-- one level expansion (simple case), no checking done
+
+local rest = (1-P(";"))^0
+local many = P(1)^0
+
+local parsedentity =
+ P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
+ (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
+
+-- parsing in the xml file
+
+local predefined_unified = {
+ [38] = "&amp;",
+ [42] = "&quot;",
+ [47] = "&apos;",
+ [74] = "&lt;",
+ [76] = "&gr;",
+}
+
+local predefined_simplified = {
+ [38] = "&", amp = "&",
+ [42] = '"', quot = '"',
+ [47] = "'", apos = "'",
+ [74] = "<", lt = "<",
+ [76] = ">", gt = ">",
+}
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
- if utfize then
- local n = tonumber(str,16)
+ local n = tonumber(str,16)
+ h = unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ end
+ elseif utfize then
h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or ""
if not n then
logs.report("xml","utfize, ignoring hex entity &#x%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,c)
+ logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
end
else
if trace_entities then
logs.report("xml","found entity &#x%s;",str)
end
- h = "&#c" .. str .. ";"
+ h = "&#x" .. str .. ";"
end
hcache[str] = h
end
@@ -4103,13 +4153,18 @@ end
local function handle_dec_entity(str)
local d = dcache[str]
if not d then
- if utfize then
- local n = tonumber(str)
+ local n = tonumber(str)
+ d = unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ logs.report("xml","utfize, converting dec entity &#%s; into %s",str,d)
+ end
+ elseif utfize then
d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or ""
if not n then
logs.report("xml","utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting dec entity &#%s; into %s",str,c)
+ logs.report("xml","utfize, converting dec entity &#%s; into %s",str,h)
end
else
if trace_entities then
@@ -4122,48 +4177,13 @@ local function handle_dec_entity(str)
return d
end
--- one level expansion (simple case)
-
-local function fromhex(s)
- local n = tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return format("h:%s",s), true
- end
-end
-
-local function fromdec(s)
- local n = tonumber(s)
- if n then
- return utfchar(n)
- else
- return format("d:%s",s), true
- end
-end
-
-local rest = (1-P(";"))^0
-local many = P(1)^0
-
-local parsedentity =
- P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
- (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
-
xml.parsedentitylpeg = parsedentity
-local predefined = {
- amp = "&",
- lt = "<",
- gt = ">",
- quot = '"',
- apos = "'",
-}
-
local function handle_any_entity(str)
if resolve then
local a = acache[str] -- per instance ! todo
if not a then
- a = resolve_predefined and predefined[str]
+ a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
elseif type(resolve) == "function" then
@@ -4209,7 +4229,7 @@ local function handle_any_entity(str)
if trace_entities then
logs.report("xml","found entity &%s;",str)
end
- a = resolve_predefined and predefined[str]
+ a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
@@ -4359,6 +4379,7 @@ local function xmlconvert(data, settings)
utfize = settings.utfize_entities
resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
cleanup = settings.text_cleanup
stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { }
acache, hcache, dcache = { }, { }, { } -- not stored
@@ -4465,21 +4486,19 @@ the whole file first. The function accepts a string representing
a filename or a file handle.</p>
--ldx]]--
-function xml.load(filename)
+function xml.load(filename,settings)
+ local data = ""
if type(filename) == "string" then
+ -- local data = io.loaddata(filename) - -todo: check type in io.loaddata
local f = io.open(filename,'r')
if f then
- local root = xmlconvert(f:read("*all"))
+ data = f:read("*all")
f:close()
- return root
- else
- return xmlconvert("")
end
elseif filename then -- filehandle
- return xmlconvert(filename:read("*all"))
- else
- return xmlconvert("")
+ data = filename:read("*all")
end
+ return xmlconvert(data,settings)
end
--[[ldx--
@@ -5109,17 +5128,17 @@ apply_axis['child'] = function(list)
for l=1,#list do
local ll = list[l]
local dt = ll.dt
-local en = 0
+ local en = 0
for k=1,#dt do
local dk = dt[k]
if dk.tg then
collected[#collected+1] = dk
dk.ni = k -- refresh
-en = en + 1
-dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
-ll.en = en
+ ll.en = en
end
return collected
end
@@ -5127,18 +5146,18 @@ end
local function collect(list,collected)
local dt = list.dt
if dt then
-local en = 0
+ local en = 0
for k=1,#dt do
local dk = dt[k]
if dk.tg then
collected[#collected+1] = dk
dk.ni = k -- refresh
-en = en + 1
-dk.ei = en
+ en = en + 1
+ dk.ei = en
collect(dk,collected)
end
end
-list.en = en
+ list.en = en
end
end
apply_axis['descendant'] = function(list)
@@ -5152,18 +5171,18 @@ end
local function collect(list,collected)
local dt = list.dt
if dt then
-local en = 0
+ local en = 0
for k=1,#dt do
local dk = dt[k]
if dk.tg then
collected[#collected+1] = dk
dk.ni = k -- refresh
-en = en + 1
-dk.ei = en
+ en = en + 1
+ dk.ei = en
collect(dk,collected)
end
end
-list.en = en
+ list.en = en
end
end
apply_axis['descendant-or-self'] = function(list)
@@ -5800,17 +5819,17 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal
add_comment(parsed, "initial-child removed") -- we could also make it a auto-self
remove(parsed,1)
end
-local np = #parsed -- can have changed
-if np > 1 then
- local pnp = parsed[np]
- if pnp.kind == "nodes" and pnp.nodetest == true then
- local nodes = pnp.nodes
- if nodes[1] == true and nodes[2] == false and nodes[3] == false then
- add_comment(parsed, "redundant final wildcard filter removed")
- remove(parsed,np)
- end
- end
-end
+ local np = #parsed -- can have changed
+ if np > 1 then
+ local pnp = parsed[np]
+ if pnp.kind == "nodes" and pnp.nodetest == true then
+ local nodes = pnp.nodes
+ if nodes[1] == true and nodes[2] == false and nodes[3] == false then
+ add_comment(parsed, "redundant final wildcard filter removed")
+ remove(parsed,np)
+ end
+ end
+ end
end
else
parsed = { pattern = pattern }
@@ -5836,6 +5855,10 @@ end
-- caching found lookups saves not that much (max .1 sec on a 8 sec run)
-- and it also messes up finalizers
+-- watch out: when there is a finalizer, it's always called as there
+-- can be cases that a finalizer returns (or does) something in case
+-- there is no match; an example of this is count()
+
local profiled = { } xml.profiled = profiled
local function profiled_apply(list,parsed,nofparsed,order)
@@ -5863,6 +5886,12 @@ local function profiled_apply(list,parsed,nofparsed,order)
return collected
end
if not collected or #collected == 0 then
+ local pn = i < nofparsed and parsed[nofparsed]
+ if pn and pn.kind == "finalizer" then
+ collected = pn.finalizer(collected)
+ p.finalized = p.finalized + 1
+ return collected
+ end
return nil
end
end
@@ -5894,10 +5923,16 @@ local function traced_apply(list,parsed,nofparsed,order)
logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
elseif kind == "finalizer" then
collected = pi.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(collected and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
return collected
end
if not collected or #collected == 0 then
+ local pn = i < nofparsed and parsed[nofparsed]
+ if pn and pn.kind == "finalizer" then
+ collected = pn.finalizer(collected)
+ logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ return collected
+ end
return nil
end
end
@@ -5922,6 +5957,10 @@ local function normal_apply(list,parsed,nofparsed,order)
return pi.finalizer(collected)
end
if not collected or #collected == 0 then
+ local pf = i < nofparsed and parsed[nofparsed].finalizer
+ if pf then
+ return pf(collected) -- can be anything
+ end
return nil
end
end
@@ -6698,7 +6737,7 @@ function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and
end
end
else
---~ str.ni = i
+ --~ str.ni = i
t[#t+1] = str
end
end
@@ -6708,6 +6747,78 @@ function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and
end
end
+function xml.strip_whitespace(root, pattern, nolines, anywhere) -- strips all leading and trailing spacing
+ local collected = xmlparseapply({ root },pattern) -- beware, indices no longer are valid now
+ if collected then
+ for i=1,#collected do
+ local e = collected[i]
+ local edt = e.dt
+ if edt then
+ if anywhere then
+ local t = { }
+ for e=1,#edt do
+ local str = edt[e]
+ if type(str) ~= "string" then
+ t[#t+1] = str
+ elseif str ~= "" then
+ -- todo: lpeg for each case
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s*(.-)%s*$","%1")
+ if str ~= "" then
+ t[#t+1] = str
+ end
+ end
+ end
+ e.dt = t
+ else
+ -- we can assume a regular sparse xml table with no successive strings
+ -- otherwise we should use a while loop
+ if #edt > 0 then
+ -- strip front
+ local str = edt[1]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt,1)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s+","")
+ if str == "" then
+ remove(edt,1)
+ else
+ edt[1] = str
+ end
+ end
+ end
+ if #edt > 1 then
+ -- strip end
+ local str = edt[#edt]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"%s+$","")
+ if str == "" then
+ remove(edt)
+ else
+ edt[#edt] = str
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
local function rename_space(root, oldspace, newspace) -- fast variant
local ndt = #root.dt
for i=1,ndt or 0 do
@@ -10623,11 +10734,13 @@ function statistics.check_fmt_status(texname)
local luv = dofile(luvname)
if luv and luv.sourcefile then
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
- if luv.enginebanner and luv.enginebanner ~= enginebanner then
- return "engine mismatch"
+ local luvbanner = luv.enginebanner or "?"
+ if luvbanner ~= enginebanner then
+ return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
end
- if luv.sourcehash and luv.sourcehash ~= sourcehash then
- return "source mismatch"
+ local luvhash = luv.sourcehash or "?"
+ if luvhash ~= sourcehash then
+ return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
end
else
return "invalid status file"
@@ -10982,7 +11095,7 @@ local _path_, libpaths, _cpath_, clibpaths
function package.libpaths()
if not _path_ or package.path ~= _path_ then
_path_ = package.path
- libpaths = file.split_path(_path_)
+ libpaths = file.split_path(_path_,";")
end
return libpaths
end
@@ -10990,7 +11103,7 @@ end
function package.clibpaths()
if not _cpath_ or package.cpath ~= _cpath_ then
_cpath_ = package.cpath
- clibpaths = file.split_path(_cpath_)
+ clibpaths = file.split_path(_cpath_,";")
end
return clibpaths
end
diff --git a/tex/context/base/cont-new.tex b/tex/context/base/cont-new.tex
index 93f757f67..f15d6f259 100644
--- a/tex/context/base/cont-new.tex
+++ b/tex/context/base/cont-new.tex
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2010.05.07 14:21}
+\newcontextversion{2010.05.08 13:33}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/context.tex b/tex/context/base/context.tex
index 00eabe64c..79768a78b 100644
--- a/tex/context/base/context.tex
+++ b/tex/context/base/context.tex
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2010.05.07 14:21}
+\edef\contextversion{2010.05.08 13:33}
%D For those who want to use this:
diff --git a/tex/context/base/data-use.lua b/tex/context/base/data-use.lua
index 123cc0eb8..593b03ad9 100644
--- a/tex/context/base/data-use.lua
+++ b/tex/context/base/data-use.lua
@@ -109,11 +109,13 @@ function statistics.check_fmt_status(texname)
local luv = dofile(luvname)
if luv and luv.sourcefile then
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
- if luv.enginebanner and luv.enginebanner ~= enginebanner then
- return "engine mismatch"
+ local luvbanner = luv.enginebanner or "?"
+ if luvbanner ~= enginebanner then
+ return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
end
- if luv.sourcehash and luv.sourcehash ~= sourcehash then
- return "source mismatch"
+ local luvhash = luv.sourcehash or "?"
+ if luvhash ~= sourcehash then
+ return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
end
else
return "invalid status file"
diff --git a/tex/context/base/metatex.tex b/tex/context/base/metatex.tex
index 84c1268db..e90af709c 100644
--- a/tex/context/base/metatex.tex
+++ b/tex/context/base/metatex.tex
@@ -59,11 +59,11 @@
% needs stripping:
-\loadmarkfile{catc-ini} % catcode table management
-\loadcorefile{catc-act} % active character definition mechanisms
-\loadcorefile{catc-def} % some generic catcode tables
-\loadcorefile{catc-ctx} % a couple of context specific tables but expected by later modules
-\loadcorefile{catc-sym} % some definitions related to \letter<tokens>
+\loadmarkfile{catc-ini} % catcode table management
+\loadcorefile{catc-act} % active character definition mechanisms
+\loadcorefile{catc-def} % some generic catcode tables
+\loadcorefile{catc-ctx} % a couple of context specific tables but expected by later modules
+\loadcorefile{catc-sym} % some definitions related to \letter<tokens>
% helpers, maybe less
diff --git a/tex/context/base/mlib-ctx.lua b/tex/context/base/mlib-ctx.lua
index 7d7e936cf..cc5682e6f 100644
--- a/tex/context/base/mlib-ctx.lua
+++ b/tex/context/base/mlib-ctx.lua
@@ -62,7 +62,7 @@ end
function metapost.theclippath(...)
local result = metapost.getclippath(...)
if result then -- we could just print the table
- result = join(metapost.flushnormalpath(object.path),"\n")
+ result = join(metapost.flushnormalpath(result),"\n")
sprint(result)
end
end
diff --git a/tex/context/base/page-str.lua b/tex/context/base/page-str.lua
new file mode 100644
index 000000000..c4d1957c3
--- /dev/null
+++ b/tex/context/base/page-str.lua
@@ -0,0 +1,221 @@
+if not modules then modules = { } end modules ['page-str'] = {
+ version = 1.001,
+ comment = "companion to page-str.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- work in progresss .. unfinished
+
+local concat = table.concat
+
+local find_tail, write_node, free_node, copy_nodelist = node.slide, node.write, node.free, node.copy_list
+local vpack_nodelist, hpack_nodelist = node.vpack, node.hpack
+local texdimen, texbox = tex.dimen, tex.box
+
+local new_kern = nodes.kern
+local new_glyph = nodes.glyph
+
+local trace_collecting = false trackers.register("streams.collecting", function(v) trace_collecting = v end)
+local trace_flushing = false trackers.register("streams.flushing", function(v) trace_flushing = v end)
+
+streams = streams or { }
+
+local data, name, stack = { }, nil, { }
+
+function streams.enable(newname)
+ if newname == "default" then
+ name = nil
+ else
+ name = newname
+ end
+end
+
+function streams.disable()
+ name = stack[#stack]
+end
+
+function streams.start(newname)
+ table.insert(stack,name)
+ name = newname
+end
+
+function streams.stop(newname)
+ name = table.remove(stack)
+end
+
+function streams.collect(head,where)
+ if name and head and name ~= "default" then
+ local tail = node.slide(head)
+ local dana = data[name]
+ if not dana then
+ dana = { }
+ data[name] = dana
+ end
+ local last = dana[#dana]
+ if last then
+ local tail = find_tail(last)
+ tail.next, head.prev = head, tail
+ elseif last == false then
+ dana[#dana] = head
+ else
+ dana[1] = head
+ end
+ if trace_collecting then
+ logs.report("streams","appending snippet '%s' to slot %s",name,#dana)
+ end
+ return nil, true
+ else
+ return head, false
+ end
+end
+
+function streams.push(thename)
+ if not thename or thename == "" then
+ thename = name
+ end
+ if thename and thename ~= "" then
+ local dana = data[thename]
+ if dana then
+ dana[#dana+1] = false
+ if trace_collecting then
+ logs.report("streams","pushing snippet '%s'",thename)
+ end
+ end
+ end
+end
+
+function streams.flush(name,copy) -- problem: we need to migrate afterwards
+ local dana = data[name]
+ if dana then
+ local dn = #dana
+ if dn == 0 then
+ -- nothing to flush
+ elseif copy then
+ if trace_flushing then
+ logs.report("streams","flushing copies of %s slots of '%s'",dn,name)
+ end
+ for i=1,dn do
+ local di = dana[i]
+ if di then
+ write_node(copy_nodelist(di.list)) -- list, will be option
+ end
+ end
+ if copy then
+ data[name] = nil
+ end
+ else
+ if trace_flushing then
+ logs.report("streams","flushing %s slots of '%s'",dn,name)
+ end
+ for i=1,dn do
+ local di = dana[i]
+ if di then
+ write_node(di.list) -- list, will be option
+ di.list = nil
+ free_node(di)
+ end
+ end
+ end
+ end
+end
+
+function streams.synchronize(list) -- this is an experiment !
+ -- we don't optimize this as we want to trace in detail
+ list = aux.settings_to_array(list)
+ local max = 0
+ if trace_flushing then
+ logs.report("streams","synchronizing list: %s",concat(list," "))
+ end
+ for i=1,#list do
+ local dana = data[list[i]]
+ if dana then
+ local n = #dana
+ if n > max then
+ max = n
+ end
+ end
+ end
+ if trace_flushing then
+ logs.report("streams","maximum number of slots: %s",max)
+ end
+ for m=1,max do
+ local height, depth = 0, 0
+ for i=1,#list do
+ local name = list[i]
+ local dana = data[name]
+ local slot = dana[m]
+ if slot then
+ local vbox = vpack_nodelist(slot)
+ local ht, dp = vbox.height, vbox.depth
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ dana[m] = vbox
+ if trace_flushing then
+ logs.report("streams","slot %s of '%s' is packed to height %s and depth %s",m,name,ht,dp)
+ end
+ end
+ end
+ if trace_flushing then
+ logs.report("streams","slot %s has max height %s and max depth %s",m,height,depth)
+ end
+ local strutht, strutdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth
+ local struthtdp = strutht + strutdp
+ for i=1,#list do
+ local name = list[i]
+ local dana = data[name]
+ local vbox = dana[m]
+ if vbox then
+ local delta_height = height - vbox.height
+ local delta_depth = depth - vbox.depth
+ if delta_height > 0 or delta_depth > 0 then
+ if false then
+ -- actually we need to add glue and repack
+ vbox.height, vbox.depth = height, depth
+ if trace_flushing then
+ logs.report("streams","slot %s of '%s' with delta (%s,%s) is compensated",m,i,delta_height,delta_depth)
+ end
+ else
+ -- this is not yet ok as we also need to keep an eye on vertical spacing
+ -- so we might need to do some splitting or whatever
+ local tail = vbox.list and find_tail(vbox.list)
+ local n, delta = 0, delta_height -- for tracing
+ while delta > 0 do
+ -- we need to add some interline penalties
+ local line = copy_nodelist(tex.box.strutbox)
+ line.height, line.depth = strutht, strutdp
+ if tail then
+ tail.next, line.prev = line, tail
+ end
+ tail = line
+ n, delta = n +1, delta - struthtdp
+ end
+ dana[m] = vpack_nodelist(vbox.list)
+ vbox.list = nil
+ free_node(vbox)
+ if trace_flushing then
+ logs.report("streams","slot %s:%s with delta (%s,%s) is compensated by %s lines",m,i,delta_height,delta_depth,n)
+ end
+ end
+ end
+ else
+ -- make dummy
+ end
+ end
+ end
+end
+
+tasks.appendaction("mvlbuilders", "normalizers", "streams.collect")
+
+tasks.disableaction("mvlbuilders", "streams.collect")
+
+function streams.initialize()
+ tasks.enableaction ("mvlbuilders", "streams.collect")
+end
+
+-- todo: remove empty last { }'s
diff --git a/tex/context/base/page-str.mkiv b/tex/context/base/page-str.mkiv
index 4fd5d58a5..4610c7f71 100644
--- a/tex/context/base/page-str.mkiv
+++ b/tex/context/base/page-str.mkiv
@@ -1,6 +1,6 @@
%D \module
%D [ file=page-str,
-%D version=2006.03.21,
+%D version=2010.03.13, % 2006.03.21,
%D title=\CONTEXT\ Page Macros,
%D subtitle=Page Streams,
%D author=Hans Hagen,
@@ -22,103 +22,76 @@
%D
%D These macros were written while listening to and watching the DVD
%D \quotation {Rush In Rio}.
+%D
+%D The reimplementation (or rather experimenting with the complete
+%D rewrite) was done while looping over \quotation {Wende Snijders
+%D No.9}.
+%D
+%D Remark: marknotes are gone, at least for a while.
-% not yet ok in mkiv ... marknotes .. will be completely redone
+\writestatus{loading}{ConTeXt Page Macros / Page Streams}
-\endinput
+\registerctxluafile{page-str}{1.001}
\unprotect
-\let\currentoutputstream\s!default
-
-\newtoks\defaultstreamoutput \defaultstreamoutput=\OTRONEoutput
-
-\newtoks\normalstreamoutput \normalstreamoutput={\saveoutputstream[\currentoutputstream]}
-
-\newcount\streampenalty \streampenalty=-101010101
-
-\ifx\multicolumnseject\undefined \else
- \let\normalmulticolumnseject\multicolumnseject
- \def\multicolumnseject{\ifinoutputstream\else\normalmulticolumnseject\fi}
-\fi
-
-\newif\ifinoutputstream
-
+\let \currentoutputstream \empty
+\newif \ifinoutputstream
\newtoks \everyenableoutputstream
\appendtoks
- \flushsidefloats
+ \flushsidefloats
\to \everyenableoutputstream
+\def\initializeoutputstreams
+ {\ctxlua{streams.initialize()}%
+ \glet\initializeoutputstreams\relax}
+
\def\enableoutputstream[#1]%
- {\the\everyenableoutputstream
- \finishoutputstream
- \writestatus{otr}{switching to output stream #1}%
+ {\initializeoutputstreams
+ \the\everyenableoutputstream
\inoutputstreamtrue
- \xdef\currentoutputstream{#1}}
+ \xdef\currentoutputstream{#1}%
+ \ctxlua{streams.enable("#1")}}
\def\disableoutputstream
- {\finishoutputstream
- \writestatus{otr}{switching to default output stream}%
- \inoutputstreamfalse
- \global\let\currentoutputstream\s!default}
-
-\def\useoutputstream[#1]%
- {\writestatus{otr}{using output stream #1}%
- \xdef\currentoutputstream{#1}}
-
-\def\handlestreamoutput
- {\ifx\currentoutputstream\s!default % already expanded
- \ifnum\outputpenalty=\streampenalty
- \ifvoid\normalpagebox \else
- \unvbox\normalpagebox
- \fi
- \else
- \the\defaultstreamoutput
- \fi
- \else
- \the\normalstreamoutput
- \fi}
-
-\OTRONEoutput{\handlestreamoutput}
-
-\def\defineoutputstream[#1]%
- {\doifundefined{otrs:#1}{\expandafter\newbox\csname otrs:#1\endcsname}}
-
-\def\outputstreamtag#1%
- {\csname otrs:#1\endcsname}
-
-\def\finishoutputstream % todo: installoutput
- {\endgraf
- \penalty\streampenalty
- \endgraf}
-
-\def\saveoutputstream[#1]%
- {\writestatus{otr}{saving otr stream #1}%
- \ifvoid\normalpagebox
- \global\setbox\outputstreamtag{#1}\emptybox
- \else
- \global\setbox\outputstreamtag{#1}\vbox
- {\presetoutputstream
- \ifvoid\outputstreamtag{#1}\else\unvbox\outputstreamtag{#1}\fi
- \scratchdimen\dp\normalpagebox
- \unvbox\normalpagebox
- \vskip-\scratchdimen
- \kern\strutdepth}%
- \fi}
-
-\let\presetoutputstream\relax
-
-\def\outputstreamht [#1]{\ht\outputstreamtag{#1}}
-\def\outputstreamdp [#1]{\dp\outputstreamtag{#1}}
-\def\outputstreamwd [#1]{\wd\outputstreamtag{#1}}
-
-\def\dowithoutputstreambox#1[#2]{\ifvoid\outputstreamtag{#2}\else#1\outputstreamtag{#2}\fi}
-
-\def\outputstreamcopy {\dowithoutputstreambox\copy }
-\def\outputstreambox {\dowithoutputstreambox\box }
-\def\outputstreamunvcopy{\dowithoutputstreambox\unvcopy}
-\def\outputstreamunvbox {\dowithoutputstreambox\unvbox }
+ {\inoutputstreamfalse
+ \global\let\currentoutputstream\s!default
+ \ctxlua{streams.disable()}}
+
+\def\startoutputstream[#1]%
+ {\begingroup
+ \initializeoutputstreams
+ \the\everyenableoutputstream
+ \inoutputstreamtrue
+ \xdef\currentoutputstream{#1}%
+ \ctxlua{streams.start("#1")}}
+
+\def\stopoutputstream
+ {\ctxlua{streams.stop()}%
+ \endgroup}
+
+\def\flushoutputstream [#1]{\ctxlua{streams.flush("#1")}}
+\def\outputstreamcopy [#1]{\vbox{\ctxlua{streams.flush("#1",true)}}}
+\def\outputstreambox [#1]{\vbox{\ctxlua{streams.flush("#1")}}}
+\def\outputstreamunvcopy[#1]{\ctxlua{streams.flush("#1",true)}}
+\def\outputstreamunvbox [#1]{\ctxlua{streams.flush("#1")}}
+\def\synchronizestreams [#1]{\ctxlua{streams.synchronize("#1")}}
+\def\dopushoutputstream [#1]{\ctxlua{streams.push("#1")}}
+
+\def\pushoutputstream {\dosingleempty\dopushoutputstream}
+
+% \def\defineoutputstream[#1]%
+% {\doifundefined{otrs:#1}{\expandafter\newbox\csname otrs:#1\endcsname}}
+%
+% \def\useoutputstream[#1]%
+% {\writestatus{otr}{using output stream #1}%
+% \xdef\currentoutputstream{#1}}
+%
+% \directsetup{stream:\firstoutputstream:set}
+% \directsetup{stream:\firstoutputstream:top}
+% \directsetup{stream:\firstoutputstream:bottom}
+% \directsetup{stream:\firstoutputstream:reset}
%D Obsolete in \MKIV:
@@ -128,202 +101,44 @@
\def\flushmarknotes [#1]{}
\def\erasemarknotes [#1]{}
-%D The next section implements synchronization of (currently
-%D two) output streams. In due time we will implement both a
-%D vertical and horizontal system, as well as alternative
-%D splitters (firstpagevsize, succesivevsize etc).
-
-\def\synchronizeoutputstreams[#1]% [one,two] [left,right]
- {\bgroup
- \getfromcommalist[#1][\plusone]\let\firstoutputstream \commalistelement
- \getfromcommalist[#1][\plustwo]\let\secondoutputstream\commalistelement
- \forgeteverypar
- \def\roundingeps{50sp}%
- \getboxheight\dimen0\of\box\outputstreamtag\firstoutputstream
- \getboxheight\dimen2\of\box\outputstreamtag\secondoutputstream
- \scratchdimen\dimexpr\dimen0-\dimen2\relax
- \ifdim\scratchdimen<-\roundingeps\relax
- \scratchdimen-\scratchdimen
- \writestatus{sync}{compensating first stream: \the\scratchdimen/\number\scratchdimen}%
- \getroundednoflines\scratchdimen
- \global\setbox\outputstreamtag\firstoutputstream\vbox
- {\presetoutputstream
- \unvbox\outputstreamtag\firstoutputstream\dorecurse\noflines\crlf}%
- \else\ifdim\scratchdimen>\roundingeps\relax
- \writestatus{sync}{compensating second stream: \the\scratchdimen/\number\scratchdimen}%
- \getroundednoflines\scratchdimen
- \global\setbox\outputstreamtag\secondoutputstream\vbox
- {\presetoutputstream
- \unvbox\outputstreamtag\secondoutputstream\dorecurse\noflines\crlf}%
- \else
- \writestatus{sync}{no need to compensate streams: \the\scratchdimen/\number\scratchdimen}%
- \fi\fi
- \egroup}
-
-\def\nofoutputstreamsplitlines {\v!auto} % {40}
-\def\outputstreamsplittolerance {-5}
-
-\def\flushoutputstreampages[#1]%
- {\bgroup
- \getfromcommalist[#1][\plusone]\let\firstoutputstream \commalistelement
- \getfromcommalist[#1][\plustwo]\let\secondoutputstream\commalistelement
- \doloop
- {\flushoutputstreams[#1]%
- \ifvoid\outputstreamtag\firstoutputstream
- \ifvoid\outputstreamtag\secondoutputstream
- \exitloop
- \else
- \global\setbox\outputstreamtag\firstoutputstream\vbox{\strut}%
- \fi
- \else
- \ifvoid\outputstreamtag\secondoutputstream
- \global\setbox\outputstreamtag\secondoutputstream\vbox{\strut}%
- \else
- % okay
- \fi
- \fi}%
- \egroup}
-
-\def\flushoutputstreams[#1]%
- {\bgroup
- \getfromcommalist[#1][\plusone]\let\firstoutputstream \commalistelement
- \getfromcommalist[#1][\plustwo]\let\secondoutputstream\commalistelement
- \doif\nofoutputstreamsplitlines\v!auto
- {\getrawnoflines\textheight
- \edef\nofoutputstreamsplitlines{\the\noflines}}%
- \splittopskip\strutheight
- \scratchdimen\nofoutputstreamsplitlines\lineheight\relax
- \unless\iffalse
- \dimen0\scratchdimen
- \doloop
- {\setbox4\copy\outputstreamtag\firstoutputstream
- \setbox0\vsplit4 to \dimen0
- \setbox0\vbox
- {\directsetup{stream:\firstoutputstream:top}%
- \unvbox0
- \directsetup{stream:\firstoutputstream:bottom}}%
- \ifdim\ht0>\scratchdimen
- \advance\dimen0-\lineheight
- \else
- \exitloop
- \fi}%
- \scratchdimen\dimen0
- \dimen2\scratchdimen
- \doloop
- {\setbox6\copy\outputstreamtag\secondoutputstream
- \setbox2\vsplit6 to \dimen2
- \setbox2\vbox
- {\directsetup{stream:\secondoutputstream:top}%
- \unvbox0
- \directsetup{stream:\secondoutputstream:bottom}}%
- \ifdim\ht2>\scratchdimen
- \advance\dimen2-\lineheight
- \else
- \exitloop
- \fi}%
- \scratchdimen\dimen2
- \fi
- \setbox4\copy\outputstreamtag\firstoutputstream
- \setbox6\copy\outputstreamtag\secondoutputstream
- \scratchcounter\zerocount
- \doloop
- {\setbox0\vsplit4 to \scratchdimen
- \setbox0\vbox{\unvbox0}%
- \setbox2\vsplit6 to \scratchdimen
- \setbox2\vbox{\unvbox2}%
- \ifvoid4
- \exitloop
- \else\ifvoid6
- \exitloop
- \else
- \dimen8=\dimexpr\ht4-\ht6\relax
- \ifdim\dimen8<\zeropoint\dimen8=-\dimen8\relax\fi
- \advance\scratchcounter\plusone
- \ifdim\dimen8<.5\lineheight
- \exitloop
- \else\ifnum\outputstreamsplittolerance>\zeropoint
- \ifnum\scratchcounter>\outputstreamsplittolerance\relax
- \exitloop
- \else
- \advance\scratchdimen\lineheight
- \fi
- \else\ifnum\outputstreamsplittolerance<\zeropoint
- \ifnum-\scratchcounter<\outputstreamsplittolerance\relax
- \exitloop
- \else
- \advance\scratchdimen-\lineheight
- \fi
- \else\ifnum\outputstreamsplittolerance=\zeropoint
- \exitloop
- \fi\fi\fi\fi
- \fi\fi}%
- \setbox0\vsplit\outputstreamtag\firstoutputstream to \scratchdimen
- \setbox0\vbox to \textheight
- {\presetoutputstream
- \directsetup{stream:\firstoutputstream:top}%
- \unvbox0
- \vfill
- \directsetup{stream:\firstoutputstream:bottom}}%
- \setbox2\vsplit\outputstreamtag\secondoutputstream to \scratchdimen
- \setbox2\vbox to \textheight
- {\presetoutputstream
- \directsetup{stream:\secondoutputstream:top}%
- \unvbox2
- \vfill
- \directsetup{stream:\secondoutputstream:bottom}}%
- \directsetup{stream:\firstoutputstream:reset}%
- \directsetup{stream:\secondoutputstream:reset}%
- \page[even]
- \box0\vfill\page
- \box2\vfill\page
- \egroup}
-
- %D Although one can put floats in a stream, it sometimes makes sense
- %D to keep them apart and this is what local floats do.
-
- \def\setuplocalfloats
- {\getparameters[\??lf]}
-
- \setuplocalfloats
- [%before=\blank,
- %after=\blank,
- inbetween=\blank]
-
- \installfloathandler \v!local \somelocalfloat
-
- \initializeboxstack{localfloats}
-
- \newcounter\noflocalfloats
-
- \def\resetlocalfloats
- {\doglobal\newcounter\noflocalfloats
- \initializeboxstack{localfloats}}
-
- \def\somelocalfloat[#1]%
- {\doglobal\increment\noflocalfloats
- \savebox{localfloats}{\noflocalfloats}{\box\floatbox}}
-
- \def\getlocalfloats
- {\dorecurse\noflocalfloats
- {\ifnum\recurselevel=\plusone % 1\relax
- \getvalue{\??lf\c!before}%
- \else
- \getvalue{\??lf\c!inbetween}%
- \fi
- \dontleavehmode\hbox{\foundbox{localfloats}\recurselevel}%
- \ifnum\recurselevel=\noflocalfloats\relax
- \getvalue{\??lf\c!after}%
- \fi}}
-
- \def\flushlocalfloats
- {\getlocalfloats
- \resetlocalfloats}
-
- \def\getlocalfloat#1{\expanded{\foundbox{localfloats}{\number#1}}}
-
- \def\forcelocalfloats{\let\forcedfloatmethod\v!local}
-
-%D Because many arrangements are possible, we will implement
-%D some examples in a runtime loadable module \type {m-streams}.
-
\protect \endinput
+
+% \enabletrackers[streams.flushing]
+%
+% \setuplayout[grid=yes] \showgrid
+%
+% \starttext
+%
+% \input tufte
+%
+% \startoutputstream[nl]
+%
+% Wat doen we hier?
+%
+% \enableoutputstream[en]
+%
+% Are you sleeping, brother John?\footnote{xxx}
+%
+% \dorecurse{4}{x \footnote{note \recurselevel}\input tufte \par \pushoutputstream}
+%
+% \enableoutputstream[de]
+%
+% Bruder Jakob, schläfst du noch?\footnote{yyy}
+%
+% \dorecurse{4}{x \footnote{note \recurselevel}\input ward \par \pushoutputstream}
+%
+% \disableoutputstream
+%
+% \stopoutputstream
+%
+% Vader Jacob, slaap je nog?\footnote{zzz}
+%
+% \input tufte
+%
+% \synchronizestreams[en,de,nl]
+%
+% \page \flushoutputstream[en] \input knuth
+% \page \flushoutputstream[de] \input knuth
+% \page \flushoutputstream[nl] \input knuth
+%
+% \stoptext
diff --git a/tex/generic/context/luatex-fonts-merged.lua b/tex/generic/context/luatex-fonts-merged.lua
index bc8b02906..3e3e7c34c 100644
--- a/tex/generic/context/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 05/07/10 14:21:56
+-- merge date : 05/08/10 13:33:54
do -- begin closure to overcome local limits and interference