summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--scripts/context/lua/mtxrun.lua37
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua37
-rwxr-xr-xscripts/context/stubs/unix/mtxrun37
-rw-r--r--tex/context/base/buff-ver.mkiv6
-rw-r--r--tex/context/base/char-def.lua4
-rw-r--r--tex/context/base/cont-new.tex2
-rw-r--r--tex/context/base/context.tex2
-rw-r--r--tex/context/base/l-lpeg.lua37
-rw-r--r--tex/context/base/luat-lib.mkiv3
-rw-r--r--tex/context/base/lxml-dir.lua4
-rw-r--r--tex/context/base/lxml-ent.lua20
-rw-r--r--tex/context/base/lxml-ini.mkiv4
-rw-r--r--tex/context/base/lxml-lpt.lua27
-rw-r--r--tex/context/base/lxml-tab.lua48
-rw-r--r--tex/context/base/math-ini.mkiv2
-rw-r--r--tex/context/base/supp-fil.lua57
-rw-r--r--tex/context/base/supp-fil.mkiv145
-rw-r--r--tex/context/base/x-asciimath.lua266
-rw-r--r--tex/context/base/x-asciimath.mkiv96
-rw-r--r--tex/context/base/x-calcmath.lua218
-rw-r--r--tex/context/base/x-mathml.mkiv13
-rw-r--r--tex/generic/context/luatex-fonts-merged.lua39
22 files changed, 846 insertions, 258 deletions
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index c63727452..cbb27098d 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -317,7 +317,7 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
license = "see context related readme files"
}
-local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local P, R, S, Ct, C, Cs, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
--~ l-lpeg.lua :
@@ -424,6 +424,41 @@ end
--~ return p
--~ end
+--~ from roberto's site:
+--~
+--~ -- decode a two-byte UTF-8 sequence
+--~ local function f2 (s)
+--~ local c1, c2 = string.byte(s, 1, 2)
+--~ return c1 * 64 + c2 - 12416
+--~ end
+--~
+--~ -- decode a three-byte UTF-8 sequence
+--~ local function f3 (s)
+--~ local c1, c2, c3 = string.byte(s, 1, 3)
+--~ return (c1 * 64 + c2) * 64 + c3 - 925824
+--~ end
+--~
+--~ -- decode a four-byte UTF-8 sequence
+--~ local function f4 (s)
+--~ local c1, c2, c3, c4 = string.byte(s, 1, 4)
+--~ return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168
+--~ end
+--~
+--~ local cont = lpeg.R("\128\191") -- continuation byte
+--~
+--~ local utf8 = lpeg.R("\0\127") / string.byte
+--~ + lpeg.R("\194\223") * cont / f2
+--~ + lpeg.R("\224\239") * cont * cont / f3
+--~ + lpeg.R("\240\244") * cont * cont * cont / f4
+--~
+--~ local decode_pattern = lpeg.Ct(utf8^0) * -1
+
+
+local cont = R("\128\191") -- continuation byte
+
+lpeg.utf8 = R("\0\127") + R("\194\223") * cont + R("\224\239") * cont * cont + R("\240\244") * cont * cont * cont
+
+
end -- of closure
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index c63727452..cbb27098d 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -317,7 +317,7 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
license = "see context related readme files"
}
-local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local P, R, S, Ct, C, Cs, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
--~ l-lpeg.lua :
@@ -424,6 +424,41 @@ end
--~ return p
--~ end
+--~ from roberto's site:
+--~
+--~ -- decode a two-byte UTF-8 sequence
+--~ local function f2 (s)
+--~ local c1, c2 = string.byte(s, 1, 2)
+--~ return c1 * 64 + c2 - 12416
+--~ end
+--~
+--~ -- decode a three-byte UTF-8 sequence
+--~ local function f3 (s)
+--~ local c1, c2, c3 = string.byte(s, 1, 3)
+--~ return (c1 * 64 + c2) * 64 + c3 - 925824
+--~ end
+--~
+--~ -- decode a four-byte UTF-8 sequence
+--~ local function f4 (s)
+--~ local c1, c2, c3, c4 = string.byte(s, 1, 4)
+--~ return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168
+--~ end
+--~
+--~ local cont = lpeg.R("\128\191") -- continuation byte
+--~
+--~ local utf8 = lpeg.R("\0\127") / string.byte
+--~ + lpeg.R("\194\223") * cont / f2
+--~ + lpeg.R("\224\239") * cont * cont / f3
+--~ + lpeg.R("\240\244") * cont * cont * cont / f4
+--~
+--~ local decode_pattern = lpeg.Ct(utf8^0) * -1
+
+
+local cont = R("\128\191") -- continuation byte
+
+lpeg.utf8 = R("\0\127") + R("\194\223") * cont + R("\224\239") * cont * cont + R("\240\244") * cont * cont * cont
+
+
end -- of closure
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index c63727452..cbb27098d 100755
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -317,7 +317,7 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
license = "see context related readme files"
}
-local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local P, R, S, Ct, C, Cs, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
--~ l-lpeg.lua :
@@ -424,6 +424,41 @@ end
--~ return p
--~ end
+--~ from roberto's site:
+--~
+--~ -- decode a two-byte UTF-8 sequence
+--~ local function f2 (s)
+--~ local c1, c2 = string.byte(s, 1, 2)
+--~ return c1 * 64 + c2 - 12416
+--~ end
+--~
+--~ -- decode a three-byte UTF-8 sequence
+--~ local function f3 (s)
+--~ local c1, c2, c3 = string.byte(s, 1, 3)
+--~ return (c1 * 64 + c2) * 64 + c3 - 925824
+--~ end
+--~
+--~ -- decode a four-byte UTF-8 sequence
+--~ local function f4 (s)
+--~ local c1, c2, c3, c4 = string.byte(s, 1, 4)
+--~ return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168
+--~ end
+--~
+--~ local cont = lpeg.R("\128\191") -- continuation byte
+--~
+--~ local utf8 = lpeg.R("\0\127") / string.byte
+--~ + lpeg.R("\194\223") * cont / f2
+--~ + lpeg.R("\224\239") * cont * cont / f3
+--~ + lpeg.R("\240\244") * cont * cont * cont / f4
+--~
+--~ local decode_pattern = lpeg.Ct(utf8^0) * -1
+
+
+local cont = R("\128\191") -- continuation byte
+
+lpeg.utf8 = R("\0\127") + R("\194\223") * cont + R("\224\239") * cont * cont + R("\240\244") * cont * cont * cont
+
+
end -- of closure
diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv
index e4f5872fe..1fa0fa447 100644
--- a/tex/context/base/buff-ver.mkiv
+++ b/tex/context/base/buff-ver.mkiv
@@ -492,9 +492,9 @@
{\setxvalue{\currenttypingclass\currenttyping#1}{#2}}
\setvalue{\??tp:\c!blank:\v!standard}{\ctxparskip}
-\setvalue{\??tp:\c!blank:\v!small }{\blankokleinmaat}
-\setvalue{\??tp:\c!blank:\v!medium }{\blankomiddelmaat}
-\setvalue{\??tp:\c!blank:\v!big }{\blankogrootmaat}
+\setvalue{\??tp:\c!blank:\v!small }{\smallskipamount}
+\setvalue{\??tp:\c!blank:\v!medium }{\medskipamount}
+\setvalue{\??tp:\c!blank:\v!big }{\bigskipamount}
\setvalue{\??tp:\c!blank:\v!halfline}{.5\baselineskip}
\setvalue{\??tp:\c!blank:\v!line }{\baselineskip}
\setvalue{\??tp:\c!blank:\v!none }{\zeropoint}
diff --git a/tex/context/base/char-def.lua b/tex/context/base/char-def.lua
index 005c41075..63a090986 100644
--- a/tex/context/base/char-def.lua
+++ b/tex/context/base/char-def.lua
@@ -59984,6 +59984,8 @@ characters.data={
description="BLACK RIGHT-POINTING TRIANGLE",
direction="on",
linebreak="ai",
+ mathclass="bin",
+ mathname="blacktriangleright",
unicodeslot=0x25B6,
},
[0x25B7]={
@@ -59999,6 +60001,8 @@ characters.data={
description="BLACK RIGHT-POINTING SMALL TRIANGLE",
direction="on",
linebreak="al",
+ mathclass="bin",
+ mathname="blacktriangleleft",
unicodeslot=0x25B8,
},
[0x25B9]={
diff --git a/tex/context/base/cont-new.tex b/tex/context/base/cont-new.tex
index 0e8c16506..0668640ef 100644
--- a/tex/context/base/cont-new.tex
+++ b/tex/context/base/cont-new.tex
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2009.10.19 14:43}
+\newcontextversion{2009.10.21 10:28}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/context.tex b/tex/context/base/context.tex
index b4f667749..231252724 100644
--- a/tex/context/base/context.tex
+++ b/tex/context/base/context.tex
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2009.10.19 14:43}
+\edef\contextversion{2009.10.21 10:28}
%D For those who want to use this:
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
index 2c95730c4..b2a646fcb 100644
--- a/tex/context/base/l-lpeg.lua
+++ b/tex/context/base/l-lpeg.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
license = "see context related readme files"
}
-local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local P, R, S, Ct, C, Cs, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
--~ l-lpeg.lua :
@@ -112,3 +112,38 @@ end
--~ end
--~ return p
--~ end
+
+--~ from roberto's site:
+--~
+--~ -- decode a two-byte UTF-8 sequence
+--~ local function f2 (s)
+--~ local c1, c2 = string.byte(s, 1, 2)
+--~ return c1 * 64 + c2 - 12416
+--~ end
+--~
+--~ -- decode a three-byte UTF-8 sequence
+--~ local function f3 (s)
+--~ local c1, c2, c3 = string.byte(s, 1, 3)
+--~ return (c1 * 64 + c2) * 64 + c3 - 925824
+--~ end
+--~
+--~ -- decode a four-byte UTF-8 sequence
+--~ local function f4 (s)
+--~ local c1, c2, c3, c4 = string.byte(s, 1, 4)
+--~ return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168
+--~ end
+--~
+--~ local cont = lpeg.R("\128\191") -- continuation byte
+--~
+--~ local utf8 = lpeg.R("\0\127") / string.byte
+--~ + lpeg.R("\194\223") * cont / f2
+--~ + lpeg.R("\224\239") * cont * cont / f3
+--~ + lpeg.R("\240\244") * cont * cont * cont / f4
+--~
+--~ local decode_pattern = lpeg.Ct(utf8^0) * -1
+
+
+local cont = R("\128\191") -- continuation byte
+
+lpeg.utf8 = R("\0\127") + R("\194\223") * cont + R("\224\239") * cont * cont + R("\240\244") * cont * cont * cont
+
diff --git a/tex/context/base/luat-lib.mkiv b/tex/context/base/luat-lib.mkiv
index d1aea092c..b52d9facb 100644
--- a/tex/context/base/luat-lib.mkiv
+++ b/tex/context/base/luat-lib.mkiv
@@ -42,13 +42,12 @@
\registerctxluafile{luat-ini} {1.001}
\registerctxluafile{luat-env} {1.001}
-%registerctxluafile{l-xml} {1.001} % we need to load lxml-tab earlier so this will change ! ! ! ! ! ! !
\registerctxluafile{lxml-tab} {1.001}
\registerctxluafile{lxml-lpt} {1.001}
\registerctxluafile{lxml-xml} {1.001}
\registerctxluafile{lxml-aux} {1.001}
\registerctxluafile{lxml-mis} {1.001}
-\registerctxluafile{lxml-ent} {1.001}
+%registerctxluafile{lxml-ent} {1.001}
\startruntimeluacode
\edef\asciia{\ctxlua{tex.sprint(logs.mode)}}
diff --git a/tex/context/base/lxml-dir.lua b/tex/context/base/lxml-dir.lua
index f57208a91..617ce3e20 100644
--- a/tex/context/base/lxml-dir.lua
+++ b/tex/context/base/lxml-dir.lua
@@ -60,9 +60,9 @@ local function load_setup(filename)
valid = valid + 1
end
end
- logs.report("lxml","%s directives found in '%s', %s valid",#collection,filename,valid)
+ commands.writestatus("lxml","%s directives found in '%s', %s valid",#collection,filename,valid)
else
- logs.report("lxml","no directives found in '%s'",filename)
+ commands.writestatus("lxml","no directives found in '%s'",filename)
end
end
diff --git a/tex/context/base/lxml-ent.lua b/tex/context/base/lxml-ent.lua
index 9003c9d83..9d93fded7 100644
--- a/tex/context/base/lxml-ent.lua
+++ b/tex/context/base/lxml-ent.lua
@@ -6,10 +6,11 @@ if not modules then modules = { } end modules ['lxml-ent'] = {
license = "see context related readme files"
}
-local type, next = type, next
+local type, next, tonumber = type, next, tonumber
local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
local utf = unicode.utf8
-local utfupper = utf.upper
+local byte, format = string.byte, string.format
+local utfupper, utfchar = utf.upper, utf.char
--[[ldx--
<p>We provide (at least here) two entity handlers. The more extensive
@@ -24,6 +25,8 @@ xml.entities = xml.entities or { } -- xml.entity_handler == function
-- experimental, this will be done differently
+local parsedentity = xml.parsedentitylpeg
+
function xml.merge_entities(root)
local documententities = root.entities
local allentities = xml.entities
@@ -40,11 +43,20 @@ function xml.resolved_entity(str)
local te = type(e)
if te == "function" then
e(str)
- else
+ elseif e then
texsprint(ctxcatcodes,e)
end
else
- texsprint(ctxcatcodes,"\\xmle{",str,"}{",utfupper(str),"}") -- we need to use our own upper
+ -- resolve hex and dec, todo: escape # & etc for ctxcatcodes
+ -- normally this is already solved while loading the file
+ local chr, err = parsedentity:match(str)
+ if chr then
+ texsprint(ctxcatcodes,chr)
+ elseif err then
+ texsprint(ctxcatcodes,err)
+ else
+ texsprint(ctxcatcodes,"\\xmle{",str,"}{",utfupper(str),"}") -- we need to use our own upper
+ end
end
end
diff --git a/tex/context/base/lxml-ini.mkiv b/tex/context/base/lxml-ini.mkiv
index 01b80562d..864c4729c 100644
--- a/tex/context/base/lxml-ini.mkiv
+++ b/tex/context/base/lxml-ini.mkiv
@@ -20,7 +20,7 @@
%registerctxluafile{lxml-xml}{1.001} % xml finalizers
%registerctxluafile{lxml-aux}{1.001} % extras using parser
%registerctxluafile{lxml-mis}{1.001} % extras independent of parser
-%registerctxluafile{lxml-ent}{1.001} % entity hacks
+\registerctxluafile{lxml-ent}{1.001} % entity hacks
\registerctxluafile{lxml-tex}{1.001} % tex finalizers
\registerctxluafile{lxml-dir}{1.001} % ctx hacks
@@ -83,7 +83,7 @@
\def\xmlloadbuffer #1#2{\ctxlua{lxml.loadbuffer("#1","#2","\@@xmentities","\@@xmcompress")}}
\def\xmlloaddata #1#2{\ctxlua{lxml.loaddata("#1",\!!bs#2\!!es,"\@@xmentities","\@@xmcompress")}}
\def\xmlloadregistered #1#2{\ctxlua{lxml.loadregistered("#1","\@@xmentities","\@@xmcompress")}}
-\def\xmlloaddirectives #1{\ctxlua{lxml.directives.load("#1")}}
+\def\xmlloaddirectives #1{\ctxlua{lxml.directives.load("any:///#1")}}
\def\xmlpos #1{\ctxlua{lxml.pos("#1")}}
%def\xmldoifelse #1#2{\ctxlua{cs.testcase(xml.found(lxml.id("#1"),"#2",false))}}
diff --git a/tex/context/base/lxml-lpt.lua b/tex/context/base/lxml-lpt.lua
index 07c112943..c656381c0 100644
--- a/tex/context/base/lxml-lpt.lua
+++ b/tex/context/base/lxml-lpt.lua
@@ -367,15 +367,13 @@ end
local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb
-local spaces = S(" \n\r\t\f")^0
-
-local lp_space = S(" \n\r\t\f")
-local lp_any = P(1)
-
-local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==")
-local lp_doequal = P("=") / "=="
-local lp_or = P("|") / " or "
-local lp_and = P("&") / " and "
+local spaces = S(" \n\r\t\f")^0
+local lp_space = S(" \n\r\t\f")
+local lp_any = P(1)
+local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==")
+local lp_doequal = P("=") / "=="
+local lp_or = P("|") / " or "
+local lp_and = P("&") / " and "
local lp_builtin = P (
P("first") / "1" +
@@ -389,9 +387,8 @@ local lp_builtin = P (
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
-local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * R("az","AZ","--","__")^1 * Cc("'])")
-local lp_fastpos = ((R("09","--","++")^1 * P(-1)) / function(s) return "l==" .. s end)
-
+local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * R("az","AZ","--","__")^1 * Cc("'])")
+local lp_fastpos = ((R("09","--","++")^1 * P(-1)) / function(s) return "l==" .. s end)
local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
@@ -412,9 +409,9 @@ local noparent = 1 - (lparent+rparent)
local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
-local lp_child = Cc("expr.child(e,'") * R("az","AZ","--","__")^1 * Cc("')")
-local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'")
-local lp_content= (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"'))
+local lp_child = Cc("expr.child(e,'") * R("az","AZ","--","__")^1 * Cc("')")
+local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'")
+local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"'))
local cleaner
diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua
index 15ed7a26f..e5273d92e 100644
--- a/tex/context/base/lxml-tab.lua
+++ b/tex/context/base/lxml-tab.lua
@@ -284,12 +284,13 @@ local function handle_hex_entity(str)
if trace_entities then
logs.report("xml","found entity &#x%s;",str)
end
- h = "&#" .. str .. ";"
+ h = "&#c" .. str .. ";"
end
hcache[str] = h
end
return h
end
+
local function handle_dec_entity(str)
local d = dcache[str]
if not d then
@@ -305,12 +306,44 @@ local function handle_dec_entity(str)
if trace_entities then
logs.report("xml","found entity &#%s;",str)
end
- d = "&" .. str .. ";"
+ d = "&#" .. str .. ";"
end
dcache[str] = d
end
return d
end
+
+-- one level expansion (simple case)
+
+local function fromhex(s)
+ local n = tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return format("h:%s",s), true
+ end
+end
+
+local function fromdec(s)
+ local n = tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return format("d:%s",s), true
+ end
+end
+
+local P, S, R, C, V, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cs
+
+local rest = (1-P(";"))^0
+local many = P(1)^0
+
+local parsedentity =
+ P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
+ (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
+
+xml.parsedentitylpeg = parsedentity
+
local function handle_any_entity(str)
if resolve then
local a = entities[str] -- per instance !
@@ -328,11 +361,11 @@ local function handle_any_entity(str)
end
elseif trace_entities then
if not acache[str] then
- logs.report("xml","converting entity &%s; into %s",str,r)
+ logs.report("xml","converting entity &%s; into %s",str,a)
acache[str] = a
end
end
- return a
+ return (a and parsedentity:match(a)) or a
else
local a = acache[str]
if not a then
@@ -346,8 +379,6 @@ local function handle_any_entity(str)
end
end
-local P, S, R, C, V, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cs
-
local space = S(' \r\n\t')
local open = P('<')
local close = P('>')
@@ -369,12 +400,11 @@ local utfbom = P('\000\000\254\255') + P('\255\254\000\000') +
local spacing = C(space^0)
local entitycontent = (1-open-semicolon)^0
-local entity = ampersand/"" * (
- P("#")/"" * (
+local parsedentity = P("#")/"" * (
P("x")/"" * (entitycontent/handle_hex_entity) +
(entitycontent/handle_dec_entity)
) + (entitycontent/handle_any_entity)
- ) * (semicolon/"")
+local entity = ampersand/"" * parsedentity * (semicolon/"")
local text_unparsed = C((1-open)^1)
local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv
index be343cd39..17b020563 100644
--- a/tex/context/base/math-ini.mkiv
+++ b/tex/context/base/math-ini.mkiv
@@ -641,4 +641,6 @@
\ifx\text\undefined \let\text\hbox \fi
+\def\mathoptext#1{\mathop{\text{#1}}}
+
\protect \endinput
diff --git a/tex/context/base/supp-fil.lua b/tex/context/base/supp-fil.lua
index 42db290a9..23b70782a 100644
--- a/tex/context/base/supp-fil.lua
+++ b/tex/context/base/supp-fil.lua
@@ -12,9 +12,8 @@ if not modules then modules = { } end modules ['supp-fil'] = {
at the <l n='tex'/> side.</p>
--ldx]]--
-local find, gsub, match = string.find, string.gsub, string.match
-
-local ctxcatcodes = tex.ctxcatcodes
+local find, gsub, match, format = string.find, string.gsub, string.match, string.format
+local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
support = support or { }
environment = environment or { }
@@ -87,3 +86,55 @@ function support.lastexistingfile()
tex.sprint(ctxcatcodes,lastexistingfile)
end
+-- more, we can cache matches
+
+local finders, loaders, openers = resolvers.finders, resolvers.loaders, resolvers.openers
+
+local found = { } -- can best be done in the resolver itself
+
+-- todo: tracing
+
+local function readfile(specification,backtrack,treetoo)
+ local fnd = found[specification]
+ if not fnd then
+ local splitspec = resolvers.splitmethod(specification)
+ local filename = splitspec.path or ""
+ if lfs.isfile(filename) then
+ fnd = filename
+ end
+ if not fnd and backtrack then
+ local fname = filename
+ for i=1,backtrack,1 do
+ fname = "../" .. fname
+ if lfs.isfile(fname) then
+ fnd = fname
+ break
+ end
+ end
+ end
+ if not fnd and treetoo then
+ fnd = resolvers.find_file(filename)
+ end
+ found[specification] = fnd
+ end
+ return fnd or ""
+end
+
+function finders.job(filename) return readfile(filename,nil,false) end -- current path, no backtracking
+function finders.loc(filename) return readfile(filename,2, false) end -- current path, backtracking
+function finders.sys(filename) return readfile(filename,nil,true ) end -- current path, obeys tex search
+function finders.fix(filename) return readfile(filename,2, false) end -- specified path, backtracking
+function finders.set(filename) return readfile(filename,nil,false) end -- specified path, no backtracking
+function finders.any(filename) return readfile(filename,2, true ) end -- loc job sys
+
+openers.job = openers.generic loaders.job = loaders.generic
+openers.loc = openers.generic loaders.loc = loaders.generic
+openers.sys = openers.generic loaders.sys = loaders.generic
+openers.fix = openers.generic loaders.fix = loaders.generic
+openers.set = openers.generic loaders.set = loaders.generic
+openers.any = openers.generic loaders.any = loaders.generic
+
+function support.doreadfile(protocol,path,name)
+ local specification = ((path == "") and format("%s:///%s",protocol,name)) or format("%s:///%s/%s",protocol,path,name)
+ texsprint(ctxcatcodes,resolvers.findtexfile(specification))
+end
diff --git a/tex/context/base/supp-fil.mkiv b/tex/context/base/supp-fil.mkiv
index 86c01fe0b..bfcefdfa7 100644
--- a/tex/context/base/supp-fil.mkiv
+++ b/tex/context/base/supp-fil.mkiv
@@ -265,97 +265,25 @@
% We need to postpone loading, else we got frozen type-* files and so when
% a format is generated on a source path.
-%
-% It's about time we move this to lua.
-\def\doreadfile#1#2#3#4%
- {\sanitizefilename#2\to\readfilename
+% tracefiles -> tracker
+
+\def\doreadfile#1#2#3% protocol path filename true false
+ {\edef\readfilename{\ctxlua{support.doreadfile("#1","#2","#3")}}%
\ifx\readfilename\empty
- % silently ignore
+ \expandafter\doreadfilenop
\else
- \let\trackedfilename\readfilename
- \ifconditional\trackfilenames
- \doifundefinedelse{fn..\trackedfilename}\donetrue\donefalse
- \else
- \donetrue
- \fi
- \ifdone
- \checkfilename\readfilename
- \ifcase\kindoffile
- \iftracefiles\writestatus\m!systems{searching for \readfilename\space on #1}\fi
- % not a full path or url, check for existence
- \doifelsenothing{#1}
- {\def\next{\redoreadfile\readfilename{#3}{#4}}}%
- {\def\next{\redoreadfile{\pathplusfile{#1}{\readfilename}}{#3}{#4}}}%
- \else
- % a full path or url, no further checking done
- \doiffileexistselse\readfilename
- {\iftracefiles\writestatus\m!systems{located \readfilename}\fi
- \def\next{#3\dodoreadfile}}%
- {\iftracefiles\writestatus\m!systems{not found \readfilename}\fi
- \def\next{#4}}%
- \fi
- \else
- \edef\readfilename{\getvalue{fn..\readfilename}}%
- \iftracefiles\writestatus\m!systems{already located \readfilename}\fi
- \def\next{#3\dodoreadfile}%
- \fi
- \expandafter\next
+ \expandafter\doreadfileyes
\fi}
-\def\redoreadfile#1#2#3%
- {\doiffileexistselse{#1}%
- {\edef\readfilename{#1}%
- \iftracefiles\writestatus\m!systems{#1 located}\fi
- \def\next{#2\dodoreadfile}}%
- {\iftracefiles\writestatus\m!systems{cannot locate #1}\fi
- \advance\readlevel\minusone
- \ifnum\readlevel>\zerocount
- \edef\readfilename{\pathplusfile{\f!parentpath}{\readfilename}}%
- \def\next{\redoreadfile\readfilename{#2}{#3}}%
- \else
- \def\next{#3}%
- \fi}%
- \next}
-
-\def\dodoreadfile % we provide hooks, for instance for \enableXML
- {\ifconditional\trackfilenames
- \setxvalue{fn..\trackedfilename}{\readfilename}%
- \fi
+\long\def\doreadfileyes#1#2%
+ {#1\relax
\the\everybeforereadfile
\relax\inputgivenfile\readfilename\relax
\the\everyafterreadfile}
-% too less:
-%
-% \unexpanded\def\readfile% #1%
-% {\readlevel\maxreadlevel
-% \doreadfile\empty} % {#1}
-%
-% too much:
-%
-% \unexpanded\def\readfile#1#2#3%
-% {\readlocfile{#1}{#2}
-% {\readjobfile{#1}{#2}
-% {\readsysfile{#1}{#2}{#3}}}}
-%
-% just ok:
-
-\unexpanded\def\readfile#1#2#3%
- {\readlocfile{#1}{#2}{\readsysfile{#1}{#2}{#3}}}
-
-\def\readtexfile#1#2#3%
- {\pushcatcodetable \catcodetable \ctxcatcodes
- \readfile{#1}{#2}{#3}%
- \popcatcodetable}
-
-\def\readxmlfile#1#2#3%
- {\pushcatcodetable \catcodetable \xmlcatcodes
- \readfile{#1}{#2}{#3}%
- \popcatcodetable}
-
-\unexpanded\def\ReadFile#1%
- {\readfile{#1}\donothing\donothing}
+\long\def\doreadfilenop#1#2%
+ {#2}
%D \macros
%D {readjobfile,readlocfile,readsysfile,
@@ -370,43 +298,24 @@
%D file and do no backtracking, while \type{\readlocfile}
%D backtracks~\number\readlevel\ directories, including the current
%D one.
-
-\unexpanded\def\readjobfile % #1% current path, no backtracking
- {\readlevel\zerocount
- \doreadfile\f!currentpath} % {#1}}
-
-\unexpanded\def\readlocfile % #1% current path, backtracking
- {\readlevel\maxreadlevel
- \doreadfile\f!currentpath} % {#1}}
-
+%D
%D System files can be anywhere and therefore
%D \type{\readsysfile} is not bound to the current directory
%D and obeys the \TEX\ implementation.
-
-\unexpanded\def\readsysfile % #1% current path, obeys tex search
- {\readlevel\zerocount
- \doreadfile\empty} % {#1}}
-
+%D
%D Of the last two, \type{\readfixfile} searches on the
%D directory specified and backtracks too, while
%D \type{\readsetfile} does only search on the specified path.
+%D
+%D The most liberal is \type {\readfile}.
-\unexpanded\def\readfixfile % #1#2% specified path, backtracking
- {\readlevel\maxreadlevel
- \doreadfile} % {#1}{#2}}
-
-\unexpanded\def\readsetfile % #1#2% specified path, no backtracking
- {\readlevel\zerocount
- \doreadfile} % {#1}{#2}}
-
-%D After having defined this commands, we reconsidered the
-%D previously defined \type{\readfile}. This time we more or
-%D less impose the search order.
-
-\unexpanded\def\readfile#1#2#3%
- {\readlocfile{#1}{#2}
- {\readjobfile{#1}{#2}
- {\readsysfile{#1}{#2}{#3}}}}
+\unexpanded\def\readjobfile #1{\doreadfile{job} {.}{#1}} % current path, no backtracking
+\unexpanded\def\readlocfile #1{\doreadfile{loc} {.}{#1}} % current path, backtracking
+\unexpanded\def\readsysfile #1{\doreadfile{sys} {.}{#1}} % current path, obeys tex search
+\unexpanded\def\readfixfile#1#2{\doreadfile{fix}{#1}{#2}} % specified path, backtracking
+\unexpanded\def\readsetfile#1#2{\doreadfile{set}{#1}{#2}} % specified path, no backtracking
+\unexpanded\def\readfile #1{\doreadfile{any} {.}{#1}}
+\unexpanded\def\ReadFile #1{\doreadfile{any} {.}{#1}\donothing\donothing}
%D So now we've got ourselves five file loading commands:
%D
@@ -419,8 +328,18 @@
%D \readsysfile {directory} {filename} {before loading} {not found}
%D \stoptyping
+\def\readtexfile#1#2#3%
+ {\pushcatcodetable \catcodetable \ctxcatcodes
+ \readfile{#1}{#2}{#3}%
+ \popcatcodetable}
+
+\def\readxmlfile#1#2#3%
+ {\pushcatcodetable \catcodetable \xmlcatcodes
+ \readfile{#1}{#2}{#3}%
+ \popcatcodetable}
+
%D \macros
-%D {readjobfile,readlocfile,readsysfile,readfixfile}
+%D {openjobfile,openlocfile,opensysfile,openfixfile}
%D
%D The next four alternatives can be used for opening files
%D for reading on a line||by||line basis. These commands get
diff --git a/tex/context/base/x-asciimath.lua b/tex/context/base/x-asciimath.lua
new file mode 100644
index 000000000..55fe6c991
--- /dev/null
+++ b/tex/context/base/x-asciimath.lua
@@ -0,0 +1,266 @@
+if not modules then modules = { } end modules ['x-asciimath'] = {
+ version = 1.001,
+ comment = "companion to x-asciimath.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+<p>Some backgrounds are discussed in <t>x-asciimath.mkiv</t>.</p>
+--ldx]]--
+
+local trace_mapping = false if trackers then trackers.register("asciimath.mapping", function(v) trace_mapping = v end) end
+
+local format = string.format
+local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
+
+local S, P, R, C, V, Cc, Ct, Cs = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct, lpeg.Cs
+
+local letter = lpeg.utf8
+local space = S(" \n\r\t")
+local spaces = space^0/""
+local integer = P("-")^-1 * R("09")^1
+local realpart = P("-")^-1 * R("09")^1 * S(".")^1 * R("09")^1
+local number = integer -- so we can support nice formatting if needed
+local real = realpart -- so we can support nice formatting if needed
+local float = realpart * P("E") * integer -- so we can support nice formatting if needed
+local texnic = P("\\") * (R("az","AZ")^1)
+
+local premapper = Cs ( (
+
+ P("@") / "\\degrees " +
+ P("O/") / "\\varnothing " +
+ P("o+") / "\\oplus " +
+ P("o.") / "\\ocirc " +
+ P("!in") / "\\not\\in " +
+ P("!=") / "\\neq " +
+ P("**") / "\\star " +
+ P("*") / "\\cdot " +
+ P("//") / "\\slash " +
+ P("/_") / "\\angle " +
+ P("\\\\") / "\\backslash " +
+ P("^^^") / "\\wedge " +
+ P("^^") / "\\wedge " +
+ P("<<") / "\\left\\langle " +
+ P(">>") / "\\right\\rangle " +
+ P("<=") / "\\leq " +
+ P(">=") / "\\geq " +
+ P("-<") / "\\precc " +
+ P(">-") / "\\succ " +
+ P("~=") / "\\cong " +
+ P("~~") / "\\approx " +
+ P("=>") / "\\Rightarrow " +
+ P("(:") / "\\left\\langle " +
+ P(":)") / "\\right\\rangle " +
+ P(":.") / "\\therefore " +
+ P("~|") / "\\right\\rceil " +
+ P("_|_") / "\\bot " +
+ P("_|") / "\\right\\rfloor " +
+ P("+-") / "\\pm " +
+ P("|--") / "\\vdash " +
+ P("|==") / "\\models " +
+ P("|_") / "\\left\\lfloor " +
+ P("|~") / "\\left\\lceil " +
+ P("-:") / "\\div " +
+ P("_=") / "\\equiv " +
+
+ P("|") / "\\middle\\| " +
+
+ P("dx") / "(dx)" +
+ P("dy") / "(dy)" +
+ P("dz") / "(dz)" +
+
+ letter + P(1)
+
+)^0 )
+
+local reserved = {
+ ["aleph"] = "\\aleph ",
+ ["vdots"] = "\\vdots ",
+ ["ddots"] = "\\ddots ",
+ ["oint"] = "\\oint ",
+ ["grad"] = "\\nabla ",
+ ["prod"] = "\\prod ",
+ ["prop"] = "\\propto ",
+ ["sube"] = "\\subseteq ",
+ ["supe"] = "\\supseteq ",
+ ["sinh"] = "\\sinh ",
+ ["cosh"] = "\\cosh ",
+ ["tanh"] = "\\tanh ",
+ ["sum"] = "\\sum ",
+ ["vvv"] = "\\vee ",
+ ["nnn"] = "\\cap ",
+ ["uuu"] = "\\cup ",
+ ["sub"] = "\\subset ",
+ ["sup"] = "\\supset ",
+ ["not"] = "\\lnot ",
+ ["iff"] = "\\Leftrightarrow ",
+ ["int"] = "\\int ",
+ ["del"] = "\\partial ",
+ ["and"] = "\\and ",
+ ["not"] = "\\not ",
+ ["sin"] = "\\sin ",
+ ["cos"] = "\\cos ",
+ ["tan"] = "\\tan ",
+ ["csc"] = "\\csc ",
+ ["sec"] = "\\sec ",
+ ["cot"] = "\\cot ",
+ ["log"] = "\\log ",
+ ["det"] = "\\det ",
+ ["lim"] = "\\lim ",
+ ["mod"] = "\\mod ",
+ ["gcd"] = "\\gcd ",
+ ["lcm"] = "\\lcm ",
+ ["min"] = "\\min ",
+ ["max"] = "\\max ",
+ ["xx"] = "\\times ",
+ ["in"] = "\\in ",
+ ["ox"] = "\\otimes ",
+ ["vv"] = "\\vee ",
+ ["nn"] = "\\cap ",
+ ["uu"] = "\\cup ",
+ ["oo"] = "\\infty ",
+ ["ln"] = "\\ln ",
+ ["or"] = "\\or ",
+
+ ["AA"] = "\\forall ",
+ ["EE"] = "\\exists ",
+ ["TT"] = "\\top ",
+ ["CC"] = "\\Bbb{C}",
+ ["NN"] = "\\Bbb{N}",
+ ["QQ"] = "\\Bbb{Q}",
+ ["RR"] = "\\Bbb{R}",
+ ["ZZ"] = "\\Bbb{Z}",
+
+}
+
+local postmapper = Cs ( (
+
+ P("\\mathoptext ") * spaces * (P("\\bgroup ")/"{") * (1-P("\\egroup "))^1 * (P("\\egroup ")/"}") +
+
+ (P("\\bgroup ")) / "{" +
+ (P("\\egroup ")) / "}" +
+
+ P("\\") * (R("az","AZ")^2) +
+
+ (R("AZ","az")^2) / reserved +
+
+ P("{:") / "\\left." +
+ P(":}") / "\\right." +
+ P("(") / "\\left(" +
+ P(")") / "\\right)" +
+ P("[") / "\\left[" +
+ P("]") / "\\right]" +
+ P("{") / "\\left\\{" +
+ P("}") / "\\right\\}" +
+
+ letter + P(1)
+)^0 )
+
+local parser
+
+local function converted(original,totex)
+ local ok, result
+ if trace_mapping then
+ logs.report("asciimath","original : %s",original)
+ end
+ local premapped = premapper:match(original)
+ if premapped then
+ if trace_mapping then
+ logs.report("asciimath","prepared : %s",premapped)
+ end
+ local parsed = parser:match(premapped)
+ if parsed then
+ if trace_mapping then
+ logs.report("asciimath","parsed : %s",parsed)
+ end
+ local postmapped = postmapper:match(parsed)
+ if postmapped then
+ if trace_mapping then
+ logs.report("asciimath","finalized : %s",postmapped)
+ end
+ result, ok = postmapped, true
+ else
+ result = "error in postmapping"
+ end
+ else
+ result = "error in mapping"
+ end
+ else
+ result = "error in premapping"
+ end
+ if totex then
+ if ok then
+ texsprint(ctxcatcodes,"\\mathematics{",result,"}")
+ else
+ texsprint(ctxcatcodes,"{\\tt",result,"}")
+ end
+ else
+ return result
+ end
+end
+
+local function onlyconverted(str)
+ local parsed = parser:match(str)
+ return parsed or str
+end
+
+local sqrt = P("sqrt") / "\\rootradical \\bgroup \\egroup "
+local root = P("root") / "\\rootradical "
+local frac = P("frac") / "\\frac "
+local stackrel = P("stackrel") / "\\stackrel "
+local text = P("text") / "\\mathoptext "
+local hat = P("hat") / "\\widehat "
+local overbar = P("bar") / "\\overbar "
+local underline = P("ul") / "\\underline "
+local vec = P("vec") / "\\overrightarrow "
+local dot = P("dot") / "\\dot "
+local ddot = P("ddot") / "\\ddot "
+
+local left = P("(:") + P("{:") + P("(") + P("[") + P("{")
+local right = P(":)") + P(":}") + P(")") + P("]") + P("}")
+local leftnorright = 1 - left - right
+local singles = sqrt + text + hat + underline + overbar + vec + ddot + dot
+local doubles = root + frac + stackrel
+local ignoreleft = (left/"") * spaces * spaces
+local ignoreright = spaces * (right/"") * spaces
+local ignoreslash = spaces * (P("/")/"") * spaces
+local comma = P(",")
+local nocomma = 1-comma
+local anychar = P(1)
+local openmatrix = left * spaces * Cc("\\matrix\\bgroup ")
+local closematrix = Cc("\\egroup ") * spaces * right
+local nextcolumn = spaces * (comma/"&") * spaces
+local nextrow = spaces * (comma/"\\cr ") * spaces
+local finishrow = Cc("\\cr ")
+local opengroup = left/"\\bgroup "
+local closegroup = right/"\\egroup "
+local somescript = S("^_") * spaces
+local beginargument = Cc("\\bgroup ")
+local endargument = Cc("\\egroup ")
+
+parser = Cs { "main",
+
+ scripts = somescript * V("argument"),
+ division = Cc("\\frac") * V("argument") * spaces * ignoreslash * spaces * V("argument"),
+ double = doubles * spaces * V("argument") * spaces * V("argument"),
+ single = singles * spaces * V("argument"),
+
+ balanced = opengroup * (C((leftnorright + V("balanced"))^0)/onlyconverted) * closegroup,
+ argument = V("balanced") + V("token"),
+
+ element = (V("step") + (V("argument") + V("step")) - ignoreright - nextcolumn - comma)^1,
+ commalist = ignoreleft * V("element") * (nextcolumn * spaces * V("element"))^0 * ignoreright,
+ matrix = openmatrix * spaces * (V("commalist") * (nextrow * V("commalist"))^0) * finishrow * closematrix,
+
+ token = beginargument * (texnic + float + real + number + letter) * endargument,
+
+ step = V("scripts") + V("division") + V("single") + V("double"),
+ main = (V("matrix") + V("step") + anychar)^0,
+
+}
+
+asciimath = { }
+asciimath.reserved = reserved
+asciimath.convert = converted
diff --git a/tex/context/base/x-asciimath.mkiv b/tex/context/base/x-asciimath.mkiv
new file mode 100644
index 000000000..d7a9d1b2e
--- /dev/null
+++ b/tex/context/base/x-asciimath.mkiv
@@ -0,0 +1,96 @@
+%D \module
+%D [ file=m-asciimath,
+%D version=2006.04.24, % 1999.11.06,
+%D title=\CONTEXT\ Modules,
+%D subtitle=AsciiMath,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright=PRAGMA]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D Lua code.
+
+\ctxloadluafile{x-asciimath}{}
+
+%D The following code is not officially supported and is only meant
+%D for the \MATHFORALL\ project.
+%D
+%D The following code kind of maps ascii math
+%D http://www1.chapman.edu/~jipsen/mathml/asciimath.html onto \TEX. The
+%D code was written for the math4all project but in retrospect we
+%D could have used just tex code as the web version can handle that
+%D as well. Anyhow, as we use \MATHML\ as basis it makes sense to add
+%D this to the repertoire as annotation variant, so now we have
+%D content \MATHML\ (prefered), presentation \MATHML\ (often messy),
+%D \OPENMATH\ (what was which we started with in this project)
+%D calcmath (handy for students who are accustomed to calculators),
+%D asciimath (to make Frits's live easier) and of course \TEX. Of
+%D course all are used mixed.
+%D
+%D We don't support all quirks of asciimath as I am not in the mood to
+%D write a complex parser while a bit of sane coding can work as well.
+%D
+%D \startitemize
+%D \item We support only the syntactically clear variants and as long
+%D as lpeg does not support left recursion this is as far as we
+%D want to go.
+%D \item The parser is rather insensitive for spaces but yet the advice is
+%D to avoid weird coding like \type {d/dxf(x)} but use \type {d/dx
+%D f(x)} instead. After all we're not in a compact coding cq.\
+%D parser challenge.
+%D \item We also don't support the somewhat confusing \type {sqrt sqrt 2}
+%D nor \type {root3x} (although the second one kind of works). A bit
+%D of defensive coding does not hurt.
+%D \item We can process \type {a/b/c/d} but it's not compatible with the
+%D default behaviour of asciimath. Use grouping instead. Yes, we do
+%D support the somewhat nonstandard grouping token mix.
+%D \item You should use explicit \type {text(..)} directives as one can
+%D never be sure what is a reserved word and not.
+%D \stopitemize
+%D
+%D Actually, as the only parsing sensitive elements of \TEX\ are
+%D fractions (\type {\over} and friends, a restricted use of \TEX\
+%D coding is probably as comprehensive and parseble.
+%D
+%D The webpage with examples served as starting point so anything beyond
+%D what can be found there isn't supported.
+
+\unprotect
+
+\writestatus{asciimath}{beware, this is an experimental (m4all only) module}
+
+\unexpanded\def\asciimath#1{\ctxlua{asciimath.convert(\!!bs\detokenize{#1}\!!es,true)}}
+
+\protect
+
+\doifnotmode{demo}{\endinput}
+
+\enabletrackers[asciimath.mapping]
+
+\starttext
+
+\startlines
+\asciimath{x^2+y_1+z_12^34}
+\asciimath{sin^-1(x)}
+\asciimath{d/dx f(x)=lim_(h->0) (f(x+h)-f(x))/h}
+\asciimath{f(x)=sum_(n=0)^oo(f^((n))(a))/(n!)(x-a)^n}
+\asciimath{int_0^1 f(x)dx}
+\asciimath{int^1_0 f(x)dx}
+\asciimath{a//b}
+\asciimath{(a/b)/(d/c)}
+\asciimath{((a*b))/(d/c)}
+\asciimath{[[a,b],[c,d]]((n),(k))}
+\asciimath{1/x={(1,text{if } x!=0),(text{undefined},if x=0):}}
+\asciimath{{ (1,2), (x,(x + text(x))) }}
+\asciimath{{(1,2),(x,(x+text(x))),(x,text(x))}}
+\asciimath{{(1,2),(x,(x+text(x))),(x,x text(x))}}
+\asciimath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,x text(xyz))}}
+\asciimath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,text(xyz)+1+text(hans))}}
+\asciimath{<<a,b>> text{and} {:(x,y),(u,v):}}
+\asciimath{(a,b] = {x text(in) RR | a < x <= b}}
+\stoplines
+
+\stoptext
diff --git a/tex/context/base/x-calcmath.lua b/tex/context/base/x-calcmath.lua
index fcb6b7470..0939411be 100644
--- a/tex/context/base/x-calcmath.lua
+++ b/tex/context/base/x-calcmath.lua
@@ -191,19 +191,20 @@ if false then
-- Df Dg {\rm f}^{\prime}
-- f() g() {\rm f}()
+
-- valid utf8
local S, P, R, C, V, Cc, Ct = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct
local space = S(" \n\r\t")^0
- local number_x = P("-")^-1 * R("09")^1
- local real_x = P("-")^-1 * R("09")^1 * S(".")^1 * R("09")^1
- local number = Cc("number") * C(number_x) * space
- local real = Cc("real") * C(real_x) * space
- local float = Cc("float") * C(real_x) * lpeg.P("E") * lpeg.C(number_x) * space
- local identifier = Cc("identifier") * C(R("az","AZ")^1) * space
- local compareop = P("<") + P("=") + P(">") + P(">=") + P("<=") + P("&gt;") + P("&lt;")
- local factorop = Cc("factor") * C(S("+-^,") + compareop ) * space
+ local integer = P("-")^-1 * R("09")^1
+ local realpart = P("-")^-1 * R("09")^1 * S(".")^1 * R("09")^1
+ local number = Cc("number") * C(integer) * space
+ local real = Cc("real") * C(realpart) * space
+ local float = Cc("float") * C(realpart) * lpeg.P("E") * lpeg.C(integer) * space
+ local identifier = Cc("identifier") * C(R("az","AZ")) * space
+ local compareop = Cc("compare") * C(P("<") + P("=") + P(">") + P(">=") + P("<=") + P("&gt;") + P("&lt;")) * space
+ local factorop = Cc("factor") * C(S("+-^_,")) * space
local termop = Cc("term") * C(S("*/")) * space
local constant = Cc("constant") * C(P("pi") + lpeg.P("inf")) * space
local functionop = Cc("function") * C(R("az")^1) * space
@@ -212,108 +213,139 @@ if false then
local grammar = P {
"expression",
- expression = Ct(V("factor" ) * (factorop * V("factor" ))^0),
- factor = Ct(V("term" ) * (termop * V("term" ))^0),
+ expression = Ct(V("factor") * ((factorop+compareop) * V("factor"))^0),
+ factor = Ct(V("term") * (termop * V("term"))^0),
term = Ct(
float + real + number +
(open * V("expression") * close) +
- (functionop * open * V("expression") * close) +
+ (functionop * open * (V("expression") * (P(",") * V("expression"))^0) * close) +
+ (functionop * V("term")) +
constant + identifier
),
}
+
local parser = space * grammar * -1
+ local texprint = function(...) texio.write(table.concat{ ... }) end
+
+ local function has_factor(t)
+ for i=1,#t do
+ if t[i] == "factor" then
+ return true
+ end
+ end
+ end
+
function totex(t)
if t then
- local one, two, three = t[1], t[2], t[3]
- if one == "number" then
- return two
- elseif one == "real" then
- return two
- elseif one == "float" then
- return format("\\scinot{%s}{%s}", two, three)
- elseif one == "identifier" then
- return format(" %s ", two)
- elseif one == "constant" then
- return format("\\%s ", two)
- elseif one == "function" then
- if two == "sqrt" then
- return format("\\sqrt{%s}", totex(three))
- elseif two == "exp" then
- return format(" e^{%s}", totex(three))
- elseif two == "abs" then
- return format("\\left|%s\\right|", totex(three))
- elseif two == "mean" then
- return format("\\overline{%s}", totex(three))
- elseif two == "int" or two == "prod" or two == "sum" then --brrr, we need to parse better for ,,
- local tt = three
- if #tt == 1 then
- return format("\\%s{%s}", two ,totex(tt[1]))
- elseif #tt == 4 then
- return format("\\%s^{%s}{%s}", two ,totex(tt[1]), totex(tt[4]))
- elseif #tt == 7 then
- return format("\\%s^{%s}_{%s}{%s}", two ,totex(tt[1]), totex(tt[4]), totex(tt[7]))
- end
- elseif #two == 1 then
- return format("%s(%s)", two, totex(three))
- else
- return format("\\%s(%s)", two, totex(three))
- end
- elseif one == "factor" then
- if two == '^' then
- return format("^{%s}%s",totex(three), (#t>3 and totex({unpack(t,4,#t)})) or "")
- else
- if two == ">=" then
- two = "\\ge "
- elseif two == "<=" then
- two = "\\le "
- elseif two == "&gt;" then
- two = "> "
- elseif two == "&lt;" then
- two = "< "
- end
- return format("%s%s%s", two, totex(three), (#t>3 and totex({unpack(t,4,#t)})) or "")
- end
- elseif one == "term" then
- if two == '/' then
- if #t > 4 then
- return format("\\frac{%s}{%s}", totex(three), totex({unpack(t,4,#t)}))
+ local one = t[1]
+ if type(one) == "string" then
+ local two, three = t[2], t[3]
+ if one == "number" then
+ texprint(two)
+ elseif one == "real" then
+ texprint(two)
+ elseif one == "float" then
+ texprint("\\scinot{",two,"}{",three,"}")
+ elseif one == "identifier" then
+ texprint(two)
+ elseif one == "constant" then
+ texprint("\\"..two)
+ elseif one == "function" then
+ if two == "sqrt" then
+ texprint("\\sqrt{")
+ totex(three)
+ texprint("}")
+ elseif two == "exp" then
+ texprint(" e^{")
+ totex(three)
+ texprint("}")
+ elseif two == "abs" then
+ texprint("\\left|")
+ totex(three)
+ texprint("\\right|")
+ elseif two == "mean" then
+ texprint("\\overline{")
+ totex(three)
+ texprint("}")
+ elseif two == "int" or two == "prod" or two == "sum" then
+ local four, five = t[4], t[5]
+ if five then
+ texprint("\\"..two.."^{")
+ totex(three)
+ texprint("}_{")
+ totex(four)
+ texprint("}")
+ totex(five)
+ elseif four then
+ texprint("\\"..two.."^{")
+ totex(three)
+ texprint("}")
+ totex(four)
+ elseif three then
+ texprint("\\"..two.." ") -- " " not needed
+ totex(three)
+ else
+ texprint("\\"..two)
+ end
else
- return format("\\frac{%s}{%s}", totex(three), totex(t[4]))
+ texprint("\\"..two.."(")
+ totex(three)
+ texprint(")")
end
- elseif two == '*' then
- local times = "\\times "
- return format("%s%s%s", times, totex(three), (#t>3 and totex({unpack(t,4,#t)})) or "")
- else
- return format("%s%s%s", two, totex(three), (#t>3 and totex({unpack(t,4,#t)})) or "")
end
- elseif two == "factor" then
- if three == '^' then
- return format("%s^{%s}", totex(one), totex(t[4]))
- else
- if two == ">=" then
- two = "\\ge "
- elseif two == "<=" then
- two = "\\le "
- elseif two == "&gt;" then
- two = "> "
- elseif two == "&lt;" then
- two = "< "
+ else
+ local nt = #t
+ local hasfactor = has_factor(t)
+ if hasfactor then
+ texprint("\\left(")
+ end
+ totex(one)
+ for i=2,nt,3 do
+ local what, how, rest = t[i], t[i+1], t[i+2]
+ if what == "factor" then
+ if how == '^' or how == "_" then
+ texprint(how)
+ texprint("{")
+ totex(rest)
+ texprint("}")
+ else
+ texprint(how)
+ totex(rest)
+ end
+ elseif what == "term" then
+ if how == '/' then
+ texprint("\\frac{")
+ totex(rest)
+ texprint("}{")
+ totex(t[i+3] or "")
+ texprint("}")
+ elseif how == '*' then
+ texprint("\\times")
+ totex(rest)
+ else
+ texprint(how)
+ totex(three)
+ end
+ elseif what == "compare" then
+ if two == ">=" then
+ texprint("\\ge")
+ elseif two == "<=" then
+ texprint("\\le")
+ elseif two == "&gt;" then
+ texprint(">")
+ elseif two == "&lt;" then
+ texprint("<")
+ end
+ totex(three)
end
- return format("%s%s", totex(one), (#t>1 and totex({unpack(t,2,#t)})) or "")
end
- elseif two == "term" then
- if three == '/' then
- return format("\\frac{%s}{%s}", totex(one), (#t>3 and totex({unpack(t,4,#t)})) or "")
- else
- return format("%s%s", totex(one), (#t>1 and totex({unpack(t,2,#t)})) or "")
+ if hasfactor then
+ texprint("\\right)")
end
- else
- return totex(one)
end
end
- return ""
end
calcmath = { }
@@ -324,11 +356,7 @@ if false then
function calcmath.tex(str)
str = totex(parser:match(str))
- print(str)
return (str == "" and "[error]") or str
end
end
-
---~ compareop = Cc("compare") * C(P("<") + P("=") + P(">") + P(">=") + P("<=") + P("&gt;")/">" + P("&lt;")/"<") * space
---~ comparison = Ct(V("expression") * (compareop * V("expression"))^0),
diff --git a/tex/context/base/x-mathml.mkiv b/tex/context/base/x-mathml.mkiv
index e58dee765..6ddf00cac 100644
--- a/tex/context/base/x-mathml.mkiv
+++ b/tex/context/base/x-mathml.mkiv
@@ -1658,6 +1658,7 @@
\stopxmlsetups
\usemodule[x][calcmath]
+%usemodule[x][asciimath]
\startxmlsetups mml:annotation
\xmldoifelse {#1} {[oneof(@encoding,'TeX','tex','TEX','ConTeXt','context','CONTEXT','ctx')]} {
@@ -1667,9 +1668,17 @@
\endgroup
} {
\xmldoifelse {#1} {[oneof(@encoding,'calcmath','cm')]} {
- \calcmath{\xmlflush{#1}}
+ \expanded{\calcmath{\xmlflush{#1}}}
} {
- \xmlall{#1}{../!mml:annotation}
+ \xmldoifelse {#1} {[oneof(@encoding,'asciimath','am')]} {
+ \ifdefined\asciimath
+ \expanded{\asciimath{\xmlflush{#1}}}
+ \else
+ \hbox{\tt no am loaded}%
+ \fi
+ } {
+ \xmlall{#1}{../!mml:annotation}
+ }
}
}
\stopxmlsetups
diff --git a/tex/generic/context/luatex-fonts-merged.lua b/tex/generic/context/luatex-fonts-merged.lua
index fec779d9e..6c9a341dc 100644
--- a/tex/generic/context/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : c:/data/develop/context/texmf/tex/generic/context/luatex-fonts-merged.lua
-- parent file : c:/data/develop/context/texmf/tex/generic/context/luatex-fonts.lua
--- merge date : 10/19/09 14:48:40
+-- merge date : 10/21/09 10:32:02
do -- begin closure to overcome local limits and interference
@@ -274,7 +274,7 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
license = "see context related readme files"
}
-local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local P, R, S, Ct, C, Cs, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
--~ l-lpeg.lua :
@@ -381,6 +381,41 @@ end
--~ return p
--~ end
+--~ from roberto's site:
+--~
+--~ -- decode a two-byte UTF-8 sequence
+--~ local function f2 (s)
+--~ local c1, c2 = string.byte(s, 1, 2)
+--~ return c1 * 64 + c2 - 12416
+--~ end
+--~
+--~ -- decode a three-byte UTF-8 sequence
+--~ local function f3 (s)
+--~ local c1, c2, c3 = string.byte(s, 1, 3)
+--~ return (c1 * 64 + c2) * 64 + c3 - 925824
+--~ end
+--~
+--~ -- decode a four-byte UTF-8 sequence
+--~ local function f4 (s)
+--~ local c1, c2, c3, c4 = string.byte(s, 1, 4)
+--~ return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168
+--~ end
+--~
+--~ local cont = lpeg.R("\128\191") -- continuation byte
+--~
+--~ local utf8 = lpeg.R("\0\127") / string.byte
+--~ + lpeg.R("\194\223") * cont / f2
+--~ + lpeg.R("\224\239") * cont * cont / f3
+--~ + lpeg.R("\240\244") * cont * cont * cont / f4
+--~
+--~ local decode_pattern = lpeg.Ct(utf8^0) * -1
+
+
+local cont = R("\128\191") -- continuation byte
+
+lpeg.utf8 = R("\0\127") + R("\194\223") * cont + R("\224\239") * cont * cont + R("\240\244") * cont * cont * cont
+
+
end -- closure
do -- begin closure to overcome local limits and interference