summaryrefslogtreecommitdiff
path: root/tex
diff options
context:
space:
mode:
authorContext Git Mirror Bot <phg42.2a@gmail.com>2014-06-08 20:39:19 +0200
committerContext Git Mirror Bot <phg42.2a@gmail.com>2014-06-08 20:39:19 +0200
commit8c0a3f520d195ebe1c0f2ae06ecf8abeb915eec7 (patch)
tree57e47071315fee9ec63964b89eacd9a56bff3d64 /tex
parent9160707731bd822be1cf9dc4001e44cb4e5d4929 (diff)
downloadcontext-8c0a3f520d195ebe1c0f2ae06ecf8abeb915eec7.tar.gz
2014-06-06 23:54:00
Diffstat (limited to 'tex')
-rw-r--r--tex/context/base/anch-pgr.lua11
-rw-r--r--tex/context/base/anch-pos.lua27
-rw-r--r--tex/context/base/attr-neg.lua4
-rw-r--r--tex/context/base/back-exp.mkiv6
-rw-r--r--tex/context/base/back-ini.lua7
-rw-r--r--tex/context/base/back-pdf.lua2
-rw-r--r--tex/context/base/back-pdf.mkiv12
-rw-r--r--tex/context/base/buff-ver.mkiv11
-rw-r--r--tex/context/base/char-act.mkiv7
-rw-r--r--tex/context/base/colo-ini.lua12
-rw-r--r--tex/context/base/colo-ini.mkiv42
-rw-r--r--tex/context/base/colo-xwi.mkii7
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4321 -> 4323 bytes
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/font-ctx.lua8
-rw-r--r--tex/context/base/l-lpeg.lua82
-rw-r--r--tex/context/base/lang-txt.lua6
-rw-r--r--tex/context/base/lpdf-ini.lua33
-rw-r--r--tex/context/base/lpdf-mis.lua11
-rw-r--r--tex/context/base/math-def.mkiv3
-rw-r--r--tex/context/base/math-dir.lua1
-rw-r--r--tex/context/base/math-rad.mkvi4
-rw-r--r--tex/context/base/meta-fnt.lua32
-rw-r--r--tex/context/base/meta-pdf.lua2
-rw-r--r--tex/context/base/mlib-pdf.lua231
-rw-r--r--tex/context/base/mlib-pps.lua99
-rw-r--r--tex/context/base/mult-aux.mkiv25
-rw-r--r--tex/context/base/mult-def.mkiv155
-rw-r--r--tex/context/base/mult-low.lua4
-rw-r--r--tex/context/base/node-fin.lua1
-rw-r--r--tex/context/base/pack-rul.mkiv29
-rw-r--r--tex/context/base/phys-dim.lua37
-rw-r--r--tex/context/base/s-abr-01.tex1
-rw-r--r--tex/context/base/sort-ini.lua96
-rw-r--r--tex/context/base/spac-chr.lua2
-rw-r--r--tex/context/base/spac-hor.mkiv4
-rw-r--r--tex/context/base/spac-ver.lua19
-rw-r--r--tex/context/base/spac-ver.mkiv4
-rw-r--r--tex/context/base/status-files.pdfbin24914 -> 24884 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin246731 -> 246770 bytes
-rw-r--r--tex/context/base/strc-mar.lua1
-rw-r--r--tex/context/base/strc-pag.lua6
-rw-r--r--tex/context/base/strc-syn.lua32
-rw-r--r--tex/context/base/strc-tag.mkiv6
-rw-r--r--tex/context/base/trac-jus.lua2
-rw-r--r--tex/context/base/typo-del.mkiv148
-rw-r--r--tex/context/base/typo-dha.lua1
-rw-r--r--tex/context/base/typo-dub.lua1
-rw-r--r--tex/context/base/typo-mar.lua4
-rw-r--r--tex/context/base/typo-mar.mkiv2
-rw-r--r--tex/context/base/typo-rep.lua1
-rw-r--r--tex/context/base/util-str.lua3
-rw-r--r--tex/context/base/x-asciimath.lua1866
-rw-r--r--tex/context/base/x-asciimath.mkiv324
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua31
56 files changed, 2796 insertions, 673 deletions
diff --git a/tex/context/base/anch-pgr.lua b/tex/context/base/anch-pgr.lua
index c7f56a92b..7c8c4545f 100644
--- a/tex/context/base/anch-pgr.lua
+++ b/tex/context/base/anch-pgr.lua
@@ -59,7 +59,7 @@ local function add(t,x,y,last,direction)
if x == lx and y == ly then
-- quick skip
elseif n == 1 then
--- if abs(lx-x) <= eps or abs(ly-y) <= eps then
+ -- if abs(lx-x) <= eps or abs(ly-y) <= eps then
if abs(lx-x) > eps or abs(ly-y) > eps then
t[n+1] = { x, y }
end
@@ -67,10 +67,9 @@ local function add(t,x,y,last,direction)
local tm = t[n-1]
local px = tm[1]
local py = tm[2]
-if (direction == "down" and y > ly) or (direction == "up" and y < ly) then
- -- move back from too much hang
-else
- if abs(lx-px) <= eps and abs(lx-x) <= eps then
+ if (direction == "down" and y > ly) or (direction == "up" and y < ly) then
+ -- move back from too much hang
+ elseif abs(lx-px) <= eps and abs(lx-x) <= eps then
if abs(ly-y) > eps then
tn[2] = y
end
@@ -81,7 +80,6 @@ else
elseif not last then
t[n+1] = { x, y }
end
-end
end
end
end
@@ -430,7 +428,6 @@ local function calculatemultipar(tag,obeyhang)
end
-- Obeying intermediate changes of left/rightskip makes no sense as it will
-- look bad, so we only look at the begin situation.
- --
local bn = b.n
if bn then
local bp = collected[f_p_tag(bn)]
diff --git a/tex/context/base/anch-pos.lua b/tex/context/base/anch-pos.lua
index c2b62bae7..4230d1d16 100644
--- a/tex/context/base/anch-pos.lua
+++ b/tex/context/base/anch-pos.lua
@@ -259,9 +259,15 @@ local function enhance(data)
return data
end
-local function set(name,index,val)
+-- analyze some files (with lots if margindata) and then when one key optionally
+-- use that one instead of a table (so, a 3rd / 4th argument: key, e.g. "x")
+
+local function set(name,index,val) -- ,key
local data = enhance(val or index)
if val then
+-- if data[key] and not next(next(data)) then
+-- data = data[key]
+-- end
container = tobesaved[name]
if not container then
tobesaved[name] = {
@@ -284,6 +290,25 @@ local function get(id,index)
end
end
+-- local function get(id,index) -- ,key
+-- local data
+-- if index then
+-- local container = collected[id]
+-- if container then
+-- data = container[index]
+-- if not data then
+-- -- nothing
+-- elseif type(data) == "table" then
+-- return data
+-- else
+-- return { [key] = data }
+-- end
+-- end
+-- else
+-- return collected[id]
+-- end
+-- end
+
jobpositions.setdim = setdim
jobpositions.setall = setall
jobpositions.set = set
diff --git a/tex/context/base/attr-neg.lua b/tex/context/base/attr-neg.lua
index c32cec956..37de9a4da 100644
--- a/tex/context/base/attr-neg.lua
+++ b/tex/context/base/attr-neg.lua
@@ -18,7 +18,7 @@ local tex = tex
local states = attributes.states
local tasks = nodes.tasks
local nodeinjections = backends.nodeinjections
-local settexattribute = tex.setattribute
+local texsetattribute = tex.setattribute
local variables = interfaces.variables
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -94,5 +94,5 @@ function commands.triggernegative(stamp)
enable()
enabled = true
end
- settexattribute(a_negative,register(stamp))
+ texsetattribute(a_negative,register(stamp))
end
diff --git a/tex/context/base/back-exp.mkiv b/tex/context/base/back-exp.mkiv
index c7696d383..7fd1b5799 100644
--- a/tex/context/base/back-exp.mkiv
+++ b/tex/context/base/back-exp.mkiv
@@ -113,9 +113,9 @@
\unexpanded\def\dotagsetnotesymbol{\taggedctxcommand{settagdescriptionsymbol("\currentnote",\currentnotenumber)}}%
\to \everyenableelements
-\appendtoks
- \unexpanded\def\doverbatimspace{\char32\relax}% will be done permanently
-\to \everyenableelements
+% \appendtoks
+% \unexpanded\def\doverbatimspace{\asciispacechar}% will be done permanently
+% \to \everyenableelements
% The action: \setupbackend[export=yes] % or filename
diff --git a/tex/context/base/back-ini.lua b/tex/context/base/back-ini.lua
index c1f973a0c..129e7b75c 100644
--- a/tex/context/base/back-ini.lua
+++ b/tex/context/base/back-ini.lua
@@ -6,6 +6,13 @@ if not modules then modules = { } end modules ['back-ini'] = {
license = "see context related readme files"
}
+-- -- how to create a shortcut:
+--
+-- local function something(...)
+-- something = backends.codeinjections.something
+-- return something(...)
+-- end
+
local next, type = next, type
local format = string.format
diff --git a/tex/context/base/back-pdf.lua b/tex/context/base/back-pdf.lua
index 34a28e3f7..9eb4a9b73 100644
--- a/tex/context/base/back-pdf.lua
+++ b/tex/context/base/back-pdf.lua
@@ -24,7 +24,7 @@ local context = context
local sind, cosd = math.sind, math.cosd
local insert, remove = table.insert, table.remove
-local f_matrix = string.formatters["%0.8F %0.8F %0.8F %0.8F"]
+local f_matrix = string.formatters["%F %F %F %F"] -- 0.8 is default
function commands.pdfrotation(a)
-- todo: check for 1 and 0 and flush sparse
diff --git a/tex/context/base/back-pdf.mkiv b/tex/context/base/back-pdf.mkiv
index df9594507..3fbad1350 100644
--- a/tex/context/base/back-pdf.mkiv
+++ b/tex/context/base/back-pdf.mkiv
@@ -527,7 +527,7 @@
\xmin \space \ymin \space \xmin \space \yymin\space y
\or % 28
\fi
- \ifnum\mode>8\space
+ \ifnum\mode>\pluseight\space
S
\else
\ifnum\dostroke=\plusone S \fi
@@ -541,13 +541,15 @@
\box\scratchbox
\egroup}
-\unexpanded\def\pdfactualtext#1#2% not interfaced
- {\pdfliteral direct{/Span <</ActualText \ctxlua{tex.write(lpdf.tosixteen("#2"))} >> BDC}%
+\unexpanded\def\pdfbackendactualtext#1#2% not interfaced
+ {\ctxcommand{startactualtext(\!!bs#2\!!es)}%
#1%
- \pdfliteral direct{EMC}}
+ \ctxcommand{stopactualtext()}}
+
+\let\pdfactualtext\pdfbackendactualtext
% \starttext
-% text \pdfactualtext{Meier}{Müller} text
+% text \pdfbackendactualtext{Meier}{Müller} text
% \stoptext
\protect \endinput
diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv
index 10002c4a7..7a4f28253 100644
--- a/tex/context/base/buff-ver.mkiv
+++ b/tex/context/base/buff-ver.mkiv
@@ -376,6 +376,15 @@
\unexpanded\def\specialstretchedspace{\hskip.5\interwordspace\s!plus.125\interwordspace\relax} % \interwordstretch can be zero
\unexpanded\def\specialcontrolspace {\hskip\zeropoint\hbox{\normalcontrolspace}\hskip\zeropoint\relax}
+% \unexpanded\def\taggedspecialfixedspace {\hskip\zeropoint\asciispacechar\hskip\zeropoint}
+% \unexpanded\def\taggedspecialobeyedspace {\hskip\zeropoint\asciispacechar\hskip\zeropoint}
+% \unexpanded\def\taggedspecialstretchedspace{\hskip\zeropoint\asciispacechar\hskip\zeropoint}
+% \unexpanded\def\taggedspecialcontrolspace {\hskip\zeropoint\hbox{\normalcontrolspace}\hskip\zeropoint\relax}
+
+\appendtoks
+ \unexpanded\def\obeyedspace{\hskip\zeropoint\asciispacechar\hskip\zeropoint}%
+\to \everyenableelements
+
\unexpanded\def\obeyhyphens
{\let\obeyedspace \specialobeyedspace % maybe \specialstretchedspace
\let\controlspace\specialcontrolspace
@@ -873,7 +882,7 @@
\newcount \c_buff_verbatim_current
\newconditional\c_buff_optimize_linebreaks
-\def\doverbatimspace {\obeyedspace}
+ \def\doverbatimspace {\obeyedspace}
\unexpanded\def\doinlineverbatimstart {}
\unexpanded\def\doinlineverbatimstop {}
diff --git a/tex/context/base/char-act.mkiv b/tex/context/base/char-act.mkiv
index 011c29d07..7d7268c8b 100644
--- a/tex/context/base/char-act.mkiv
+++ b/tex/context/base/char-act.mkiv
@@ -24,6 +24,8 @@
%D \NEWLINE\ and \NEWPAGE\ active and assigning them
%D \type{\obeysomething}, but first we set some default values.
+% These are expandable!
+
\def\obeyedspace {\space}
\def\obeyedtab {\obeyedspace}
\def\obeyedline {\par}
@@ -36,7 +38,10 @@
%D spaces (control spaces) we only have to adapt the definition
%D of \type{\obeyedspace} to:
-\unexpanded\def\controlspace{\hbox{\char32}} % rather tex, we need the unicode value
+\chardef\asciispacechar\spaceasciicode % a real space character
+
+\unexpanded\def\naturalspace{\asciispacechar}
+\unexpanded\def\controlspace{\hbox{\asciispacechar}} % rather tex, we need the unicode value
\unexpanded\def\normalspaces{\catcode\spaceasciicode\spacecatcode}
\bgroup
diff --git a/tex/context/base/colo-ini.lua b/tex/context/base/colo-ini.lua
index 94e9e6615..174ee9546 100644
--- a/tex/context/base/colo-ini.lua
+++ b/tex/context/base/colo-ini.lua
@@ -26,8 +26,8 @@ local colors = attributes.colors
local transparencies = attributes.transparencies
local colorintents = attributes.colorintents
local registrations = backends.registrations
-local settexattribute = tex.setattribute
-local gettexattribute = tex.getattribute
+local texsetattribute = tex.setattribute
+local texgetattribute = tex.getattribute
local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
@@ -512,7 +512,7 @@ local function mpcolor(model,ca,ta,default)
end
local function mpnamedcolor(name)
- return mpcolor(gettexattribute(a_colorspace),l_color[name] or l_color.black)
+ return mpcolor(texgetattribute(a_colorspace),l_color[name] or l_color.black)
end
local function mpoptions(model,ca,ta,default) -- will move to mlib-col
@@ -812,11 +812,11 @@ end
local setcolormodel = colors.setmodel
function commands.setcolormodel(model,weight)
- settexattribute(a_colorspace,setcolormodel(model,weight))
+ texsetattribute(a_colorspace,setcolormodel(model,weight))
end
-- function commands.setrastercolor(name,s)
--- settexattribute(a_color,colors.definesimplegray(name,s))
+-- texsetattribute(a_color,colors.definesimplegray(name,s))
-- end
function commands.registermaintextcolor(a)
@@ -851,7 +851,7 @@ function commands.doifblackelse(a)
end
function commands.doifdrawingblackelse()
- commands.doifelse(colors.isblack(gettexattribute(a_color)))
+ commands.doifelse(colors.isblack(texgetattribute(a_color)))
end
-- function commands.withcolorsinset(name,command)
diff --git a/tex/context/base/colo-ini.mkiv b/tex/context/base/colo-ini.mkiv
index 6aa51b218..98ba2904c 100644
--- a/tex/context/base/colo-ini.mkiv
+++ b/tex/context/base/colo-ini.mkiv
@@ -688,21 +688,39 @@
\attribute\colorattribute\attributeunsetvalue
\attribute\transparencyattribute\attributeunsetvalue}
+% todo: check if color is overloading a non-color command
+
+\newcount\c_colo_protection
+
+\unexpanded\def\startprotectedcolors
+ {\advance\c_colo_protection\plusone}
+
+\unexpanded\def\stopprotectedcolors
+ {\advance\c_colo_protection\minusone}
+
\def\colo_basics_define[#1][#2]%
{\ctxcommand{defineprocesscolor("#1","#2",false,\v_colo_freeze_state)}%
- \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
+ \ifcase\c_colo_protection
+ \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\colo_basics_define_global[#1][#2]%
{\ctxcommand{defineprocesscolor("#1","#2",true,\v_colo_freeze_state)}%
- \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}}
+ \ifcase\c_colo_protection
+ \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\colo_basics_define_named[#1][#2]% currently same as define
{\ctxcommand{defineprocesscolor("#1","#2",false,\v_colo_freeze_state)}%
- \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
+ \ifcase\c_colo_protection
+ \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\dodefinefastcolor[#1][#2]% still not fast but ok (might change)
{\ctxcommand{defineprocesscolor("#1","#2",false,\v_colo_freeze_state)}%
- \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
+ \ifcase\c_colo_protection
+ \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\colo_basics_defined_and_activated#1%
{\ctxcommand{defineprocesscolor("\v_colo_dummy_name","#1",false,false)}%
@@ -717,11 +735,15 @@
\def\colo_basics_define_process_yes[#1][#2][#3]%
{\ctxcommand{defineprocesscolor("#1","\processcolorcomponents{#2},#3",false,\v_colo_freeze_state)}%
- \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
+ \ifcase\c_colo_protection
+ \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\colo_basics_define_process_nop[#1][#2][#3]%
{\ctxcommand{defineprocesscolor("#1","#2",false,\v_colo_freeze_state)}%
- \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
+ \ifcase\c_colo_protection
+ \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
% Spotcolors used setxvalue but that messes up currentcolor
% and probably no global is needed either but they are global
@@ -730,11 +752,15 @@
\def\colo_basics_define_spot[#1][#2][#3]%
{\ctxcommand{definespotcolor("#1","#2","#3",true)}%
- \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}}
+ \ifcase\c_colo_protection
+ \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\colo_basics_define_multitone[#1][#2][#3][#4]%
{\ctxcommand{definemultitonecolor("#1","#2","#3","#4",true)}%
- \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}}
+ \ifcase\c_colo_protection
+ \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
%D Transparencies (only):
diff --git a/tex/context/base/colo-xwi.mkii b/tex/context/base/colo-xwi.mkii
index 557e9c57c..13d04759e 100644
--- a/tex/context/base/colo-xwi.mkii
+++ b/tex/context/base/colo-xwi.mkii
@@ -8,11 +8,12 @@
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
%D I've forgotten where I got these definitions from, but maybe
-%D they can be of use.
+%D they can be of use. Watch out, these colors are in the rgb
+%D color space, so cyan is not defined in cmyk!
\definecolor [aliceblue] [r=0.94,g=0.97,b=1.00]
\definecolor [antiquewhite] [r=0.98,g=0.92,b=0.84]
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 11d62d298..512dbb282 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2014.05.30 23:26}
+\newcontextversion{2014.06.06 23:52}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index a3983ea30..6bf00b133 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 22a770b9d..1290a7d92 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -28,7 +28,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2014.05.30 23:26}
+\edef\contextversion{2014.06.06 23:52}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua
index 2bfcf3859..2f5ae57e2 100644
--- a/tex/context/base/font-ctx.lua
+++ b/tex/context/base/font-ctx.lua
@@ -1395,10 +1395,10 @@ end
local mappings = fonts.mappings
local loaded = { -- prevent loading (happens in cont-sys files)
- ["original-base.map" ] = true,
- ["original-ams-base.map" ] = true,
- ["original-ams-euler.map"] = true,
- ["original-public-lm.map"] = true,
+ -- ["original-base.map" ] = true,
+ -- ["original-ams-base.map" ] = true,
+ -- ["original-ams-euler.map"] = true,
+ -- ["original-public-lm.map"] = true,
}
function mappings.loadfile(name)
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
index 666af2103..c203d8044 100644
--- a/tex/context/base/l-lpeg.lua
+++ b/tex/context/base/l-lpeg.lua
@@ -225,9 +225,12 @@ patterns.integer = sign^-1 * digit^1
patterns.unsigned = digit^0 * period * digit^1
patterns.float = sign^-1 * patterns.unsigned
patterns.cunsigned = digit^0 * comma * digit^1
+patterns.cpunsigned = digit^0 * (period + comma) * digit^1
patterns.cfloat = sign^-1 * patterns.cunsigned
+patterns.cpfloat = sign^-1 * patterns.cpunsigned
patterns.number = patterns.float + patterns.integer
patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.cpnumber = patterns.cpfloat + patterns.integer
patterns.oct = zero * octdigit^1
patterns.octal = patterns.oct
patterns.HEX = zero * P("X") * (digit+uppercase)^1
@@ -813,21 +816,76 @@ end
-- experiment:
-local function make(t)
- local p
+-- local function make(t)
+-- local p
+-- local keys = sortedkeys(t)
+-- for i=1,#keys do
+-- local k = keys[i]
+-- local v = t[k]
+-- if not p then
+-- if next(v) then
+-- p = P(k) * make(v)
+-- else
+-- p = P(k)
+-- end
+-- else
+-- if next(v) then
+-- p = p + P(k) * make(v)
+-- else
+-- p = p + P(k)
+-- end
+-- end
+-- end
+-- return p
+-- end
+
+-- local function make(t)
+-- local p = P(false)
+-- local keys = sortedkeys(t)
+-- for i=1,#keys do
+-- local k = keys[i]
+-- local v = t[k]
+-- if next(v) then
+-- p = p + P(k) * make(v)
+-- else
+-- p = p + P(k)
+-- end
+-- end
+-- return p
+-- end
+
+-- function lpeg.utfchartabletopattern(list) -- goes to util-lpg
+-- local tree = { }
+-- for i=1,#list do
+-- local t = tree
+-- for c in gmatch(list[i],".") do
+-- local tc = t[c]
+-- if not tc then
+-- tc = { }
+-- t[c] = tc
+-- end
+-- t = tc
+-- end
+-- end
+-- return make(tree)
+-- end
+
+local function make(t,hash)
+ local p = P(false)
local keys = sortedkeys(t)
for i=1,#keys do
local k = keys[i]
local v = t[k]
- if not p then
+ local h = hash[v]
+ if h then
if next(v) then
- p = P(k) * make(v)
+ p = p + P(k) * (make(v,hash) + P(true))
else
- p = P(k)
+ p = p + P(k) * P(true)
end
else
if next(v) then
- p = p + P(k) * make(v)
+ p = p + P(k) * make(v,hash)
else
p = p + P(k)
end
@@ -838,16 +896,20 @@ end
function lpeg.utfchartabletopattern(list) -- goes to util-lpg
local tree = { }
+ local hash = { }
for i=1,#list do
local t = tree
for c in gmatch(list[i],".") do
- if not t[c] then
- t[c] = { }
+ local tc = t[c]
+ if not tc then
+ tc = { }
+ t[c] = tc
end
- t = t[c]
+ t = tc
end
+ hash[t] = list[i]
end
- return make(tree)
+ return make(tree,hash)
end
-- inspect ( lpeg.utfchartabletopattern {
diff --git a/tex/context/base/lang-txt.lua b/tex/context/base/lang-txt.lua
index e4935aa88..f96ee52a1 100644
--- a/tex/context/base/lang-txt.lua
+++ b/tex/context/base/lang-txt.lua
@@ -2756,3 +2756,9 @@ data.labels={
},
}
}
+
+local functions = data.labels.functions
+
+functions.asin = functions.arcsin
+functions.acos = functions.arccos
+functions.atan = functions.arctan
diff --git a/tex/context/base/lpdf-ini.lua b/tex/context/base/lpdf-ini.lua
index 025f62c4f..eee5e2280 100644
--- a/tex/context/base/lpdf-ini.lua
+++ b/tex/context/base/lpdf-ini.lua
@@ -532,7 +532,7 @@ end
-- lpdf.object = pdfdeferredobject
-- lpdf.referenceobject = pdfreferenceobject
-local pagereference = pdf.pageref or tex.pdfpageref
+local pagereference = pdf.pageref -- tex.pdfpageref is obsolete
local nofpages = 0
function lpdf.pagereference(n)
@@ -1002,3 +1002,34 @@ end
-- end
-- end,
-- })
+
+
+-- The next variant of ActualText is what Taco and I could come up with
+-- eventually. As of September 2013 Acrobat copies okay, Sumatra copies a
+-- question mark, pdftotext injects an extra space and Okular adds a
+-- newline plus space.
+
+-- return formatters["BT /Span << /ActualText (CONTEXT) >> BDC [<feff>] TJ % t EMC ET"](code)
+
+local f_actual_text_one = formatters["BT /Span << /ActualText <feff%04x> >> BDC [<feff>] TJ %s EMC ET"]
+local f_actual_text_two = formatters["BT /Span << /ActualText <feff%04x%04x> >> BDC [<feff>] TJ %s EMC ET"]
+local f_actual_text = formatters["/Span <</ActualText %s >> BDC"]
+
+local context = context
+local pdfdirect = nodes.pool.pdfdirect
+
+function codeinjections.unicodetoactualtext(unicode,pdfcode)
+ if unicode < 0x10000 then
+ return f_actual_text_one(unicode,pdfcode)
+ else
+ return f_actual_text_two(unicode/1024+0xD800,unicode%1024+0xDC00,pdfcode)
+ end
+end
+
+function commands.startactualtext(str)
+ context(pdfdirect(f_actual_text(tosixteen(str))))
+end
+
+function commands.stopactualtext()
+ context(pdfdirect("EMC"))
+end
diff --git a/tex/context/base/lpdf-mis.lua b/tex/context/base/lpdf-mis.lua
index 6efbd3882..748567125 100644
--- a/tex/context/base/lpdf-mis.lua
+++ b/tex/context/base/lpdf-mis.lua
@@ -253,21 +253,26 @@ local pagespecs = {
local pagespec, topoffset, leftoffset, height, width, doublesided = "default", 0, 0, 0, 0, false
+local pdfpaperheight = tex.pdfpageheight
+local pdfpaperwidth = tex.pdfpagewidth
+
function codeinjections.setupcanvas(specification)
local paperheight = specification.paperheight
local paperwidth = specification.paperwidth
local paperdouble = specification.doublesided
if paperheight then
texset('global','pdfpageheight',paperheight)
+ pdfpaperheight = paperheight
end
if paperwidth then
texset('global','pdfpagewidth',paperwidth)
+ pdfpaperwidth = paperwidth
end
pagespec = specification.mode or pagespec
topoffset = specification.topoffset or 0
leftoffset = specification.leftoffset or 0
- height = specification.height or tex.pdfpageheight
- width = specification.width or tex.pdfpagewidth
+ height = specification.height or pdfpaperheight
+ width = specification.width or pdfpaperwidth
if paperdouble ~= nil then
doublesided = paperdouble
end
@@ -313,7 +318,7 @@ local function boxvalue(n) -- we could share them
end
local function pagespecification()
- local pageheight = tex.pdfpageheight
+ local pageheight = pdfpaperheight
local box = pdfarray { -- can be cached
boxvalue(leftoffset),
boxvalue(pageheight+topoffset-height),
diff --git a/tex/context/base/math-def.mkiv b/tex/context/base/math-def.mkiv
index 250986959..0888081f0 100644
--- a/tex/context/base/math-def.mkiv
+++ b/tex/context/base/math-def.mkiv
@@ -30,6 +30,9 @@
\definemathcommand [arccos] [nolop] {\mfunctionlabeltext{arccos}}
\definemathcommand [arcsin] [nolop] {\mfunctionlabeltext{arcsin}}
\definemathcommand [arctan] [nolop] {\mfunctionlabeltext{arctan}}
+\definemathcommand [acos] [nolop] {\mfunctionlabeltext{acos}}
+\definemathcommand [asin] [nolop] {\mfunctionlabeltext{asin}}
+\definemathcommand [atan] [nolop] {\mfunctionlabeltext{atan}}
\definemathcommand [arg] [nolop] {\mfunctionlabeltext{arg}}
\definemathcommand [cosh] [nolop] {\mfunctionlabeltext{cosh}}
\definemathcommand [cos] [nolop] {\mfunctionlabeltext{cos}}
diff --git a/tex/context/base/math-dir.lua b/tex/context/base/math-dir.lua
index bcc5461e9..525d07831 100644
--- a/tex/context/base/math-dir.lua
+++ b/tex/context/base/math-dir.lua
@@ -33,7 +33,6 @@ local getid = nuts.getid
local getlist = nuts.getlist
local setfield = nuts.setfield
local getattr = nuts.getattr
-local setattr = nuts.setattr
local insert_node_before = nuts.insert_before
local insert_node_after = nuts.insert_after
diff --git a/tex/context/base/math-rad.mkvi b/tex/context/base/math-rad.mkvi
index 027b5c27d..f22d62374 100644
--- a/tex/context/base/math-rad.mkvi
+++ b/tex/context/base/math-rad.mkvi
@@ -33,8 +33,8 @@
\def\styledrootradical#1#2% so that \text works ok ... \rootradical behaves somewhat weird
{\normalexpanded{\rootradical{\normalunexpanded{#1}}{\noexpand\triggermathstyle{\normalmathstyle}\normalunexpanded{#2}}}}
-\def\rootwithdegree[#1]{\rootradical{#1}}
-\def\rootwithoutdegree {\rootradical {}}
+\unexpanded\def\rootwithdegree[#1]{\rootradical{#1}}
+\unexpanded\def\rootwithoutdegree {\rootradical {}}
%D Even older stuff:
diff --git a/tex/context/base/meta-fnt.lua b/tex/context/base/meta-fnt.lua
index 596d0f456..02bf5d6b2 100644
--- a/tex/context/base/meta-fnt.lua
+++ b/tex/context/base/meta-fnt.lua
@@ -21,32 +21,18 @@ mpfonts.version = mpfonts.version or 1.20
mpfonts.inline = true
mpfonts.cache = containers.define("fonts", "mp", mpfonts.version, true)
-metapost.fonts = metapost.fonts or { }
+metapost.fonts = metapost.fonts or { }
+
+local function unicodetoactualtext(...)
+ unicodetoactualtext = backends.codeinjections.unicodetoactualtext
+ return unicodetoactualtext(...)
+end
-- a few glocals
local characters, descriptions = { }, { }
local factor, code, slot, width, height, depth, total, variants, bbox, llx, lly, urx, ury = 100, { }, 0, 0, 0, 0, 0, 0, true, 0, 0, 0, 0
--- The next variant of ActualText is what Taco and I could come up with
--- eventually. As of September 2013 Acrobat copies okay, Sumatra copies a
--- question mark, pdftotext injects an extra space and Okular adds a
--- newline plus space.
-
--- return formatters["BT /Span << /ActualText (CONTEXT) >> BDC [<feff>] TJ % t EMC ET"](code)
-
-local function topdf(n,code)
- if n < 0x10000 then
- return formatters["BT /Span << /ActualText <feff%04x> >> BDC [<feff>] TJ % t EMC ET"](n,code)
- else
- return formatters["BT /Span << /ActualText <feff%04x%04x> >> BDC [<feff>] TJ % t EMC ET"](n/1024+0xD800,n%1024+0xDC00,code)
- end
-end
-
--- local function topdf(n,code)
--- return formatters["/Span << /ActualText (CTX) >> BDC % t EMC"](code)
--- end
-
local flusher = {
startfigure = function(_chr_,_llx_,_lly_,_urx_,_ury_)
code = { }
@@ -68,6 +54,7 @@ local flusher = {
end,
stopfigure = function()
local cd = chardata[n]
+ local code = unicodetoactualtext(slot,concat(code," ")) or ""
descriptions[slot] = {
-- unicode = slot,
name = cd and cd.adobename,
@@ -79,7 +66,7 @@ local flusher = {
if inline then
characters[slot] = {
commands = {
- { "special", "pdf:" .. topdf(slot,code) },
+ { "special", "pdf:" .. code },
}
}
else
@@ -88,13 +75,14 @@ local flusher = {
{
"image",
{
- stream = topdf(slot,code),
+ stream = code,
bbox = { 0, -depth * 65536, width * 65536, height * 65536 }
},
},
}
}
end
+ code = nil -- no need to keep that
end
}
diff --git a/tex/context/base/meta-pdf.lua b/tex/context/base/meta-pdf.lua
index 512384450..03ea1358b 100644
--- a/tex/context/base/meta-pdf.lua
+++ b/tex/context/base/meta-pdf.lua
@@ -329,7 +329,7 @@ handlers[50] = function() report_mptopdf("skipping special %s",50) end
--end of not supported
function mps.setrgbcolor(r,g,b) -- extra check
- r, g = tonumber(r), tonumber(g) -- needed when we use lpeg
+ r, g, b = tonumber(r), tonumber(g), tonumber(b) -- needed when we use lpeg
if r == 0.0123 and g < 0.1 then
g, b = round(g*10000), round(b*10000)
local s = specials[b]
diff --git a/tex/context/base/mlib-pdf.lua b/tex/context/base/mlib-pdf.lua
index d25dde884..82db18fec 100644
--- a/tex/context/base/mlib-pdf.lua
+++ b/tex/context/base/mlib-pdf.lua
@@ -53,9 +53,9 @@ local f_J = formatters["%i J"]
local f_d = formatters["[%s] %F d"]
local f_w = formatters["%F w"]
-local pdfliteral = function(s)
+local pdfliteral = function(pdfcode)
local literal = copy_node(mpsliteral)
- literal.data = s
+ literal.data = pdfcode
return literal
end
@@ -64,18 +64,36 @@ end
-- get a new result table and the stored objects are forgotten. Otherwise they
-- are reused.
-local function getobjects(result,figure,f)
+-- local function getobjects(result,figure,index)
+-- if metapost.optimize then
+-- local objects = result.objects
+-- if not objects then
+-- result.objects = { }
+-- end
+-- objects = result.objects[index]
+-- if not objects then
+-- objects = figure:objects()
+-- result.objects[index] = objects
+-- end
+-- return objects
+-- else
+-- return figure:objects()
+-- end
+-- end
+
+local function getobjects(result,figure,index)
if metapost.optimize then
- local objects = result.objects
- if not objects then
- result.objects = { }
+ local robjects = result.objects
+ if not robjects then
+ robjects = { }
+ result.objects = robjects
end
- objects = result.objects[f]
- if not objects then
- objects = figure:objects()
- result.objects[f] = objects
+ local fobjects = robjects[index]
+ if not fobjects then
+ fobjects = figure:objects()
+ robjects[index] = fobjects
end
- return objects
+ return fobjects
else
return figure:objects()
end
@@ -323,11 +341,8 @@ local variable =
local pattern_lst = (variable * newline^0)^0
-metapost.variables = { } -- to be stacked
-metapost.llx = 0 -- to be stacked
-metapost.lly = 0 -- to be stacked
-metapost.urx = 0 -- to be stacked
-metapost.ury = 0 -- to be stacked
+metapost.variables = { } -- to be stacked
+metapost.properties = { } -- to be stacked
function commands.mprunvar(key,n) -- should be defined in another lib
local value = metapost.variables[key]
@@ -374,6 +389,30 @@ function metapost.processspecial(str)
end
end
+local function setproperties(figure)
+ local boundingbox = figure:boundingbox()
+ local properties = {
+ llx = boundingbox[1],
+ lly = boundingbox[2],
+ urx = boundingbox[3],
+ ury = boundingbox[4],
+ slot = figure:charcode(),
+ width = figure:width(),
+ height = figure:height(),
+ depth = figure:depth(),
+ italic = figure:italcorr(),
+ number = figure:charcode() or 0,
+ }
+ metapost.properties = properties
+ return properties
+end
+
+local function setvariables(figure)
+ local variables = { }
+ metapost.variables = variables
+ return variables
+end
+
function metapost.flush(result,flusher,askedfig)
if result then
local figures = result.fig
@@ -388,42 +427,27 @@ function metapost.flush(result,flusher,askedfig)
local flushfigure = flusher.flushfigure
local textfigure = flusher.textfigure
local processspecial = flusher.processspecial or metapost.processspecial
- for f=1,#figures do
- local figure = figures[f]
- local objects = getobjects(result,figure,f)
- local fignum = figure:charcode() or 0
- if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
- local t = { }
+ local variables = setvariables(figure) -- also resets then in case of not found
+ for index=1,#figures do
+ local figure = figures[index]
+ local properties = setproperties(figure)
+ if askedfig == "direct" or askedfig == "all" or askedfig == properties.number then
+ local objects = getobjects(result,figure,index)
+ local result = { }
local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
- local bbox = figure:boundingbox()
- local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4]
- local variables = { }
- metapost.variables = variables
- metapost.properties = {
- llx = llx,
- lly = lly,
- urx = urx,
- ury = ury,
- slot = figure:charcode(),
- width = figure:width(),
- height = figure:height(),
- depth = figure:depth(),
- italic = figure:italcorr(),
- }
- -- replaced by the above
- metapost.llx = llx
- metapost.lly = lly
- metapost.urx = urx
- metapost.ury = ury
+ local llx = properties.llx
+ local lly = properties.lly
+ local urx = properties.urx
+ local ury = properties.ury
if urx < llx then
-- invalid
- startfigure(fignum,0,0,0,0,"invalid",figure)
+ startfigure(properties.number,0,0,0,0,"invalid",figure)
stopfigure()
else
- startfigure(fignum,llx,lly,urx,ury,"begin",figure)
- t[#t+1] = "q"
+ startfigure(properties.number,llx,lly,urx,ury,"begin",figure)
+ result[#result+1] = "q"
if objects then
- resetplugins(t) -- we should move the colorinitializer here
+ resetplugins(result) -- we should move the colorinitializer here
for o=1,#objects do
local object = objects[o]
local objecttype = object.type
@@ -434,20 +458,20 @@ function metapost.flush(result,flusher,askedfig)
processspecial(object.prescript)
end
elseif objecttype == "start_clip" then
- t[#t+1] = "q"
- flushnormalpath(object.path,t,false)
- t[#t+1] = "W n"
+ result[#result+1] = "q"
+ flushnormalpath(object.path,result,false)
+ result[#result+1] = "W n"
elseif objecttype == "stop_clip" then
- t[#t+1] = "Q"
+ result[#result+1] = "Q"
miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
elseif objecttype == "text" then
- t[#t+1] = "q"
+ result[#result+1] = "q"
local ot = object.transform -- 3,4,5,6,1,2
- t[#t+1] = f_cm(ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%F %F m %F %F %F %F 0 0 cm"](unpack(ot))
- flushfigure(t) -- flush accumulated literals
- t = { }
+ result[#result+1] = f_cm(ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%F %F m %F %F %F %F 0 0 cm"](unpack(ot))
+ flushfigure(result) -- flush accumulated literals
+ result = { }
textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth)
- t[#t+1] = "Q"
+ result[#result+1] = "Q"
else
-- we use an indirect table as we want to overload
-- entries but this is not possible in userdata
@@ -463,32 +487,32 @@ function metapost.flush(result,flusher,askedfig)
local before, after = processplugins(object)
local objecttype = object.type -- can have changed
if before then
- t = pluginactions(before,t,flushfigure)
+ result = pluginactions(before,result,flushfigure)
end
local ml = object.miterlimit
if ml and ml ~= miterlimit then
miterlimit = ml
- t[#t+1] = f_M(ml)
+ result[#result+1] = f_M(ml)
end
local lj = object.linejoin
if lj and lj ~= linejoin then
linejoin = lj
- t[#t+1] = f_j(lj)
+ result[#result+1] = f_j(lj)
end
local lc = object.linecap
if lc and lc ~= linecap then
linecap = lc
- t[#t+1] = f_J(lc)
+ result[#result+1] = f_J(lc)
end
local dl = object.dash
if dl then
local d = f_d(concat(dl.dashes or {}," "),dl.offset)
if d ~= dashed then
dashed = d
- t[#t+1] = dashed
+ result[#result+1] = dashed
end
elseif dashed then
- t[#t+1] = "[] 0 d"
+ result[#result+1] = "[] 0 d"
dashed = false
end
local path = object.path -- newpath
@@ -498,7 +522,7 @@ function metapost.flush(result,flusher,askedfig)
if pen then
if pen.type == 'elliptical' then
transformed, penwidth = pen_characteristics(original) -- boolean, value
- t[#t+1] = f_w(penwidth) -- todo: only if changed
+ result[#result+1] = f_w(penwidth) -- todo: only if changed
if objecttype == 'fill' then
objecttype = 'both'
end
@@ -507,48 +531,48 @@ function metapost.flush(result,flusher,askedfig)
end
end
if transformed then
- t[#t+1] = "q"
+ result[#result+1] = "q"
end
if path then
if transformed then
- flushconcatpath(path,t,open)
+ flushconcatpath(path,result,open)
else
- flushnormalpath(path,t,open)
+ flushnormalpath(path,result,open)
end
if objecttype == "fill" then
- t[#t+1] = "h f"
+ result[#result+1] = "h f"
elseif objecttype == "outline" then
- t[#t+1] = open and "S" or "h S"
+ result[#result+1] = open and "S" or "h S"
elseif objecttype == "both" then
- t[#t+1] = "h B"
+ result[#result+1] = "h B"
end
end
if transformed then
- t[#t+1] = "Q"
+ result[#result+1] = "Q"
end
local path = object.htap
if path then
if transformed then
- t[#t+1] = "q"
+ result[#result+1] = "q"
end
if transformed then
- flushconcatpath(path,t,open)
+ flushconcatpath(path,result,open)
else
- flushnormalpath(path,t,open)
+ flushnormalpath(path,result,open)
end
if objecttype == "fill" then
- t[#t+1] = "h f"
+ result[#result+1] = "h f"
elseif objecttype == "outline" then
- t[#t+1] = open and "S" or "h S"
+ result[#result+1] = open and "S" or "h S"
elseif objecttype == "both" then
- t[#t+1] = "h B"
+ result[#result+1] = "h B"
end
if transformed then
- t[#t+1] = "Q"
+ result[#result+1] = "Q"
end
end
if after then
- t = pluginactions(after,t,flushfigure)
+ result = pluginactions(after,result,flushfigure)
end
if object.grouped then
-- can be qQ'd so changes can end up in groups
@@ -557,8 +581,8 @@ function metapost.flush(result,flusher,askedfig)
end
end
end
- t[#t+1] = "Q"
- flushfigure(t)
+ result[#result+1] = "Q"
+ flushfigure(result)
stopfigure("end")
end
if askedfig ~= "all" then
@@ -575,16 +599,11 @@ function metapost.parse(result,askedfig)
local figures = result.fig
if figures then
local analyzeplugins = metapost.analyzeplugins -- each object
- for f=1,#figures do
- local figure = figures[f]
- local fignum = figure:charcode() or 0
- if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
- local bbox = figure:boundingbox()
- metapost.llx = bbox[1]
- metapost.lly = bbox[2]
- metapost.urx = bbox[3]
- metapost.ury = bbox[4]
- local objects = getobjects(result,figure,f)
+ for index=1,#figures do
+ local figure = figures[index]
+ local properties = setproperties(figure)
+ if askedfig == "direct" or askedfig == "all" or askedfig == properties.number then
+ local objects = getobjects(result,figure,index)
if objects then
for o=1,#objects do
analyzeplugins(objects[o])
@@ -601,18 +620,17 @@ end
-- tracing:
-local t = { }
+local result = { }
local flusher = {
startfigure = function()
- t = { }
+ result = { }
context.startnointerference()
end,
flushfigure = function(literals)
- local n = #t
- for i=1, #literals do
- n = n + 1
- t[n] = literals[i]
+ local n = #result
+ for i=1,#literals do
+ result[n+i] = literals[i]
end
end,
stopfigure = function()
@@ -622,7 +640,7 @@ local flusher = {
function metapost.pdfliterals(result)
metapost.flush(result,flusher)
- return t
+ return result
end
-- so far
@@ -630,22 +648,27 @@ end
function metapost.totable(result)
local figure = result and result.fig and result.fig[1]
if figure then
- local t = { }
+ local results = { }
local objects = figure:objects()
for o=1,#objects do
local object = objects[o]
- local tt = { }
- local fields = mplib.fields(object)
+ local result = { }
+ local fields = mplib.fields(object) -- hm, is this the whole list, if so, we can get it once
for f=1,#fields do
local field = fields[f]
- tt[field] = object[field]
+ result[field] = object[field]
end
- t[o] = tt
+ results[o] = result
end
- local b = figure:boundingbox()
+ local boundingbox = figure:boundingbox()
return {
- boundingbox = { llx = b[1], lly = b[2], urx = b[3], ury = b[4] },
- objects = t
+ boundingbox = {
+ llx = boundingbox[1],
+ lly = boundingbox[2],
+ urx = boundingbox[3],
+ ury = boundingbox[4],
+ },
+ objects = results
}
else
return nil
diff --git a/tex/context/base/mlib-pps.lua b/tex/context/base/mlib-pps.lua
index ce95d5ca7..0f38c271e 100644
--- a/tex/context/base/mlib-pps.lua
+++ b/tex/context/base/mlib-pps.lua
@@ -217,21 +217,34 @@ local function checkandconvert(ca,cb)
end
end
+-- We keep textexts in a shared list (as it's easier that way and we also had that in
+-- the beginning). Each graphic gets its own (1 based) subtable so that we can also
+-- handle multiple conversions in one go which is needed when we process mp files
+-- directly.
+
local stack = { } -- quick hack, we will pass topofstack around
local top = nil
local nofruns = 0 -- askedfig: "all", "first", number
-local function startjob(texmode)
- top = {
- textexts = { }, -- all boxes, optionally with a different color
- texslots = { }, -- references to textexts in order or usage
- texorder = { }, -- references to textexts by mp index
+local function preset(t,k)
+ -- references to textexts by mp index
+ local v = {
textrial = 0,
texfinal = 0,
- -- used by tx plugin
+ texslots = { },
+ texorder = { },
texhash = { },
+ }
+ t[k] = v
+ return v
+end
+
+local function startjob(texmode)
+ top = {
+ textexts = { }, -- all boxes, optionally with a different color
texlast = 0,
- texmode = texmode, -- some day we can then skip all pre/postscripts
+ texdata = setmetatableindex({},preset), -- references to textexts in order or usage
+ texmode = texmode, -- some day we can then skip all pre/postscripts
}
insert(stack,top)
if trace_runs then
@@ -245,7 +258,7 @@ local function stopjob()
for n, tn in next, top.textexts do
free_list(tn)
if trace_textexts then
- report_textexts("freeing box %s",n)
+ report_textexts("freeing text %s",n)
end
end
if trace_runs then
@@ -508,21 +521,23 @@ local do_safeguard = ";"
local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"]
function metapost.textextsdata()
- local texorder = top.texorder
local textexts = top.textexts
local collected = { }
local nofcollected = 0
- for n=1,#texorder do
- local box = textexts[texorder[n]]
- if box then
- local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor
- if trace_textexts then
- report_textexts("passed data item %s: (%p,%p,%p)",n,wd,ht,dp)
+ for k, data in sortedhash(top.texdata) do -- sort is nicer in trace
+ local texorder = data.texorder
+ for n=1,#texorder do
+ local box = textexts[texorder[n]]
+ if box then
+ local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor
+ if trace_textexts then
+ report_textexts("passed data item %s:%s > (%p,%p,%p)",k,n,wd,ht,dp)
+ end
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = f_text_data(n,wd,n,ht,n,dp)
+ else
+ break
end
- nofcollected = nofcollected + 1
- collected[nofcollected] = f_text_data(n,wd,n,ht,n,dp)
- else
- break
end
end
return collected
@@ -860,6 +875,7 @@ end
local function tx_reset()
if top then
+ -- why ?
top.texhash = { }
top.texlast = 0
end
@@ -873,10 +889,11 @@ local ctx_MPLIBsetNtext = context.MPLIBsetNtext
local ctx_MPLIBsetCtext = context.MPLIBsetCtext
local function tx_analyze(object,prescript) -- todo: hash content and reuse them
+ local data = top.texdata[metapost.properties.number]
local tx_stage = prescript.tx_stage
if tx_stage == "trial" then
- local tx_trial = top.textrial + 1
- top.textrial = tx_trial
+ local tx_trial = data.textrial + 1
+ data.textrial = tx_trial
local tx_number = tonumber(prescript.tx_number)
local s = object.postscript or ""
local c = object.color -- only simple ones, no transparency
@@ -889,7 +906,7 @@ local function tx_analyze(object,prescript) -- todo: hash content and reuse them
local a = prescript.tr_alternative
local t = prescript.tr_transparency
local h = fmt(tx_number,a or "-",t or "-",c or "-")
- local n = top.texhash[h] -- todo: hashed variant with s (nicer for similar labels)
+ local n = data.texhash[h] -- todo: hashed variant with s (nicer for similar labels)
if not n then
local tx_last = top.texlast + 1
top.texlast = tx_last
@@ -918,31 +935,31 @@ local function tx_analyze(object,prescript) -- todo: hash content and reuse them
end
top.multipass = true
metapost.multipass = true -- ugly
- top.texhash[h] = tx_last
- top.texslots[tx_trial] = tx_last
- top.texorder[tx_number] = tx_last
+ data.texhash [h] = tx_last
+ data.texslots[tx_trial] = tx_last
+ data.texorder[tx_number] = tx_last
if trace_textexts then
report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,tx_trial,tx_number,tx_last,h)
end
else
- top.texslots[tx_trial] = n
+ data.texslots[tx_trial] = n
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,tx_trial,tx_number,n,h)
+ report_textexts("stage %a, usage %a, number %a, old %a, hash %a",tx_stage,tx_trial,tx_number,n,h)
end
end
elseif tx_stage == "extra" then
- local tx_trial = top.textrial + 1
- top.textrial = tx_trial
+ local tx_trial = data.textrial + 1
+ data.textrial = tx_trial
local tx_number = tonumber(prescript.tx_number)
- if not top.texorder[tx_number] then
+ if not data.texorder[tx_number] then
local s = object.postscript or ""
local tx_last = top.texlast + 1
top.texlast = tx_last
context.MPLIBsettext(tx_last,s)
top.multipass = true
metapost.multipass = true -- ugly
- top.texslots[tx_trial] = tx_last
- top.texorder[tx_number] = tx_last
+ data.texslots[tx_trial] = tx_last
+ data.texorder[tx_number] = tx_last
if trace_textexts then
report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,tx_trial,tx_number,tx_last)
end
@@ -951,15 +968,16 @@ local function tx_analyze(object,prescript) -- todo: hash content and reuse them
end
local function tx_process(object,prescript,before,after)
- local tx_number = prescript.tx_number
+ local data = top.texdata[metapost.properties.number]
+ local tx_number = tonumber(prescript.tx_number)
if tx_number then
- tx_number = tonumber(tx_number)
local tx_stage = prescript.tx_stage
if tx_stage == "final" then
- top.texfinal = top.texfinal + 1
- local n = top.texslots[top.texfinal]
+ local tx_final = data.texfinal + 1
+ data.texfinal = tx_final
+ local n = data.texslots[tx_final]
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,top.texfinal,tx_number,n)
+ report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,tx_final,tx_number,n)
end
local sx, rx, ry, sy, tx, ty = cm(object) -- needs to be frozen outside the function
local box = top.textexts[n]
@@ -983,7 +1001,7 @@ local function tx_process(object,prescript,before,after)
if not trace_textexts then
object.path = false -- else: keep it
end
- object.color = false
+ object.color = false
object.grouped = true
end
end
@@ -1120,8 +1138,9 @@ local function ps_process(object,prescript,before,after)
local first, third = op[1], op[3]
local x, y = first.x_coord, first.y_coord
local w, h = third.x_coord - x, third.y_coord - y
- x = x - metapost.llx
- y = metapost.ury - y
+ local properties = metapost.properties
+ x = x - properties.llx
+ y = properties.ury - y
before[#before+1] = function()
context.MPLIBpositionwhd(ps_label,x,y,w,h)
end
diff --git a/tex/context/base/mult-aux.mkiv b/tex/context/base/mult-aux.mkiv
index 427be69ce..d4f9d3b7b 100644
--- a/tex/context/base/mult-aux.mkiv
+++ b/tex/context/base/mult-aux.mkiv
@@ -1117,3 +1117,28 @@
% \global\advance\commalevel \minusone
\protect
+
+%\unprotect
+% \installcorenamespace {test} \installcommandhandler \??test {test} \??test
+% \unexpanded\def\TestMeA[#1]%
+% {\edef\currenttest{#1}
+% \edef\p_before{\testparameter\c!before}%
+% \ifx\p_before\empty \relax \else \relax \fi}
+% \unexpanded\def\TestMeB[#1]%
+% {\edef\currenttest{#1}
+% \doifelsenothing{\testparameter\c!before}\relax\relax}
+% \unexpanded\def\TestMeC[#1]%
+% {\edef\currenttest{#1}
+% \expandafter\expandafter\expandafter\ifx\testparameter\c!before\empty \relax \else \relax \fi}
+% \unexpanded\def\TestMeD[#1]%
+% {\edef\currenttest{#1}
+% \doubleexpandafter\ifx\testparameter\c!before\empty \relax \else \relax \fi}
+% \protect
+%
+% \starttext
+% \definetest[foo] \definetest[bar][foo] \setuptest[bar][before=indeed]
+% \resettimer \dorecurse{100000}{\TestMeA[bar]} A:\elapsedtime \par % 0.502
+% \resettimer \dorecurse{100000}{\TestMeB[bar]} B:\elapsedtime \par % 0.530
+% \resettimer \dorecurse{100000}{\TestMeC[bar]} C:\elapsedtime \par % 0.487
+% \resettimer \dorecurse{100000}{\TestMeD[bar]} D:\elapsedtime \par % 0.493
+% \stoptext
diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv
index 321f06154..d029a2a3e 100644
--- a/tex/context/base/mult-def.mkiv
+++ b/tex/context/base/mult-def.mkiv
@@ -34,81 +34,86 @@
% start todo:
-\def\c!fences {fences}
-\def\c!keeptogether {keeptogether}
-\def\c!viewerprefix {viewerprefix}
-
-\def\c!dataset {dataset}
-\def\c!sectionblock {sectionblock}
-\def\c!language {language}
-\def\c!compressseparator{compressseparator}
-\def\c!renderingsetup {renderingsetup}
-\def\c!filler {filler}
-\def\c!resources {resources}
-\def\c!first {first}
-\def\c!last {last}
-\def\c!quotechar {quotechar}
-\def\c!commentchar {commentchar}
-\def\c!symbolcommand {symbolcommand}
-\def\c!xmlsetup {xmlsetup}
-\def\c!comma {comma}
-\def\c!period {period}
-\def\c!monthconversion {monthconversion}
-\def\c!authorconversion {authorconversion}
-\def\c!comment {comment}
-\def\c!textalign {textalign}
-\def\c!up {up}
-\def\c!down {down}
-\def\c!instance {instance}
-\def\c!database {database}
-\def\c!group {group}
-\def\c!groupsuffix {groupsuffix}
-
-\def\c!referencemethod {referencemethod} % forward both
-
-\def\v!dataset {dataset}
-\def\v!compressseparator{compressseparator}
-\def\v!notation {notation}
-\def\v!endnote {endnote}
-\def\v!interactive {interactive}
-\def\v!autopunctuation {autopunctuation}
-\def\v!integral {integral}
-\def\v!shiftup {shiftup}
-\def\v!shiftdown {shiftdown}
-\def\v!construction {construction}
-\def\v!unframed {unframed}
-\def\v!chemical {chemical}
-\def\v!chemicals {chemicals}
-\def\v!words {words}
-\def\v!combination {combination}
-\def\v!norepeat {norepeat}
-\def\v!mixed {mixed}
-\def\v!centerlast {centerlast}
-
-\def\s!lcgreek {lcgreek}
-\def\s!ucgreek {ucgreek}
-\def\s!sygreek {sygreek}
-\def\s!italics {italics}
-\def\s!integral {integral}
-\def\s!insert {insert} % maybe insertclass
-\def\s!marker {marker}
-
-\def\s!mixedcolumn {mixedcolumn}
-
-\def\s!double {double}
-\def\s!decimal {decimal}
-\def\s!binary {binary}
-
-\def\s!internal {internal}
-
-\def\s!current {current}
-
-\def\s!rel {rel}
-\def\s!ord {ord}
-
-\def\c!HL {HL}
-\def\c!VL {VL}
-\def\c!NL {NL}
+\def\c!nextleft {nextleft}
+\def\c!nextright {nextright}
+\def\c!nextleftquotation {nextleftquotation}
+\def\c!nextrightquotation{nextrightquotation}
+
+\def\c!fences {fences}
+\def\c!keeptogether {keeptogether}
+\def\c!viewerprefix {viewerprefix}
+
+\def\c!dataset {dataset}
+\def\c!sectionblock {sectionblock}
+\def\c!language {language}
+\def\c!compressseparator {compressseparator}
+\def\c!renderingsetup {renderingsetup}
+\def\c!filler {filler}
+\def\c!resources {resources}
+\def\c!first {first}
+\def\c!last {last}
+\def\c!quotechar {quotechar}
+\def\c!commentchar {commentchar}
+\def\c!symbolcommand {symbolcommand}
+\def\c!xmlsetup {xmlsetup}
+\def\c!comma {comma}
+\def\c!period {period}
+\def\c!monthconversion {monthconversion}
+\def\c!authorconversion {authorconversion}
+\def\c!comment {comment}
+\def\c!textalign {textalign}
+\def\c!up {up}
+\def\c!down {down}
+\def\c!instance {instance}
+\def\c!database {database}
+\def\c!group {group}
+\def\c!groupsuffix {groupsuffix}
+
+\def\c!referencemethod {referencemethod} % forward both
+
+\def\v!dataset {dataset}
+\def\v!compressseparator {compressseparator}
+\def\v!notation {notation}
+\def\v!endnote {endnote}
+\def\v!interactive {interactive}
+\def\v!autopunctuation {autopunctuation}
+\def\v!integral {integral}
+\def\v!shiftup {shiftup}
+\def\v!shiftdown {shiftdown}
+\def\v!construction {construction}
+\def\v!unframed {unframed}
+\def\v!chemical {chemical}
+\def\v!chemicals {chemicals}
+\def\v!words {words}
+\def\v!combination {combination}
+\def\v!norepeat {norepeat}
+\def\v!mixed {mixed}
+\def\v!centerlast {centerlast}
+
+\def\s!lcgreek {lcgreek}
+\def\s!ucgreek {ucgreek}
+\def\s!sygreek {sygreek}
+\def\s!italics {italics}
+\def\s!integral {integral}
+\def\s!insert {insert} % maybe insertclass
+\def\s!marker {marker}
+
+\def\s!mixedcolumn {mixedcolumn}
+
+\def\s!double {double}
+\def\s!decimal {decimal}
+\def\s!binary {binary}
+
+\def\s!internal {internal}
+
+\def\s!current {current}
+
+\def\s!rel {rel}
+\def\s!ord {ord}
+
+\def\c!HL {HL}
+\def\c!VL {VL}
+\def\c!NL {NL}
\ifdefined\v!kerncharacters\else \def\v!kerncharacters{kerncharacters} \fi % no time now for translations should be a e! actually
\ifdefined\v!letterspacing \else \def\v!letterspacing {letterspacing} \fi % no time now for translations should be a e! actually
diff --git a/tex/context/base/mult-low.lua b/tex/context/base/mult-low.lua
index 2bae5a0b0..faa1302a9 100644
--- a/tex/context/base/mult-low.lua
+++ b/tex/context/base/mult-low.lua
@@ -126,7 +126,7 @@ return {
"twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace",
"figurespace", "punctuationspace", "hairspace",
"zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj",
- "optionalspace",
+ "optionalspace", "asciispacechar",
},
["helpers"] = {
--
@@ -241,7 +241,7 @@ return {
"removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to",
--
"endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "nbsp",
- "obeyspaces", "obeylines", "obeyedspace", "obeyedline",
+ "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "obeyedtab", "obeyedpage",
"normalspace",
--
"executeifdefined",
diff --git a/tex/context/base/node-fin.lua b/tex/context/base/node-fin.lua
index 250035f39..76ad973da 100644
--- a/tex/context/base/node-fin.lua
+++ b/tex/context/base/node-fin.lua
@@ -27,7 +27,6 @@ local getid = nuts.getid
local getlist = nuts.getlist
local getleader = nuts.getleader
local getattr = nuts.getattr
-local setattr = nuts.setattr
local copy_node = nuts.copy
local insert_node_before = nuts.insert_before
diff --git a/tex/context/base/pack-rul.mkiv b/tex/context/base/pack-rul.mkiv
index 5bda9bb29..5aa633207 100644
--- a/tex/context/base/pack-rul.mkiv
+++ b/tex/context/base/pack-rul.mkiv
@@ -778,6 +778,33 @@
\newcount\c_pack_framed_nesting
+% to be tested (slightly more efficient):
+%
+% \unexpanded\def\pack_frame_common % #1 #2
+% {\bgroup
+% \advance\c_pack_framed_nesting\plusone
+% \expandafter\let\csname\??framed>\the\c_pack_framed_nesting:\s!parent\endcsname\??framed
+% \edef\currentframed{>\the\c_pack_framed_nesting}%
+% \pack_framed_initialize
+% \bgroup
+% \doifnextoptionalcselse} % #1 #2
+%
+% \unexpanded\def\framed {\pack_frame_common\pack_framed_process_framed_pickup\pack_framed_process_indeed}
+% \unexpanded\def\startframed{\pack_frame_common\pack_framed_start_framed_pickup \pack_framed_start_indeed }
+%
+% \def\pack_framed_process_framed_pickup[#1]%
+% {\setupcurrentframed[#1]%
+% \pack_framed_process_indeed}
+%
+% \def\pack_framed_start_framed_pickup[#1]%
+% {\setupcurrentframed[#1]% here !
+% \secondargumenttrue % dirty trick
+% \pack_framed_start_framed_indeed}
+%
+% \def\pack_framed_start_framed_indeed
+% {\pack_framed_process_indeed
+% \bgroup}
+
\unexpanded\def\pack_framed_process_framed[#1]%
{\bgroup
\iffirstargument % faster
@@ -806,6 +833,8 @@
\pack_framed_process_framed[#1]% can be inlined
\bgroup}
+% till here
+
\let\stopframed\egroup
\unexpanded\def\normalframedwithsettings[#1]%
diff --git a/tex/context/base/phys-dim.lua b/tex/context/base/phys-dim.lua
index 870cbd29b..db0b5740d 100644
--- a/tex/context/base/phys-dim.lua
+++ b/tex/context/base/phys-dim.lua
@@ -287,18 +287,29 @@ local long_units = {
-- synonyms
- ["Metric Ton"] = "tonne",
+ MetricTon = "tonne",
Litre = "liter",
+ ["Metric Ton"] = "tonne",
+
-- non-SI units whose values must be obtained experimentally (Table 7)
- ["Electron Volt"] = "electronvolt",
+ AtomicMassUnit = "atomicmassunit",
+ AstronomicalUnit = "astronomicalunit",
+ ElectronVolt = "electronvolt",
Dalton = "dalton",
+
["Atomic Mass Unit"] = "atomicmassunit",
["Astronomical Unit"] = "astronomicalunit",
+ ["Electron Volt"] = "electronvolt",
-- special cases (catch doubles, okay, a bit over the top)
+ DegreesCelsius = "celsius",
+ DegreesFahrenheit = "fahrenheit",
+ DegreeCelsius = "celsius",
+ DegreeFahrenheit = "fahrenheit",
+
["Degrees Celsius"] = "celsius",
["Degrees Fahrenheit"] = "fahrenheit",
["Degree Celsius"] = "celsius",
@@ -323,12 +334,14 @@ local long_units = {
Hg = "mercury",
-- ["Millimetre Of Mercury"] = [[mmHg]],
Angstrom = "angstrom", -- strictly Ångström
- ["Nautical Mile"] = "nauticalmile",
+ NauticalMile = "nauticalmile",
Barn = "barn",
Knot = "knot",
Neper = "neper",
Bel = "bel", -- in practice only decibel used
+ ["Nautical Mile"] = "nauticalmile",
+
-- other non-SI units from CGS system (Table 9)
Erg = "erg",
@@ -601,7 +614,7 @@ labels.units = allocate {
electronvolt = { labels = { en = [[eV]] } },
dalton = { labels = { en = [[Da]] } },
atomicmassunit = { labels = { en = [[u]] } },
- astronomicalunit = { labels = { en = [[ua]] } },
+ astronomicalunit = { labels = { en = [[au]] } },
bar = { labels = { en = [[bar]] } },
angstrom = { labels = { en = [[Å]] } }, -- strictly Ångström
nauticalmile = { labels = { en = [[M]] } },
@@ -799,6 +812,18 @@ local function update_parsers() -- todo: don't remap utf sequences
+ V("nothing") * V("shortunit")
+ V("longprefix") * V("shortunit") -- centi m
+ V("shortprefix") * V("longunit"), -- c meter
+
+-- combination = ( V("longprefix") -- centi meter
+-- + V("nothing")
+-- ) * V("longunit")
+-- + ( V("shortprefix") -- c m
+-- + V("nothing")
+-- + V("longprefix")
+-- ) * V("shortunit") -- centi m
+-- + ( V("shortprefix") -- c meter
+-- ) * V("longunit"),
+
+
dimension = V("somespace")
* (
V("packaged") / dimpre
@@ -812,9 +837,7 @@ local function update_parsers() -- todo: don't remap utf sequences
* V("somespace"),
snippet = V("dimension")
+ V("somesymbol"),
- unit = (
- V("snippet")
- * (V("operator") * V("snippet"))^0
+ unit = ( V("snippet") * (V("operator") * V("snippet"))^0
+ V("somepackaged")
)^1,
}
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index 733eebf7b..044b30f32 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -94,6 +94,7 @@
\logo [EMTEX] {em\TeX}
\logo [ENCODING] {enc}
\logo [ENCTEX] {enc\TeX}
+\logo [EPUB] {ePub}
\logo [EPS] {eps}
\logo [ETEX] {\eTeX}
\logo [EUROBACHOTEX] {EuroBacho\TeX}
diff --git a/tex/context/base/sort-ini.lua b/tex/context/base/sort-ini.lua
index 42d83188e..d1eaacd15 100644
--- a/tex/context/base/sort-ini.lua
+++ b/tex/context/base/sort-ini.lua
@@ -39,11 +39,18 @@ relatively easy to do.</p>
how they map onto this mechanism. I've learned that users can come up
with any demand so nothing here is frozen.</p>
+<p>Todo: I ran into the Unicode Collation document and noticed that
+there are some similarities (like the weights) but using that method
+would still demand extra code for language specifics. One option is
+to use the allkeys.txt file for the uc vectors but then we would also
+use the collapsed key (sq, code is now commented). In fact, we could
+just hook those into the replacer code that we reun beforehand.</p>
+
<p>In the future index entries will become more clever, i.e. they will
have language etc properties that then can be used.</p>
]]--
-local gsub, rep, sub, sort, concat = string.gsub, string.rep, string.sub, table.sort, table.concat
+local gsub, rep, sub, sort, concat, tohash, format = string.gsub, string.rep, string.sub, table.sort, table.concat, table.tohash, string.format
local utfbyte, utfchar, utfcharacters, utfvalues = utf.byte, utf.char, utf.characters, utf.values
local next, type, tonumber, rawget, rawset = next, type, tonumber, rawget, rawset
@@ -52,6 +59,7 @@ local setmetatableindex = table.setmetatableindex
local trace_tests = false trackers.register("sorters.tests", function(v) trace_tests = v end)
local trace_methods = false trackers.register("sorters.methods", function(v) trace_methods = v end)
+local trace_orders = false trackers.register("sorters.orders", function(v) trace_orders = v end)
local report_sorters = logs.reporter("languages","sorters")
@@ -65,7 +73,9 @@ local digitsoffset = 0x20000 -- frozen
local digitsmaximum = 0xFFFFF -- frozen
local lccodes = characters.lccodes
+local uccodes = characters.uccodes
local lcchars = characters.lcchars
+local ucchars = characters.ucchars
local shchars = characters.shchars
local fscodes = characters.fscodes
local fschars = characters.fschars
@@ -81,7 +91,7 @@ local v_after = variables.after
local v_first = variables.first
local v_last = variables.last
-local validmethods = table.tohash {
+local validmethods = tohash {
"ch", -- raw character (for tracing)
"mm", -- minus mapping
"zm", -- zero mapping
@@ -169,12 +179,12 @@ local function preparetables(data)
__index = function(t,k)
local n, nn
if k then
- if trace_tests then
+ if trace_orders then
report_sorters("simplifing character %C",k)
end
local l = lower[k] or lcchars[k]
if l then
- if trace_tests then
+ if trace_orders then
report_sorters(" 1 lower: %C",l)
end
local ml = rawget(t,l)
@@ -185,7 +195,7 @@ local function preparetables(data)
nn = nn + 1
n[nn] = ml[i] + (t.__delta or 0)
end
- if trace_tests then
+ if trace_orders then
report_sorters(" 2 order: % t",n)
end
end
@@ -193,7 +203,7 @@ local function preparetables(data)
if not n then
local s = shchars[k] -- maybe all components?
if s and s ~= k then
- if trace_tests then
+ if trace_orders then
report_sorters(" 3 shape: %C",s)
end
n = { }
@@ -201,7 +211,7 @@ local function preparetables(data)
for l in utfcharacters(s) do
local ml = rawget(t,l)
if ml then
- if trace_tests then
+ if trace_orders then
report_sorters(" 4 keep: %C",l)
end
if ml then
@@ -213,7 +223,7 @@ local function preparetables(data)
else
l = lower[l] or lcchars[l]
if l then
- if trace_tests then
+ if trace_orders then
report_sorters(" 5 lower: %C",l)
end
local ml = rawget(t,l)
@@ -232,7 +242,7 @@ local function preparetables(data)
--
-- s = fschars[k]
-- if s and s ~= k then
- -- if trace_tests then
+ -- if trace_orders then
-- report_sorters(" 6 split: %s",s)
-- end
-- local ml = rawget(t,s)
@@ -247,24 +257,24 @@ local function preparetables(data)
-- end
local b = utfbyte(k)
n = decomposed[b] or { b }
- if trace_tests then
+ if trace_orders then
report_sorters(" 6 split: %s",utf.tostring(b)) -- todo
end
end
if n then
- if trace_tests then
+ if trace_orders then
report_sorters(" 7 order: % t",n)
end
else
n = noorder
- if trace_tests then
+ if trace_orders then
report_sorters(" 8 order: 0")
end
end
end
else
n = noorder
- if trace_tests then
+ if trace_orders then
report_sorters(" 9 order: 0")
end
end
@@ -334,8 +344,8 @@ local function setlanguage(l,m,d,u)
report_sorters("invalid sorter method %a in %a",s,method)
end
end
+ usedinsequence = tohash(sequence)
data.sequence = sequence
- usedinsequence = table.tohash(sequence)
data.usedinsequence = usedinsequence
-- usedinsequence.ch = true -- better just store the string
if trace_tests then
@@ -387,7 +397,6 @@ local function basic(a,b) -- trace ea and eb
for j=1,#sequence do
local m = sequence[j]
result = basicsort(ea[m],eb[m])
--- print(m,result)
if result ~= 0 then
return result
end
@@ -439,6 +448,36 @@ local function basic(a,b) -- trace ea and eb
end
end
+-- if we use sq:
+--
+-- local function basic(a,b) -- trace ea and eb
+-- local ea, eb = a.split, b.split
+-- local na, nb = #ea, #eb
+-- if na == 0 and nb == 0 then
+-- -- simple variant (single word)
+-- return basicsort(ea.sq,eb.sq)
+-- else
+-- -- complex variant, used in register (multiple words)
+-- local result = 0
+-- for i=1,nb < na and nb or na do
+-- local eai, ebi = ea[i], eb[i]
+-- result = basicsort(ea.sq,eb.sq)
+-- if result ~= 0 then
+-- return result
+-- end
+-- end
+-- if result ~= 0 then
+-- return result
+-- elseif na > nb then
+-- return 1
+-- elseif nb > na then
+-- return -1
+-- else
+-- return 0
+-- end
+-- end
+-- end
+
comparers.basic = basic
function sorters.basicsorter(a,b)
@@ -531,10 +570,15 @@ function splitters.utf(str,checked) -- we could append m and u but this is clean
else
n = n + 1
local l = lower[sc]
- l = l and utfbyte(l) or lccodes[b]
+ l = l and utfbyte(l) or lccodes[b] or b
+ -- local u = upper[sc]
+ -- u = u and utfbyte(u) or uccodes[b] or b
if type(l) == "table" then
l = l[1] -- there are currently no tables in lccodes but it can be some, day
end
+ -- if type(u) == "table" then
+ -- u = u[1] -- there are currently no tables in lccodes but it can be some, day
+ -- end
z_case[n] = l
if l ~= b then
m_case[n] = l - 1
@@ -593,9 +637,9 @@ function splitters.utf(str,checked) -- we could append m and u but this is clean
-- p_mapping = { p_mappings[fs][1] }
-- end
-- end
-
+ local result
if checked then
- return {
+ result = {
ch = trace_tests and char or nil, -- not in sequence
uc = usedinsequence.uc and byte or nil,
mc = usedinsequence.mc and m_case or nil,
@@ -606,7 +650,7 @@ function splitters.utf(str,checked) -- we could append m and u but this is clean
pm = usedinsequence.pm and p_mapping or nil,
}
else
- return {
+ result = {
ch = char,
uc = byte,
mc = m_case,
@@ -617,7 +661,15 @@ function splitters.utf(str,checked) -- we could append m and u but this is clean
pm = p_mapping,
}
end
-
+ -- local sq, n = { }, 0
+ -- for i=1,#byte do
+ -- for s=1,#sequence do
+ -- n = n + 1
+ -- sq[n] = result[sequence[s]][i]
+ -- end
+ -- end
+ -- result.sq = sq
+ return result
end
local function packch(entry)
@@ -648,11 +700,11 @@ local function packuc(entry)
if #split > 0 then -- useless test
local t = { }
for i=1,#split do
- t[i] = concat(split[i].uc, " ")
+ t[i] = concat(split[i].uc, " ") -- sq
end
return concat(t," + ")
else
- return concat(split.uc," ")
+ return concat(split.uc," ") -- sq
end
end
diff --git a/tex/context/base/spac-chr.lua b/tex/context/base/spac-chr.lua
index 5b3a15478..cdc56e552 100644
--- a/tex/context/base/spac-chr.lua
+++ b/tex/context/base/spac-chr.lua
@@ -14,7 +14,7 @@ local byte, lower = string.byte, string.lower
-- to be redone: characters will become tagged spaces instead as then we keep track of
-- spaceskip etc
--- todo: only setattr when export
+-- todo: only setattr when export / use properties
local next = next
diff --git a/tex/context/base/spac-hor.mkiv b/tex/context/base/spac-hor.mkiv
index 92491ce32..e3ccc5dd6 100644
--- a/tex/context/base/spac-hor.mkiv
+++ b/tex/context/base/spac-hor.mkiv
@@ -733,8 +733,8 @@
\unexpanded\def\dosetleftskipadaption #1{\leftskipadaption \ifcsname\??skipadaptionleft #1\endcsname\csname\??skipadaptionleft #1\endcsname\else#1\fi\relax}
\unexpanded\def\dosetrightskipadaption#1{\rightskipadaption\ifcsname\??skipadaptionright#1\endcsname\csname\??skipadaptionright#1\endcsname\else#1\fi\relax}
-\unexpanded\def\doadaptleftskip #1{\dosetleftskipadaption {#1}\advance\leftskip \leftskipadaption }
-\unexpanded\def\doadaptrightskip#1{\dosetrightskipadaption{#1}\advance\rightskip\rightskipadaption}
+\unexpanded\def\doadaptleftskip #1{\normalexpanded{\dosetleftskipadaption {#1}}\advance\leftskip \leftskipadaption }
+\unexpanded\def\doadaptrightskip#1{\normalexpanded{\dosetrightskipadaption{#1}}\advance\rightskip\rightskipadaption}
\unexpanded\def\forgetbothskips
{\leftskip\zeropoint
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index 018881663..55c135cf6 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -879,6 +879,8 @@ local special_penalty_xxx = 0
-- header don't break but also make sure that we have at least a decent
-- break when we have succesive ones (often when testing)
+-- todo: mark headers as such so that we can recognize them
+
local specialmethods = { }
local specialmethod = 1
@@ -927,10 +929,21 @@ specialmethods[1] = function(start,penalty)
return
end
elseif trace_specials then
- report_specials(" context %a, higher level, continue",p)
+ report_specials(" context penalty %a, higher level, continue",p)
+ end
+ else
+ local p = getfield(current,"penalty")
+ if p < 10000 then
+ -- assume some other mechanism kicks in so we seem to have content
+ if trace_specials then
+ report_specials(" regular penalty %a, quitting",p)
+ end
+ break
+ else
+ if trace_specials then
+ report_specials(" regular penalty %a, continue",p)
+ end
end
- elseif trace_specials then
- report_specials(" regular penalty, continue")
end
end
current = getprev(current)
diff --git a/tex/context/base/spac-ver.mkiv b/tex/context/base/spac-ver.mkiv
index 0c84958be..409dd985c 100644
--- a/tex/context/base/spac-ver.mkiv
+++ b/tex/context/base/spac-ver.mkiv
@@ -17,6 +17,8 @@
\registerctxluafile{spac-ver}{1.001}
+% todo: use usernodes ?
+
% todo: itemize : intro ... only when there is one or two lines preceding and then
% keep these together i.e. \blank[intro]
@@ -1802,7 +1804,7 @@
\def\directvspacing#1%
{\par\ctxcommand{vspacing("#1")}}
-% handy (and faste):
+% handy (and faster):
\unexpanded\def\directvpenalty#1%
{\begingroup
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 55046b375..2801e2b05 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index c1435146e..dba0dc0b7 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/strc-mar.lua b/tex/context/base/strc-mar.lua
index 9c6259de4..951cf3ced 100644
--- a/tex/context/base/strc-mar.lua
+++ b/tex/context/base/strc-mar.lua
@@ -29,7 +29,6 @@ local getprev = nuts.getprev
local getid = nuts.getid
local getlist = nuts.getlist
local getattr = nuts.getattr
-local setattr = nuts.setattr
local getbox = nuts.getbox
local traversenodes = nuts.traverse
diff --git a/tex/context/base/strc-pag.lua b/tex/context/base/strc-pag.lua
index c294a4645..35b288888 100644
--- a/tex/context/base/strc-pag.lua
+++ b/tex/context/base/strc-pag.lua
@@ -61,10 +61,12 @@ function pages.save(prefixdata,numberdata,extradata)
if trace_pages then
report_pages("saving page %s.%s",realpage,userpage)
end
+ local viewerprefix = extradata.viewerprefix
+ local state = extradata.state
local data = {
number = userpage,
- viewerprefix = extradata.viewerprefix,
- state = extradata.state,
+ viewerprefix = viewerprefix ~= "" and viewerprefix or nil,
+ state = state ~= "" and state or nil, -- maybe let "start" be default
block = sections.currentblock(),
prefixdata = prefixdata and helpers.simplify(prefixdata),
numberdata = numberdata and helpers.simplify(numberdata),
diff --git a/tex/context/base/strc-syn.lua b/tex/context/base/strc-syn.lua
index e27974eb2..2ca428455 100644
--- a/tex/context/base/strc-syn.lua
+++ b/tex/context/base/strc-syn.lua
@@ -139,23 +139,26 @@ function synonyms.sort(data,options)
sorters.sort(data.result,synonyms.compare)
end
-function synonyms.finalize(data,options)
+function synonyms.finalize(data,options) -- mostly the same as registers so we will generalize it: sorters.split
local result = data.result
data.metadata.nofsorted = #result
- local split = { }
+ local split, nofsplit, lasttag, done, nofdone = { }, 0, nil, nil, 0
+ local firstofsplit = sorters.firstofsplit
for k=1,#result do
local v = result[k]
local entry, tag = firstofsplit(v)
- local s = split[entry] -- keeps track of change
- local d
- if not s then
- d = { }
- s = { tag = tag, data = d }
- split[entry] = s
- else
- d = s.data
+ if tag ~= lasttag then
+ -- if trace_registers then
+ -- report_registers("splitting at %a",tag)
+ -- end
+ done = { }
+ nofdone = 0
+ nofsplit = nofsplit + 1
+ lasttag = tag
+ split[nofsplit] = { tag = tag, data = done }
end
- d[#d+1] = v
+ nofdone = nofdone + 1
+ done[nofdone] = v
end
data.result = split
end
@@ -168,10 +171,9 @@ local ctx_synonymentry = context.synonymentry
function synonyms.flush(data,options)
local kind = data.metadata.kind -- hack, will be done better
local result = data.result
- local sorted = table.sortedkeys(result)
- for k=1,#sorted do
- local letter = sorted[k]
- local sublist = result[letter]
+ for i=1,#result do
+ local sublist = result[i]
+ local letter = sublist.tag
local data = sublist.data
for d=1,#data do
local entry = data[d].definition
diff --git a/tex/context/base/strc-tag.mkiv b/tex/context/base/strc-tag.mkiv
index 6e792fd3f..7e15be4a3 100644
--- a/tex/context/base/strc-tag.mkiv
+++ b/tex/context/base/strc-tag.mkiv
@@ -11,6 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% labels: no language needed
% key/values and other names might change (and probably will)
\writestatus{loading}{ConTeXt Structure Macros / Tags}
@@ -176,6 +177,11 @@
\expandafter\strc_tags_element_stop_yes
\fi}
+% if mainlanguage == en we can even omit the label (default to tag) which is faster
+%
+% \unexpanded\def\strc_tags_element_start_yes_indeed_yes[#1][#2]%
+% {\ctxcommand{starttag("#1",{label="#1",userdata=\!!bs#2\!!es})}}
+
\unexpanded\def\strc_tags_element_start_yes_indeed_yes[#1][#2]%
{\ctxcommand{starttag("#1",{label="\dogetupsometaglabeltext{#1}",userdata=\!!bs#2\!!es})}}
diff --git a/tex/context/base/trac-jus.lua b/tex/context/base/trac-jus.lua
index 00c871159..e4cbdb2e8 100644
--- a/tex/context/base/trac-jus.lua
+++ b/tex/context/base/trac-jus.lua
@@ -77,7 +77,7 @@ end)
function checkers.handler(head)
for current in traverse_id(hlist_code,tonut(head)) do
if getattr(current,a_justification) == 1 then
- setattr(current,a_justification,0)
+ setattr(current,a_justification,0) -- kind of reset
local width = getfield(current,"width")
if width > 0 then
local list = getlist(current)
diff --git a/tex/context/base/typo-del.mkiv b/tex/context/base/typo-del.mkiv
index 603471f75..f2f2055a1 100644
--- a/tex/context/base/typo-del.mkiv
+++ b/tex/context/base/typo-del.mkiv
@@ -205,6 +205,14 @@
[\rightboundarycharacter\c!rightquotation{quotation}]
\definesymbol
+ [\c!nextleftquotation]
+ [\rightboundarycharacter\c!leftquotation{quotation}]
+
+\definesymbol
+ [\c!nextrightquotation]
+ [\leftboundarycharacter\c!rightquotation{quotation}]
+
+\definesymbol
[\c!leftquote]
[\leftboundarycharacter\c!leftquote{quote}]
@@ -298,8 +306,8 @@
{\begingroup
\typo_delimited_push{#1}%
\dostarttagged\t!delimitedblock\currentdelimitedtext
- \edef\p_method{\delimitedtextparameter\c!method}%
- \ifx\p_method\s!font
+ \edef\p_delimited_method{\delimitedtextparameter\c!method}%
+ \ifx\p_delimited_method\s!font
\expandafter\typo_delimited_start_font
\else
\expandafter\typo_delimited_start_other
@@ -313,16 +321,16 @@
\ignorespaces}
\def\typo_delimited_start_other
- {\edef\p_repeat{\delimitedtextparameter\c!repeat}%
- \ifx\p_repeat\v!yes
+ {\edef\p_delimited_repeat{\delimitedtextparameter\c!repeat}%
+ \ifx\p_delimited_repeat\v!yes
\let\typo_delimited_repeat\typo_delimited_repeat_ideed
\else
\let\typo_delimited_repeat\relax
\fi
- \edef\p_location{\delimitedtextparameter\c!location}%
- \ifx\p_location\v!paragraph
+ \edef\p_delimited_location{\delimitedtextparameter\c!location}%
+ \ifx\p_delimited_location\v!paragraph
\singleexpandafter\typo_delimited_start_par
- \else\ifx\p_location\v!margin
+ \else\ifx\p_delimited_location\v!margin
\doubleexpandafter\typo_delimited_start_par
\else
\doubleexpandafter\typo_delimited_start_txt
@@ -338,8 +346,10 @@
\def\typo_delimited_start_par_indeed[#1]%
{\let\typo_delimited_stop\typo_delimited_stop_par
- \doifsomething{\delimitedtextparameter\c!spacebefore}
- {\blank[\delimitedtextparameter\c!spacebefore]}%
+ \edef\p_delimited_spacebefore{\delimitedtextparameter\c!spacebefore}%
+ \ifx\p_delimited_spacebefore\empty \else
+ \blank[\p_delimited_spacebefore]%
+ \fi
\delimitedtextparameter\c!before
\edef\m_typo_delimited_narrower{#1}%
\ifx\m_typo_delimited_narrower\empty
@@ -353,11 +363,24 @@
\fi
% so far
\pushmacro\checkindentation
- \doifsomething{\delimitedtextparameter\c!indenting} % WS
- {\setupindenting[\delimitedtextparameter\c!indenting]}%
+ \edef\p_delimited_indenting{\delimitedtextparameter\c!indenting}%
+ \ifx\p_delimited_indenting\empty \else
+ \setupindenting[\p_indenting]% todo: use fast one (no [] checking)
+ \fi
+ %
\begingroup
\usedelimitedtextstyleandcolor\c!style\c!color
+ %
+ \edef\p_delimited_left {\delimitedtextparameter{\c!left}}%
+ \edef\p_delimited_right {\delimitedtextparameter{\c!right}}%
+ \edef\p_delimited_nextleft {\delimitedtextparameter{\c!nextleft}}%
+ \edef\p_delimited_nextright{\delimitedtextparameter{\c!nextright}}%
+ %
\leftdelimitedtextmark
+ %
+ \setnextleftdelimitedtextmark
+ \setnextrightdelimitedtextmark
+ %
\ignorespaces}
\def\typo_delimited_stop_par
@@ -368,8 +391,10 @@
\popmacro\checkindentation
\typo_delimited_stop_par_indeed
\delimitedtextparameter\c!after
- \doifsomething{\delimitedtextparameter\c!spaceafter}
- {\blank[\delimitedtextparameter\c!spaceafter]}%
+ \edef\p_delimited_spaceafter{\delimitedtextparameter\c!spaceafter}%
+ \ifx\p_delimited_spaceafter\empty \else
+ \blank[\p_delimited_spaceafter]%
+ \fi
\useindentnextparameter\delimitedtextparameter
\dorechecknextindentation}% AM: This was missing!
@@ -394,18 +419,18 @@
\unexpanded\def\delimitedtext[#1]%
{\dontleavehmode % following ones can be omited
\typo_delimited_push{#1}%
- \edef\p_method{\delimitedtextparameter\c!method}%
- \ifx\p_method\s!font
+ \edef\p_delimited_method{\delimitedtextparameter\c!method}%
+ \ifx\p_delimited_method\s!font
\expandafter\typo_delimited_fontdriven
\else
\expandafter\typo_delimited_other
\fi}
\def\typo_delimited_other
- {\edef\p_location{\delimitedtextparameter\c!location}%
- \ifx\p_location\v!paragraph
+ {\edef\p_delimited_location{\delimitedtextparameter\c!location}%
+ \ifx\p_delimited_location\v!paragraph
\singleexpandafter\typo_delimited_par
- \else\ifx\p_location\v!margin
+ \else\ifx\p_delimited_location\v!margin
\doubleexpandafter\typo_delimited_par
\else
\doubleexpandafter\typo_delimited_txt
@@ -417,16 +442,89 @@
\unexpanded\def\stopdelimited {\stopdelimitedtext} % no let, dynamically assigned
\def\delimited {\delimitedtext}
+% todo: \dostarttagged\t!nothing\empty % for left/right boxes
+
+%D We have 4 different location and symbol handlers (two pairs):
+%D
+%D \starttyping
+%D \input tufte \startquotation \input tufte \stopquotation
+%D
+%D \setupdelimitedtext
+%D [quotation]
+%D [nextleft=right,
+%D nextright=left]
+%D
+%D \input tufte \startquotation \input tufte \stopquotation
+%D
+%D \setupdelimitedtext
+%D [quotation]
+%D [nextleft={\symbol[nextleftquotation]},
+%D nextright={\symbol[nextrightquotation]}]
+%D
+%D \input tufte \startquotation \input tufte \stopquotation
+%D \stoptyping
+
+\unexpanded\def\setnextleftdelimitedtextmark
+ {\ifx\p_delimited_nextleft\empty
+ % nothing
+ \else\ifx\p_delimited_nextleft\v!left
+ \typo_delimited_nextleft_symbol\p_delimited_left
+ \else\ifx\p_delimited_nextleft\v!right
+ \typo_delimited_nextleft_symbol\p_delimited_right
+ \else
+ \typo_delimited_nextleft_symbol\p_delimited_nextleft
+ \fi\fi\fi}
+
+\unexpanded\def\setnextrightdelimitedtextmark
+ {\ifx\p_delimited_nextright\empty
+ % nothing
+ \else\ifx\p_delimited_nextright\v!right
+ \typo_delimited_nextright_symbol\p_delimited_right
+ \else\ifx\p_delimited_nextright\v!left
+ \typo_delimited_nextright_symbol\p_delimited_left
+ \else
+ \typo_delimited_nextright_symbol\p_delimited_nextright
+ \fi\fi\fi}
+
\unexpanded\def\leftdelimitedtextmark
- {\doifsomething{\delimitedtextparameter\c!left}
- {\setbox\scratchbox\hbox{\delimitedtextparameter\c!left}%
- \dontleavehmode
- \doif{\delimitedtextparameter\c!location}\v!margin{\hskip-\wd\scratchbox}%
- \box\scratchbox}}
+ {\ifx\p_delimited_left\empty
+ % nothing
+ \else
+ \typo_delimited_left_symbol\p_delimited_left
+ \fi}
\unexpanded\def\rightdelimitedtextmark
- {\doifsomething{\delimitedtextparameter\c!right}
- {\hsmash{\delimitedtextparameter\c!right}}}
+ {\ifx\p_delimited_right\empty
+ % nothing
+ \else
+ \typo_delimited_right_symbol\p_delimited_right
+ \fi}
+
+\def\typo_delimited_left_symbol#1%
+ {\setbox\scratchbox\hbox{#1}%
+ \dontleavehmode
+ \edef\p_delimited_margin{\delimitedtextparameter\c!location}%
+ \ifx\p_delimited_margin\v!margin
+ \hskip-\wd\scratchbox
+ \fi
+ \box\scratchbox}
+
+\def\typo_delimited_right_symbol#1%
+ {\hsmash{#1}}
+
+\def\typo_delimited_nextleft_symbol#1%
+ {\localleftbox\bgroup
+ \swapmacros\leftboundarycharacter\rightboundarycharacter
+ \boundarycharactermode\plusone
+ \typo_delimited_left_symbol#1%
+ \egroup}
+
+\def\typo_delimited_nextright_symbol#1%
+ {\localrightbox\bgroup
+ \swapmacros\leftboundarycharacter\rightboundarycharacter
+ \boundarycharactermode\plusone
+ \typo_delimited_right_symbol#1%
+ \egroup}
% \starttext
% \hyphenatedword{groepsvrijstellingsverordeningen}\par
diff --git a/tex/context/base/typo-dha.lua b/tex/context/base/typo-dha.lua
index 3410c2dfc..4bce53481 100644
--- a/tex/context/base/typo-dha.lua
+++ b/tex/context/base/typo-dha.lua
@@ -64,7 +64,6 @@ local getlist = nuts.getlist
local getfield = nuts.getfield
local setfield = nuts.setfield
local getattr = nuts.getattr
-local setattr = nuts.setattr
local getprop = nuts.getprop
local setprop = nuts.setprop
diff --git a/tex/context/base/typo-dub.lua b/tex/context/base/typo-dub.lua
index b6581137b..5408b3cae 100644
--- a/tex/context/base/typo-dub.lua
+++ b/tex/context/base/typo-dub.lua
@@ -65,7 +65,6 @@ local getid = nuts.getid
local getsubtype = nuts.getsubtype
local getlist = nuts.getlist
local getattr = nuts.getattr
-local setattr = nuts.setattr
local getfield = nuts.getfield
local setfield = nuts.setfield
diff --git a/tex/context/base/typo-mar.lua b/tex/context/base/typo-mar.lua
index 4ea6b1e1d..5eb129ed5 100644
--- a/tex/context/base/typo-mar.lua
+++ b/tex/context/base/typo-mar.lua
@@ -374,11 +374,13 @@ end
local status, nofstatus = { }, 0
local f_anchor = formatters["_plib_.set('md:h',%i,{x=true,c=true})"]
+
local function setanchor(h_anchor)
return new_latelua(f_anchor(h_anchor))
end
-- local t_anchor = { x = true, c = true }
+--
-- local function setanchor(h_anchor)
-- return lateluafunction(function() setposition("md:h",h_anchor,t_anchor) end)
-- end
@@ -449,7 +451,7 @@ local function realign(current,candidate)
anchor = v_text
end
if inline or anchor ~= v_text or candidate.psubtype == alignment_code then
- -- the alignment_code check catches margintexts ste before a tabulate
+ -- the alignment_code check catches margintexts before a tabulate
h_anchors = h_anchors + 1
anchornode = setanchor(h_anchors)
local blob = getposition('md:h',h_anchors)
diff --git a/tex/context/base/typo-mar.mkiv b/tex/context/base/typo-mar.mkiv
index 2b89f5777..0a113dbc2 100644
--- a/tex/context/base/typo-mar.mkiv
+++ b/tex/context/base/typo-mar.mkiv
@@ -14,6 +14,8 @@
%C details.
% todo: tags
+% todo: force inline with option (saves pos)
+% todo: margintitle (also less position then)
\writestatus{loading}{ConTeXt Typesetting Macros / Margindata}
diff --git a/tex/context/base/typo-rep.lua b/tex/context/base/typo-rep.lua
index 15e3f9746..aa75fbf46 100644
--- a/tex/context/base/typo-rep.lua
+++ b/tex/context/base/typo-rep.lua
@@ -29,7 +29,6 @@ local getchar = nuts.getchar
local getid = nuts.getid
local getattr = nuts.getattr
-local setattr = nuts.setattr
local delete_node = nuts.delete
local replace_node = nuts.replace
diff --git a/tex/context/base/util-str.lua b/tex/context/base/util-str.lua
index 6f952545c..73a586b61 100644
--- a/tex/context/base/util-str.lua
+++ b/tex/context/base/util-str.lua
@@ -216,6 +216,7 @@ local striplinepatterns = {
["retain"] = p_retain_normal,
["retain and collapse"] = p_retain_collapse,
["retain and no empty"] = p_retain_noempty,
+ ["collapse"] = patterns.collapser, -- how about: stripper fullstripper
}
strings.striplinepatterns = striplinepatterns
@@ -224,6 +225,8 @@ function strings.striplines(str,how)
return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str
end
+-- also see: string.collapsespaces
+
strings.striplong = strings.striplines -- for old times sake
-- local str = table.concat( {
diff --git a/tex/context/base/x-asciimath.lua b/tex/context/base/x-asciimath.lua
index dd438d42e..a6883f66c 100644
--- a/tex/context/base/x-asciimath.lua
+++ b/tex/context/base/x-asciimath.lua
@@ -5,11 +5,22 @@ if not modules then modules = { } end modules ['x-asciimath'] = {
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
+
--[[ldx--
-<p>Some backgrounds are discussed in <t>x-asciimath.mkiv</t>.</p>
+<p>Some backgrounds are discussed in <t>x-asciimath.mkiv</t>. This is a third version. I first
+tried a to make a proper expression parser but it's not that easy. First we have to avoid left
+recursion, which is not that trivial (maybe a future version of lpeg will provide that), and
+second there is not really a syntax but a mix of expressions and sequences with some fuzzy logic
+applied. Most problematic are fractions and we also need to handle incomplete expressions. So,
+instead we (sort of) tokenize the string and then do some passes over the result. Yes, it's real
+ugly and unsatisfying code mess down here. Don't take this as an example.</p>
--ldx]]--
+-- todo: spaces around all elements in cleanup?
+-- todo: filter from files listed in tuc file
+
local trace_mapping = false if trackers then trackers.register("modules.asciimath.mapping", function(v) trace_mapping = v end) end
+local trace_detail = false if trackers then trackers.register("modules.asciimath.detail", function(v) trace_detail = v end) end
local asciimath = { }
local moduledata = moduledata or { }
@@ -18,264 +29,1677 @@ moduledata.asciimath = asciimath
if not characters then
require("char-def")
require("char-ini")
+ require("char-ent")
end
+local entities = characters.entities or { }
+
local report_asciimath = logs.reporter("mathematics","asciimath")
-local format = string.format
-local lpegmatch = lpeg.match
+local type, rawget = type, rawget
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
local S, P, R, C, V, Cc, Ct, Cs = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct, lpeg.Cs
-
-local letter = lpeg.patterns.utf8
-local space = S(" \n\r\t")
-local spaces = space^0/""
-local integer = P("-")^-1 * R("09")^1
-local realpart = P("-")^-1 * R("09")^1 * S(".")^1 * R("09")^1
-local number = integer -- so we can support nice formatting if needed
-local real = realpart -- so we can support nice formatting if needed
-local float = realpart * P("E") * integer -- so we can support nice formatting if needed
-local texnic = P("\\") * (R("az","AZ")^1)
-
-local premapper = Cs ( (
-
- P("@") / "\\degrees " +
- P("O/") / "\\varnothing " +
- P("o+") / "\\oplus " +
- P("o.") / "\\ocirc " +
- P("!in") / "\\not\\in " +
- P("!=") / "\\neq " +
- P("**") / "\\star " +
- P("*") / "\\cdot " +
- P("//") / "\\slash " +
- P("/_") / "\\angle " +
- P("\\\\") / "\\backslash " +
- P("^^^") / "\\wedge " +
- P("^^") / "\\wedge " +
- P("<<") / "\\left\\langle " +
- P(">>") / "\\right\\rangle " +
- P("<=") / "\\leq " +
- P(">=") / "\\geq " +
- P("-<") / "\\precc " +
- P(">-") / "\\succ " +
- P("~=") / "\\cong " +
- P("~~") / "\\approx " +
- P("=>") / "\\Rightarrow " +
- P("(:") / "\\left\\langle " +
- P(":)") / "\\right\\rangle " +
- P(":.") / "\\therefore " +
- P("~|") / "\\right\\rceil " +
- P("_|_") / "\\bot " +
- P("_|") / "\\right\\rfloor " +
- P("+-") / "\\pm " +
- P("|--") / "\\vdash " +
- P("|==") / "\\models " +
- P("|_") / "\\left\\lfloor " +
- P("|~") / "\\left\\lceil " +
- P("-:") / "\\div " +
- P("_=") / "\\equiv " +
-
- P("|") / "\\middle\\| " +
-
- P("dx") / "(dx)" +
- P("dy") / "(dy)" +
- P("dz") / "(dz)" +
-
- letter + P(1)
-
-)^0 )
+local concat, remove, sortedhash, sortedkeys, keys = table.concat, table.remove, table.sortedhash, table.sortedkeys, table.keys
+local rep, gmatch, gsub, find = string.rep, string.gmatch, string.gsub, string.find
+local formatters = string.formatters
local reserved = {
- ["aleph"] = "\\aleph ",
- ["vdots"] = "\\vdots ",
- ["ddots"] = "\\ddots ",
- ["oint"] = "\\oint ",
- ["grad"] = "\\nabla ",
- ["prod"] = "\\prod ",
- ["prop"] = "\\propto ",
- ["sube"] = "\\subseteq ",
- ["supe"] = "\\supseteq ",
- ["sinh"] = "\\sinh ",
- ["cosh"] = "\\cosh ",
- ["tanh"] = "\\tanh ",
- ["sum"] = "\\sum ",
- ["vvv"] = "\\vee ",
- ["nnn"] = "\\cap ",
- ["uuu"] = "\\cup ",
- ["sub"] = "\\subset ",
- ["sup"] = "\\supset ",
- ["not"] = "\\lnot ",
- ["iff"] = "\\Leftrightarrow ",
- ["int"] = "\\int ",
- ["del"] = "\\partial ",
- ["and"] = "\\and ",
- ["not"] = "\\not ",
- ["sin"] = "\\sin ",
- ["cos"] = "\\cos ",
- ["tan"] = "\\tan ",
- ["csc"] = "\\csc ",
- ["sec"] = "\\sec ",
- ["cot"] = "\\cot ",
- ["log"] = "\\log ",
- ["det"] = "\\det ",
- ["lim"] = "\\lim ",
- ["mod"] = "\\mod ",
- ["gcd"] = "\\gcd ",
- ["lcm"] = "\\lcm ",
- ["min"] = "\\min ",
- ["max"] = "\\max ",
- ["xx"] = "\\times ",
- ["in"] = "\\in ",
- ["ox"] = "\\otimes ",
- ["vv"] = "\\vee ",
- ["nn"] = "\\cap ",
- ["uu"] = "\\cup ",
- ["oo"] = "\\infty ",
- ["ln"] = "\\ln ",
- ["or"] = "\\or ",
-
- ["AA"] = "\\forall ",
- ["EE"] = "\\exists ",
- ["TT"] = "\\top ",
- ["CC"] = "\\Bbb{C}",
- ["NN"] = "\\Bbb{N}",
- ["QQ"] = "\\Bbb{Q}",
- ["RR"] = "\\Bbb{R}",
- ["ZZ"] = "\\Bbb{Z}",
+ -- ["aleph"] = "\\aleph",
+ -- ["vdots"] = "\\vdots",
+ -- ["ddots"] = "\\ddots",
+ -- ["oint"] = "\\oint",
+ -- ["grad"] = "\\nabla",
+ ["prod"] = "\\prod",
+ -- ["prop"] = "\\propto",
+ -- ["sube"] = "\\subseteq",
+ -- ["supe"] = "\\supseteq",
+ ["sinh"] = "\\sinh",
+ ["cosh"] = "\\cosh",
+ ["tanh"] = "\\tanh",
+ ["sum"] = "\\sum",
+ -- ["vvv"] = "\\vee",
+ -- ["nnn"] = "\\cap",
+ -- ["uuu"] = "\\cup",
+ -- ["sub"] = "\\subset",
+ -- ["sup"] = "\\supset",
+ -- ["iff"] = "\\Leftrightarrow",
+ ["int"] = "\\int",
+ -- ["del"] = "\\partial",
+ ["sin"] = "\\sin",
+ ["cos"] = "\\cos",
+ ["tan"] = "\\tan",
+ ["csc"] = "\\csc",
+ ["sec"] = "\\sec",
+ ["cot"] = "\\cot",
+ ["log"] = "\\log",
+ ["det"] = "\\det",
+ ["lim"] = "\\lim",
+ ["mod"] = "\\mod",
+ ["gcd"] = "\\gcd",
+ -- ["lcm"] = "\\lcm", -- undefined in context
+ ["min"] = "\\min",
+ ["max"] = "\\max",
+ -- ["xx"] = "\\times",
+ ["in"] = "\\in",
+ -- ["ox"] = "\\otimes",
+ -- ["vv"] = "\\vee",
+ -- ["nn"] = "\\cap",
+ -- ["uu"] = "\\cup",
+ -- ["oo"] = "\\infty",
+ ["ln"] = "\\ln",
+
+ -- ["not"] = "\\not",
+ ["and"] = "\\text{and}",
+ ["or"] = "\\text{or}",
+ ["if"] = "\\text{if}",
+
+ -- ["AA"] = "\\forall",
+ -- ["EE"] = "\\exists",
+ -- ["TT"] = "\\top",
+
+ ["sqrt"] = "\\rootradical{}",
+ ["root"] = "\\rootradical",
+ ["frac"] = "\\frac",
+ ["stackrel"] = "\\stackrel",
+ -- ["text"] = "\\mathoptext",
+ -- ["bb"] = "\\bb",
+ ["hat"] = "\\widehat",
+ ["overbar"] = "\\overbar",
+ ["underline"] = "\\underline",
+ ["vec"] = "\\overrightarrow",
+ ["dot"] = "\\dot",
+ ["ddot"] = "\\ddot",
+
+ -- binary operators
+
+ -- ["+"] = "+",
+ -- ["-"] = "-",
+ ["*"] = "⋅",
+ ["**"] = "⋆",
+ ["//"] = "\\slash",
+ ["\\"] = "\\",
+ ["xx"] = "×",
+ ["times"] = "×",
+ ["-:"] = "÷",
+ ["@"] = "∘",
+ ["o+"] = "⊕",
+ ["ox"] = "⊗",
+ ["o."] = "⊙",
+ ["^^"] = "∧",
+ ["vv"] = "∨",
+ ["nn"] = "∩",
+ ["uu"] = "∪",
+
+ -- big operators
+
+ -- ["sum"] = "∑",
+ -- ["prod"] = "∏",
+ ["^^^"] = "⋀",
+ ["vvv"] = "⋁",
+ ["nnn"] = "⋂",
+ ["uuu"] = "⋃",
+ ["int"] = "∫",
+ ["oint"] = "∮",
+
+ -- brackets
+
+-- ["("] = "(,
+-- [")"] = "),
+-- ["["] = "[,
+-- ["]"] = "],
+-- ["{"] = "{,
+-- ["}"] = "},
+-- ["(:"] = "〈",
+-- [":)"] = "〉",
+
+ -- binary relations
+
+ ["="] = "=",
+ ["!="] = "≠",
+ ["<"] = "<",
+ [">"] = ">",
+ ["<="] = "≤",
+ [">="] = "≥",
+ ["-<"] = "≺",
+ [">-"] = "≻",
+ ["in"] = "∈",
+ ["!in"] = "∉",
+ ["sub"] = "⊂",
+ ["sup"] = "⊃",
+ ["sube"] = "⊆",
+ ["supe"] = "⊇",
+ ["-="] = "≡",
+ ["~="] = "≅",
+ ["~~"] = "≈",
+ ["prop"] = "∝",
+
+ -- arrows
+
+ ["rarr"] = "→",
+ ["->"] = "→",
+ ["larr"] = "←",
+ ["harr"] = "↔",
+ ["uarr"] = "↑",
+ ["darr"] = "↓",
+ ["rArr"] = "⇒",
+ ["lArr"] = "⇐",
+ ["hArr"] = "⇔",
+ ["|->"] = "↦",
+
+ -- logical
+
+ -- ["and"] = "and",
+ -- ["or"] = "or",
+ -- ["if"] = "if",
+ ["not"] = "¬",
+ ["=>"] = "⇒",
+ ["iff"] = "⇔",
+ ["AA"] = "∀",
+ ["EE"] = "∃",
+ ["_|_"] = "⊥",
+ ["TT"] = "⊤",
+ ["|--"] = "⊢",
+ ["|=="] = "⊨",
+
+ -- miscellaneous
+
+ ["del"] = "∂",
+ ["grad"] = "∇",
+ ["+-"] = "±",
+ ["O/"] = "∅",
+ ["oo"] = "∞",
+ ["aleph"] = "ℵ",
+ ["angle"] = "∠",
+ ["/_"] = "∠",
+ [":."] = "∴",
+ ["..."] = "...", -- ldots
+ ["ldots"] = "...", -- ldots
+ ["cdots"] = "⋯",
+ ["vdots"] = "⋮",
+ ["ddots"] = "⋱",
+ ["diamond"] = "⋄",
+ ["square"] = "□",
+ ["|__"] = "⌊",
+ ["__|"] = "⌋",
+ ["|~"] = "⌈",
+ ["~|"] = "⌉",
+
+ -- more
+ ["_="] = "≡",
+
+ -- blackboard
+
+ ["CC"] = "ℂ",
+ ["NN"] = "ℕ",
+ ["QQ"] = "ℚ",
+ ["RR"] = "ℝ",
+ ["ZZ"] = "ℤ",
+
+ -- greek lowercase
+
+ alpha = "α",
+ beta = "β",
+ gamma = "γ",
+ delta = "δ",
+ epsilon = "ε",
+ varepsilon = "ɛ",
+ zeta = "ζ",
+ eta = "η",
+ theta = "θ",
+ vartheta = "ϑ",
+ iota = "ι",
+ kappa = "κ",
+ lambda = "λ",
+ mu = "μ",
+ nu = "ν",
+ xi = "ξ",
+ pi = "π",
+ rho = "ρ",
+ sigma = "σ",
+ tau = "τ",
+ upsilon = "υ",
+ phi = "φ",
+ varphi = "ϕ",
+ chi = "χ",
+ psi = "ψ",
+ omega = "ω",
+
+ -- greek uppercase
+
+ Gamma = "Γ",
+ Delta = "Δ",
+ Theta = "Θ",
+ Lambda = "Λ",
+ Xi = "Ξ",
+ Pi = "Π",
+ Sigma = "Σ",
+ Phi = "Φ",
+ Psi = "Ψ",
+ Omega = "Ω",
+
+ -- alternatively we could just inject a style switch + following character
+
+ -- blackboard
+
+ ["bbb a"] = "𝕒",
+ ["bbb b"] = "𝕓",
+ ["bbb c"] = "𝕔",
+ ["bbb d"] = "𝕕",
+ ["bbb e"] = "𝕖",
+ ["bbb f"] = "𝕗",
+ ["bbb g"] = "𝕘",
+ ["bbb h"] = "𝕙",
+ ["bbb i"] = "𝕚",
+ ["bbb j"] = "𝕛",
+ ["bbb k"] = "𝕜",
+ ["bbb l"] = "𝕝",
+ ["bbb m"] = "𝕞",
+ ["bbb n"] = "𝕟",
+ ["bbb o"] = "𝕠",
+ ["bbb p"] = "𝕡",
+ ["bbb q"] = "𝕢",
+ ["bbb r"] = "𝕣",
+ ["bbb s"] = "𝕤",
+ ["bbb t"] = "𝕥",
+ ["bbb u"] = "𝕦",
+ ["bbb v"] = "𝕧",
+ ["bbb w"] = "𝕨",
+ ["bbb x"] = "𝕩",
+ ["bbb y"] = "𝕪",
+ ["bbb z"] = "𝕫",
+
+ ["bbb A"] = "𝔸",
+ ["bbb B"] = "𝔹",
+ ["bbb C"] = "ℂ",
+ ["bbb D"] = "𝔻",
+ ["bbb E"] = "𝔼",
+ ["bbb F"] = "𝔽",
+ ["bbb G"] = "𝔾",
+ ["bbb H"] = "ℍ",
+ ["bbb I"] = "𝕀",
+ ["bbb J"] = "𝕁",
+ ["bbb K"] = "𝕂",
+ ["bbb L"] = "𝕃",
+ ["bbb M"] = "𝕄",
+ ["bbb N"] = "ℕ",
+ ["bbb O"] = "𝕆",
+ ["bbb P"] = "ℙ",
+ ["bbb Q"] = "ℚ",
+ ["bbb R"] = "ℝ",
+ ["bbb S"] = "𝕊",
+ ["bbb T"] = "𝕋",
+ ["bbb U"] = "𝕌",
+ ["bbb V"] = "𝕍",
+ ["bbb W"] = "𝕎",
+ ["bbb X"] = "𝕏",
+ ["bbb Y"] = "𝕐",
+ ["bbb Z"] = "ℤ",
+
+ -- fraktur
+
+ ["fr a"] = "𝔞",
+ ["fr b"] = "𝔟",
+ ["fr c"] = "𝔠",
+ ["fr d"] = "𝔡",
+ ["fr e"] = "𝔢",
+ ["fr f"] = "𝔣",
+ ["fr g"] = "𝔤",
+ ["fr h"] = "𝔥",
+ ["fr i"] = "𝔦",
+ ["fr j"] = "𝔧",
+ ["fr k"] = "𝔨",
+ ["fr l"] = "𝔩",
+ ["fr m"] = "𝔪",
+ ["fr n"] = "𝔫",
+ ["fr o"] = "𝔬",
+ ["fr p"] = "𝔭",
+ ["fr q"] = "𝔮",
+ ["fr r"] = "𝔯",
+ ["fr s"] = "𝔰",
+ ["fr t"] = "𝔱",
+ ["fr u"] = "𝔲",
+ ["fr v"] = "𝔳",
+ ["fr w"] = "𝔴",
+ ["fr x"] = "𝔵",
+ ["fr y"] = "𝔶",
+ ["fr z"] = "𝔷",
+ ["fr A"] = "𝔄",
+ ["fr B"] = "𝔅",
+ ["fr C"] = "ℭ",
+ ["fr D"] = "𝔇",
+ ["fr E"] = "𝔈",
+ ["fr F"] = "𝔉",
+ ["fr G"] = "𝔊",
+ ["fr H"] = "ℌ",
+ ["fr I"] = "ℑ",
+ ["fr J"] = "𝔍",
+ ["fr K"] = "𝔎",
+ ["fr L"] = "𝔏",
+ ["fr M"] = "𝔐",
+ ["fr N"] = "𝔑",
+ ["fr O"] = "𝔒",
+ ["fr P"] = "𝔓",
+ ["fr Q"] = "𝔔",
+ ["fr R"] = "ℜ",
+ ["fr S"] = "𝔖",
+ ["fr T"] = "𝔗",
+ ["fr U"] = "𝔘",
+ ["fr V"] = "𝔙",
+ ["fr W"] = "𝔚",
+ ["fr X"] = "𝔛",
+ ["fr Y"] = "𝔜",
+ ["fr Z"] = "ℨ",
+
+ -- script
+
+ ["cc a"] = "𝒶",
+ ["cc b"] = "𝒷",
+ ["cc c"] = "𝒸",
+ ["cc d"] = "𝒹",
+ ["cc e"] = "ℯ",
+ ["cc f"] = "𝒻",
+ ["cc g"] = "ℊ",
+ ["cc h"] = "𝒽",
+ ["cc i"] = "𝒾",
+ ["cc j"] = "𝒿",
+ ["cc k"] = "𝓀",
+ ["cc l"] = "𝓁",
+ ["cc m"] = "𝓂",
+ ["cc n"] = "𝓃",
+ ["cc o"] = "ℴ",
+ ["cc p"] = "𝓅",
+ ["cc q"] = "𝓆",
+ ["cc r"] = "𝓇",
+ ["cc s"] = "𝓈",
+ ["cc t"] = "𝓉",
+ ["cc u"] = "𝓊",
+ ["cc v"] = "𝓋",
+ ["cc w"] = "𝓌",
+ ["cc x"] = "𝓍",
+ ["cc y"] = "𝓎",
+ ["cc z"] = "𝓏",
+
+ ["cc A"] = "𝒜",
+ ["cc B"] = "ℬ",
+ ["cc C"] = "𝒞",
+ ["cc D"] = "𝒟",
+ ["cc E"] = "ℰ",
+ ["cc F"] = "ℱ",
+ ["cc G"] = "𝒢",
+ ["cc H"] = "ℋ",
+ ["cc I"] = "ℐ",
+ ["cc J"] = "𝒥",
+ ["cc K"] = "𝒦",
+ ["cc L"] = "ℒ",
+ ["cc M"] = "ℳ",
+ ["cc N"] = "𝒩",
+ ["cc O"] = "𝒪",
+ ["cc P"] = "𝒫",
+ ["cc Q"] = "𝒬",
+ ["cc R"] = "ℛ",
+ ["cc S"] = "𝒮",
+ ["cc T"] = "𝒯",
+ ["cc U"] = "𝒰",
+ ["cc V"] = "𝒱",
+ ["cc W"] = "𝒲",
+ ["cc X"] = "𝒳",
+ ["cc Y"] = "𝒴",
+ ["cc Z"] = "𝒵",
+
+ -- bold
+
+ ["bb a"] = "𝒂",
+ ["bb b"] = "𝒃",
+ ["bb c"] = "𝒄",
+ ["bb d"] = "𝒅",
+ ["bb e"] = "𝒆",
+ ["bb f"] = "𝒇",
+ ["bb g"] = "𝒈",
+ ["bb h"] = "𝒉",
+ ["bb i"] = "𝒊",
+ ["bb j"] = "𝒋",
+ ["bb k"] = "𝒌",
+ ["bb l"] = "𝒍",
+ ["bb m"] = "𝒎",
+ ["bb n"] = "𝒏",
+ ["bb o"] = "𝒐",
+ ["bb p"] = "𝒑",
+ ["bb q"] = "𝒒",
+ ["bb r"] = "𝒓",
+ ["bb s"] = "𝒔",
+ ["bb t"] = "𝒕",
+ ["bb u"] = "𝒖",
+ ["bb v"] = "𝒗",
+ ["bb w"] = "𝒘",
+ ["bb x"] = "𝒙",
+ ["bb y"] = "𝒚",
+ ["bb z"] = "𝒛",
+
+ ["bb A"] = "𝑨",
+ ["bb B"] = "𝑩",
+ ["bb C"] = "𝑪",
+ ["bb D"] = "𝑫",
+ ["bb E"] = "𝑬",
+ ["bb F"] = "𝑭",
+ ["bb G"] = "𝑮",
+ ["bb H"] = "𝑯",
+ ["bb I"] = "𝑰",
+ ["bb J"] = "𝑱",
+ ["bb K"] = "𝑲",
+ ["bb L"] = "𝑳",
+ ["bb M"] = "𝑴",
+ ["bb N"] = "𝑵",
+ ["bb O"] = "𝑶",
+ ["bb P"] = "𝑷",
+ ["bb Q"] = "𝑸",
+ ["bb R"] = "𝑹",
+ ["bb S"] = "𝑺",
+ ["bb T"] = "𝑻",
+ ["bb U"] = "𝑼",
+ ["bb V"] = "𝑽",
+ ["bb W"] = "𝑾",
+ ["bb X"] = "𝑿",
+ ["bb Y"] = "𝒀",
+ ["bb Z"] = "𝒁",
+
+ -- sans
+
+ ["sf a"] = "𝖺",
+ ["sf b"] = "𝖻",
+ ["sf c"] = "𝖼",
+ ["sf d"] = "𝖽",
+ ["sf e"] = "𝖾",
+ ["sf f"] = "𝖿",
+ ["sf g"] = "𝗀",
+ ["sf h"] = "𝗁",
+ ["sf i"] = "𝗂",
+ ["sf j"] = "𝗃",
+ ["sf k"] = "𝗄",
+ ["sf l"] = "𝗅",
+ ["sf m"] = "𝗆",
+ ["sf n"] = "𝗇",
+ ["sf o"] = "𝗈",
+ ["sf p"] = "𝗉",
+ ["sf q"] = "𝗊",
+ ["sf r"] = "𝗋",
+ ["sf s"] = "𝗌",
+ ["sf t"] = "𝗍",
+ ["sf u"] = "𝗎",
+ ["sf v"] = "𝗏",
+ ["sf w"] = "𝗐",
+ ["sf x"] = "𝗑",
+ ["sf y"] = "𝗒",
+ ["sf z"] = "𝗓",
+
+ ["sf A"] = "𝖠",
+ ["sf B"] = "𝖡",
+ ["sf C"] = "𝖢",
+ ["sf D"] = "𝖣",
+ ["sf E"] = "𝖤",
+ ["sf F"] = "𝖥",
+ ["sf G"] = "𝖦",
+ ["sf H"] = "𝖧",
+ ["sf I"] = "𝖨",
+ ["sf J"] = "𝖩",
+ ["sf K"] = "𝖪",
+ ["sf L"] = "𝖫",
+ ["sf M"] = "𝖬",
+ ["sf N"] = "𝖭",
+ ["sf O"] = "𝖮",
+ ["sf P"] = "𝖯",
+ ["sf Q"] = "𝖰",
+ ["sf R"] = "𝖱",
+ ["sf S"] = "𝖲",
+ ["sf T"] = "𝖳",
+ ["sf U"] = "𝖴",
+ ["sf V"] = "𝖵",
+ ["sf W"] = "𝖶",
+ ["sf X"] = "𝖷",
+ ["sf Y"] = "𝖸",
+ ["sf Z"] = "𝖹",
+
+ -- monospace
+
+ ["tt a"] = "𝚊",
+ ["tt b"] = "𝚋",
+ ["tt c"] = "𝚌",
+ ["tt d"] = "𝚍",
+ ["tt e"] = "𝚎",
+ ["tt f"] = "𝚏",
+ ["tt g"] = "𝚐",
+ ["tt h"] = "𝚑",
+ ["tt i"] = "𝚒",
+ ["tt j"] = "𝚓",
+ ["tt k"] = "𝚔",
+ ["tt l"] = "𝚕",
+ ["tt m"] = "𝚖",
+ ["tt n"] = "𝚗",
+ ["tt o"] = "𝚘",
+ ["tt p"] = "𝚙",
+ ["tt q"] = "𝚚",
+ ["tt r"] = "𝚛",
+ ["tt s"] = "𝚜",
+ ["tt t"] = "𝚝",
+ ["tt u"] = "𝚞",
+ ["tt v"] = "𝚟",
+ ["tt w"] = "𝚠",
+ ["tt x"] = "𝚡",
+ ["tt y"] = "𝚢",
+ ["tt z"] = "𝚣",
+
+ ["tt A"] = "𝙰",
+ ["tt B"] = "𝙱",
+ ["tt C"] = "𝙲",
+ ["tt D"] = "𝙳",
+ ["tt E"] = "𝙴",
+ ["tt F"] = "𝙵",
+ ["tt G"] = "𝙶",
+ ["tt H"] = "𝙷",
+ ["tt I"] = "𝙸",
+ ["tt J"] = "𝙹",
+ ["tt K"] = "𝙺",
+ ["tt L"] = "𝙻",
+ ["tt M"] = "𝙼",
+ ["tt N"] = "𝙽",
+ ["tt O"] = "𝙾",
+ ["tt P"] = "𝙿",
+ ["tt Q"] = "𝚀",
+ ["tt R"] = "𝚁",
+ ["tt S"] = "𝚂",
+ ["tt T"] = "𝚃",
+ ["tt U"] = "𝚄",
+ ["tt V"] = "𝚅",
+ ["tt W"] = "𝚆",
+ ["tt X"] = "𝚇",
+ ["tt Y"] = "𝚈",
+ ["tt Z"] = "𝚉",
+
+ -- some more undocumented
+
+ ["dx"] = { "d", "x" }, -- "{dx}" "\\left(dx\\right)"
+ ["dy"] = { "d", "y" }, -- "{dy}" "\\left(dy\\right)"
+ ["dz"] = { "d", "z" }, -- "{dz}" "\\left(dz\\right)"
+
+ ["atan"] = "\\atan",
+ ["acos"] = "\\acos",
+ ["asin"] = "\\asin",
+
+ ["arctan"] = "\\arctan",
+ ["arccos"] = "\\arccos",
+ ["arcsin"] = "\\arcsin",
+
+ ["prime"] = "′",
+ ["'"] = "′",
+ ["''"] = "″",
+ ["'''"] = "‴",
}
-table.setmetatableindex(reserved,characters.entities)
+local isbinary = {
+ ["\\frac"] = true,
+ ["\\root"] = true,
+ ["\\rootradical"] = true,
+ ["\\stackrel"] = true,
+}
+
+local isunary = {
+ ["\\sqrt"] = true,
+ ["\\rootradical{}"] = true,
+ -- ["\\bb"] = true,
+ ["\\text"] = true, -- mathoptext
+ ["\\mathoptext"] = true, -- mathoptext
+ ["\\hat"] = true, -- widehat
+ ["\\widehat"] = true, -- widehat
+ ["\\overbar"] = true, --
+ ["\\underline"] = true, --
+ ["\\vec"] = true, -- overrightarrow
+ ["\\overrightarrow"] = true, -- overrightarrow
+ ["\\dot"] = true, --
+ ["\\ddot"] = true, --
-local postmapper = Cs ( (
+-- ["^"] = true,
+-- ["_"] = true,
- P("\\mathoptext ") * spaces * (P("\\bgroup ")/"{") * (1-P("\\egroup "))^1 * (P("\\egroup ")/"}") +
+}
- (P("\\bgroup ")) / "{" +
- (P("\\egroup ")) / "}" +
+local isinfix = {
+ ["^"] = true,
+ ["_"] = true,
+}
- P("\\") * (R("az","AZ")^2) +
+local isleft = {
+ ["\\left\\lparent"] = true,
+ ["\\left\\lbrace"] = true,
+ ["\\left\\lbracket"] = true,
+ ["\\left."] = true,
+}
+local isright = {
+ ["\\right\\rparent"] = true,
+ ["\\right\\rbrace"] = true,
+ ["\\right\\rbracket"] = true,
+ ["\\right."] = true,
+}
- (R("AZ","az")^2) / reserved +
+local issimplified = {
+}
- P("{:") / "\\left." +
- P(":}") / "\\right." +
- P("(") / "\\left(" +
- P(")") / "\\right)" +
- P("[") / "\\left[" +
- P("]") / "\\right]" +
- P("{") / "\\left\\{" +
- P("}") / "\\right\\}" +
+local p_number_base = patterns.cpnumber or patterns.cnumber or patterns.number
+local p_number = C(p_number_base)
+local p_spaces = patterns.whitespace
- letter + P(1)
-)^0 )
+----- p_number = Cs((patterns.cpnumber or patterns.cnumber or patterns.number)/function(s) return (gsub(s,",","{,}")) end)
-local parser
+local sign = P("-")^-1
+local digits = R("09")^1
+local integer = sign * digits
+----- real = sign * digits * (S(".,") * digits)^-1
+local real = digits * (S(".,") * digits)^-1
+local float = real * (P("E") * integer)^-1
-local function converted(original,totex)
- local ok, result
- if trace_mapping then
- report_asciimath("original : %s",original)
+-- local number = C(float + integer)
+local p_number = C(float)
+
+local p_utf_base =
+ patterns.utf8character
+local p_utf =
+ C(p_utf_base)
+
+local p_entity_base =
+ P("&") * ((1-P(";"))^2) * P(";")
+local p_entity =
+ P("&") * (((1-P(";"))^2) / entities) * P(";")
+
+-- This is (given the large match):
+--
+-- local s = sortedkeys(reserved)
+-- local p = P(false)
+-- for i=#s,1,-1 do
+-- local k = s[i]
+-- p = p + P(k)
+-- end
+-- local p_reserved = p / reserved
+--
+-- twice as slow as:
+
+local k_reserved = sortedkeys(reserved)
+
+asciimath.keys = {
+ reserved = k_reserved
+}
+
+local k_reserved_different = { }
+local k_reserved_words = { }
+
+for k, v in sortedhash(reserved) do
+ if k ~= v then
+ k_reserved_different[#k_reserved_different+1] = k
end
- local premapped = lpegmatch(premapper,original)
- if premapped then
- if trace_mapping then
- report_asciimath("prepared : %s",premapped)
+ if not find(k,"[^a-zA-Z]") then
+ k_reserved_words[#k_reserved_words+1] = k
+ end
+end
+
+local p_reserved =
+ lpeg.utfchartabletopattern(k_reserved_different) / reserved
+
+-- local p_text =
+-- P("text")
+-- * p_spaces^0
+-- * Cc("\\mathoptext")
+-- * ( -- maybe balanced
+-- Cs((P("{") ) * (1-P("}"))^0 * P("}") )
+-- + Cs((P("(")/"{") * (1-P(")"))^0 * (P(")")/"}"))
+-- )
+-- + Cc("\\mathoptext") * Cs(Cc("{") * patterns.undouble * Cc("}"))
+
+local p_text =
+ P("text")
+ * p_spaces^0
+ * Cc("\\mathoptext")
+ * ( -- maybe balanced
+ Cs( P("{") * (1-P("}"))^0 * P("}") )
+ + Cs((P("(")/"{") * (1-P(")"))^0 * (P(")")/"}"))
+ )
+ + Cc("\\mathoptext") * Cs(Cc("{") * patterns.undouble * Cc("}"))
+
+-- either map to \left<utf> or map to \left\name
+
+-- local p_open = S("{[") * P(":")
+-- local p_close = P(":") * S("]}")
+
+-- local p_open_left = (S("{[") * P(":")) / "\\left."
+-- local p_close_right = (P(":") * S("]}")) / "\\right."
+
+-- local p_left =
+-- P("(:") / "\\left\\langle"
+-- + P("{:") / "\\left."
+-- + P("[:") / "\\left."
+-- + P("(") / "\\left\\lparent"
+-- + P("[") / "\\left\\lbracket"
+-- + P("{") / "\\left\\lbrace"
+-- + P("<<") / "\\left\\langle" -- why not <:
+-- + P("|_") / "\\left\\lfloor"
+-- + P("|~") / "\\left\\lceil"
+-- + P("⟨") / "\\left\\langle"
+-- + P("〈") / "\\left\\langle"
+-- + P("〈") / "\\left\\langle"
+
+-- local p_right =
+-- P(")") / "\\right\\rparent"
+-- + P(":)") / "\\right\\rangle"
+-- + P(":}") / "\\right."
+-- + P(":]") / "\\right."
+-- + P("]") / "\\right\\rbracket"
+-- + P("}") / "\\right\\rbrace"
+-- + P(">>") / "\\right\\rangle" -- why not :>
+-- + P("~|") / "\\right\\rceil"
+-- + P("_|") / "\\right\\rfloor"
+-- + P("⟩") / "\\right\\rangle"
+-- + P("〉") / "\\right\\rangle"
+-- + P("〉") / "\\right\\rangle"
+
+local m_left = {
+ ["(:"] = "\\left\\langle",
+ ["{:"] = "\\left.",
+ ["[:"] = "\\left.",
+ ["("] = "\\left\\lparent",
+ ["["] = "\\left\\lbracket",
+ ["{"] = "\\left\\lbrace",
+ ["<<"] = "\\left\\langle", -- why not <:
+ ["|_"] = "\\left\\lfloor",
+ ["|~"] = "\\left\\lceil",
+ ["⟨"] = "\\left\\langle",
+ ["〈"] = "\\left\\langle",
+ ["〈"] = "\\left\\langle",
+}
+
+local m_right = {
+ [")"] = "\\right\\rparent",
+ [":)"] = "\\right\\rangle",
+ [":}"] = "\\right.",
+ [":]"] = "\\right.",
+ ["]"] = "\\right\\rbracket",
+ ["}"] = "\\right\\rbrace",
+ [">>"] = "\\right\\rangle", -- why not :>
+ ["~|"] = "\\right\\rceil",
+ ["_|"] = "\\right\\rfloor",
+ ["⟩"] = "\\right\\rangle",
+ ["〉"] = "\\right\\rangle",
+ ["〉"] = "\\right\\rangle",
+}
+
+local p_left =
+ lpeg.utfchartabletopattern(keys(m_left)) / m_left
+local p_right =
+ lpeg.utfchartabletopattern(keys(m_right)) / m_right
+
+-- special cases
+
+-- local p_special =
+-- C("/")
+-- + P("\\ ") * Cc("{}") * p_spaces^0 * C(S("^_"))
+-- + P("\\ ") * Cc("\\space")
+-- + P("\\\\") * Cc("\\backslash")
+-- + P("\\") * (R("az","AZ")^1/entities)
+-- + P("|") * Cc("\\|") -- "\\middle\\|" -- maybe always add left / right as in mml ?
+--
+-- faster bug also uglier:
+
+local p_special =
+-- C("/")
+-- +
+ P("|") * Cc("\\|") -- "\\middle\\|" -- maybe always add left / right as in mml ?
+ +
+ P("\\") * (
+ (
+ P(" ") * (
+ Cc("{}") * p_spaces^0 * C(S("^_"))
+ + Cc("\\space")
+ )
+ )
+ + P("\\") * Cc("\\backslash")
+ + (R("az","AZ")^1/entities)
+ )
+
+-- open | close :: {: | :}
+
+
+local parser = Ct { "tokenizer",
+ tokenizer = (
+ p_spaces
+ + p_number
+ + p_text
+-- + Ct(p_open * V("tokenizer") * p_close) -- {: (a+b,=,1),(a+b,=,7) :}
+-- + Ct(p_open * V("tokenizer") * p_close_right) -- { (a+b,=,1),(a+b,=,7) :}
+-- + Ct(p_open_left * V("tokenizer") * p_right) -- {: (a+b,=,1),(a+b,=,7) }
+ + Ct(p_left * V("tokenizer") * p_right) -- { (a+b,=,1),(a+b,=,7) }
+ + p_special
+ + p_reserved
+ + p_entity
+-- + p_utf - p_close - p_right
+ + p_utf - p_right
+ )^1,
+}
+
+local function show_state(state,level,t)
+ state = state + 1
+ report_asciimath(table.serialize(t,formatters["stage %s:%s"](level,state)))
+ return state
+end
+
+local function show_result(str,result)
+ report_asciimath("input > %s",str)
+ report_asciimath("result > %s",result)
+end
+
+local function collapse(t,level)
+ if not t then
+ return ""
+ end
+ local state = 0
+ if trace_detail then
+ if level then
+ level = level + 1
+ else
+ level = 1
end
- local parsed = lpegmatch(parser,premapped)
- if parsed then
- if trace_mapping then
- report_asciimath("parsed : %s",parsed)
+ state = show_state(state,level,t)
+ end
+ --
+ local n = #t
+ if n > 4 and t[3] == "," then
+ local l1 = t[1]
+ local r1 = t[n]
+ if isleft[l1] and isright[r1] then
+ local l2 = t[2]
+ local r2 = t[n-1]
+ if type(l2) == "table" and type(r2) == "table" then
+ -- we have a matrix
+ local valid = true
+ for i=3,n-2,2 do
+ if t[i] ~= "," then
+ valid = false
+ break
+ end
+ end
+ if valid then
+ for i=2,n-1,2 do
+ local ti = t[i]
+ local tl = ti[1]
+ local tr = ti[#ti]
+ if isleft[tl] and isright[tr] then
+ -- ok
+ else
+ valid = false
+ break
+ end
+ end
+ if valid then
+ local omit = l1 == "\\left." and r1 == "\\right."
+ if omit then
+ t[1] = "\\startmatrix"
+ else
+ t[1] = l1 .. "\\startmatrix"
+ end
+ for i=2,n-1 do
+ if t[i] == "," then
+ t[i] = "\\NR"
+ else
+ local ti = t[i]
+ ti[1] = "\\NC"
+ for i=2,#ti-1 do
+ if ti[i] == "," then
+ ti[i] = "\\NC"
+ end
+ end
+ ti[#ti] = nil
+ end
+ end
+ if omit then
+ t[n] = "\\NR\\stopmatrix"
+ else
+ t[n] = "\\NR\\stopmatrix" .. r1
+ end
+ end
+ end
end
- local postmapped = lpegmatch(postmapper,parsed)
- if postmapped then
- if trace_mapping then
- report_asciimath("finalized: %s",postmapped)
+ end
+ end
+ --
+ if trace_detail then
+ state = show_state(state,level,t)
+ end
+ --
+ local n, i = #t, 1
+ while i < n do
+ local current = t[i]
+ if current == "/" and i > 1 then
+ local tl = t[i-1]
+ local tr = t[i+1]
+ if type(tl) == "table" then
+ if isleft[tl[1]] and isright[tl[#tl]] then
+ tl[1] = "" -- todo: remove
+ tl[#tl] = nil
end
- result, ok = postmapped, true
+ end
+ if type(tr) == "table" then
+ if isleft[tr[1]] and isright[tr[#tr]] then
+ tr[1] = "" -- todo: remove
+ tr[#tr] = nil
+ end
+ end
+ i = i + 2
+ elseif current == "," or current == ";" then
+ t[i] = current .. "\\thinspace"
+ i = i + 1
+ else
+ i = i + 1
+ end
+ end
+ --
+ if trace_detail then
+ state = show_state(state,level,t)
+ end
+ --
+ local n, i = #t, 1
+ if n > 2 then
+ while i < n do
+ local current = t[i]
+ if type(current) == "table" and isleft[t[i-1]] and isright[t[i+1]] then
+ local c = #current
+ if c > 2 and isleft[current[1]] and isright[current[c]] then
+-- current[c] = nil
+-- current[1] = ""
+ remove(current,c)
+ remove(current,1)
+ end
+ i = i + 3
else
- result = "error in postmapping"
+ i = i + 1
+ end
+ end
+ end
+ --
+ if trace_detail then
+ state = show_state(state,level,t)
+ end
+ --
+ local n, m, i = #t, 0, 1
+ while i <= n do
+ m = m + 1
+ local current = t[i]
+ if isunary[current] then
+ local one = t[i+1]
+ if not one then
+ m = m + 1
+ t[m] = current .. "{}" -- error
+ break
+ end
+ if type(one) == "table" then
+ if isleft[one[1]] and isright[one[#one]] then
+-- one[1] = ""
+-- one[#one] = nil
+ remove(one,#one)
+ remove(one,1)
+ end
+ one = collapse(one,level)
+ elseif one == "-" and i + 2 <= n then -- or another sign ? or unary ?
+ local t2 = t[i+2]
+ if type(t2) == "string" then
+ one = one .. t2
+ i = i + 1
+ end
end
+ t[m] = current .. "{" .. one .. "}"
+ i = i + 2
else
- result = "error in mapping"
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then -- yes?
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ --
+ if trace_detail then
+ state = show_state(state,level,t)
+ end
+ --
+ local n, m, i = #t, 0, 1
+ while i <= n do
+ m = m + 1
+ local current = t[i]
+ if isbinary[current] then
+ local one = t[i+1]
+ local two = t[i+2]
+ if not one then
+ t[m] = current .. "{}{}" -- error
+ break
+ end
+ if type(one) == "table" then
+ if isleft[one[1]] and isright[one[#one]] then
+-- one[1] = ""
+-- one[#one] = nil
+ remove(one,#one)
+ remove(one,1)
+ end
+ one = collapse(one,level)
+ end
+ if not two then
+ t[m] = current .. "{" .. one .. "}{}"
+ break
+ end
+ if type(two) == "table" then
+ if isleft[two[1]] and isright[two[#two]] then
+-- two[1] = ""
+-- two[#two] = nil
+ remove(two,#two)
+ remove(two,1)
+ end
+ two = collapse(two,level)
+ end
+ t[m] = current .. "{" .. one .. "}{" .. two .. "}"
+ i = i + 3
+ else
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then -- yes?
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
end
+ end
+ --
+ if trace_detail then
+ state = show_state(state,level,t)
+ end
+ --
+ local n, m, i = #t, 0, 1
+ while i <= n do
+ m = m + 1
+ local current = t[i]
+ if type(current) == "table" then
+ if current[1] == "\\NC" then
+ t[m] = collapse(current,level)
+ else
+ t[m] = "{" .. collapse(current,level) .. "}"
+ end
+ i = i + 1
+ else
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then -- yes?
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ --
+ if trace_detail then
+ state = show_state(state,level,t)
+ end
+ --
+ local n, m, i = #t, 0, 1
+ while i < n do
+ local current = t[i]
+ if isinfix[current] and i > 1 then
+ local tl = t[i-1]
+ local tr = t[i+1]
+ t[m] = tl .. current .. "{" .. tr .. "}"
+ i = i + 2
+ else
+ m = m + 1
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ --
+ if trace_detail then
+ state = show_state(state,level,t)
+ end
+ --
+ local n, m, i = #t, 0, 1
+ while i < n do
+ local current = t[i]
+ if current == "/" and i > 1 then
+ local tl = t[i-1]
+ local tr = t[i+1]
+ -- if type(tl) == "table" then
+ -- if isleft[tl[1]] and isright[tl[#tl]] then
+ -- tl[1] = ""
+ -- tl[#tl] = ""
+ -- end
+ -- end
+ -- if type(tr) == "table" then
+ -- if isleft[tr[1]] and isright[tr[#tr]] then
+ -- tr[1] = ""
+ -- tr[#tr] = ""
+ -- end
+ -- end
+ t[m] = "\\frac{" .. tl .. "}{" .. tr .. "}"
+ i = i + 2
+ else
+ m = m + 1
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ --
+ if trace_detail then
+ state = show_state(state,level,t)
+ end
+ --
+ local n, m, i = #t, 0, 1
+ while i < n do
+ local current = t[i]
+ if current == "\\slash" and i > 1 then
+-- t[m] = "{\\left(" .. t[i-1] .. "\\middle/" .. t[i+1] .. "\\right)}"
+ t[m] = "{\\left." .. t[i-1] .. "\\middle/" .. t[i+1] .. "\\right.}"
+ i = i + 2
+ else
+ m = m + 1
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ --
+ if trace_detail then
+ state = show_state(state,level,t)
+ end
+ --
+ local n = #t
+ if t[1] == "\\left." and t[n] == "\\right." then
+ return concat(t," ",2,n-1)
else
- result = "error in premapping"
+ return concat(t," ")
+ end
+end
+
+-- todo: cache simple ones, say #str < 10, maybe weak
+
+local ctx_mathematics = context and context.mathematics or report_asciimath
+local ctx_type = context and context.type or function() end
+local ctx_inleft = context and context.inleft or function() end
+
+local function convert(str,totex)
+ local texcode = collapse(lpegmatch(parser,str))
+ if trace_mapping then
+ show_result(str,texcode)
end
if totex then
- if ok then
- context.mathematics(result)
+ ctx_mathematics(texcode)
+ else
+ return texcode
+ end
+end
+
+local n = 0
+local p = (
+ (S("{[(") + P("\\left" )) / function() n = n + 1 end
+ + (S("}])") + P("\\right")) / function() n = n - 1 end
+ + P(1)
+)^0
+
+local function invalidtex(str)
+ n = 0
+ local result = lpegmatch(p,str)
+ if n == 0 then
+ return false
+ elseif n < 0 then
+ return formatters["too many left fences: %s"](-n)
+ elseif n > 0 then
+ return formatters["not enough right fences: %s"](n)
+ end
+end
+
+local collected = { }
+local indexed = { }
+
+-- bonus
+
+local p_reserved_spaced =
+ C(lpeg.utfchartabletopattern(k_reserved_words)) / " %1 "
+
+local p_text =
+ C(P("text")) / " %1 "
+ * p_spaces^0
+ * ( -- maybe balanced
+ (P("{") * (1-P("}"))^0 * P("}"))
+ + (P("(") * (1-P(")"))^0 * P(")"))
+ )
+ + patterns.doublequoted
+
+local p_expand = Cs((p_text + p_reserved_spaced + p_entity_base + p_utf_base)^0)
+local p_compress = patterns.collapser
+
+local function cleanedup(str)
+ return lpegmatch(p_compress,lpegmatch(p_expand,str)) or str
+end
+
+-- so far
+
+function collect(fpattern,element,collected,indexed)
+ local element = element or "am"
+ local mpattern = formatters["<%s>(.-)</%s>"](element,element)
+ local filenames = dir.glob(fpattern)
+ local cfpattern = gsub(fpattern,"^%./",lfs.currentdir())
+ local cfpattern = gsub(cfpattern,"\\","/")
+ local wildcard = string.split(cfpattern,"*")[1]
+ if not collected then
+ collected = { }
+ indexed = { }
+ end
+ for i=1,#filenames do
+ filename = gsub(filenames[i],"\\","/")
+ local splitname = (wildcard and wildcard ~= "" and string.split(filename,wildcard)[2]) or filename
+ local shortname = gsub(splitname or file.basename(filename),"^%./","")
+ for s in gmatch(io.loaddata(filename),mpattern) do
+ local c = cleanedup(s)
+ local f = collected[c]
+ if f then
+ f.count = f.count + 1
+ f.files[shortname] = (f.files[shortname] or 0) + 1
+ if s ~= c then
+ f.cleanedup = f.cleanedup + 1
+ end
+ f.dirty[s] = (f.dirty[s] or 0) + 1
+ else
+ local texcode = convert(s)
+ local message = invalidtex(texcode)
+ if message then
+ report_asciimath("%s: %s",message,s)
+ end
+ collected[c] = {
+ count = 1,
+ files = { [shortname] = 1 },
+ texcode = texcode,
+ message = message,
+ cleanedup = s ~= c and 1 or 0,
+ dirty = { [s] = 1 }
+ }
+ end
+ end
+ end
+ local n = 0
+ for k, v in sortedhash(collected) do
+ n = n + 1
+ v.n= n
+ indexed[n] = k
+ end
+ return collected, indexed
+end
+
+asciimath.convert = convert
+asciimath.reserved = reserved
+asciimath.collect = collect
+asciimath.invalidtex = invalidtex
+asciimath.cleanedup = cleanedup
+
+-- sin(x) = 1 : 3.3 uncached 1.2 cached , so no real gain (better optimize the converter then)
+
+local function convert(str)
+ if #str == 1 then
+ ctx_mathematics(str)
+ else
+ local texcode = collapse(lpegmatch(parser,str))
+ if trace_mapping then
+ show_result(str,texcode)
+ end
+ if #texcode == 0 then
+ report_asciimath("error in asciimath: %s",str)
+ else
+ local message = invalidtex(texcode)
+ if message then
+ report_asciimath("%s: %s",message,str)
+ ctx_type(formatters["<%s>"](message))
+ else
+ ctx_mathematics(texcode)
+ end
+ end
+ end
+end
+
+commands.asciimath = convert
+
+if not context then
+
+-- trace_mapping = true
+-- trace_detail = true
+
+-- report_asciimath(cleanedup([[ac+sinx+xsqrtx+sinsqrtx+sinsqrt(x)]]))
+-- report_asciimath(cleanedup([[a "αsinsqrtx" b]]))
+-- report_asciimath(cleanedup([[a "α" b]]))
+-- report_asciimath(cleanedup([[//4]]))
+
+-- convert([[D_f=[0 ,→〉]])
+-- convert([[ac+sinx+xsqrtx]])
+-- convert([[ac+\alpha x+xsqrtx-cc b*pi**psi-3alephx / bb X]])
+-- convert([[ac+\ ^ x+xsqrtx]])
+-- convert([[d/dx(x^2+1)]])
+-- convert([[a "αsinsqrtx" b]])
+-- convert([[a "α" b]])
+-- convert([[//4]])
+-- convert([[ {(a+b,=,1),(a+b,=,7)) ]])
+
+-- convert([[ 2/a // 5/b = (2 b) / ( a b) // ( 5 a ) / ( a b ) = (2 b ) / ( 5 a ) ]])
+-- convert([[ (2+x)/a // 5/b ]])
+
+-- convert([[ ( 2/a ) // ( 5/b ) = ( (2 b) / ( a b) ) // ( ( 5 a ) / ( a b ) ) = (2 b ) / ( 5 a ) ]])
+
+-- convert([[ (x/y)^3 = x^3/y^3 ]])
+
+-- convert([[ {: (1,2) :} ]])
+-- convert([[ {: (a+b,=,1),(a+b,=,7) :} ]])
+-- convert([[ { (a+b,=,1),(a+b,=,7) :} ]])
+-- convert([[ {: (a+b,=,1),(a+b,=,7) } ]])
+-- convert([[ { (a+b,=,1),(a+b,=,7) } ]])
+
+-- convert([[(1,5 ±sqrt(1,25 ),0 )]])
+-- convert([[1//2]])
+-- convert([[(p)/sqrt(p)]])
+-- convert([[u_tot]])
+-- convert([[u_tot=4,4 L+0,054 T]])
+
+-- convert([[ [←;0,2] ]])
+-- convert([[ [←;0,2⟩ ]])
+-- convert([[ ⟨←;0,2 ) ]])
+-- convert([[ ⟨←;0,2 ] ]])
+-- convert([[ ⟨←;0,2⟩ ]])
+
+-- convert([[ x^2(x-1/16)=0 ]])
+-- convert([[ y = ax + 3 - 3a ]])
+-- convert([[ y= ((1/4)) ^x ]])
+-- convert([[ x=\ ^ (1/4) log(0 ,002 )= log(0,002) / (log(1/4) ]])
+-- convert([[ x=\ ^glog(y) ]])
+-- convert([[ x^ (-1 1/2) =1/x^ (1 1/2)=1/ (x^1*x^ (1/2)) =1/ (xsqrt(x)) ]])
+-- convert([[ x^2(10 -x)&gt;2 x^2 ]])
+-- convert([[ x^4&gt;x ]])
+
+ return
+
+end
+
+local context = context
+
+local ctx_typebuffer = context.typebuffer
+local ctx_mathematics = context.mathematics
+local ctx_color = context.color
+
+local sequenced = table.sequenced
+local assign_buffer = buffers.assign
+
+asciimath.show = { }
+
+local collected, indexed, ignored = { }, { }, { }
+
+local color = { "darkred" }
+
+function asciimath.show.ignore(n)
+ if type(n) == "string" then
+ local c = collected[n]
+ n = c and c.n
+ end
+ if n then
+ ignored[n] = true
+ end
+end
+
+function asciimath.show.count(n,showcleanedup)
+ local v = collected[indexed[n]]
+ local count = v.count
+ local cleanedup = v.cleanedup
+ if not showcleanedup or cleanedup == 0 then
+ context(count)
+ elseif count == cleanedup then
+ ctx_color(color,count)
+ else
+ context("%s+",count-cleanedup)
+ ctx_color(color,cleanedup)
+ end
+end
+
+local h = { }
+
+function asciimath.show.nofdirty(n)
+ local k = indexed[n]
+ local v = collected[k]
+ local n = v.cleanedup
+ h = { }
+ if n > 0 then
+ for d, n in sortedhash(v.dirty) do
+ if d ~= k then
+ h[#h+1] = { d, n }
+ end
+ end
+ end
+ context(#h)
+end
+
+function asciimath.show.dirty(m,wrapped)
+ local d = h[m]
+ if d then
+ ctx_inleft(d[2])
+ if wrapped then
+ assign_buffer("am",'"' .. d[1] .. '"')
else
- context.type(result) -- some day monospaced
+ assign_buffer("am",d[1])
end
+ ctx_typebuffer { "am" }
+ end
+end
+
+function asciimath.show.files(n)
+ context(sequenced(collected[indexed[n]].files," "))
+end
+
+function asciimath.show.input(n,wrapped)
+ if wrapped then
+ assign_buffer("am",'"' .. indexed[n] .. '"')
else
- return result
+ assign_buffer("am",indexed[n])
end
+ ctx_typebuffer { "am" }
end
-local function onlyconverted(str)
- local parsed = lpegmatch(parser,str)
- return parsed or str
+function asciimath.show.result(n)
+ local v = collected[indexed[n]]
+ if ignored[n] then
+ context("ignored")
+ elseif v.message then
+ ctx_color(color, v.message)
+ else
+ ctx_mathematics(v.texcode)
+ end
end
-local sqrt = P("sqrt") / "\\rootradical \\bgroup \\egroup "
-local root = P("root") / "\\rootradical "
-local frac = P("frac") / "\\frac "
-local stackrel = P("stackrel") / "\\stackrel "
-local text = P("text") / "\\mathoptext "
-local hat = P("hat") / "\\widehat "
-local overbar = P("bar") / "\\overbar "
-local underline = P("ul") / "\\underline "
-local vec = P("vec") / "\\overrightarrow "
-local dot = P("dot") / "\\dot "
-local ddot = P("ddot") / "\\ddot "
-
-local left = S("{(") * P(":") + S("([{")
-local right = P(":") * S(")}") + S(")]}")
-local leftnorright = 1 - left - right
-local singles = sqrt + text + hat + underline + overbar + vec + ddot + dot
-local doubles = root + frac + stackrel
-local ignoreleft = (left/"") * spaces * spaces
-local ignoreright = spaces * (right/"") * spaces
-local ignoreslash = spaces * (P("/")/"") * spaces
-local comma = P(",")
-local nocomma = 1-comma
-local anychar = P(1)
-local openmatrix = left * spaces * Cc("\\matrix\\bgroup ")
-local closematrix = Cc("\\egroup ") * spaces * right
-local nextcolumn = spaces * (comma/"&") * spaces
-local nextrow = spaces * (comma/"\\cr ") * spaces
-local finishrow = Cc("\\cr ")
-local opengroup = left/"\\bgroup "
-local closegroup = right/"\\egroup "
-local somescript = S("^_") * spaces
-local beginargument = Cc("\\bgroup ")
-local endargument = Cc("\\egroup ")
-local macro = P("\\") * R("az","AZ")^1
-
-parser = Cs { "main",
-
- scripts = somescript * V("argument"),
- division = Cc("\\frac") * V("argument") * spaces * ignoreslash * spaces * V("argument")
- + Cc("\\left.") * V("balanced") * spaces * (P("\\slash ")/"\\middle/") * spaces * V("balanced") * Cc("\\right."),
- double = doubles * spaces * V("argument") * spaces * V("argument"),
- single = singles * spaces * V("argument"),
- macro = macro,
-
- balanced = opengroup * (C((leftnorright + V("balanced"))^0)/onlyconverted) * closegroup,
- argument = V("balanced") + V("token"),
-
- element = (V("step") + (V("argument") + V("step")) - ignoreright - nextcolumn - comma)^1,
- commalist = ignoreleft * V("element") * (nextcolumn * spaces * V("element"))^0 * ignoreright,
- matrix = openmatrix * spaces * (V("commalist") * (nextrow * V("commalist"))^0) * finishrow * closematrix,
-
- token = beginargument * (texnic + float + real + number + letter) * endargument,
-
- step = V("scripts") + V("division") + V("macro") + V("single") + V("double"),
- main = (V("matrix") + V("step") + anychar)^0,
+function asciimath.show.load(str,element)
+ collected, indexed, ignored = { }, { }, { }
+ local t = utilities.parsers.settings_to_array(str)
+ for i=1,#t do
+ asciimath.collect(t[i],element or "am",collected,indexed)
+ end
+end
-}
+function asciimath.show.max()
+ context(#indexed)
+end
+
+function asciimath.show.statistics()
+ local usedfiles = { }
+ local noffiles = 0
+ local nofokay = 0
+ local nofbad = 0
+ local nofcleanedup = 0
+ for k, v in next, collected do
+ if ignored[v.n] then
+ nofbad = nofbad + v.count
+ elseif v.message then
+ nofbad = nofbad + v.count
+ else
+ nofokay = nofokay + v.count
+ end
+ nofcleanedup = nofcleanedup + v.cleanedup
+ for k, v in next, v.files do
+ local u = usedfiles[k]
+ if u then
+ usedfiles[k] = u + 1
+ else
+ noffiles = noffiles + 1
+ usedfiles[k] = 1
+ end
+ end
+ end
+ context.starttabulate { "|B||" }
+ context.NC() context("files") context.EQ() context(noffiles) context.NC() context.NR()
+ context.NC() context("formulas") context.EQ() context(nofokay+nofbad) context.NC() context.NR()
+ context.NC() context("uniques") context.EQ() context(#indexed) context.NC() context.NR()
+ context.NC() context("cleanedup") context.EQ() context(nofcleanedup) context.NC() context.NR()
+ context.NC() context("errors") context.EQ() context(nofbad) context.NC() context.NR()
+ context.stoptabulate()
+end
+
+function asciimath.show.save(name)
+ table.save(name ~= "" and name or "dummy.lua",collected)
+end
+
+-- maybe:
-asciimath.reserved = reserved
-asciimath.convert = converted
+-- \backslash \
+-- \times ×
+-- \divide ÷
+-- \circ ∘
+-- \oplus ⊕
+-- \otimes ⊗
+-- \sum ∑
+-- \prod ∏
+-- \wedge ∧
+-- \bigwedge ⋀
+-- \vee ∨
+-- \bigvee ⋁
+-- \cup ∪
+-- \bigcup ⋃
+-- \cap ∩
+-- \bigcap ⋂
-commands.convert = converted
+-- \ne ≠
+-- \le ≤
+-- \leq ≤
+-- \ge ≥
+-- \geq ≥
+-- \prec ≺
+-- \succ ≻
+-- \in ∈
+-- \notin ∉
+-- \subset ⊂
+-- \supset ⊃
+-- \subseteq ⊆
+-- \supseteq ⊇
+-- \equiv ≡
+-- \cong ≅
+-- \approx ≈
+-- \propto ∝
+--
+-- \neg ¬
+-- \implies ⇒
+-- \iff ⇔
+-- \forall ∀
+-- \exists ∃
+-- \bot ⊥
+-- \top ⊤
+-- \vdash ⊢
+-- \models ⊨
+--
+-- \int ∫
+-- \oint ∮
+-- \partial ∂
+-- \nabla ∇
+-- \pm ±
+-- \emptyset ∅
+-- \infty ∞
+-- \aleph ℵ
+-- \ldots ...
+-- \cdots ⋯
+-- \quad
+-- \diamond ⋄
+-- \square □
+-- \lfloor ⌊
+-- \rfloor ⌋
+-- \lceiling ⌈
+-- \rceiling ⌉
+--
+-- \sin sin
+-- \cos cos
+-- \tan tan
+-- \csc csc
+-- \sec sec
+-- \cot cot
+-- \sinh sinh
+-- \cosh cosh
+-- \tanh tanh
+-- \log log
+-- \ln ln
+-- \det det
+-- \dim dim
+-- \lim lim
+-- \mod mod
+-- \gcd gcd
+-- \lcm lcm
+--
+-- \uparrow ↑
+-- \downarrow ↓
+-- \rightarrow →
+-- \to →
+-- \leftarrow ←
+-- \leftrightarrow ↔
+-- \Rightarrow ⇒
+-- \Leftarrow ⇐
+-- \Leftrightarrow ⇔
+--
+-- \mathbf
+-- \mathbb
+-- \mathcal
+-- \mathtt
+-- \mathfrak
diff --git a/tex/context/base/x-asciimath.mkiv b/tex/context/base/x-asciimath.mkiv
index 18283398a..4eff848e0 100644
--- a/tex/context/base/x-asciimath.mkiv
+++ b/tex/context/base/x-asciimath.mkiv
@@ -1,6 +1,6 @@
%D \module
-%D [ file=m-asciimath,
-%D version=2006.04.24, % 1999.11.06,
+%D [ file=x-asciimath,
+%D version=2014.06.01, % 2006.04.24, % 1999.11.06,
%D title=\CONTEXT\ Modules,
%D subtitle=AsciiMath,
%D author=Hans Hagen,
@@ -11,73 +11,238 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D Lua code.
-
\registerctxluafile{x-asciimath}{}
-\def\ctxmoduleasciimath#1{\ctxlua{moduledata.asciimath.#1}}
-
-%D The following code is not officially supported and is only meant
-%D for the Math4All project.
+%D When the Math4All project started, we immediately started using content \MATHML.
+%D Because in school math there is often a reference to calculator input, we also
+%D provided what we called \quote {calcmath}: a predictable expression based way
+%D entering math. At some point \OPENMATH\ was also used but that was later
+%D abandoned because editing is more cumbersome.
%D
-%D The following code kind of maps ascii math
-%D http://www1.chapman.edu/~jipsen/mathml/asciimath.html onto \TEX. The
-%D code was written for the math4all project but in retrospect we
-%D could have used just tex code as the web version can handle that
-%D as well. Anyhow, as we use \MATHML\ as basis it makes sense to add
-%D this to the repertoire as annotation variant, so now we have
-%D content \MATHML\ (prefered), presentation \MATHML\ (often messy),
-%D \OPENMATH\ (what was which we started with in this project)
-%D calcmath (handy for students who are accustomed to calculators),
-%D asciimath (to make Frits's live easier) and of course \TEX. Of
-%D course all are used mixed.
+%D Due to limitations in the web variant (which is independent of rendering for
+%D paper but often determines the coding of document, not seldom for the worse) the
+%D switch was made to presentational \MATHML. But even that proved to be too complex
+%D for rendering on the web, so it got converted to so called \ASCIIMATH\ which
+%D can be rendered using some \JAVASCRIPT\ magic. However, all the formulas (and
+%D we're talking of tens of thousands of them) were very precisely coded by the main
+%D author. Because in intermediate stages of the editing (by additional authors) a
+%D mixture of \MATHML\ and \ASCIIMATH\ was used, we wrote the first version of this
+%D module. As reference we took \url
+%D {http://www1.chapman.edu/~jipsen/mathml/asciimath.html} and. The idea was to
+%D stick to \MATHML\ as reference and if needed use \ASCIIMATH\ as annotation.
%D
-%D We don't support all quirks of asciimath as I am not in the mood to
-%D write a complex parser while a bit of sane coding can work as well.
+%D Eventually we ended up with supporting several math encodings in \CONTEXT\ that
+%D could be used mixed: content \MATHML\ (preferred), presentation \MATHML\ (often
+%D messy), \OPENMATH\ (somewhat minimalistic) calcmath (handy for students who are
+%D accustomed to calculators), \ASCIIMATH\ (to make web support easier) and of
+%D course \TEX.
%D
+%D The first version had some limitations as we didn't want to support all quirks of
+%D \ASCIIMATH\ and also because I was not really in the mood to write a complex parser
+%D when a bit of sane coding can work equally well. Some comments from that version:
+%D
+%D \startnarrower
%D \startitemize
-%D \item We support only the syntactically clear variants and as long
-%D as lpeg does not support left recursion this is as far as we
-%D want to go.
-%D \item The parser is rather insensitive for spaces but yet the advice is
-%D to avoid weird coding like \type {d/dxf(x)} but use \type {d/dx
-%D f(x)} instead. After all we're not in a compact coding cq.\
-%D parser challenge.
-%D \item We also don't support the somewhat confusing \type {sqrt sqrt 2}
-%D nor \type {root3x} (although the second one kind of works). A bit
-%D of defensive coding does not hurt.
-%D \item We can process \type {a/b/c/d} but it's not compatible with the
-%D default behaviour of asciimath. Use grouping instead. Yes, we do
-%D support the somewhat nonstandard grouping token mix.
-%D \item You should use explicit \type {text(..)} directives as one can
-%D never be sure what is a reserved word and not.
+%D \item We support only the syntactically clear variants and as long as lpeg does
+%D not support left recursion this is as far as we want to go.
+%D \item The parser is rather insensitive for spaces but yet the advice is to avoid
+%D weird coding like \type {d/dxf(x)} but use \type {d/dx f(x)} instead. After
+%D all we're not in a compact coding cq.\ parser challenge.
+%D \item We also don't support the somewhat confusing \type {sqrt sqrt 2} nor \type
+%D {root3x} (although the second one kind of works). A bit of defensive coding
+%D does not hurt.
+%D \item We can process \type {a/b/c/d} but it's not compatible with the default
+%D behaviour of \ASCIIMATH. Use grouping instead. Yes, we do support the somewhat
+%D nonstandard grouping token mix.
+%D \item You should use explicit \type {text(..)} directives as one can never be sure
+%D what is a reserved word and not.
%D \stopitemize
%D
-%D Actually, as the only parsing sensitive elements of \TEX\ are
-%D fractions (\type {\over} and friends, a restricted use of \TEX\
-%D coding is probably as comprehensive and parseble.
-%D
-%D The webpage with examples served as starting point so anything beyond
+%D Actually, as the only parsing sensitive elements of \TEX\ are fractions (\type {\over}
+%D and friends, a restricted use of \TEX\ coding is probably as comprehensive and
+%D parsable. The webpage with examples served as starting point so anything beyond
%D what can be found there isn't supported.
+%D \stopnarrower
+%D
+%D Then in 2014 something bad happened. Following the fashion of minimal encoding
+%D (which of course means messy encoding of complex cases and which can make authors
+%D sloppy too) the web based support workflow of the mentioned project ran into some
+%D limitations and magically one day all carefully coded \MATHML\ was converted into
+%D \ASCIIMATH. As there was no way to recover the original thousands of files and
+%D tens of thousands of formulas we were suddenly stuck with \ASCIIMATH. Because the
+%D conversion had be done automagically, we also saw numerous errors and were forced
+%D to come up with some methods to check formulas. Because \MATHML\ poses some
+%D restrictions it has predictable rendering; \ASCIIMATH\ on the other hand enforces
+%D no structure. Also, because \MATHML\ has to be valid \XML\ it always processes.
+%D Of course, during the decade that the project had run we also had to built in
+%D some catches for abuse but at least we had a relatively stable and configurable
+%D subsystem. So, in order to deal with less predictable cases as well as extensive
+%D checking, a new \ASCIIMATH\ parser was written, one that could also be used to
+%D trace bad coding.
+%D
+%D Because the formal description is incomplete, and because some links to resources
+%D are broken, and because some testing on the web showed that sequences of characters
+%D are interpreted that were not mentioned anywhere (visible), and because we noticed
+%D that the parser was dangerously tolerant, the new code is quite different from the
+%D old code.
+%D
+%D One need to keep in mind that because spaces are optional, the only robust way to
+%D edit \ASCIIMATH\ is to use a \WYSIWYG\ editor and hope that the parser doesn't
+%D change ever. Keys are picked up from spaceless sequences and when not recognized
+%D a (sequence) of characters is considered to be variables. So, \type {xsqrtx} is
+%D valid and renders as \type {$x\sqrt{x}$}, \type {xx} becomes \type {×} (times)
+%D but \type {ac} becomes \type {$a c$} (a times c). We're lucky that \type {AC} is
+%D not turned into Alternating Current, but who knows what happens a few years from
+%D now. So, we do support this spaceless mess, but users are warned: best use a
+%D spacy sequence. The extra amount of spaces (at one byte each) an author has to
+%D include in his|/|her active writing time probably stays below the size of one
+%D holiday picture. Another complication is that numbers (in Dutch) use commas instead
+%D of periods, but vectors use commas as well. We also hav esome different names for
+%D functions which then can conflict with the expectations about collapsed variables.
+%D
+%D It must be noted that simplified encodings (that seem to be the fashion today)
+%D can demand from applications to apply fuzzy logic to make something work out
+%D well. Because we have sequential data that gets rendered, sometimes wrong input
+%D gets obscured simply by the rendering: like the comma's in numbers as well as
+%D for separators (depending on space usage), or plain wrong symbols that somehow
+%D get a representation anyway. This in itself is more a side effect of trying to
+%D use the simplified encoding without applying rules (in the input) or to use it
+%D beyong its intended usage, which then of course can lead to adapted parsers and
+%D catches that themselves trigger further abuse. Imagine that instead of developing
+%D new cars, planes, space ships, mobile phones, computers we would have adapted
+%D horse cars, kites, firework, old fashioned phones and mechanical calculators in a
+%D similar way: patch upon patch of traditional means for sure would not have
+%D worked. So, when you use \ASCIIMATH\ best check immediately how it gets rendered
+%D in the browser as well as on paper. And be prepared to check the more complex
+%D code in the future again. We don't offer any guarantees but of course will try to
+%D keep up.
+%D
+%D In retrospect I sometimes wonder if the energy put into constantly adapting to
+%D the fashion of the day pays off. Probably not. It definitely doesn't pay of.
\unprotect
\writestatus{asciimath}{beware, this is an experimental (m4all only) module}
-%unexpanded\def\asciimath#1{\ctxmoduleasciimath{convert(\!!bs\detokenize{#1}\!!es,true)}}
-\unexpanded\def\asciimath#1{\ctxcommand{convert(\!!bs\detokenize\expandafter{\normalexpanded{#1}}\!!es,true)}}
+%D The core commands:
+
+\unexpanded\def\asciimath#1%
+ {\ctxcommand{asciimath(\!!bs\detokenize\expandafter{\normalexpanded{#1}}\!!es)}}
+
+\unexpanded\def\ctxmoduleasciimath#1%
+ {\ctxlua{moduledata.asciimath.#1}}
+
+%D Some tracing commands. Using tex commands is 10\% slower that directly piping
+%D from \LUA, but this is non|-|critical code.
+
+\unexpanded\def\ShowAsciiMathLoad [#1]{\ctxlua{moduledata.asciimath.show.load("#1")}}
+\unexpanded\def\ShowAsciiMathIgnore[#1]{\ctxlua{moduledata.asciimath.show.ignore("#1")}}
+\unexpanded\def\ShowAsciiMathStats {\ctxlua{moduledata.asciimath.show.statistics()}}
+\unexpanded\def\ShowAsciiMathMax {\ctxlua{moduledata.asciimath.show.max()}}
+
+\unexpanded\def\ShowAsciiMathResult#1%
+ {\begingroup
+ \blank
+ % if we are in vmode, we don't get positions i.e. a smaller tuc file
+ \inleft{\ttbf#1\hfill\ctxlua{moduledata.asciimath.show.count(#1,true)}}%
+ \dontleavehmode
+ \begingroup
+ \ttbf
+ \ctxlua{moduledata.asciimath.show.files(#1)}
+ \endgroup
+ \blank[medium,samepage]
+ \startcolor[darkblue]
+ \ctxlua{moduledata.asciimath.show.input(#1,true)}
+ \stopcolor
+ \blank[medium,samepage]
+ \doifmode{asciimath:show:dirty} {
+ \dorecurse{\ctxlua{moduledata.asciimath.show.nofdirty(#1)}} {
+ \ctxlua{moduledata.asciimath.show.dirty(\recurselevel,true)}
+ \blank[medium,samepage]
+ }
+ }
+ \ctxlua{moduledata.asciimath.show.result(#1)}
+ \blank
+ \endgroup}
+
+\unexpanded\def\ShowAsciiMathStart
+ {\begingroup
+ \let\normalmathoptext\mathoptext
+ \unexpanded\def\mathoptext##1{\normalmathoptext{\color[darkgreen]{##1}}}%
+ \setuptyping[\v!buffer][\c!before=,\c!after=]
+ \setupmargindata[\v!left][\c!style=]}
+
+\unexpanded\def\ShowAsciiMathStop
+ {\endgroup}
+
+\unexpanded\def\ShowAsciiMath
+ {\dodoubleempty\doShowAsciiMath}
+
+\unexpanded\def\doShowAsciiMath[#1][#2]%
+ {\iffirstargument
+ \ShowAsciiMathStart
+ \ShowAsciiMathLoad[#1]
+ \ifsecondargument
+ \ShowAsciiMathIgnore[#2]
+ \fi
+ \dorecurse{\ShowAsciiMathMax}{\ShowAsciiMathResult\recurselevel}
+ \page
+ \ShowAsciiMathStats
+ \ShowAsciiMathStop
+ \fi}
+
+\unexpanded\def\ShowAsciiMathSave
+ {\dosingleempty\doShowAsciiMathSave}
+
+\unexpanded\def\doShowAsciiMathSave[#1]%
+ {\ctxlua{moduledata.asciimath.show.save("#1")}}
\protect
\continueifinputfile{x-asciimath.mkiv}
-\enabletrackers[modules.asciimath.mapping]
+%D This will become an extra.
+
+\setupbodyfont
+% [pagella,10pt]
+ [dejavu,10pt]
+
+\setuplayout
+ [backspace=35mm,
+ leftmargin=20mm,
+ rightmargindistance=0pt,
+ leftmargindistance=5mm,
+ cutspace=1cm,
+ topspace=1cm,
+ bottomspace=1cm,
+ width=middle,
+ height=middle,
+ header=0cm,
+ footer=1cm]
+
+\setupheadertexts
+ []
+
+\setupfootertexts
+ [\currentdate][\pagenumber]
+
+\setupalign
+ [flushleft,verytolerant,stretch]
-\starttext
+\dontcomplain
-\def\MyAsciiMath#1{\startformula\asciimath{#1}\stopformula}
+% \enabletrackers[modules.asciimath.mapping]
+% \enabletrackers[modules.asciimath.detail]
-\startlines
+% \starttext
+% \enablemode[asciimath:show:dirty]
+% \ShowAsciiMath[e:/temporary/asciimath/*.xml]
+% % \ShowAsciiMathSave[e:/temporary/asciimath/asciimath.lua]
+% \stoptext
+
+% \starttext
+% \unexpanded\def\MyAsciiMath#1{\startformula\asciimath{#1}\stopformula}
+% \startlines
% \MyAsciiMath{x^2 / 10 // z_12^34 / 20}
% \MyAsciiMath{{:{:x^2:} / 10:} // {:{:z_12^34 :} / 20:}}
% \MyAsciiMath{x^2+y_1+z_12^34}
@@ -101,9 +266,66 @@
% \MyAsciiMath{(a,b] = {x text(in) RR | a < x <= b}}
% \MyAsciiMath{a/b / c/d = (a * d) / (b * d) / (b * c) / (b * d) = (a * d) / (b * c)}
% \MyAsciiMath{ (a/b) // (c/d) = ( (a * d) / (b * d) ) // ( (b * c) / (b * d) ) = (a * d) / (b * c)}
-\MyAsciiMath{sin(x+1)_3^2/b / c/d}
+% \MyAsciiMath{sin(x+1)_3^2/b / c/d}
% \MyAsciiMath{{:{:sin(x+1)_3^2:}/b:} / {:c/d:}}
% \MyAsciiMath{cos(a) + sin(x+1)_3^2/b / c/d = (a * d) / (b * d) / (b * c) / (b * d) = (a * d) / (b * c)}
-\stoplines
-
-\stoptext
+% \MyAsciiMath{S_(11)}
+% \MyAsciiMath{f(x)}
+% \MyAsciiMath{sin(x)}
+% \MyAsciiMath{sin(x+1)}
+% \MyAsciiMath{sin^-1(x)}
+% \MyAsciiMath{sin(2x)}
+% \MyAsciiMath{a_2^2}
+% \MyAsciiMath{( (S_(11),S_(12),S_(1n)),(vdots,ddots,vdots),(S_(m1),S_(m2),S_(mn)) ]}
+% \MyAsciiMath{frac a b}
+% \MyAsciiMath{sin(x)/2 // cos(x)/pi}
+% \MyAsciiMath{a/13 // c/d}
+% \MyAsciiMath{a/b // c/d}
+% \MyAsciiMath{x}
+% \MyAsciiMath{x^2}
+% \MyAsciiMath{sqrt x}
+% \MyAsciiMath{sqrt (x)}
+% \MyAsciiMath{root 2 x}
+% \MyAsciiMath{x+x}
+% \MyAsciiMath{x/3}
+% \MyAsciiMath{x^2 / 10}
+% \MyAsciiMath{x^2 / 10 // z_12^34 / 20}
+% \MyAsciiMath{a^23}
+% \MyAsciiMath{a^{:b^23:}+3x}
+% \MyAsciiMath{a/b / c/d}
+% \MyAsciiMath{sin(x)/b / c/d}
+% \MyAsciiMath{sin(x)/b // c/d}
+% \MyAsciiMath{a/b / c/d = (a * d) / (b * d) / (b * c) / (b * d) = (a * d) / (b * c) }
+% \MyAsciiMath{{:{:x^2:} / 10:} // {:{:z_12^34 :} / 20:}}
+% \MyAsciiMath{x^2+y_1+z_12^34}
+% \MyAsciiMath{sin^-1(x)}
+% \MyAsciiMath{d/dx f(x)=lim_(h->0) (f(x+h)-f(x))/h}
+% \MyAsciiMath{f(x)=sum_(n=0)^oo(f^((n))(a))/(n!)(x-a)^n}
+% \MyAsciiMath{int_0^1 f(x)dx}
+% \MyAsciiMath{int^1_0 f(x)dx}
+% \MyAsciiMath{2x}
+% \MyAsciiMath{a//b}
+% \MyAsciiMath{a//\alpha}
+% \MyAsciiMath{(a/b)/(d/c)}
+% \MyAsciiMath{((a*b))/(d/c)}
+% \MyAsciiMath{[[a,b],[c,d]]((n),(k))}
+% \MyAsciiMath{1/x={(1,text{if } x!=0),(text{undefined},if x=0):}}
+% \MyAsciiMath{{ (1,2), (x,(x + text(x))) }}
+% \MyAsciiMath{{(1,2),(x,(x+text(x))),(x,text(x))}}
+% \MyAsciiMath{{(1,2),(x,(x+text(x))),(x,x text(x))}}
+% \MyAsciiMath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,x text(xyz))}}
+% \MyAsciiMath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,text(xyz)+1+text(hans))}}
+% \MyAsciiMath{<<a,b>> text{and} {:(x,y),(u,v):}}
+% \MyAsciiMath{(a,b] = {x text(in) RR | a < x <= b}}
+% \MyAsciiMath{x^-2}
+% \MyAsciiMath{x^2(x-1/16)=0}
+% \MyAsciiMath{y= ((1/4)) ^x}
+% \MyAsciiMath{log (0,002) / (log(1/4))}
+% \MyAsciiMath{x=ax+b \ oeps}
+% \MyAsciiMath{x=\ ^ (1/4) log(x)}
+% \MyAsciiMath{x=\ ^ (1/4) log(0 ,002 )= log(0,002) / (log(1/4))}
+% \MyAsciiMath{x^ (-1 1/2) =1/x^ (1 1/2)=1/ (x^1*x^ (1/2)) =1/ (xsqrt(x))}
+% \MyAsciiMath{x^2(10 -x)&gt;2 x^2}
+% \MyAsciiMath{x^4&gt;x}
+% \stoplines
+% \stoptext
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 1732a2345..0f4cdc112 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 05/30/14 23:26:41
+-- merge date : 06/06/14 23:52:32
do -- begin closure to overcome local limits and interference
@@ -217,9 +217,12 @@ patterns.integer=sign^-1*digit^1
patterns.unsigned=digit^0*period*digit^1
patterns.float=sign^-1*patterns.unsigned
patterns.cunsigned=digit^0*comma*digit^1
+patterns.cpunsigned=digit^0*(period+comma)*digit^1
patterns.cfloat=sign^-1*patterns.cunsigned
+patterns.cpfloat=sign^-1*patterns.cpunsigned
patterns.number=patterns.float+patterns.integer
patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.cpnumber=patterns.cpfloat+patterns.integer
patterns.oct=zero*octdigit^1
patterns.octal=patterns.oct
patterns.HEX=zero*P("X")*(digit+uppercase)^1
@@ -636,21 +639,22 @@ function lpeg.append(list,pp,delayed,checked)
end
return p
end
-local function make(t)
- local p
+local function make(t,hash)
+ local p=P(false)
local keys=sortedkeys(t)
for i=1,#keys do
local k=keys[i]
local v=t[k]
- if not p then
+ local h=hash[v]
+ if h then
if next(v) then
- p=P(k)*make(v)
+ p=p+P(k)*(make(v,hash)+P(true))
else
- p=P(k)
+ p=p+P(k)*P(true)
end
else
if next(v) then
- p=p+P(k)*make(v)
+ p=p+P(k)*make(v,hash)
else
p=p+P(k)
end
@@ -660,16 +664,20 @@ local function make(t)
end
function lpeg.utfchartabletopattern(list)
local tree={}
+ local hash={}
for i=1,#list do
local t=tree
for c in gmatch(list[i],".") do
- if not t[c] then
- t[c]={}
+ local tc=t[c]
+ if not tc then
+ tc={}
+ t[c]=tc
end
- t=t[c]
+ t=tc
end
+ hash[t]=list[i]
end
- return make(tree)
+ return make(tree,hash)
end
patterns.containseol=lpeg.finder(eol)
local function nextstep(n,step,result)
@@ -2668,6 +2676,7 @@ local striplinepatterns={
["retain"]=p_retain_normal,
["retain and collapse"]=p_retain_collapse,
["retain and no empty"]=p_retain_noempty,
+ ["collapse"]=patterns.collapser,
}
strings.striplinepatterns=striplinepatterns
function strings.striplines(str,how)