summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHans Hagen <pragma@wxs.nl>2021-03-05 12:17:06 +0100
committerContext Git Mirror Bot <phg@phi-gamma.net>2021-03-05 12:17:06 +0100
commitcd05787a9d41bac345695564011d333974afe1d9 (patch)
treeecc998d3de192ddcccdf1fcb1ec56fc3d539c2f9
parent0d300509bdd7497fd376844b2326f5917636590e (diff)
downloadcontext-cd05787a9d41bac345695564011d333974afe1d9.tar.gz
2021-03-05 11:16:00
-rw-r--r--doc/context/scripts/mkiv/context.html2
-rw-r--r--doc/context/scripts/mkiv/context.man3
-rw-r--r--doc/context/scripts/mkiv/context.xml5
-rw-r--r--doc/context/scripts/mkiv/mtx-context.html2
-rw-r--r--doc/context/scripts/mkiv/mtx-context.man3
-rw-r--r--doc/context/scripts/mkiv/mtx-context.xml5
-rw-r--r--doc/context/sources/general/manuals/metafun/metafun-basics.tex223
-rw-r--r--doc/context/sources/general/manuals/metafun/metafun-debugging.tex74
-rw-r--r--doc/context/sources/general/manuals/metafun/metafun-environment.tex4
-rw-r--r--doc/context/sources/general/manuals/metafun/metafun-text-lmtx.tex35
-rw-r--r--metapost/context/base/mpiv/mp-back.mpiv336
-rw-r--r--metapost/context/base/mpiv/mp-tool.mpiv2
-rw-r--r--metapost/context/base/mpxl/mp-text.mpxl35
-rw-r--r--metapost/context/base/mpxl/mp-tool.mpxl4
-rw-r--r--scripts/context/lua/mtx-context.lua65
-rw-r--r--scripts/context/lua/mtx-context.xml5
-rw-r--r--scripts/context/lua/mtxrun.lua37
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua37
-rw-r--r--scripts/context/stubs/unix/mtxrun37
-rw-r--r--scripts/context/stubs/win64/mtxrun.lua37
-rw-r--r--tex/context/base/mkii/cont-new.mkii2
-rw-r--r--tex/context/base/mkii/context.mkii2
-rw-r--r--tex/context/base/mkiv/back-exp.lua4
-rw-r--r--tex/context/base/mkiv/cont-new.mkiv2
-rw-r--r--tex/context/base/mkiv/context.mkiv2
-rw-r--r--tex/context/base/mkiv/core-con.mkiv9
-rw-r--r--tex/context/base/mkiv/l-package.lua22
-rw-r--r--tex/context/base/mkiv/luat-log.lua4
-rw-r--r--tex/context/base/mkiv/mult-fmt.lua34
-rw-r--r--tex/context/base/mkiv/page-ini.lua17
-rw-r--r--tex/context/base/mkiv/status-files.pdfbin25371 -> 25388 bytes
-rw-r--r--tex/context/base/mkiv/status-lua.pdfbin256137 -> 257036 bytes
-rw-r--r--tex/context/base/mkiv/strc-pag.lua1
-rw-r--r--tex/context/base/mkiv/typo-bld.lua77
-rw-r--r--tex/context/base/mkiv/util-lib.lua2
-rw-r--r--tex/context/base/mkiv/util-soc-imp-http.lua10
-rw-r--r--tex/context/base/mkxl/back-exp-imp-mth.lmt742
-rw-r--r--tex/context/base/mkxl/back-exp-imp-ref.lmt261
-rw-r--r--tex/context/base/mkxl/back-exp-imp-tag.lmt846
-rw-r--r--tex/context/base/mkxl/back-exp.lmt2719
-rw-r--r--tex/context/base/mkxl/back-exp.mkxl5
-rw-r--r--tex/context/base/mkxl/cont-new.mkxl2
-rw-r--r--tex/context/base/mkxl/context.mkxl2
-rw-r--r--tex/context/base/mkxl/core-con.mkxl6
-rw-r--r--tex/context/base/mkxl/luat-log.lmt4
-rw-r--r--tex/context/base/mkxl/mlib-fio.lmt2
-rw-r--r--tex/context/base/mkxl/typo-bld.lmt414
-rw-r--r--tex/context/base/mkxl/typo-bld.mkxl2
-rw-r--r--tex/context/base/mkxl/typo-shp.lmt2
-rw-r--r--tex/context/base/mkxl/typo-shp.mkxl34
-rw-r--r--tex/context/fonts/mkiv/type-imp-dejavu.mkiv3
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua2
52 files changed, 5809 insertions, 376 deletions
diff --git a/doc/context/scripts/mkiv/context.html b/doc/context/scripts/mkiv/context.html
index 808da3209..682b8621f 100644
--- a/doc/context/scripts/mkiv/context.html
+++ b/doc/context/scripts/mkiv/context.html
@@ -76,6 +76,8 @@
<tr><th>--arrange</th><td></td><td>run extra imposition pass, given that the style sets up imposition</td></tr>
<tr><th>--noarrange</th><td></td><td>ignore imposition specifications in the style</td></tr>
<tr><th/><td/><td/></tr>
+ <tr><th>--pages</th><td></td><td>report pagenumbers of names pages and list references (--page=... or --page=...)</td></tr>
+ <tr><th/><td/><td/></tr>
<tr><th>--jit</th><td></td><td>use luajittex with jit turned off (only use the faster virtual machine)</td></tr>
<tr><th>--jiton</th><td></td><td>use luajittex with jit turned on (in most cases not faster, even slower)</td></tr>
<tr><th/><td/><td/></tr>
diff --git a/doc/context/scripts/mkiv/context.man b/doc/context/scripts/mkiv/context.man
index 99c0c42aa..7d9e949bb 100644
--- a/doc/context/scripts/mkiv/context.man
+++ b/doc/context/scripts/mkiv/context.man
@@ -101,6 +101,9 @@ run extra imposition pass, given that the style sets up imposition
.B --noarrange
ignore imposition specifications in the style
.TP
+.B --pages
+report pagenumbers of names pages and list references (--page=... or --page=...)
+.TP
.B --jit
use luajittex with jit turned off (only use the faster virtual machine)
.TP
diff --git a/doc/context/scripts/mkiv/context.xml b/doc/context/scripts/mkiv/context.xml
index 40751c613..98a75244b 100644
--- a/doc/context/scripts/mkiv/context.xml
+++ b/doc/context/scripts/mkiv/context.xml
@@ -112,6 +112,11 @@
</flag>
</subcategory>
<subcategory>
+ <flag name="pages">
+ <short>report pagenumbers of names pages and list references (<ref name="page"/>=... or <ref name="page"/>=...)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
<flag name="jit">
<short>use luajittex with jit turned off (only use the faster virtual machine)</short>
</flag>
diff --git a/doc/context/scripts/mkiv/mtx-context.html b/doc/context/scripts/mkiv/mtx-context.html
index 808da3209..682b8621f 100644
--- a/doc/context/scripts/mkiv/mtx-context.html
+++ b/doc/context/scripts/mkiv/mtx-context.html
@@ -76,6 +76,8 @@
<tr><th>--arrange</th><td></td><td>run extra imposition pass, given that the style sets up imposition</td></tr>
<tr><th>--noarrange</th><td></td><td>ignore imposition specifications in the style</td></tr>
<tr><th/><td/><td/></tr>
+ <tr><th>--pages</th><td></td><td>report pagenumbers of names pages and list references (--page=... or --page=...)</td></tr>
+ <tr><th/><td/><td/></tr>
<tr><th>--jit</th><td></td><td>use luajittex with jit turned off (only use the faster virtual machine)</td></tr>
<tr><th>--jiton</th><td></td><td>use luajittex with jit turned on (in most cases not faster, even slower)</td></tr>
<tr><th/><td/><td/></tr>
diff --git a/doc/context/scripts/mkiv/mtx-context.man b/doc/context/scripts/mkiv/mtx-context.man
index 99c0c42aa..7d9e949bb 100644
--- a/doc/context/scripts/mkiv/mtx-context.man
+++ b/doc/context/scripts/mkiv/mtx-context.man
@@ -101,6 +101,9 @@ run extra imposition pass, given that the style sets up imposition
.B --noarrange
ignore imposition specifications in the style
.TP
+.B --pages
+report pagenumbers of names pages and list references (--page=... or --page=...)
+.TP
.B --jit
use luajittex with jit turned off (only use the faster virtual machine)
.TP
diff --git a/doc/context/scripts/mkiv/mtx-context.xml b/doc/context/scripts/mkiv/mtx-context.xml
index 40751c613..98a75244b 100644
--- a/doc/context/scripts/mkiv/mtx-context.xml
+++ b/doc/context/scripts/mkiv/mtx-context.xml
@@ -112,6 +112,11 @@
</flag>
</subcategory>
<subcategory>
+ <flag name="pages">
+ <short>report pagenumbers of names pages and list references (<ref name="page"/>=... or <ref name="page"/>=...)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
<flag name="jit">
<short>use luajittex with jit turned off (only use the faster virtual machine)</short>
</flag>
diff --git a/doc/context/sources/general/manuals/metafun/metafun-basics.tex b/doc/context/sources/general/manuals/metafun/metafun-basics.tex
index 92d2f2c07..479f39602 100644
--- a/doc/context/sources/general/manuals/metafun/metafun-basics.tex
+++ b/doc/context/sources/general/manuals/metafun/metafun-basics.tex
@@ -51,8 +51,8 @@ wish, your comments, your grocery list, whatever. Comments in \METAPOST, prefixe
by a percent sign, as in \typ {% Let's draw a circle}, are ignored by the
interpreter, but useful reminders for the programmer.
-If the file is saved as \type {yourfile.mp}, then the file is processed by
-\METAPOST\ by issuing the following command:
+Traditionally, if the file is saved as \type {yourfile.mp}, then the file is
+processed by \METAPOST\ by issuing the following command:
\starttyping
mpost yourfile
@@ -88,7 +88,7 @@ If you have installed \CONTEXT, somewhere on your system there resides a file
following line at the top of your file:
\starttyping
-input mp-tool ; % or input metafun ;
+input metafun ;
\stoptyping
By loading this file, the resulting graphic will provide a high resolution
@@ -96,10 +96,44 @@ bounding box, which enables more accurate placement. The file also sets the \typ
{prologues := 1} so that viewers like \GHOSTVIEW\ can refresh the file when it is
changed.
+{\bf However!} When you use \METAPOST\ in \CONTEXT, you will not run the
+mentioned program at all: you embed your graphic in the document and we will
+discuss this later. You can still use separate files but then you process them as
+follows:
+
+\starttyping
+mtxrun --script metapost somefile.mp
+\stoptyping
+
+Alternatively you can wrap the individual pictures in a \TEX\ file, say \type
+{yourfile.tex}:
+
+\starttyping
+\starttext
+ \startMPpage
+ % code
+ \stopMPpage
+ \startMPpage
+ % code
+ \stopMPpage
+\stoptext
+\stoptyping
+
+and then run:
+
+\starttyping
+context somefile.tex
+\stoptyping
+
+after which you can use the resulting \PDF\ file (if it has more pages you just
+filter the page from that file).
+
Next we will introduce some more \METAPOST\ commands. From now on, we will omit
the encapsulating \type {beginfig} and \type {endfig} macros. If you want to
process these examples yourself, you should add those commands yourself, or if
-you use \CONTEXT\ you don't need them at all.
+you use \CONTEXT\ you don't need them at all. You can for instance wrap them in a
+\type {\startMPpage} \unknown\ \type {\stopMPpage} environment if you want to
+play around.
\startbuffer
pickup pencircle scaled .5cm ;
@@ -3377,7 +3411,43 @@ context("%3!texexp!", 10.4698E30,"@2.3f","@2i")
\typebuffer
-This gives: \inlinebuffer .
+This gives: \inlinebuffer . In \in {figure} [fig:formatters] we see some more
+formatters.
+
+\startbuffer
+for i=1 upto 12 :
+ draw
+ thefmttext("\letterpercent 3!date!","month,space,year",2019,i)
+ shifted (0,i *cm / 2);
+ draw
+ thefmttext("@3!date!","month,space,year",2019,i)
+ shifted (4cm,i *cm / 2);
+endfor ;
+
+for i=1 upto 20 :
+ draw
+ thefmttext(decimal i)
+ shifted (1cm,-i/2*cm);
+ draw
+ thefmttext("@!month!",i)
+ shifted (4cm,-i/2*cm);
+ draw
+ thefmttext("@!weekday!",i)
+ shifted (7cm,-i/2*cm);
+ draw
+ thefmttext("@!monthshort!",i)
+ shifted (10cm,-i/2*cm);
+ draw
+ thefmttext("@3!dayoftheweek!",i,1,2018)
+ shifted (13cm,-i/2*cm);
+endfor ;
+\stopbuffer
+
+\typebuffer
+
+\startplacefigure[title=formatters,reference=fig:formatters]
+ \framed{\scale[width=.8\textwidth]{\processMPbuffer}}
+\stopplacefigure
\stopsection
@@ -3660,6 +3730,149 @@ We could have said \type {-- hide(a := a shifted point i of p) a} because the
\stopsection
+\startsection[title=External files]
+
+You can put code in an external file and load that one when needed. The low level
+command is \type {input}:
+
+\starttyping
+input myfile ;
+input myfile.mp ;
+input myfile.mpiv ;
+input myfile.mpxl ;
+input "myfile.mpxl" ;
+\stoptyping
+
+In \CONTEXT\ we have modules and their name starts with \type {mp-} and the
+suffix can depend on what version you use (\type {mpii}, \type {mpiv}, \type
+{mpxl}) but that will be dealt with automatically.
+
+\starttyping
+loadmodule("mine") ;
+\stoptyping
+
+This will locate \type {mp-mine} with the proper suffix and often a module will
+be set up to be loaded only once. For loading files \LUAMETAFUN\ provides:
+
+\starttyping
+loadfile("somefile.mp") ;
+\stoptyping
+
+Which is a bit more robust than the \type {input} commands that is kind of fuzzy
+because it takes a sequence of tokens or a string. When you embed code in a
+document source there are some environments that permit loading of definitions
+and extensions.
+
+\starttyping
+\startMPdefinitions
+ % code passed to the library directly
+\stopMPdefinitions
+
+\startMPextensions
+ % code passed to the library but via tex (expansion step)
+\stopMPextensions
+\stoptyping
+
+\stopsection
+
+\startsection[title=Instances]
+
+The library supports several numbering models. The default is \type {scaled} that
+uses integers that represent a (relatively small) float. Although you can go up
+to 32K it's wise to stay below 4096. The \type {double} number model supports 64
+bit floating point and the \type {decimal} model arbitrary precision. In \LUATEX\
+there is also \type {binary} for arbitrary precision but in \LUAMETATEX\ that
+model is not provided.
+
+You can use different models and instances alongside. In \MKIV\ and \LMTX\ we have
+a few predefined. The \type {metafun} instance is the default. Here we show the
+ones that use \METAFUN:
+
+\starttyping[style=\ttx]
+\defineMPinstance[metafun] [format=metafun,extensions=yes,initializations=yes]
+\defineMPinstance[minifun] [format=minifun,extensions=yes,initializations=yes]
+\defineMPinstance[extrafun] [format=metafun,extensions=yes,initializations=yes]
+\defineMPinstance[lessfun] [format=metafun]
+\defineMPinstance[doublefun] [format=metafun,extensions=yes,initializations=yes,method=double]
+\defineMPinstance[decimalfun][format=metafun,extensions=yes,initializations=yes,method=decimal]
+\defineMPinstance[mprun] [format=metafun,extensions=yes,initializations=yes]
+\defineMPinstance[simplefun] [format=metafun,method=double] % for internal usage
+\stoptyping
+
+All instances will load additional definitions btu extensions (expanded
+definitions) and initializations (before each graphic) are optional.
+
+\starttyping
+\startMPdefinitions
+\stopMPdefinitions
+
+\startMPextensions
+\stopMPextensions
+
+\startMPinitializations
+\stopMPinitializations
+\stoptyping
+
+You can search the sources of manuals and test suite for examples of usage, but here are
+some:
+
+\startbuffer
+\defineMPinstance[foo][definitions=yes]
+\defineMPinstance[bar][definitions=yes]
+
+\startMPdefinitions{foo}
+ vardef p = image (
+ draw fullsquare scaled 1cm withcolor "darkgreen" ;
+ draw textext("1") ;
+ )
+ enddef ;
+\stopMPdefinitions
+
+\startMPdefinitions{bar}
+ vardef p = image (
+ draw (fullsquare rotated 45) scaled 1cm withcolor "darkyellow" ;
+ draw textext("2") ;
+ )
+ enddef ;
+\stopMPdefinitions
+\stopbuffer
+
+\typebuffer \getbuffer
+
+We use both as:
+
+\startbuffer[demo-1]
+\startMPcode{foo}
+ draw p ;
+ draw image (
+ draw fullcircle scaled 1cm withcolor "darkblue" ;
+ draw textext("3") withcolor "darkred" ;
+ ) ;
+ draw p ;
+\stopMPcode
+\stopbuffer
+
+\startbuffer[demo-2]
+\startMPcode{bar}
+ draw p ;
+ draw image (
+ draw fullcircle scaled 1cm withcolor "darkblue" ;
+ draw textext("4") withcolor "darkred" ;
+ ) ;
+\stopMPcode
+\stopbuffer
+
+\typebuffer[demo-1,demo-2]
+
+\startlinecorrection
+\startcombination[nx=2,ny=1]
+ {\getbuffer[demo-1]} {foo}
+ {\getbuffer[demo-2]} {bar}
+\stopcombination
+\stoplinecorrection
+
+\stopsection
+
\stopchapter
\stopcomponent
diff --git a/doc/context/sources/general/manuals/metafun/metafun-debugging.tex b/doc/context/sources/general/manuals/metafun/metafun-debugging.tex
index de863aea0..f8957b5d8 100644
--- a/doc/context/sources/general/manuals/metafun/metafun-debugging.tex
+++ b/doc/context/sources/general/manuals/metafun/metafun-debugging.tex
@@ -375,6 +375,80 @@ When we overlay these three we get. The envelope only returns the outer curve.
\stopoverlay
\stoplinecorrection
+We show a few nore examples and let it to the user to come up with applications
+for this feature. We start by defining a scaled pen that we apply to a simple
+path that has three point.
+
+\startbuffer
+pen whateverpen ; whateverpen := makepen(fullcircle xyscaled(1/20,1/40));
+
+path p ; p := (origin .. (1,1/2) .. (1,1/4)) scaled 10 ;
+path q ; q := envelope whateverpen of p ;
+
+draw p withpen pencircle xyscaled(1/2,1/4)
+ withcolor "darkgreen" withtransparency (1,.5) ;
+drawpoints p withpen pencircle scaled 1
+ withcolor "darkgray" withtransparency (1,.5) ;
+drawcontrolpoints p withpen pencircle scaled 1/2
+ withcolor "darkmagenta" withtransparency (1,.5) ;
+
+currentpicture := currentpicture shifted (-20,0) ;
+
+draw p withpen pencircle xyscaled(1/2,1/4)
+ withcolor "darkgreen" withtransparency (1,.5) ;
+draw q withpen pencircle scaled 1/5
+ withcolor "darkblue" withtransparency (1,.5) ;
+drawpoints q withpen pencircle scaled 1
+ withcolor "darkgray" withtransparency (1,.5) ;
+drawcontrolpoints q withpen pencircle scaled 1/2
+ withcolor "darkyellow" withtransparency (1,.5) ;
+draw q withpen pencircle scaled 1/50
+ withcolor "white" ;
+
+currentpicture := currentpicture ysized 2cm ;
+\stopbuffer
+
+\typebuffer
+
+On the left we see the path drawn with a circular pen. The points and control
+points are also shown. The path is repeated on the right but there it gets the
+envelope overlayed. We show the points and control points of the envelope: they
+nearly overlap but that depends of course on the used pen.
+
+\startlinecorrection[blank]
+ \processMPbuffer
+\stoplinecorrection
+
+In order to illustrate that the envelope is an outline we blow up a piece of
+this image:
+
+\startlinecorrection[blank]
+ \scale[height=3cm]{\clip[nx=10,ny=20,x=9,y=7]{\processMPbuffer}}
+\stoplinecorrection
+
+Without further explanation we let you ponder the results of the following code.
+
+\startbuffer
+drawoptions(withcolor "darkred") ;
+draw (envelope pensquare of (up--left)) ;
+draw (up--left) shifted (4,0) withpen pensquare ;
+draw fullcircle shifted (8,0) ;
+draw (envelope pensquare of fullcircle) shifted (12,0) ;
+draw (fullcircle) shifted (16,0) withpen pensquare ;
+draw (fullcircle) shifted (20,0) withpen pencircle ;
+currentpicture := currentpicture xsized TextWidth ;
+\stopbuffer
+
+\typebuffer
+
+The efficiency of the output of each draw differs a lot because circles are made
+from eight points and because a transformed pen results in two paths. Normally
+that is not something you have to loose sleep over.
+
+\startlinecorrection[blank]
+ \processMPbuffer
+\stoplinecorrection
+
\stopsection
\startsection[title=Performance]
diff --git a/doc/context/sources/general/manuals/metafun/metafun-environment.tex b/doc/context/sources/general/manuals/metafun/metafun-environment.tex
index 20b6920f9..57875008d 100644
--- a/doc/context/sources/general/manuals/metafun/metafun-environment.tex
+++ b/doc/context/sources/general/manuals/metafun/metafun-environment.tex
@@ -342,7 +342,7 @@
\startbuffer[shape-a]
\startuniqueMPgraphic{meta:hash}{linewidth,linecolor,angle,gap}
- if unknown context_back : input mp-back ; fi ;
+ loadmodule("back") ;
some_hash ( OverlayWidth, OverlayHeight ,
\MPvar{linewidth}, \MPvar{linecolor} ,
\MPvar{angle}, \MPvar{gap} ) ;
@@ -369,7 +369,7 @@
\startbuffer[symb-a]
\startuniqueMPgraphic{meta:button}{type,size,linecolor,fillcolor}
- if unknown context_butt : input mp-butt ; fi ;
+ loadmodule("butt") ;
some_button ( \MPvar{type},
\MPvar{size},
\MPvar{linecolor},
diff --git a/doc/context/sources/general/manuals/metafun/metafun-text-lmtx.tex b/doc/context/sources/general/manuals/metafun/metafun-text-lmtx.tex
index bd84f08ec..abdca3c3a 100644
--- a/doc/context/sources/general/manuals/metafun/metafun-text-lmtx.tex
+++ b/doc/context/sources/general/manuals/metafun/metafun-text-lmtx.tex
@@ -1295,7 +1295,7 @@ being an invisible character with maximum dimensions. Variables like \type
can save variables.
\startbuffer
-\startuseMPgraphic{test 1}
+\startuseMPgraphic{shapetest-1}
begingroup ;
save p ; path p ; p := fullcircle scaled 6cm ;
lmt_parshape [
@@ -1319,7 +1319,7 @@ The second shape is a diamond. This is a rather useless shape, unless the text
suits the small lines at the top and bottom.
\startbuffer
-\startuseMPgraphic{test 2}
+\startuseMPgraphic{shapetest-2}
begingroup ;
save p ; path p ; p := fullsquare rotated 45 scaled 5cm ;
lmt_parshape [
@@ -1338,7 +1338,7 @@ The third and fourth shape demonstrate that providing a suitable offset is not
always trivial.
\startbuffer
-\startuseMPgraphic{test 3}
+\startuseMPgraphic{shapetest-3}
begingroup ;
save w, h, p ; path p ; w := h := 6cm ;
p := (.5w,h) -- ( 0, h) -- (0,0) -- (w,0) &
@@ -1360,14 +1360,15 @@ a path, \METAPOST\ is able to calculate the right dimensions and offsets. This i
needed, since we need these later on.
\startbuffer
-\startuseMPgraphic{test 4}
+\startuseMPgraphic{shapetest-4}
begingroup ;
- save d, p, q ; path p, q ; d := BodyFontSize/2;
+ save d, p, q, shape ; path p, q ; d := BodyFontSize/2;
vardef shape(expr w, h, o) =
(o,o) -- (w-o,o) & (w-o,o) .. (.75w-o,.5h) ..
(w-2o,h-o) & (w-2o,h-o) -- (o,h-o) -- cycle
enddef ;
- p := shape(6cm, 6cm, d) ; q := shape(6cm, 6cm, 0) ;
+ p := shape(6cm, 6cm, d) ;
+ q := shape(6cm, 6cm, 0) ;
lmt_parshape [
path = p,
offsetpath = q,
@@ -1386,10 +1387,10 @@ Since we also want these graphics as backgrounds, we define them as overlays. If
you don't want to show the graphic, you may omit this step.
\startbuffer
-\defineoverlay[test 1][\useMPgraphic{test 1}]
-\defineoverlay[test 2][\useMPgraphic{test 2}]
-\defineoverlay[test 3][\useMPgraphic{test 3}]
-\defineoverlay[test 4][\useMPgraphic{test 4}]
+\defineoverlay[shapetest-1][\useMPgraphic{shapetest-1}]
+\defineoverlay[shapetest-2][\useMPgraphic{shapetest-2}]
+\defineoverlay[shapetest-3][\useMPgraphic{shapetest-3}]
+\defineoverlay[shapetest-4][\useMPgraphic{shapetest-4}]
\stopbuffer
\typebuffer \getbuffer
@@ -1399,7 +1400,7 @@ Themas, Questing for the Essence of Mind and Pattern}. Watch how we pass a list
of shapes.
\startbuffer[text]
-\startshapetext[test 1,test 2,test 3,test 4]
+\startshapetext[shapetest-1,shapetest-2,shapetest-3,shapetest-4]
\forgetall % as it says
\setupalign[verytolerant,stretch,normal]%
\samplefile{douglas}% Douglas R. Hofstadter
@@ -1409,17 +1410,15 @@ of shapes.
\typebuffer[text]
Finally we combine text and shapes. Since we also want a background, we use \type
-{\framed}. The macros \type {\parwidth} and \type {\parheight} are automatically
-set to the current shape dimensions. The normal result is shown in \in {figure}
-[fig:shapes].
+{\framed}. The normal result is shown in \in {figure} [fig:shapes].
\startbuffer[shapes]
\startbuffer
\startcombination[2*2]
- {\framed[offset=overlay,frame=off,background=test 1]{\getshapetext}} {test 1}
- {\framed[offset=overlay,frame=off,background=test 2]{\getshapetext}} {test 2}
- {\framed[offset=overlay,frame=off,background=test 3]{\getshapetext}} {test 3}
- {\framed[offset=overlay,frame=off,background=test 4]{\getshapetext}} {test 4}
+ {\framed[offset=overlay,frame=off,background=shapetest-1]{\getshapetext}} {shapetest-1}
+ {\framed[offset=overlay,frame=off,background=shapetest-2]{\getshapetext}} {shapetest-2}
+ {\framed[offset=overlay,frame=off,background=shapetest-3]{\getshapetext}} {shapetest-3}
+ {\framed[offset=overlay,frame=off,background=shapetest-4]{\getshapetext}} {shapetest-4}
\stopcombination
\stopbuffer
\stopbuffer
diff --git a/metapost/context/base/mpiv/mp-back.mpiv b/metapost/context/base/mpiv/mp-back.mpiv
index f588adea9..e77b3f0d9 100644
--- a/metapost/context/base/mpiv/mp-back.mpiv
+++ b/metapost/context/base/mpiv/mp-back.mpiv
@@ -22,14 +22,15 @@ def some_hash ( expr hash_width ,
hash_angle ,
hash_gap ) =
- stripe_gap := hash_gap ;
- stripe_angle := hash_angle ;
- drawoptions (withpen pencircle scaled hash_linewidth
- withcolor hash_linecolor) ;
- path p ; p := unitsquare xscaled hash_width yscaled hash_height ;
- stripe_path_a () (draw) p ; % next we move it all to quadrant 1
- currentpicture := currentpicture shifted urcorner currentpicture ;
-
+ begingroup ;
+ stripe_gap := hash_gap ;
+ stripe_angle := hash_angle ;
+ drawoptions (withpen pencircle scaled hash_linewidth
+ withcolor hash_linecolor) ;
+ path p ; p := unitsquare xscaled hash_width yscaled hash_height ;
+ stripe_path_a () (draw) p ; % next we move it all to quadrant 1
+ currentpicture := currentpicture shifted urcorner currentpicture ;
+ endgroup ;
enddef ;
def some_double_back (expr back_type ,
@@ -44,162 +45,167 @@ def some_double_back (expr back_type ,
back_leftcolor ,
back_rightcolor ) =
- numeric ww ; ww := back_width ;
- numeric hh ; hh := back_height ;
- numeric dd ; dd := back_delta ;
-
- color back_nillcolor ; back_nillcolor := back_topcolor ;
-
- path p ; p := fullsquare xscaled ww yscaled hh ;
- path q ; q := fullsquare xscaled (ww-2dd) yscaled (hh-2dd) ;
- path r ; r := llcorner p --
- lrcorner p shifted (-3dd,0) .. controls lrcorner p ..
- lrcorner p shifted (0, 3dd) --
- urcorner p shifted (0,-3dd) .. controls urcorner p ..
- urcorner p shifted (-3dd,0) --
- ulcorner p -- cycle ;
- path s ; s := r xscaled ((ww-2dd)/ww) yscaled ((hh-2dd)/hh) ;
- path t ; t := llcorner p --
- lrcorner p --
- urcorner p shifted (0,-3dd) .. controls urcorner p ..
- urcorner p shifted (-3dd,0) --
- ulcorner p shifted ( 3dd,0) .. controls ulcorner p ..
- ulcorner p shifted (0,-3dd) --
- llcorner p -- cycle ;
- path u ; u := t xscaled ((ww-2dd)/ww) yscaled ((hh-2dd)/hh) ;
- path v ; v := llcorner p shifted ( 3dd,0) --
- lrcorner p shifted (-3dd,0) .. controls lrcorner p ..
- lrcorner p shifted (0, 3dd) --
- urcorner p shifted (0,-3dd) .. controls urcorner p ..
- urcorner p shifted (-3dd,0) --
- ulcorner p shifted ( 3dd,0) .. controls ulcorner p ..
- ulcorner p shifted (0,-3dd) ..
- llcorner p shifted (0, 3dd) .. controls llcorner p .. cycle ; % {down} .. cycle ;
- path w ; w := t xscaled ((ww-2dd)/ww) yscaled ((hh-2dd)/hh) ;
- path a ; a := llcorner p -- ulcorner p --
- ulcorner q -- llcorner q -- cycle ;
- path b ; b := llcorner p -- lrcorner p --
- lrcorner q -- llcorner q -- cycle ;
- path c ; c := lrcorner p -- urcorner p --
- urcorner q -- lrcorner q -- cycle ;
- path d ; d := ulcorner p -- urcorner p --
- urcorner q -- ulcorner q -- cycle ;
- path e ; e := llcorner p -- lrcorner p --
- urcorner p -- urcorner q --
- lrcorner q -- llcorner q -- cycle ;
- path f ; f := llcorner p -- ulcorner p --
- urcorner p -- urcorner q --
- ulcorner q -- llcorner q -- cycle ;
-
- linecap := butt ; pickup pencircle scaled back_linewidth ;
-
- if back_type=1 :
-
- fill p withcolor back_fillcolor ;
- fill a withcolor back_leftcolor ;
- fill b withcolor back_bottomcolor ;
- fill c withcolor back_rightcolor ;
- fill d withcolor back_topcolor ;
- draw a withcolor back_linecolor ;
- draw d withcolor back_linecolor ;
- draw b withcolor back_linecolor ;
- draw c withcolor back_linecolor ;
-
- elseif back_type=2 :
-
- fill p withcolor back_fillcolor ;
- fill e withcolor back_bottomcolor ;
- fill f withcolor back_topcolor ;
- draw e withcolor back_linecolor ;
- draw f withcolor back_linecolor ;
-
- elseif back_type=3 :
-
- fill v withcolor back_nillcolor ;
- fill w withcolor back_fillcolor ;
- draw v withcolor back_linecolor ;
- draw w withcolor back_linecolor ;
-
- elseif back_type=4 :
-
- fill t withcolor back_nillcolor ;
- fill u withcolor back_fillcolor ;
- draw t withcolor back_linecolor ;
- draw u withcolor back_linecolor ;
-
- elseif back_type=5 :
-
- t := t rotatedaround(center t,180) ;
- u := u rotatedaround(center u,180) ;
-
- fill t withcolor back_nillcolor ;
- fill u withcolor back_fillcolor ;
- draw t withcolor back_linecolor ;
- draw u withcolor back_linecolor ;
-
- elseif back_type=6 :
-
- r := r rotatedaround(center r,180) ;
- s := s rotatedaround(center s,180) ;
-
- fill r withcolor back_nillcolor ;
- fill s withcolor back_fillcolor ;
- draw r withcolor back_linecolor ;
- draw s withcolor back_linecolor ;
-
- elseif back_type=7 :
-
- fill r withcolor back_nillcolor ;
- fill s withcolor back_fillcolor ;
- draw r withcolor back_linecolor ;
- draw s withcolor back_linecolor ;
-
-fi ;
-
+ begingroup ;
+
+ save ww, hh, dd, p, q, r, s, t, u, v, w, a, c, d, e, f, back_nillcolor ;
+
+ numeric ww ; ww := back_width ;
+ numeric hh ; hh := back_height ;
+ numeric dd ; dd := back_delta ;
+
+ color back_nillcolor ; back_nillcolor := back_topcolor ;
+
+ path p ; p := fullsquare xscaled ww yscaled hh ;
+ path q ; q := fullsquare xscaled (ww-2dd) yscaled (hh-2dd) ;
+ path r ; r := llcorner p --
+ lrcorner p shifted (-3dd,0) .. controls lrcorner p ..
+ lrcorner p shifted (0, 3dd) --
+ urcorner p shifted (0,-3dd) .. controls urcorner p ..
+ urcorner p shifted (-3dd,0) --
+ ulcorner p -- cycle ;
+ path s ; s := r xscaled ((ww-2dd)/ww) yscaled ((hh-2dd)/hh) ;
+ path t ; t := llcorner p --
+ lrcorner p --
+ urcorner p shifted (0,-3dd) .. controls urcorner p ..
+ urcorner p shifted (-3dd,0) --
+ ulcorner p shifted ( 3dd,0) .. controls ulcorner p ..
+ ulcorner p shifted (0,-3dd) --
+ llcorner p -- cycle ;
+ path u ; u := t xscaled ((ww-2dd)/ww) yscaled ((hh-2dd)/hh) ;
+ path v ; v := llcorner p shifted ( 3dd,0) --
+ lrcorner p shifted (-3dd,0) .. controls lrcorner p ..
+ lrcorner p shifted (0, 3dd) --
+ urcorner p shifted (0,-3dd) .. controls urcorner p ..
+ urcorner p shifted (-3dd,0) --
+ ulcorner p shifted ( 3dd,0) .. controls ulcorner p ..
+ ulcorner p shifted (0,-3dd) ..
+ llcorner p shifted (0, 3dd) .. controls llcorner p .. cycle ; % {down} .. cycle ;
+ path w ; w := t xscaled ((ww-2dd)/ww) yscaled ((hh-2dd)/hh) ;
+ path a ; a := llcorner p -- ulcorner p --
+ ulcorner q -- llcorner q -- cycle ;
+ path b ; b := llcorner p -- lrcorner p --
+ lrcorner q -- llcorner q -- cycle ;
+ path c ; c := lrcorner p -- urcorner p --
+ urcorner q -- lrcorner q -- cycle ;
+ path d ; d := ulcorner p -- urcorner p --
+ urcorner q -- ulcorner q -- cycle ;
+ path e ; e := llcorner p -- lrcorner p --
+ urcorner p -- urcorner q --
+ lrcorner q -- llcorner q -- cycle ;
+ path f ; f := llcorner p -- ulcorner p --
+ urcorner p -- urcorner q --
+ ulcorner q -- llcorner q -- cycle ;
+
+ linecap := butt ; pickup pencircle scaled back_linewidth ;
+
+ if back_type = 1 :
+
+ fill p withcolor back_fillcolor ;
+ fill a withcolor back_leftcolor ;
+ fill b withcolor back_bottomcolor ;
+ fill c withcolor back_rightcolor ;
+ fill d withcolor back_topcolor ;
+ draw a withcolor back_linecolor ;
+ draw d withcolor back_linecolor ;
+ draw b withcolor back_linecolor ;
+ draw c withcolor back_linecolor ;
+
+ elseif back_type = 2 :
+
+ fill p withcolor back_fillcolor ;
+ fill e withcolor back_bottomcolor ;
+ fill f withcolor back_topcolor ;
+ draw e withcolor back_linecolor ;
+ draw f withcolor back_linecolor ;
+
+ elseif back_type = 3 :
+
+ fill v withcolor back_nillcolor ;
+ fill w withcolor back_fillcolor ;
+ draw v withcolor back_linecolor ;
+ draw w withcolor back_linecolor ;
+
+ elseif back_type = 4 :
+
+ fill t withcolor back_nillcolor ;
+ fill u withcolor back_fillcolor ;
+ draw t withcolor back_linecolor ;
+ draw u withcolor back_linecolor ;
+
+ elseif back_type = 5 :
+
+ t := t rotatedaround(center t,180) ;
+ u := u rotatedaround(center u,180) ;
+
+ fill t withcolor back_nillcolor ;
+ fill u withcolor back_fillcolor ;
+ draw t withcolor back_linecolor ;
+ draw u withcolor back_linecolor ;
+
+ elseif back_type = 6 :
+
+ r := r rotatedaround(center r,180) ;
+ s := s rotatedaround(center s,180) ;
+
+ fill r withcolor back_nillcolor ;
+ fill s withcolor back_fillcolor ;
+ draw r withcolor back_linecolor ;
+ draw s withcolor back_linecolor ;
+
+ elseif back_type = 7 :
+
+ fill r withcolor back_nillcolor ;
+ fill s withcolor back_fillcolor ;
+ draw r withcolor back_linecolor ;
+ draw s withcolor back_linecolor ;
+
+ fi ;
+
+ endgroup;
enddef ;
-endinput ;
-
-beginfig (1) ;
-
-some_double_back (1, 4.5cm, 1.5cm, .25cm, 1mm,
- .5white, .8white, .7white, .6white, .7white, .6white)
-
-currentpicture := currentpicture shifted (0,-3cm) ;
-
-some_double_back (2, 4.5cm, 1.5cm, .25cm, 1mm,
- .5white, .8white, .7white, .6white, white, white)
-
-currentpicture := currentpicture shifted (0,-3cm) ;
-
-some_double_back (3, 4.5cm, 1.5cm, .25cm, 1mm,
- .5white, .8white, .7white, white, white, white)
-
-currentpicture := currentpicture shifted (0,-3cm) ;
-
-some_double_back (4, 4.5cm, 1.5cm, .25cm, 1mm,
- .5white, .8white, .7white, white, white, white)
-
-currentpicture := currentpicture shifted (0,-3cm) ;
-
-some_double_back (5, 4.5cm, 1.5cm, .25cm, 1mm,
- .5white, .8white, .7white, white, white, white)
-
-currentpicture := currentpicture shifted (0,-3cm) ;
-
-some_double_back (6, 4.5cm, 1.5cm, .25cm, 1mm,
- .5white, .8white, .7white, white, white, white)
-
-currentpicture := currentpicture shifted (0,-3cm) ;
-
-some_double_back (7, 4.5cm, 1.5cm, .25cm, 1mm,
- .5white, .8white, .7white, white, white, white)
-
-currentpicture := currentpicture shifted (0,-3cm) ;
-
-some_double_back (8, 4.5cm, 1.5cm, .25cm, 1mm,
- .5white, .8white, .7white, white, white, white)
-
-endfig ;
-
-end .
+% endinput ;
+%
+% beginfig (1) ;
+%
+% some_double_back (1, 4.5cm, 1.5cm, .25cm, 1mm,
+% .5white, .8white, .7white, .6white, .7white, .6white)
+%
+% currentpicture := currentpicture shifted (0,-3cm) ;
+%
+% some_double_back (2, 4.5cm, 1.5cm, .25cm, 1mm,
+% .5white, .8white, .7white, .6white, white, white)
+%
+% currentpicture := currentpicture shifted (0,-3cm) ;
+%
+% some_double_back (3, 4.5cm, 1.5cm, .25cm, 1mm,
+% .5white, .8white, .7white, white, white, white)
+%
+% currentpicture := currentpicture shifted (0,-3cm) ;
+%
+% some_double_back (4, 4.5cm, 1.5cm, .25cm, 1mm,
+% .5white, .8white, .7white, white, white, white)
+%
+% currentpicture := currentpicture shifted (0,-3cm) ;
+%
+% some_double_back (5, 4.5cm, 1.5cm, .25cm, 1mm,
+% .5white, .8white, .7white, white, white, white)
+%
+% currentpicture := currentpicture shifted (0,-3cm) ;
+%
+% some_double_back (6, 4.5cm, 1.5cm, .25cm, 1mm,
+% .5white, .8white, .7white, white, white, white)
+%
+% currentpicture := currentpicture shifted (0,-3cm) ;
+%
+% some_double_back (7, 4.5cm, 1.5cm, .25cm, 1mm,
+% .5white, .8white, .7white, white, white, white)
+%
+% currentpicture := currentpicture shifted (0,-3cm) ;
+%
+% some_double_back (8, 4.5cm, 1.5cm, .25cm, 1mm,
+% .5white, .8white, .7white, white, white, white)
+%
+% endfig ;
+%
+% end .
diff --git a/metapost/context/base/mpiv/mp-tool.mpiv b/metapost/context/base/mpiv/mp-tool.mpiv
index e2f59acfd..c84d8dfbf 100644
--- a/metapost/context/base/mpiv/mp-tool.mpiv
+++ b/metapost/context/base/mpiv/mp-tool.mpiv
@@ -3356,7 +3356,7 @@ let dump = relax ;
def loadmodule expr name = % no vardef
% input can't be used directly in a macro
- if unknown scantokens("context_" & name) :
+ if (unknown scantokens("context_" & name)) and (unknown scantokens("metafun_loaded_" & name)) :
save s ; string s ;
% s := "mp-" & name & ".mpiv" ;
% message("loading module",s) ;
diff --git a/metapost/context/base/mpxl/mp-text.mpxl b/metapost/context/base/mpxl/mp-text.mpxl
index 02dbf4479..c18647baf 100644
--- a/metapost/context/base/mpxl/mp-text.mpxl
+++ b/metapost/context/base/mpxl/mp-text.mpxl
@@ -40,13 +40,14 @@ def lmt_parshape = applyparameters "parshape" "lmt_do_parshape" enddef ;
def lmt_do_parshape = % todo: check and improve this rather oldie
- begingroup ;
+ begingroup ; pushparameters "parshape" ;
save
p, q, l, r, line, tt, bb,
dx, dy, baselineskip, strutheight, strutdepth, topskip, bottomskip, offset, trace,
n, hsize, vsize, vvsize, voffset, hoffset, width, indent,
- ll, lll, rr, rrr, cp, cq, t, b ;
+ ll, lll, rr, rrr, cp, cq, t, b,
+ found_point ;
path
p, q, l, r, line, tt, bb ;
@@ -60,24 +61,24 @@ def lmt_do_parshape = % todo: check and improve this rather oldie
% specification:
- p := getparameterdefault "parshape" "path" fullsquare ;
- dx := getparameterdefault "parshape" "dx" 0 ;
- dy := getparameterdefault "parshape" "dy" 0 ;
- baselineskip := getparameterdefault "parshape" "baselineskip" LineHeight ;
- strutheight := getparameterdefault "parshape" "strutheight" StrutHeight ;
- strutdepth := getparameterdefault "parshape" "strutdepth" StrutDepth ;
- topskip := getparameterdefault "parshape" "topskip" StrutHeight ;
- bottomskip := getparameterdefault "parshape" "bottomskip" 0 ;
- offset := getparameterdefault "parshape" "offset" 0 ;
- trace := getparameterdefault "parshape" "trace" false ;
+ p := getparameterdefault "path" fullsquare ;
+ dx := getparameterdefault "dx" 0 ;
+ dy := getparameterdefault "dy" 0 ;
+ baselineskip := getparameterdefault "baselineskip" LineHeight ;
+ strutheight := getparameterdefault "strutheight" StrutHeight ;
+ strutdepth := getparameterdefault "strutdepth" StrutDepth ;
+ topskip := getparameterdefault "topskip" StrutHeight ;
+ bottomskip := getparameterdefault "bottomskip" 0 ;
+ offset := getparameterdefault "offset" 0 ;
+ trace := getparameterdefault "trace" false ;
%
n := 0 ;
cp := center p ;
- if hasparameter "parshape" "offsetpath" :
- q := getparameter "parshape" "offsetpath" ;
+ if hasparameter "offsetpath" :
+ q := getparameter "offsetpath" ;
voffset := dy ;
hoffset := dx ;
else :
@@ -98,7 +99,7 @@ def lmt_do_parshape = % todo: check and improve this rather oldie
runscript mfid_setparshapeproperty "width" hsize ;
runscript mfid_setparshapeproperty "height" vsize ;
- if not path offset_or_path :
+ if hasparameter "offsetpath" :
q := q xscaled ((hsize-2hoffset)/hsize) yscaled ((vsize-2voffset)/vsize) ;
fi ;
@@ -119,7 +120,7 @@ def lmt_do_parshape = % todo: check and improve this rather oldie
r := r if xpart point 0 of q > 0 : & q fi cutafter t ;
vardef found_point (expr lin, pat, sig) =
- pair a, b ;
+ save a, b; pair a, b ;
a := pat intersection_point (lin shifted (0,strutheight)) ;
if intersection_found :
a := a shifted (0,-strutheight) ;
@@ -183,7 +184,7 @@ def lmt_do_parshape = % todo: check and improve this rather oldie
runscript mfid_setparshapeproperty "line" i (indent[i]) (width[i]) ;
endfor ;
- endgroup ;
+ popparameters ; endgroup ;
enddef ;
diff --git a/metapost/context/base/mpxl/mp-tool.mpxl b/metapost/context/base/mpxl/mp-tool.mpxl
index d5da026f2..7ea222fb0 100644
--- a/metapost/context/base/mpxl/mp-tool.mpxl
+++ b/metapost/context/base/mpxl/mp-tool.mpxl
@@ -3464,8 +3464,8 @@ let dump = relax ;
def loadmodule expr name = % no vardef
% input can't be used directly in a macro
- if (unknown scantokens("context_" & name)) and (unknown scantokens("metafun_loaded" & name)) :
- save s ; string s ; s := "input " & ditto & "mp-" & name & ditto;
+ if (unknown scantokens("context_" & name)) and (unknown scantokens("metafun_loaded_" & name)) :
+ save s ; string s ; s := "input " & ditto & "mp-" & name & ditto & ";" ;
expandafter scantokens expandafter s
fi ;
enddef ;
diff --git a/scripts/context/lua/mtx-context.lua b/scripts/context/lua/mtx-context.lua
index 38cb363bf..5b159158c 100644
--- a/scripts/context/lua/mtx-context.lua
+++ b/scripts/context/lua/mtx-context.lua
@@ -11,7 +11,7 @@ if not modules then modules = { } end modules['mtx-context'] = {
local type, next, tostring, tonumber = type, next, tostring, tonumber
local format, gmatch, match, gsub, find = string.format, string.gmatch, string.match, string.gsub, string.find
-local quote, validstring = string.quote, string.valid
+local quote, validstring, splitstring = string.quote, string.valid, string.split
local sort, concat, insert, sortedhash, tohash = table.sort, table.concat, table.insert, table.sortedhash, table.tohash
local settings_to_array = utilities.parsers.settings_to_array
local appendtable = table.append
@@ -1435,6 +1435,67 @@ function scripts.context.touch()
end
end
+function scripts.context.pages()
+ local filename = environment.files[1]
+ if filename then
+ local u = table.load(file.addsuffix(filename,"tuc"))
+ if u then
+ local p = u.structures.pages.collected
+ local l = u.structures.lists.collected
+ local page = environment.arguments.page
+ local list = environment.arguments.list
+ if type(page) == "string" then
+ page = settings_to_array(page)
+ end
+ if type(list) == "string" then
+ list = settings_to_array(list)
+ end
+ if page or list then
+ if page then
+ for i=1,#p do
+ local pi = p[i]
+ local m = pi.marked
+ if m then
+ for j=1,#page do
+ local n = page[j]
+ if table.contains(m,n) then
+ report("page : %04i %s",i,n)
+ end
+ end
+ end
+ end
+ end
+ if list then
+ for i=1,#l do
+ local li = l[i]
+ local r = li.references
+ if r then
+ local rr = r.reference
+ if rr then
+ rr = splitstring(rr,",")
+ for j=1,#list do
+ local n = list[j]
+ if table.contains(rr,n) then
+ report("list : %04i %s",r.realpage,n)
+ end
+ end
+ end
+ end
+ end
+ end
+ else
+ for i=1,#p do
+ local pi = p[i]
+ local m = pi.marked
+ if m then
+ report("page : %04i % t",i,m)
+ end
+ end
+ end
+ end
+ end
+end
+
-- modules
local labels = { "title", "comment", "status" }
@@ -1661,6 +1722,8 @@ elseif getargument("version") then
scripts.context.version()
elseif getargument("touch") then
scripts.context.touch()
+elseif getargument("pages") then
+ scripts.context.pages()
elseif getargument("expert") then
application.help("expert", "special")
elseif getargument("showmodules") or getargument("modules") then
diff --git a/scripts/context/lua/mtx-context.xml b/scripts/context/lua/mtx-context.xml
index 40751c613..98a75244b 100644
--- a/scripts/context/lua/mtx-context.xml
+++ b/scripts/context/lua/mtx-context.xml
@@ -112,6 +112,11 @@
</flag>
</subcategory>
<subcategory>
+ <flag name="pages">
+ <short>report pagenumbers of names pages and list references (<ref name="page"/>=... or <ref name="page"/>=...)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
<flag name="jit">
<short>use luajittex with jit turned off (only use the faster virtual machine)</short>
</flag>
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 4d176eea7..6fc17adb0 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -849,7 +849,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-package"] = package.loaded["l-package"] or true
--- original size: 11969, stripped down to: 8501
+-- original size: 12566, stripped down to: 8937
if not modules then modules={} end modules ['l-package']={
version=1.001,
@@ -858,7 +858,7 @@ if not modules then modules={} end modules ['l-package']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local type=type
+local type,unpack=type,unpack
local gsub,format,find=string.gsub,string.format,string.find
local insert,remove=table.insert,table.remove
local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
@@ -888,6 +888,7 @@ local helpers=package.helpers or {
},
methods={},
sequence={
+ "reset loaded",
"already loaded",
"preload table",
"qualified path",
@@ -904,6 +905,7 @@ local methods=helpers.methods
local builtin=helpers.builtin
local extraluapaths={}
local extralibpaths={}
+local checkedfiles={}
local luapaths=nil
local libpaths=nil
local oldluapath=nil
@@ -1037,10 +1039,16 @@ end
local function loadedaslib(resolved,rawname)
local base=gsub(rawname,"%.","_")
local init="luaopen_"..gsub(base,"%.","_")
+ local data={ resolved,init,"" }
+ checkedfiles[#checkedfiles+1]=data
if helpers.trace then
helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
end
- return package.loadlib(resolved,init)
+ local a,b,c=package.loadlib(resolved,init)
+ if not a and type(b)=="string" then
+ data[3]=string.fullstrip(b or "unknown error")
+ end
+ return a,b,c
end
helpers.loadedaslib=loadedaslib
local function loadedbypath(name,rawname,paths,islib,what)
@@ -1079,6 +1087,10 @@ local function loadedbyname(name,rawname)
end
end
helpers.loadedbyname=loadedbyname
+methods["reset loaded"]=function(name)
+ checkedfiles={}
+ return false
+end
methods["already loaded"]=function(name)
return package.loaded[name]
end
@@ -1120,6 +1132,9 @@ end
methods["not loaded"]=function(name)
if helpers.trace then
helpers.report("unable to locate '%s'",name or "?")
+ for i=1,#checkedfiles do
+ helpers.report("checked file '%s', initializer '%s', message '%s'",unpack(checkedfiles[i]))
+ end
end
return nil
end
@@ -11800,7 +11815,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-soc-imp-http"] = package.loaded["util-soc-imp-http"] or true
--- original size: 12577, stripped down to: 9577
+-- original size: 12624, stripped down to: 9598
local tostring,tonumber,setmetatable,next,type=tostring,tonumber,setmetatable,next,type
@@ -11845,7 +11860,7 @@ local function receiveheaders(sock,headers)
if not headers then
headers={}
end
- local line,err=sock:receive()
+ local line,err=sock:receive("*l")
if err then
return nil,err
end
@@ -11855,13 +11870,13 @@ local function receiveheaders(sock,headers)
return nil,"malformed reponse headers"
end
name=lower(name)
- line,err=sock:receive()
+ line,err=sock:receive("*l")
if err then
return nil,err
end
while find(line,"^%s") do
value=value..line
- line=sock:receive()
+ line=sock:receive("*l")
if err then
return nil,err
end
@@ -11881,7 +11896,7 @@ socket.sourcet["http-chunked"]=function(sock,headers)
dirty=function() return sock:dirty() end,
},{
__call=function()
- local line,err=sock:receive()
+ local line,err=sock:receive("*l")
if err then
return nil,err
end
@@ -11892,7 +11907,7 @@ socket.sourcet["http-chunked"]=function(sock,headers)
if size>0 then
local chunk,err,part=sock:receive(size)
if chunk then
- sock:receive()
+ sock:receive("*a")
end
return chunk,err
else
@@ -25827,8 +25842,8 @@ end -- of closure
-- used libraries : l-bit32.lua l-lua.lua l-macro.lua l-sandbox.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-sha.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua util-soc-imp-reset.lua util-soc-imp-socket.lua util-soc-imp-copas.lua util-soc-imp-ltn12.lua util-soc-imp-mime.lua util-soc-imp-url.lua util-soc-imp-headers.lua util-soc-imp-tp.lua util-soc-imp-http.lua util-soc-imp-ftp.lua util-soc-imp-smtp.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-tpl.lua util-sbx.lua util-mrg.lua util-env.lua luat-env.lua util-zip.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua libs-ini.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 1025303
--- stripped bytes : 405465
+-- original bytes : 1025947
+-- stripped bytes : 405652
-- end library merge
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 4d176eea7..6fc17adb0 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -849,7 +849,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-package"] = package.loaded["l-package"] or true
--- original size: 11969, stripped down to: 8501
+-- original size: 12566, stripped down to: 8937
if not modules then modules={} end modules ['l-package']={
version=1.001,
@@ -858,7 +858,7 @@ if not modules then modules={} end modules ['l-package']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local type=type
+local type,unpack=type,unpack
local gsub,format,find=string.gsub,string.format,string.find
local insert,remove=table.insert,table.remove
local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
@@ -888,6 +888,7 @@ local helpers=package.helpers or {
},
methods={},
sequence={
+ "reset loaded",
"already loaded",
"preload table",
"qualified path",
@@ -904,6 +905,7 @@ local methods=helpers.methods
local builtin=helpers.builtin
local extraluapaths={}
local extralibpaths={}
+local checkedfiles={}
local luapaths=nil
local libpaths=nil
local oldluapath=nil
@@ -1037,10 +1039,16 @@ end
local function loadedaslib(resolved,rawname)
local base=gsub(rawname,"%.","_")
local init="luaopen_"..gsub(base,"%.","_")
+ local data={ resolved,init,"" }
+ checkedfiles[#checkedfiles+1]=data
if helpers.trace then
helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
end
- return package.loadlib(resolved,init)
+ local a,b,c=package.loadlib(resolved,init)
+ if not a and type(b)=="string" then
+ data[3]=string.fullstrip(b or "unknown error")
+ end
+ return a,b,c
end
helpers.loadedaslib=loadedaslib
local function loadedbypath(name,rawname,paths,islib,what)
@@ -1079,6 +1087,10 @@ local function loadedbyname(name,rawname)
end
end
helpers.loadedbyname=loadedbyname
+methods["reset loaded"]=function(name)
+ checkedfiles={}
+ return false
+end
methods["already loaded"]=function(name)
return package.loaded[name]
end
@@ -1120,6 +1132,9 @@ end
methods["not loaded"]=function(name)
if helpers.trace then
helpers.report("unable to locate '%s'",name or "?")
+ for i=1,#checkedfiles do
+ helpers.report("checked file '%s', initializer '%s', message '%s'",unpack(checkedfiles[i]))
+ end
end
return nil
end
@@ -11800,7 +11815,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-soc-imp-http"] = package.loaded["util-soc-imp-http"] or true
--- original size: 12577, stripped down to: 9577
+-- original size: 12624, stripped down to: 9598
local tostring,tonumber,setmetatable,next,type=tostring,tonumber,setmetatable,next,type
@@ -11845,7 +11860,7 @@ local function receiveheaders(sock,headers)
if not headers then
headers={}
end
- local line,err=sock:receive()
+ local line,err=sock:receive("*l")
if err then
return nil,err
end
@@ -11855,13 +11870,13 @@ local function receiveheaders(sock,headers)
return nil,"malformed reponse headers"
end
name=lower(name)
- line,err=sock:receive()
+ line,err=sock:receive("*l")
if err then
return nil,err
end
while find(line,"^%s") do
value=value..line
- line=sock:receive()
+ line=sock:receive("*l")
if err then
return nil,err
end
@@ -11881,7 +11896,7 @@ socket.sourcet["http-chunked"]=function(sock,headers)
dirty=function() return sock:dirty() end,
},{
__call=function()
- local line,err=sock:receive()
+ local line,err=sock:receive("*l")
if err then
return nil,err
end
@@ -11892,7 +11907,7 @@ socket.sourcet["http-chunked"]=function(sock,headers)
if size>0 then
local chunk,err,part=sock:receive(size)
if chunk then
- sock:receive()
+ sock:receive("*a")
end
return chunk,err
else
@@ -25827,8 +25842,8 @@ end -- of closure
-- used libraries : l-bit32.lua l-lua.lua l-macro.lua l-sandbox.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-sha.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua util-soc-imp-reset.lua util-soc-imp-socket.lua util-soc-imp-copas.lua util-soc-imp-ltn12.lua util-soc-imp-mime.lua util-soc-imp-url.lua util-soc-imp-headers.lua util-soc-imp-tp.lua util-soc-imp-http.lua util-soc-imp-ftp.lua util-soc-imp-smtp.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-tpl.lua util-sbx.lua util-mrg.lua util-env.lua luat-env.lua util-zip.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua libs-ini.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 1025303
--- stripped bytes : 405465
+-- original bytes : 1025947
+-- stripped bytes : 405652
-- end library merge
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 4d176eea7..6fc17adb0 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -849,7 +849,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-package"] = package.loaded["l-package"] or true
--- original size: 11969, stripped down to: 8501
+-- original size: 12566, stripped down to: 8937
if not modules then modules={} end modules ['l-package']={
version=1.001,
@@ -858,7 +858,7 @@ if not modules then modules={} end modules ['l-package']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local type=type
+local type,unpack=type,unpack
local gsub,format,find=string.gsub,string.format,string.find
local insert,remove=table.insert,table.remove
local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
@@ -888,6 +888,7 @@ local helpers=package.helpers or {
},
methods={},
sequence={
+ "reset loaded",
"already loaded",
"preload table",
"qualified path",
@@ -904,6 +905,7 @@ local methods=helpers.methods
local builtin=helpers.builtin
local extraluapaths={}
local extralibpaths={}
+local checkedfiles={}
local luapaths=nil
local libpaths=nil
local oldluapath=nil
@@ -1037,10 +1039,16 @@ end
local function loadedaslib(resolved,rawname)
local base=gsub(rawname,"%.","_")
local init="luaopen_"..gsub(base,"%.","_")
+ local data={ resolved,init,"" }
+ checkedfiles[#checkedfiles+1]=data
if helpers.trace then
helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
end
- return package.loadlib(resolved,init)
+ local a,b,c=package.loadlib(resolved,init)
+ if not a and type(b)=="string" then
+ data[3]=string.fullstrip(b or "unknown error")
+ end
+ return a,b,c
end
helpers.loadedaslib=loadedaslib
local function loadedbypath(name,rawname,paths,islib,what)
@@ -1079,6 +1087,10 @@ local function loadedbyname(name,rawname)
end
end
helpers.loadedbyname=loadedbyname
+methods["reset loaded"]=function(name)
+ checkedfiles={}
+ return false
+end
methods["already loaded"]=function(name)
return package.loaded[name]
end
@@ -1120,6 +1132,9 @@ end
methods["not loaded"]=function(name)
if helpers.trace then
helpers.report("unable to locate '%s'",name or "?")
+ for i=1,#checkedfiles do
+ helpers.report("checked file '%s', initializer '%s', message '%s'",unpack(checkedfiles[i]))
+ end
end
return nil
end
@@ -11800,7 +11815,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-soc-imp-http"] = package.loaded["util-soc-imp-http"] or true
--- original size: 12577, stripped down to: 9577
+-- original size: 12624, stripped down to: 9598
local tostring,tonumber,setmetatable,next,type=tostring,tonumber,setmetatable,next,type
@@ -11845,7 +11860,7 @@ local function receiveheaders(sock,headers)
if not headers then
headers={}
end
- local line,err=sock:receive()
+ local line,err=sock:receive("*l")
if err then
return nil,err
end
@@ -11855,13 +11870,13 @@ local function receiveheaders(sock,headers)
return nil,"malformed reponse headers"
end
name=lower(name)
- line,err=sock:receive()
+ line,err=sock:receive("*l")
if err then
return nil,err
end
while find(line,"^%s") do
value=value..line
- line=sock:receive()
+ line=sock:receive("*l")
if err then
return nil,err
end
@@ -11881,7 +11896,7 @@ socket.sourcet["http-chunked"]=function(sock,headers)
dirty=function() return sock:dirty() end,
},{
__call=function()
- local line,err=sock:receive()
+ local line,err=sock:receive("*l")
if err then
return nil,err
end
@@ -11892,7 +11907,7 @@ socket.sourcet["http-chunked"]=function(sock,headers)
if size>0 then
local chunk,err,part=sock:receive(size)
if chunk then
- sock:receive()
+ sock:receive("*a")
end
return chunk,err
else
@@ -25827,8 +25842,8 @@ end -- of closure
-- used libraries : l-bit32.lua l-lua.lua l-macro.lua l-sandbox.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-sha.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua util-soc-imp-reset.lua util-soc-imp-socket.lua util-soc-imp-copas.lua util-soc-imp-ltn12.lua util-soc-imp-mime.lua util-soc-imp-url.lua util-soc-imp-headers.lua util-soc-imp-tp.lua util-soc-imp-http.lua util-soc-imp-ftp.lua util-soc-imp-smtp.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-tpl.lua util-sbx.lua util-mrg.lua util-env.lua luat-env.lua util-zip.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua libs-ini.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 1025303
--- stripped bytes : 405465
+-- original bytes : 1025947
+-- stripped bytes : 405652
-- end library merge
diff --git a/scripts/context/stubs/win64/mtxrun.lua b/scripts/context/stubs/win64/mtxrun.lua
index 4d176eea7..6fc17adb0 100644
--- a/scripts/context/stubs/win64/mtxrun.lua
+++ b/scripts/context/stubs/win64/mtxrun.lua
@@ -849,7 +849,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-package"] = package.loaded["l-package"] or true
--- original size: 11969, stripped down to: 8501
+-- original size: 12566, stripped down to: 8937
if not modules then modules={} end modules ['l-package']={
version=1.001,
@@ -858,7 +858,7 @@ if not modules then modules={} end modules ['l-package']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local type=type
+local type,unpack=type,unpack
local gsub,format,find=string.gsub,string.format,string.find
local insert,remove=table.insert,table.remove
local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
@@ -888,6 +888,7 @@ local helpers=package.helpers or {
},
methods={},
sequence={
+ "reset loaded",
"already loaded",
"preload table",
"qualified path",
@@ -904,6 +905,7 @@ local methods=helpers.methods
local builtin=helpers.builtin
local extraluapaths={}
local extralibpaths={}
+local checkedfiles={}
local luapaths=nil
local libpaths=nil
local oldluapath=nil
@@ -1037,10 +1039,16 @@ end
local function loadedaslib(resolved,rawname)
local base=gsub(rawname,"%.","_")
local init="luaopen_"..gsub(base,"%.","_")
+ local data={ resolved,init,"" }
+ checkedfiles[#checkedfiles+1]=data
if helpers.trace then
helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
end
- return package.loadlib(resolved,init)
+ local a,b,c=package.loadlib(resolved,init)
+ if not a and type(b)=="string" then
+ data[3]=string.fullstrip(b or "unknown error")
+ end
+ return a,b,c
end
helpers.loadedaslib=loadedaslib
local function loadedbypath(name,rawname,paths,islib,what)
@@ -1079,6 +1087,10 @@ local function loadedbyname(name,rawname)
end
end
helpers.loadedbyname=loadedbyname
+methods["reset loaded"]=function(name)
+ checkedfiles={}
+ return false
+end
methods["already loaded"]=function(name)
return package.loaded[name]
end
@@ -1120,6 +1132,9 @@ end
methods["not loaded"]=function(name)
if helpers.trace then
helpers.report("unable to locate '%s'",name or "?")
+ for i=1,#checkedfiles do
+ helpers.report("checked file '%s', initializer '%s', message '%s'",unpack(checkedfiles[i]))
+ end
end
return nil
end
@@ -11800,7 +11815,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-soc-imp-http"] = package.loaded["util-soc-imp-http"] or true
--- original size: 12577, stripped down to: 9577
+-- original size: 12624, stripped down to: 9598
local tostring,tonumber,setmetatable,next,type=tostring,tonumber,setmetatable,next,type
@@ -11845,7 +11860,7 @@ local function receiveheaders(sock,headers)
if not headers then
headers={}
end
- local line,err=sock:receive()
+ local line,err=sock:receive("*l")
if err then
return nil,err
end
@@ -11855,13 +11870,13 @@ local function receiveheaders(sock,headers)
return nil,"malformed reponse headers"
end
name=lower(name)
- line,err=sock:receive()
+ line,err=sock:receive("*l")
if err then
return nil,err
end
while find(line,"^%s") do
value=value..line
- line=sock:receive()
+ line=sock:receive("*l")
if err then
return nil,err
end
@@ -11881,7 +11896,7 @@ socket.sourcet["http-chunked"]=function(sock,headers)
dirty=function() return sock:dirty() end,
},{
__call=function()
- local line,err=sock:receive()
+ local line,err=sock:receive("*l")
if err then
return nil,err
end
@@ -11892,7 +11907,7 @@ socket.sourcet["http-chunked"]=function(sock,headers)
if size>0 then
local chunk,err,part=sock:receive(size)
if chunk then
- sock:receive()
+ sock:receive("*a")
end
return chunk,err
else
@@ -25827,8 +25842,8 @@ end -- of closure
-- used libraries : l-bit32.lua l-lua.lua l-macro.lua l-sandbox.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-sha.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua util-soc-imp-reset.lua util-soc-imp-socket.lua util-soc-imp-copas.lua util-soc-imp-ltn12.lua util-soc-imp-mime.lua util-soc-imp-url.lua util-soc-imp-headers.lua util-soc-imp-tp.lua util-soc-imp-http.lua util-soc-imp-ftp.lua util-soc-imp-smtp.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-tpl.lua util-sbx.lua util-mrg.lua util-env.lua luat-env.lua util-zip.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua libs-ini.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 1025303
--- stripped bytes : 405465
+-- original bytes : 1025947
+-- stripped bytes : 405652
-- end library merge
diff --git a/tex/context/base/mkii/cont-new.mkii b/tex/context/base/mkii/cont-new.mkii
index cbbe223a9..892644485 100644
--- a/tex/context/base/mkii/cont-new.mkii
+++ b/tex/context/base/mkii/cont-new.mkii
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2021.03.02 19:17}
+\newcontextversion{2021.03.05 11:13}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/mkii/context.mkii b/tex/context/base/mkii/context.mkii
index 88620fc90..beba48148 100644
--- a/tex/context/base/mkii/context.mkii
+++ b/tex/context/base/mkii/context.mkii
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2021.03.02 19:17}
+\edef\contextversion{2021.03.05 11:13}
%D For those who want to use this:
diff --git a/tex/context/base/mkiv/back-exp.lua b/tex/context/base/mkiv/back-exp.lua
index 700c1f040..7a37cc948 100644
--- a/tex/context/base/mkiv/back-exp.lua
+++ b/tex/context/base/mkiv/back-exp.lua
@@ -2145,9 +2145,7 @@ do
function structurestags.setparagraph(align)
if align ~= "" then
usedparagraphs[locatedtag("paragraph")] = {
- dataset = dataset,
- tag = tag,
- align = align,
+ align = align,
}
end
end
diff --git a/tex/context/base/mkiv/cont-new.mkiv b/tex/context/base/mkiv/cont-new.mkiv
index cea147adc..f12dfde45 100644
--- a/tex/context/base/mkiv/cont-new.mkiv
+++ b/tex/context/base/mkiv/cont-new.mkiv
@@ -13,7 +13,7 @@
% \normalend % uncomment this to get the real base runtime
-\newcontextversion{2021.03.02 19:17}
+\newcontextversion{2021.03.05 11:13}
%D This file is loaded at runtime, thereby providing an excellent place for hacks,
%D patches, extensions and new features. There can be local overloads in cont-loc
diff --git a/tex/context/base/mkiv/context.mkiv b/tex/context/base/mkiv/context.mkiv
index abbe41be2..c149869df 100644
--- a/tex/context/base/mkiv/context.mkiv
+++ b/tex/context/base/mkiv/context.mkiv
@@ -45,7 +45,7 @@
%D {YYYY.MM.DD HH:MM} format.
\edef\contextformat {\jobname}
-\edef\contextversion{2021.03.02 19:17}
+\edef\contextversion{2021.03.05 11:13}
%D Kind of special:
diff --git a/tex/context/base/mkiv/core-con.mkiv b/tex/context/base/mkiv/core-con.mkiv
index 953a99395..616ed1996 100644
--- a/tex/context/base/mkiv/core-con.mkiv
+++ b/tex/context/base/mkiv/core-con.mkiv
@@ -467,11 +467,14 @@
\letdummyparameter\c!m\normalmonth
\letdummyparameter\c!y\normalyear
\getdummyparameters[#1]%
+ \edef\p_d{\directdummyparameter\c!d}%
+ \edef\p_m{\directdummyparameter\c!m}%
+ \edef\p_y{\directdummyparameter\c!y}%
\normalexpanded
{\endgroup
- \normalday \number\directdummyparameter\c!d\relax
- \normalmonth\number\directdummyparameter\c!m\relax
- \normalyear \number\directdummyparameter\c!y\relax}%
+ \ifx\p_d\empty\else\normalday \number\directdummyparameter\c!d\relax\fi
+ \ifx\p_m\empty\else\normalmonth\number\directdummyparameter\c!m\relax\fi
+ \ifx\p_y\empty\else\normalyear \number\directdummyparameter\c!y\relax\fi}%
\fi
\begingroup
\the\everycurrentdate
diff --git a/tex/context/base/mkiv/l-package.lua b/tex/context/base/mkiv/l-package.lua
index a35ec3e2a..ef37aebe7 100644
--- a/tex/context/base/mkiv/l-package.lua
+++ b/tex/context/base/mkiv/l-package.lua
@@ -16,7 +16,7 @@ if not modules then modules = { } end modules ['l-package'] = {
-- -- local mylib = require("libtest")
-- -- local mysql = require("luasql.mysql")
-local type = type
+local type, unpack = type, unpack
local gsub, format, find = string.gsub, string.format, string.find
local insert, remove = table.insert, table.remove
@@ -70,6 +70,7 @@ local helpers = package.helpers or {
methods = {
},
sequence = {
+ "reset loaded",
"already loaded",
"preload table",
"qualified path", -- beware, lua itself doesn't handle qualified paths (prepends ./)
@@ -91,6 +92,7 @@ local builtin = helpers.builtin
local extraluapaths = { }
local extralibpaths = { }
+local checkedfiles = { }
local luapaths = nil -- delayed
local libpaths = nil -- delayed
local oldluapath = nil
@@ -245,10 +247,17 @@ local function loadedaslib(resolved,rawname) -- todo: strip all before first -
-- so, we can do a require("foo/bar") and initialize bar
-- local base = gsub(file.basename(rawname),"%.","_")
local init = "luaopen_" .. gsub(base,"%.","_")
+ local data = { resolved, init, "" }
+ checkedfiles[#checkedfiles+1] = data
if helpers.trace then
helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
end
- return package.loadlib(resolved,init)
+ local a, b, c = package.loadlib(resolved,init)
+ if not a and type(b) == "string" then
+-- data[3] = gsub(b or "unknown error","[\n\r]","")
+ data[3] = string.fullstrip(b or "unknown error")
+ end
+ return a, b, c -- c can be 'init'
end
helpers.loadedaslib = loadedaslib
@@ -295,6 +304,12 @@ end
helpers.loadedbyname = loadedbyname
+methods["reset loaded"] = function(name)
+ checkedfiles = { }
+ return false
+end
+
+
methods["already loaded"] = function(name)
return package.loaded[name]
end
@@ -344,6 +359,9 @@ end
methods["not loaded"] = function(name)
if helpers.trace then
helpers.report("unable to locate '%s'",name or "?")
+ for i=1,#checkedfiles do
+ helpers.report("checked file '%s', initializer '%s', message '%s'",unpack(checkedfiles[i]))
+ end
end
return nil
end
diff --git a/tex/context/base/mkiv/luat-log.lua b/tex/context/base/mkiv/luat-log.lua
index fd3b73d3d..8bfec8d21 100644
--- a/tex/context/base/mkiv/luat-log.lua
+++ b/tex/context/base/mkiv/luat-log.lua
@@ -716,7 +716,7 @@ do
if s then
report("start %s: %s",what,s)
else
- report("start %s",what)
+ report("start %s",what or "")
end
if target == "logfile" then
newline()
@@ -728,7 +728,7 @@ do
if target == "logfile" then
newline()
end
- report("stop %s",what)
+ report("stop %s",what or "")
if target == "logfile" then
newline()
end
diff --git a/tex/context/base/mkiv/mult-fmt.lua b/tex/context/base/mkiv/mult-fmt.lua
index 53dbff5b8..0d11a4253 100644
--- a/tex/context/base/mkiv/mult-fmt.lua
+++ b/tex/context/base/mkiv/mult-fmt.lua
@@ -271,24 +271,24 @@ function interfaces.setuserinterface(interface,response)
-- end
-- end)
-- end
-do
- local list = complete.commands -- forces the load
- local t = { }
- local n = 0
- local f = formatters["\\frozen\\protected\\def\\%s{\\%s}"] -- formatters["\\ui_m{%s}{%s}"]
- logs.startfilelogging(report,"translated commands")
- for given, command in sortedhash(list) do
- command = command[interface] or command.en or given
- if command ~= given then
- n = n + 1
- t[n] = f(command,given)
- report_command("%-40s: %s",given,command)
+ do
+ local list = complete.commands -- forces the load
+ local t = { }
+ local n = 0
+ local f = formatters["\\frozen\\protected\\def\\%s{\\%s}"] -- formatters["\\ui_m{%s}{%s}"]
+ logs.startfilelogging(report,"translated commands")
+ for given, command in sortedhash(list) do
+ command = command[interface] or command.en or given
+ if command ~= given then
+ n = n + 1
+ t[n] = f(command,given)
+ report_command("%-40s: %s",given,command)
+ end
+ nofcommands = nofcommands + 1
+ end
+ logs.stopfilelogging()
+ contextsprint(prtcatcodes,"\\toksapp\\everydump{"..concat(t).."}")
end
- nofcommands = nofcommands + 1
- end
- logs.stopfilelogging()
- contextsprint(prtcatcodes,"\\toksapp\\everydump{"..concat(t).."}")
-end
do
local list = complete.messages.formats
logs.startfilelogging(report,"translated message formats")
diff --git a/tex/context/base/mkiv/page-ini.lua b/tex/context/base/mkiv/page-ini.lua
index 17f4c44da..924e01b2a 100644
--- a/tex/context/base/mkiv/page-ini.lua
+++ b/tex/context/base/mkiv/page-ini.lua
@@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['page-ini'] = {
local tonumber, rawget, rawset, type, next = tonumber, rawget, rawset, type, next
local match = string.match
-local sort, tohash, insert, remove = table.sort, table.tohash, table.insert, table.remove
+local sort, tohash, insert, remove, sortedkeys = table.sort, table.tohash, table.insert, table.remove, table.sortedkeys
local settings_to_array, settings_to_hash = utilities.parsers.settings_to_array, utilities.parsers.settings_to_hash
local texgetcount = tex.getcount
@@ -87,10 +87,23 @@ function pages.mark(name,list)
end
end
+local tobemarked = { }
+
+function pages.markedlist(realpage)
+ if realpage then
+ local m = rawget(tobemarked,realpage) or rawget(data,realpage)
+ return m and next(m) and sortedkeys(m)
+ end
+end
+
local function marked(name)
local realpage = texgetcount("realpageno")
for i=last,realpage-1 do
- rawset(data,i,nil)
+ local di = data[i]
+ if di then
+ tobemarked[i] = di
+ rawset(data,i,nil)
+ end
end
local pagedata = rawget(data,realpage)
return pagedata and pagedata[name] and true or false
diff --git a/tex/context/base/mkiv/status-files.pdf b/tex/context/base/mkiv/status-files.pdf
index 4747b2d13..eccdf2ee0 100644
--- a/tex/context/base/mkiv/status-files.pdf
+++ b/tex/context/base/mkiv/status-files.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/status-lua.pdf b/tex/context/base/mkiv/status-lua.pdf
index 021bcf069..4ccd668fc 100644
--- a/tex/context/base/mkiv/status-lua.pdf
+++ b/tex/context/base/mkiv/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/strc-pag.lua b/tex/context/base/mkiv/strc-pag.lua
index 6f9cdd429..9c040104d 100644
--- a/tex/context/base/mkiv/strc-pag.lua
+++ b/tex/context/base/mkiv/strc-pag.lua
@@ -84,6 +84,7 @@ function pages.save(prefixdata,numberdata,extradata)
block = sections.currentblock(),
prefixdata = prefixdata and helpers.simplify(prefixdata),
numberdata = numberdata and helpers.simplify(numberdata),
+ marked = pages.markedlist(realpage), -- not yet defined
}
tobesaved[realpage] = data
if not collected[realpage] then
diff --git a/tex/context/base/mkiv/typo-bld.lua b/tex/context/base/mkiv/typo-bld.lua
index 269386e6c..305032772 100644
--- a/tex/context/base/mkiv/typo-bld.lua
+++ b/tex/context/base/mkiv/typo-bld.lua
@@ -242,65 +242,30 @@ end
-- this will be split into contribute_filter for these 4 so at some point
-- the check can go away
-if CONTEXTLMTXMODE > 0 then
-
- -- Todo: contrib_head can be any head (kind of) not per se the page one so maybe I will
- -- intercept that in the engine with page_contribute_head or so.
-
- function builders.buildpage_filter(groupcode)
- local head = texlists.contribute_head
- if head then
- local done = false
- -- called quite often ... maybe time to remove timing
- starttiming(builders)
- if trace_page_builder then
- report(groupcode,head)
- end
- head, done = pageactions(head,groupcode)
- stoptiming(builders)
- -- -- doesn't work here (not passed on?)
- -- texset("pagegoal,texget("vsize") - texgetdimen("d_page_floats_inserted_top") - texgetdimen("d_page_floats_inserted_bottom")
- texlists.contribute_head = head or nil -- needs checking
- -- tex.setlist("contribute_head",head,head and nodes.tail(head))
- return done and head or true -- no return value needed
- else
- -- happens quite often
- if trace_page_builder then
- report(groupcode)
- end
- -- return nil, false -- no return value needed
- return nil
+function builders.buildpage_filter(groupcode)
+ local head = texlists.contrib_head
+ if head then
+ local done = false
+ -- called quite often ... maybe time to remove timing
+ starttiming(builders)
+ if trace_page_builder then
+ report(groupcode,head)
end
- end
-
-else
-
- function builders.buildpage_filter(groupcode)
- local head = texlists.contrib_head
- if head then
- local done = false
- -- called quite often ... maybe time to remove timing
- starttiming(builders)
- if trace_page_builder then
- report(groupcode,head)
- end
- head, done = pageactions(head,groupcode)
- stoptiming(builders)
- -- -- doesn't work here (not passed on?)
- -- texset("pagegoal,texget("vsize") - texgetdimen("d_page_floats_inserted_top") - texgetdimen("d_page_floats_inserted_bottom")
- texlists.contrib_head = head or nil -- needs checking
- -- tex.setlist("contrib_head",head,head and nodes.tail(head))
- return done and head or true -- no return value needed
- else
- -- happens quite often
- if trace_page_builder then
- report(groupcode)
- end
- -- return nil, false -- no return value needed
- return nil
+ head, done = pageactions(head,groupcode)
+ stoptiming(builders)
+ -- -- doesn't work here (not passed on?)
+ -- texset("pagegoal,texget("vsize") - texgetdimen("d_page_floats_inserted_top") - texgetdimen("d_page_floats_inserted_bottom")
+ texlists.contrib_head = head or nil -- needs checking
+ -- tex.setlist("contrib_head",head,head and nodes.tail(head))
+ return done and head or true -- no return value needed
+ else
+ -- happens quite often
+ if trace_page_builder then
+ report(groupcode)
end
+-- return nil, false -- no return value needed
+ return nil
end
-
end
registercallback('vpack_filter', builders.vpack_filter, "vertical spacing etc")
diff --git a/tex/context/base/mkiv/util-lib.lua b/tex/context/base/mkiv/util-lib.lua
index a49507c60..be763d92e 100644
--- a/tex/context/base/mkiv/util-lib.lua
+++ b/tex/context/base/mkiv/util-lib.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['util-lib'] = {
license = "see context related readme files",
}
+-- not used in context any more
+
--[[
The problem with library bindings is manyfold. They are of course platform
diff --git a/tex/context/base/mkiv/util-soc-imp-http.lua b/tex/context/base/mkiv/util-soc-imp-http.lua
index c3a28be82..d8f45880e 100644
--- a/tex/context/base/mkiv/util-soc-imp-http.lua
+++ b/tex/context/base/mkiv/util-soc-imp-http.lua
@@ -59,7 +59,7 @@ local function receiveheaders(sock, headers)
headers = { }
end
-- get first line
- local line, err = sock:receive()
+ local line, err = sock:receive("*l") -- this seems to be wrong!
if err then
return nil, err
end
@@ -72,14 +72,14 @@ local function receiveheaders(sock, headers)
end
name = lower(name)
-- get next line (value might be folded)
- line, err = sock:receive()
+ line, err = sock:receive("*l")
if err then
return nil, err
end
-- unfold any folded values
while find(line, "^%s") do
value = value .. line
- line = sock:receive()
+ line = sock:receive("*l")
if err then
return nil, err
end
@@ -103,7 +103,7 @@ socket.sourcet["http-chunked"] = function(sock, headers)
dirty = function() return sock:dirty() end,
}, {
__call = function()
- local line, err = sock:receive()
+ local line, err = sock:receive("*l")
if err then
return nil, err
end
@@ -114,7 +114,7 @@ socket.sourcet["http-chunked"] = function(sock, headers)
if size > 0 then
local chunk, err, part = sock:receive(size)
if chunk then
- sock:receive()
+ sock:receive("*a")
end
return chunk, err
else
diff --git a/tex/context/base/mkxl/back-exp-imp-mth.lmt b/tex/context/base/mkxl/back-exp-imp-mth.lmt
new file mode 100644
index 000000000..73c09d79e
--- /dev/null
+++ b/tex/context/base/mkxl/back-exp-imp-mth.lmt
@@ -0,0 +1,742 @@
+if not modules then modules = { } end modules ['back-exp-imp-mth'] = {
+ version = 1.001,
+ comment = "companion to back-exp.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local sub = string.sub
+local utfchar, utfvalues = utf.char, utf.values
+local setmetatableindex, concat = table.setmetatableindex, table.concat
+
+local structurestags = structures.tags
+local specifications = structurestags.specifications
+local locatedtag = structurestags.locatedtag
+
+local backend = structurestags.backend
+
+local setattribute = backend.setattribute
+local extras = backend.extras
+local checks = backend.checks
+local finalizers = backend.finalizers
+
+local implement = interfaces.implement
+
+do
+
+ local automathrows = true directives.register("export.math.autorows", function(v) automathrows = v end)
+ local automathapply = true directives.register("export.math.autoapply", function(v) automathapply = v end)
+ local automathnumber = true directives.register("export.math.autonumber", function(v) automathnumber = v end)
+ local automathstrip = true directives.register("export.math.autostrip", function(v) automathstrip = v end)
+
+ local functions = mathematics.categories.functions
+
+ local function collapse(di,i,data,ndata,detail,element)
+ local collapsing = di.data
+ if data then
+ di.element = element
+ di.detail = nil
+ i = i + 1
+ while i <= ndata do
+ local dn = data[i]
+ if dn.detail == detail then
+ collapsing[#collapsing+1] = dn.data[1]
+ dn.skip = "ignore"
+ i = i + 1
+ else
+ break
+ end
+ end
+ end
+ return i
+ end
+
+ local function collapse_mn(di,i,data,ndata)
+ -- this is tricky ... we need to make sure that we wrap in mrows if we want
+ -- to bypass this one
+ local collapsing = di.data
+ if data then
+ i = i + 1
+ while i <= ndata do
+ local dn = data[i]
+ local tg = dn.tg
+ if tg == "mn" then
+ collapsing[#collapsing+1] = dn.data[1]
+ dn.skip = "ignore"
+ i = i + 1
+ elseif tg == "mo" then
+ local d = dn.data[1]
+ if d == "." then
+ collapsing[#collapsing+1] = d
+ dn.skip = "ignore"
+ i = i + 1
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ return i
+ end
+
+ -- maybe delay __i__ till we need it
+
+ local apply_function = {
+ {
+ element = "mo",
+ -- comment = "apply function",
+ -- data = { utfchar(0x2061) },
+ data = { "&#x2061;" },
+ nature = "mixed",
+ }
+ }
+
+ local functioncontent = { }
+
+ setmetatableindex(functioncontent,function(t,k)
+ local v = { { content = k } }
+ t[k] = v
+ return v
+ end)
+
+ local dummy_nucleus = {
+ element = "mtext",
+ data = { content = "" },
+ nature = "inline",
+ comment = "dummy nucleus",
+ fulltag = "mtext>0"
+ }
+
+ local function accentchar(d)
+ for i=1,3 do
+ d = d.data
+ if not d then
+ return
+ end
+ d = d[1]
+ if not d then
+ return
+ end
+ local tg = d.tg
+ if tg == "mover" then
+ local s = specifications[d.fulltag]
+ local t = s.top
+ if t then
+ d = d.data[1]
+ local d1 = d.data[1]
+ d1.content = utfchar(t)
+ d.data = { d1 }
+ return d
+ end
+ elseif tg == "munder" then
+ local s = specifications[d.fulltag]
+ local b = s.bottom
+ if b then
+ d = d.data[1]
+ local d1 = d.data[1]
+ d1.content = utfchar(b)
+ d.data = { d1 }
+ return d
+ end
+ end
+ end
+ end
+
+ local no_mrow = {
+ mrow = true,
+ mfenced = true,
+ mfrac = true,
+ mroot = true,
+ msqrt = true,
+ mtable = true,
+ mi = true,
+ mo = true,
+ mn = true,
+ }
+
+ local function checkmath(root) -- we can provide utf.toentities as an option
+ local data = root.data
+ if data then
+ local ndata = #data
+ local roottg = root.tg
+ if roottg == "msubsup" then
+ -- kind of tricky: we have a diufferent order in display mode
+ local nucleus, superscript, subscript
+ if ndata > 3 then
+ -- error
+ else
+ for i=1,ndata do
+ local di = data[i]
+ if not di then
+ -- weird
+ elseif di.content then
+ -- text
+ else
+ local s = specifications[di.fulltag]
+ if s.subscript then
+ subscript = i
+ elseif s.superscript then
+ superscript = i
+ else
+ nucleus = i
+ end
+ end
+ end
+ if superscript or subscript then
+ -- we probably always have 3 anyway ... needs checking
+ local nuc = nucleus and data[nucleus]
+ local sub = subscript and data[subscript]
+ local sup = superscript and data[superscript]
+ local n = 0 -- play safe
+ if nuc then n = n + 1 ; data[n] = nuc end
+ if sub then n = n + 1 ; data[n] = sub end
+ if sup then n = n + 1 ; data[n] = sup end
+ end
+ end
+ -- elseif roottg == "msup" or roottg == "msub" then
+ -- -- m$^2$
+ -- if ndata == 1 then
+ -- local d = data[1]
+ -- data[2] = d
+ -- d.__i__ = 2
+ -- data[1] = dummy_nucleus
+ -- end
+ elseif roottg == "mfenced" then
+ local s = specifications[root.fulltag]
+ local l, m, r = s.left, s.middle, s.right
+ if l then
+ l = utfchar(l)
+ end
+ if m then
+ local t = { }
+ for i=1,#m do
+ t[i] = utfchar(m[i])
+ end
+ m = concat(t)
+ end
+ if r then
+ r = utfchar(r)
+ end
+ root.attributes = {
+ open = l,
+ separators = m,
+ close = r,
+ }
+ end
+ if ndata == 0 then
+ root.skip = "comment" -- get rid of weird artefacts
+ root.nota = "weird"
+ return
+ elseif ndata == 1 then
+ local d = data[1]
+ if not d or d == "" then
+ root.skip = "comment"
+ return
+ elseif d.content then
+ return
+ else -- if ndata == 1 then
+ local tg = d.tg
+ if automathrows and (roottg == "mrow" or roottg == "mtext") then
+ -- maybe just always ! check spec first
+ -- or we can have chesks.* for each as we then can flatten
+ if no_mrow[tg] then
+ root.skip = "comment"
+ end
+ elseif roottg == "mo" then
+ if tg == "mo" then
+ root.skip = "comment"
+ end
+ end
+ end
+ end
+ local i = 1
+ while i <= ndata do -- -- -- TOO MUCH NESTED CHECKING -- -- --
+ local di = data[i]
+ if di and not di.content then
+ local tg = di.tg
+ if tg == "math" then
+ -- di.element = "mrow" -- when properties
+ di.skip = "comment"
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mover" then
+ local s = specifications[di.fulltag]
+ if s.accent then
+ local t = s.top
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ }
+ -- todo: p.topfixed
+ if t then
+ -- mover
+ d[1].data[1].content = utfchar(t)
+ di.data = { d[2], d[1] }
+ end
+ else
+ -- can't happen
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "munder" then
+ local s = specifications[di.fulltag]
+ if s.accent then
+ local b = s.bottom
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ }
+ -- todo: p.bottomfixed
+ if b then
+ -- munder
+ d[2].data[1].content = utfchar(b)
+ end
+ else
+ -- can't happen
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "munderover" then
+ local s = specifications[di.fulltag]
+ if s.accent then
+ local t = s.top
+ local b = s.bottom
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ -- todo: accentunder = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ accentunder = "true",
+ }
+ -- todo: p.topfixed
+ -- todo: p.bottomfixed
+ if t and b then
+ -- munderover
+ d[1].data[1].content = utfchar(t)
+ d[3].data[1].content = utfchar(b)
+ di.data = { d[2], d[3], d[1] }
+ else
+ -- can't happen
+ end
+ else
+ -- can't happen
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mstacker" then
+ local d = di.data
+ local d1 = d[1]
+ local d2 = d[2]
+ local d3 = d[3]
+ local t1 = d1 and d1.tg
+ local t2 = d2 and d2.tg
+ local t3 = d3 and d3.tg
+ local m = nil -- d1.data[1]
+ local t = nil
+ local b = nil
+ -- only accent when top / bot have stretch
+ -- normally we flush [base under over] which is better for tagged pdf
+ if t1 == "mstackermid" then
+ m = accentchar(d1) -- or m
+ if t2 == "mstackertop" then
+ if t3 == "mstackerbot" then
+ t = accentchar(d2)
+ b = accentchar(d3)
+ di.element = "munderover"
+ di.data = { m or d1.data[1], b or d3.data[1], t or d2.data[1] }
+ else
+ t = accentchar(d2)
+ di.element = "mover"
+ di.data = { m or d1.data[1], t or d2.data[1] }
+ end
+ elseif t2 == "mstackerbot" then
+ if t3 == "mstackertop" then
+ b = accentchar(d2)
+ t = accentchar(d3)
+ di.element = "munderover"
+ di.data = { m or d1.data[1], t or d3.data[1], m, b or d2.data[1] }
+ else
+ b = accentchar(d2)
+ di.element = "munder"
+ di.data = { m or d1.data[1], b or d2.data[1] }
+ end
+ else
+ -- can't happen
+ end
+ else
+ -- can't happen
+ end
+ if t or b then
+ di.attributes = {
+ accent = t and "true" or nil,
+ accentunder = b and "true" or nil,
+ }
+ di.detail = nil
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mroot" then
+ local data = di.data
+ local size = #data
+ if size == 1 then
+ -- else firefox complains ... code in math-tag (for pdf tagging)
+ di.element = "msqrt"
+ elseif size == 2 then
+ data[1], data[2] = data[2], data[1]
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "break" then
+ di.skip = "comment"
+ i = i + 1
+ elseif tg == "mtext" then
+ -- this is only needed for unboxed mtexts ... all kind of special
+ -- tex border cases and optimizations ... trial and error
+ local data = di.data
+ if #data > 1 then
+ for i=1,#data do
+ local di = data[i]
+ local content = di.content
+ if content then
+ data[i] = {
+ element = "mtext",
+ nature = "inline",
+ data = { di },
+ n = 0,
+ }
+ elseif di.tg == "math" then
+ local di = di.data[1]
+ if di then
+ data[i] = di
+ checkmath(di)
+ end
+ end
+ end
+ di.element = "mrow"
+ -- di.tg = "mrow"
+ -- di.nature = "inline"
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mrow" and detail then -- hm, falls through
+ di.detail = nil
+ checkmath(di)
+ di = {
+ element = "maction",
+ nature = "display",
+ attributes = { actiontype = detail },
+ data = { di },
+ n = 0,
+ }
+ data[i] = di
+ i = i + 1
+ else
+ local category = di.mathcategory
+ if category then
+ -- no checkmath(di) here
+ if category == 1 then -- mo
+ i = collapse(di,i,data,ndata,detail,"mo")
+ elseif category == 2 then -- mi
+ i = collapse(di,i,data,ndata,detail,"mi")
+ elseif category == 3 then -- mn
+ i = collapse(di,i,data,ndata,detail,"mn")
+ elseif category == 4 then -- ms
+ i = collapse(di,i,data,ndata,detail,"ms")
+ elseif category >= 1000 then
+ local apply = category >= 2000
+ if apply then
+ category = category - 1000
+ end
+ if tg == "mi" then -- function
+ if roottg == "mrow" then
+ root.skip = "comment"
+ root.element = "function"
+ end
+ i = collapse(di,i,data,ndata,detail,"mi")
+ local tag = functions[category]
+ if tag then
+ di.data = functioncontent[tag]
+ end
+ if apply then
+ di.after = apply_function
+ elseif automathapply then -- make function
+ local following
+ if i <= ndata then
+ -- normally not the case
+ following = data[i]
+ else
+ local parent = di.__p__ -- == root
+ if parent.tg == "mrow" then
+ parent = parent.__p__
+ end
+ local index = parent.__i__
+ following = parent.data[index+1]
+ end
+ if following then
+ local tg = following.tg
+ if tg == "mrow" or tg == "mfenced" then -- we need to figure out the right condition
+ di.after = apply_function
+ end
+ end
+ end
+ else -- some problem
+ checkmath(di)
+ i = i + 1
+ end
+ else
+ checkmath(di)
+ i = i + 1
+ end
+ elseif automathnumber and tg == "mn" then
+ checkmath(di)
+ i = collapse_mn(di,i,data,ndata)
+ else
+ checkmath(di)
+ i = i + 1
+ end
+ end
+ else -- can be string or boolean
+ if parenttg ~= "mtext" and di == " " then
+ data[i] = false
+ end
+ i = i + 1
+ end
+ end
+ end
+ end
+
+ local function stripmath(di)
+ if not di then
+ --
+ elseif di.content then
+ return di
+ else
+ local tg = di.tg
+ if tg == "mtext" or tg == "ms" then
+ return di
+ else
+ local data = di.data
+ local ndata = #data
+ local n = 0
+ for i=1,ndata do
+ local d = data[i]
+ if d and not d.content then
+ d = stripmath(d)
+ end
+ if d then
+ local content = d.content
+ if not content then
+ n = n + 1
+ d.__i__ = n
+ data[n] = d
+ elseif content == " " or content == "" then
+ if di.tg == "mspace" then
+ -- we append or prepend a space to a preceding or following mtext
+ local parent = di.__p__
+ local index = di.__i__ -- == i
+ local data = parent.data
+ if index > 1 then
+ local d = data[index-1]
+ if d.tg == "mtext" then
+ local dd = d.data
+ local dn = dd[#dd]
+ local dc = dn.content
+ if dc then
+ dn.content = dc .. content
+ end
+ end
+ elseif index < ndata then
+ local d = data[index+1]
+ if d.tg == "mtext" then
+ local dd = d.data
+ local dn = dd[1]
+ local dc = dn.content
+ if dc then
+ dn.content = content .. dc
+ end
+ end
+ end
+ end
+ else
+ n = n + 1
+ data[n] = d
+ end
+ end
+ end
+ for i=ndata,n+1,-1 do
+ data[i] = nil
+ end
+ if #data > 0 then
+ return di
+ end
+ end
+ end
+ end
+
+ function checks.math(di)
+ if di.skip == "comment" then
+ -- already done, kind of weird, happens in mathmatrix, maybe some collapse
+ -- issue that i need to look into
+ else
+ local specification = specifications[di.fulltag]
+ local mode = specification and specification.mode == "display" and "block" or "inline"
+ di.attributes = {
+ ["display"] = mode,
+ ["xmlns:m"] = mathmlns,
+ }
+ -- can be option if needed:
+ if mode == "inline" then
+ -- di.nature = "mixed" -- else spacing problem (maybe inline)
+ di.nature = "inline" -- we need to catch x$X$x and x $X$ x
+ else
+ di.nature = "display"
+ end
+ if automathstrip then
+ stripmath(di)
+ end
+ checkmath(di)
+ end
+ end
+
+ -- this one can replace some of the previous code .. todo (test on mathmatrix)
+
+ -- ignore with no data can be removed
+
+ local function checked(d)
+ local n = #d
+ if n == 1 then
+ local di = d[1]
+ local tg = di.tg
+ if tg == "ignore" then
+ -- todo: we can move ignore's data one level up
+ return 1
+ elseif di.content then
+ return 1
+ else
+ local dd = di.data
+ if #dd > 0 and checked(dd) > 0 then
+ return 1
+ else
+ return 0
+ end
+ end
+ else
+ local m = 0
+ for i=1,n do
+ local di = d[i]
+ local tg = di.tg
+ if tg == "ignore" then
+ -- skip
+ elseif di.content then
+ m = m + 1
+ d[m] = di
+ else
+ local dd = di.data
+ if #dd > 0 and checked(dd) > 0 then
+ m = m + 1
+ d[m] = di
+ end
+ end
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ d[i] = nil
+ end
+ end
+ return m
+ end
+ end
+
+ function checks.mrow(di)
+ -- local d = di.data
+ -- if d then
+ -- checked(d)
+ -- end
+ end
+
+ -- we can move more checks here
+
+ local function flatten(di)
+ local r = di.__p__
+ while r do
+ local d = r.data
+ local n = #d
+ if d and n > 1 then
+ n = checked(d)
+ end
+ local tg = r.tg
+ if n == 1 and (tg == "mtext" or tg == "mrow") then
+ r.skip = "comment" -- weird error
+ r = r.__p__
+ else
+ break
+ end
+ end
+ end
+
+ function checks.mtable(di)
+ flatten(di)
+ local d = di.data
+ for i=1,#d do
+ local d = d[i]
+ if d.tg == "mtr" then
+ local d = d.data
+ for i=1,#d do
+ local d = d[i]
+ if d.tg == "mtd" then
+ -- okay
+ elseif d.content then
+ d.content = ""
+ else
+ d.skip = "comment" -- weird error
+ end
+ end
+ elseif d.content then
+ d.content = ""
+ else
+ d.skip = "comment" -- weird error
+ end
+ end
+ end
+
+ do
+
+ local a, z, A, Z = 0x61, 0x7A, 0x41, 0x5A
+
+ function extras.mi(di,element,n,fulltag) -- check with content
+ local str = di.data[1].content
+ if str and sub(str,1,1) ~= "&" then -- hack but good enough (maybe gsub op eerste)
+ for v in utfvalues(str) do
+ if (v >= a and v <= z) or (v >= A and v <= Z) then
+ local a = di.attributes
+ if a then
+ a.mathvariant = "normal"
+ else
+ di.attributes = { mathvariant = "normal" }
+ end
+ end
+ end
+ end
+ end
+
+ end
+
+ function extras.msub(di,element,n,fulltag)
+ -- m$^2$
+ local data = di.data
+ if #data == 1 then
+ local d = data[1]
+ data[2] = d
+ d.__i__ = 2
+ data[1] = dummy_nucleus
+ end
+ end
+
+ extras.msup = extras.msub
+
+end
diff --git a/tex/context/base/mkxl/back-exp-imp-ref.lmt b/tex/context/base/mkxl/back-exp-imp-ref.lmt
new file mode 100644
index 000000000..25682f8ed
--- /dev/null
+++ b/tex/context/base/mkxl/back-exp-imp-ref.lmt
@@ -0,0 +1,261 @@
+if not modules then modules = { } end modules ['back-exp-imp-ref'] = {
+ version = 1.001,
+ comment = "companion to back-exp.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- quite some code deals with exporting references --
+
+-- links:
+--
+-- url :
+-- file :
+-- internal : automatic location
+-- location : named reference
+
+-- references:
+--
+-- implicit : automatic reference
+-- explicit : named reference
+
+local tonumber = tonumber
+local lpegmatch = lpeg.match
+local insert = table.insert
+
+local references = structures.references
+
+local structurestags = structures.tags
+local specifications = structurestags.specifications
+local locatedtag = structurestags.locatedtag
+
+local backend = structurestags.backend
+
+local setattribute = backend.setattribute
+local extras = backend.extras
+local fixes = backend.fixes
+local referencehash = backend.referencehash
+local destinationhash = backend.destinationhash
+
+local implement = interfaces.implement
+
+local evaluators = { }
+local specials = { }
+local explicits = { }
+
+evaluators.inner = function(di,var)
+ local inner = var.inner
+ if inner then
+ setattribute(di,"location",inner,true)
+ end
+end
+
+evaluators.outer = function(di,var)
+ local file, url = references.checkedfileorurl(var.outer,var.outer)
+ if url then
+ setattribute(di,"url",url,true)
+ elseif file then
+ setattribute(di,"file",file,true)
+ end
+end
+
+evaluators["outer with inner"] = function(di,var)
+ local file = references.checkedfile(var.f)
+ if file then
+ setattribute(di,"file",file,true)
+ end
+ local inner = var.inner
+ if inner then
+ setattribute(di,"inner",inner,true)
+ end
+end
+
+evaluators.special = function(di,var)
+ local handler = specials[var.special]
+ if handler then
+ handler(di,var)
+ end
+end
+
+do
+
+ evaluators["special outer with operation"] = evaluators.special
+ evaluators["special operation"] = evaluators.special
+ evaluators["special operation with arguments"] = evaluators.special
+
+ function specials.url(di,var)
+ local url = references.checkedurl(var.operation)
+ if url and url ~= "" then
+ setattribute(di,"url",url,true)
+ end
+ end
+
+ function specials.file(di,var)
+ local file = references.checkedfile(var.operation)
+ if file and file ~= "" then
+ setattribute(di,"file",file,true)
+ end
+ end
+
+ function specials.fileorurl(di,var)
+ local file, url = references.checkedfileorurl(var.operation,var.operation)
+ if url and url ~= "" then
+ setattribute(di,"url",url,true)
+ elseif file and file ~= "" then
+ setattribute(di,"file",file,true)
+ end
+ end
+
+ function specials.internal(di,var)
+ local internal = references.checkedurl(var.operation)
+ if internal then
+ setattribute(di,"location",internal)
+ end
+ end
+
+ local function adddestination(di,references) -- todo: specials -> exporters and then concat
+ if references then
+ local reference = references.reference
+ if reference and reference ~= "" then
+ local prefix = references.prefix
+ if prefix and prefix ~= "" then
+ setattribute(di,"prefix",prefix,true)
+ end
+ setattribute(di,"destination",reference,true)
+ for i=1,#references do
+ local r = references[i]
+ local e = evaluators[r.kind]
+ if e then
+ e(di,r)
+ end
+ end
+ end
+ end
+ end
+
+ function extras.addimplicit(di,references)
+ if references then
+ local internal = references.internal
+ if internal then
+ setattribute(di,"implicit",internal)
+ end
+ end
+ end
+
+ function extras.addinternal(di,references)
+ if references then
+ local internal = references.internal
+ if internal then
+ setattribute(di,"internal",internal)
+ end
+ end
+ end
+
+ local p_firstpart = lpeg.Cs((1-lpeg.P(","))^0)
+
+ local function addreference(di,references)
+ if references then
+ local reference = references.reference
+ if reference and reference ~= "" then
+ local prefix = references.prefix
+ if prefix and prefix ~= "" then
+ setattribute(di,"prefix",prefix)
+ end
+ setattribute(di,"reference",reference,true)
+ setattribute(di,"explicit",lpegmatch(p_firstpart,reference),true)
+ end
+ local internal = references.internal
+ if internal and internal ~= "" then
+ setattribute(di,"implicit",internal)
+ end
+ end
+ end
+
+ local function link(di,element,n,fulltag)
+ -- for instance in lists a link has nested elements and no own text
+ local reference = referencehash[fulltag]
+ if reference then
+ adddestination(di,structures.references.get(reference))
+ return true
+ else
+ local data = di.data
+ if data then
+ for i=1,#data do
+ local di = data[i]
+ if di then
+ local fulltag = di.fulltag
+ if fulltag and link(di,element,n,fulltag) then
+ return true
+ end
+ end
+ end
+ end
+ end
+ end
+
+ local function reference(di,element,n,fulltag)
+ local destination = destinationhash[fulltag]
+ if destination then
+ local d = structures.references.internals[destination]
+ if d then
+ addreference(di,d.references)
+ return true
+ else
+ return false
+ end
+ else
+ local data = di.data
+ if data then
+ for i=1,#data do
+ local di = data[i]
+ if di then
+ local fulltag = di.fulltag
+ if fulltag and reference(di,element,n,fulltag) then
+ return true
+ end
+ end
+ end
+ end
+ end
+ end
+
+ extras.adddestination = adddestination
+ extras.addreference = addreference
+
+ extras.link = link
+ extras.reference = reference
+
+end
+
+do
+
+ function fixes.linenumber(di,data,i)
+ local ni = data[i+1]
+ if ni then
+ if ni.data then
+ while true do
+ local d = ni.data[1]
+ if d then
+ local e = d.element
+ if e then
+ if e == "line" or e == "verbatimline" then
+ insert(d.data,1,di)
+ data[i] = false
+ return
+ else
+ ni = d
+ end
+ else
+ return
+ end
+ else
+ return
+ end
+ end
+ end
+ end
+ end
+
+end
+
diff --git a/tex/context/base/mkxl/back-exp-imp-tag.lmt b/tex/context/base/mkxl/back-exp-imp-tag.lmt
new file mode 100644
index 000000000..73b7b5b47
--- /dev/null
+++ b/tex/context/base/mkxl/back-exp-imp-tag.lmt
@@ -0,0 +1,846 @@
+if not modules then modules = { } end modules ['back-exp-imp-tag'] = {
+ version = 1.001,
+ comment = "companion to back-exp.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Because we run into the 200 locals limit we now split the file into smaller
+-- parts.
+
+local tonumber = tonumber
+local todimen = number.todimen
+local sortedhash, setmetatableindex, concat, insert = table.sortedhash, table.setmetatableindex, table.concat, table.insert
+local settings_to_hash = utilities.parsers.settings_to_hash
+local lpegmatch = lpeg.match
+local formatters = string.formatters
+
+local references = structures.references
+local structurestags = structures.tags
+local taglist = structurestags.taglist
+local specifications = structurestags.specifications
+local properties = structurestags.properties
+local locatedtag = structurestags.locatedtag
+
+local backend = structurestags.backend
+
+local setattribute = backend.setattribute
+local extras = backend.extras
+local checks = backend.checks
+local fixes = backend.fixes
+local listdata = backend.listdata
+local finalizers = backend.finalizers
+local usedstyles = backend.usedstyles -- for now
+local usedimages = backend.usedimages -- for now
+local referencehash = backend.referencehash
+local destinationhash = backend.destinationhash
+
+local implement = interfaces.implement
+
+do
+
+ local itemgroups = { }
+
+ local function setitemgroup(packed,level,symbol)
+ itemgroups[locatedtag("itemgroup")] = {
+ packed = packed,
+ symbol = symbol,
+ level = level,
+ }
+ end
+
+ local function setitem(kind)
+ itemgroups[locatedtag("item")] = {
+ kind = kind,
+ }
+ end
+
+ function extras.itemgroup(di,element,n,fulltag)
+ local hash = itemgroups[fulltag]
+ if hash then
+ setattribute(di,"packed",hash.packed and "yes" or nil)
+ setattribute(di,"symbol",hash.symbol)
+ setattribute(di,"level",hash.level)
+ end
+ end
+
+ function extras.item(di,element,n,fulltag)
+ local hash = itemgroups[fulltag]
+ if hash then
+ local kind = hash.kind
+ if kind and kind ~= "" then
+ setattribute(di,"kind",kind)
+ end
+ end
+ end
+
+ implement {
+ name = "settagitemgroup",
+ actions = setitemgroup,
+ arguments = { "boolean", "integer", "string" }
+ }
+
+ implement {
+ name = "settagitem",
+ actions = setitem,
+ arguments = "string"
+ }
+
+ structurestags.setitemgroup = setitemgroup
+ structurestags.setitem = setitem
+
+end
+
+do
+
+ local registered = structures.sections.registered
+
+ local function resolve(di,element,n,fulltag)
+ local data = listdata[fulltag]
+ if data then
+ extras.addreference(di,data.references)
+ return true
+ else
+ local data = di.data
+ if data then
+ for i=1,#data do
+ local di = data[i]
+ if di then
+ local ft = di.fulltag
+ if ft and resolve(di,element,n,ft) then
+ return true
+ end
+ end
+ end
+ end
+ end
+ end
+
+ function extras.section(di,element,n,fulltag)
+ local r = registered[specifications[fulltag].detail]
+ if r then
+ setattribute(di,"level",r.level)
+ end
+ resolve(di,element,n,fulltag)
+ end
+
+ local floats = { }
+
+ local function setfloat(options,method)
+ floats[locatedtag("float")] = {
+ options = options,
+ method = method,
+ }
+ end
+
+ function extras.float(di,element,n,fulltag)
+ local hash = floats[fulltag]
+ if hash then
+ local method = hash.method
+ if not method or method == "" then
+ method = "here"
+ end
+ setattribute(di,"method",method)
+ local options = hash.options
+ if options and options ~= "" then
+ options = settings_to_hash(options)
+ options[method] = nil
+ options = concat(sortedkeys(options),",")
+ if #options > 0 then
+ setattribute(di,"options",options)
+ end
+ end
+ end
+ resolve(di,element,n,fulltag)
+ end
+
+ implement {
+ name = "settagfloat",
+ actions = setfloat,
+ arguments = "2 strings",
+ }
+
+ structurestags.setfloat = setfloat
+
+end
+
+do
+
+ local registered = { }
+
+ local function setformulacontent(n)
+ registered[locatedtag("formulacontent")] = {
+ n = n,
+ }
+ end
+
+ function extras.formulacontent(di,element,n,fulltag)
+ local r = registered[fulltag]
+ if r then
+ setattribute(di,"n",r.n)
+ end
+ end
+
+ implement {
+ name = "settagformulacontent",
+ actions = setformulacontent,
+ arguments = "integer",
+ }
+
+ structurestags.setformulacontent = setformulacontent
+
+end
+
+do
+
+ local symbols = { }
+
+ local function settagdelimitedsymbol(symbol)
+ symbols[locatedtag("delimitedsymbol")] = {
+ symbol = symbol,
+ }
+ end
+
+ function extras.delimitedsymbol(di,element,n,fulltag)
+ local hash = symbols[fulltag]
+ if hash then
+ setattribute(di,"symbol",hash.symbol or nil)
+ end
+ end
+
+ implement {
+ name = "settagdelimitedsymbol",
+ actions = settagdelimitedsymbol,
+ arguments = "string"
+ }
+
+ structurestags.settagdelimitedsymbol = settagdelimitedsymbol
+
+end
+
+
+do
+
+ local symbols = { }
+
+ local function settagsubsentencesymbol(symbol)
+ symbols[locatedtag("subsentencesymbol")] = {
+ symbol = symbol,
+ }
+ end
+
+ function extras.subsentencesymbol(di,element,n,fulltag)
+ local hash = symbols[fulltag]
+ if hash then
+ setattribute(di,"symbol",hash.symbol or nil)
+ end
+ end
+
+ implement {
+ name = "settagsubsentencesymbol",
+ actions = settagsubsentencesymbol,
+ arguments = "string"
+ }
+
+ structurestags.settagsubsentencesymbol = settagsubsentencesymbol
+
+end
+
+do
+
+ local synonyms = { }
+ local sortings = { }
+
+ local function setsynonym(tag)
+ synonyms[locatedtag("synonym")] = tag
+ end
+
+ function extras.synonym(di,element,n,fulltag)
+ local tag = synonyms[fulltag]
+ if tag then
+ setattribute(di,"tag",tag)
+ end
+ end
+
+ local function setsorting(tag)
+ sortings[locatedtag("sorting")] = tag
+ end
+
+ function extras.sorting(di,element,n,fulltag)
+ local tag = sortings[fulltag]
+ if tag then
+ setattribute(di,"tag",tag)
+ end
+ end
+
+ implement {
+ name = "settagsynonym",
+ actions = setsynonym,
+ arguments = "string"
+ }
+
+ implement {
+ name = "settagsorting",
+ actions = setsorting,
+ arguments = "string"
+ }
+
+ structurestags.setsynonym = setsynonym
+ structurestags.setsorting = setsorting
+
+end
+
+do
+
+ local descriptions = { }
+ local symbols = { }
+ local linked = { }
+
+ -- we could move the notation itself to the first reference (can be an option)
+
+ local function setnotation(tag,n) -- needs checking (is tag needed)
+ -- we can also use the internals hash or list
+ local nd = structures.notes.get(tag,n)
+ if nd then
+ local references = nd.references
+ descriptions[references and references.internal] = locatedtag("description")
+ end
+ end
+
+ local function setnotationsymbol(tag,n) -- needs checking (is tag needed)
+ local nd = structures.notes.get(tag,n) -- todo: use listdata instead
+ if nd then
+ local references = nd.references
+ symbols[references and references.internal] = locatedtag("descriptionsymbol")
+ end
+ end
+
+ function finalizers.descriptions(tree)
+ local n = 0
+ for id, tag in sortedhash(descriptions) do
+ local sym = symbols[id]
+ if sym then
+ n = n + 1
+ linked[tag] = n
+ linked[sym] = n
+ end
+ end
+ end
+
+ function extras.description(di,element,n,fulltag)
+ local id = linked[fulltag]
+ if id then
+ setattribute(di,"insert",id)
+ end
+ end
+
+ function extras.descriptionsymbol(di,element,n,fulltag)
+ local id = linked[fulltag]
+ if id then
+ setattribute(di,"insert",id)
+ end
+ end
+
+ implement {
+ name = "settagnotation",
+ actions = setnotation,
+ arguments = { "string", "integer" }
+ }
+
+ implement {
+ name = "settagnotationsymbol",
+ actions = setnotationsymbol,
+ arguments = { "string", "integer" }
+ }
+
+ structurestags.setnotation = setnotation
+ structurestags.setnotationsymbol = setnotationsymbol
+
+end
+
+
+do
+
+ local strippedtag = structurestags.strip -- we assume global styles
+
+ local highlight = { }
+ local construct = { }
+
+ usedstyles.highlight = highlight
+ usedstyles.construct = construct
+
+ local function sethighlight(name,style,color,mode)
+ if not highlight[name] then
+ highlight[name] = {
+ style = style,
+ color = color,
+ mode = mode == 1 and "display" or nil,
+ }
+ end
+ end
+
+ local function setconstruct(name,style,color,mode)
+ if not construct[name] then
+ construct[name] = {
+ style = style,
+ color = color,
+ mode = mode == 1 and "display" or nil,
+ }
+ end
+ end
+
+ implement {
+ name = "settagconstruct",
+ actions = setconstruct,
+ arguments = { "string", "string", "integer", "integer" }
+ }
+
+ implement {
+ name = "settaghighlight",
+ actions = sethighlight,
+ arguments = { "string", "string", "integer", "integer" }
+ }
+
+ structurestags.sethighlight = sethighlight
+ structurestags.setconstruct = setconstruct
+
+end
+
+do
+
+ local f_id = formatters["%s-%s"]
+ local image = { }
+ usedimages.image = image
+
+ structurestags.usewithcare.images = image
+
+ local function setfigure(name,used,page,width,height,label)
+ local fulltag = locatedtag("image")
+ local spec = specifications[fulltag]
+ if spec then
+ local page = tonumber(page)
+ image[fulltag] = {
+ id = f_id(spec.tagname,spec.tagindex),
+ name = name,
+ used = used,
+ page = page and page > 1 and page or nil,
+ width = todimen(width, "cm","%0.3F%s"),
+ height = todimen(height,"cm","%0.3F%s"),
+ label = label,
+ }
+ else
+ -- we ignore images in layers in the background / pagebody
+ end
+ end
+
+ function extras.image(di,element,n,fulltag)
+ local data = image[fulltag]
+ if data then
+ setattribute(di,"name",data.name)
+ setattribute(di,"page",data.page)
+ setattribute(di,"id",data.id)
+ setattribute(di,"width",data.width)
+ setattribute(di,"height",data.height)
+ setattribute(di,"label",data.height)
+ end
+ end
+
+ implement {
+ name = "settagfigure",
+ actions = setfigure,
+ arguments = { "string", "string", "string", "dimen", "dimen", "string" }
+ }
+
+ structurestags.setfigure = setfigure
+
+end
+
+do
+
+ local combinations = { }
+
+ local function setcombination(nx,ny)
+ combinations[locatedtag("combination")] = {
+ nx = nx,
+ ny = ny,
+ }
+ end
+
+ function extras.combination(di,element,n,fulltag)
+ local data = combinations[fulltag]
+ if data then
+ setattribute(di,"nx",data.nx)
+ setattribute(di,"ny",data.ny)
+ end
+ end
+
+ implement {
+ name = "settagcombination",
+ actions = setcombination,
+ arguments = { "integer", "integer" }
+ }
+
+ structurestags.setcombination = setcombination
+
+end
+
+do
+
+ local function hascontent(data)
+ for i=1,#data do
+ local di = data[i]
+ if not di or di.tg == "ignore" then
+ --
+ else
+ local content = di.content
+ if content == " " then
+ --
+ elseif content then
+ return true
+ else
+ local d = di.data
+ if d and #d > 0 and hascontent(d) then
+ return true
+ end
+ end
+ end
+ end
+ end
+
+ local tabledata = { }
+
+ local function settablecell(rows,columns,align)
+ if align > 0 or rows > 1 or columns > 1 then -- or kind > 0
+ tabledata[locatedtag("tablecell")] = {
+ rows = rows,
+ columns = columns,
+ align = align,
+ }
+ end
+ end
+
+ local function gettablecell(fulltag)
+ return tabledata[fulltag]
+ end
+
+ function extras.tablecell(di,element,n,fulltag)
+ local hash = tabledata[fulltag]
+ if hash then
+ local columns = hash.columns
+ if columns and columns > 1 then
+ setattribute(di,"columns",columns)
+ end
+ local rows = hash.rows
+ if rows and rows > 1 then
+ setattribute(di,"rows",rows)
+ end
+ local align = hash.align
+ if not align or align == 0 then
+ -- normal
+ elseif align == 1 then -- use numbertoalign here
+ setattribute(di,"align","flushright")
+ elseif align == 2 then
+ setattribute(di,"align","middle")
+ elseif align == 3 then
+ setattribute(di,"align","flushleft")
+ end
+ end
+ end
+
+ local tabulatedata = { }
+
+ local function settabulatecell(align,kind)
+ if align > 0 or kind > 0 then
+ tabulatedata[locatedtag("tabulatecell")] = {
+ align = align,
+ kind = kind, -- 1 = bold head
+ }
+ end
+ end
+
+ local function gettabulatecell(fulltag)
+ return tabulatedata[fulltag]
+ end
+
+ function extras.tabulate(di,element,n,fulltag)
+ local data = di.data
+ for i=1,#data do
+ local di = data[i]
+ if di.tg == "tabulaterow" and not hascontent(di.data) then
+ di.element = "" -- or simply remove
+ end
+ end
+ end
+
+ function extras.tabulatecell(di,element,n,fulltag)
+ local hash = tabulatedata[fulltag]
+ if hash then
+ local align = hash.align
+ if not align or align == 0 then
+ -- normal
+ elseif align == 1 then
+ setattribute(di,"align","flushleft")
+ elseif align == 2 then
+ setattribute(di,"align","flushright")
+ elseif align == 3 then
+ setattribute(di,"align","middle")
+ end
+ local kind = hash.kind
+ if kind == 1 then
+ setattribute(di,"kind","strong")
+ elseif kind == 2 then
+ setattribute(di,"kind","equals")
+ end
+ end
+ end
+
+ implement {
+ name = "settagtablecell",
+ actions = settablecell,
+ arguments = { "integer", "integer", "integer" }
+ }
+
+ implement {
+ name = "settagtabulatecell",
+ actions = settabulatecell,
+ arguments = { "integer", "integer" },
+ }
+
+ structurestags.settablecell = settablecell
+ structurestags.gettablecell = gettablecell
+ structurestags.settabulatecell = settabulatecell
+ structurestags.gettabulatecell = gettabulatecell
+
+end
+
+do
+
+ -- todo: internal is already hashed
+
+ local p_stripper = lpeg.patterns.stripper
+
+ local function setregister(tag,n) -- check if tag is needed
+ local data = structures.registers.get(tag,n)
+ if data then
+ referencehash[locatedtag("registerlocation")] = data
+ end
+ end
+
+ function extras.registerlocation(di,element,n,fulltag)
+ local data = referencehash[fulltag]
+ if type(data) == "table" then
+ extras.addinternal(di,data.references)
+ return true
+ else
+ -- needs checking, probably bookmarks
+ end
+ end
+
+ function extras.registerpages(di,element,n,fulltag) -- ignorebreaks
+ local data = di.data
+ for i=1,#data do
+ local d = data[i]
+ if d.content == " " then
+ d.content = ""
+ end
+ end
+ end
+
+ function extras.registerseparator(di,element,n,fulltag) -- ignorespaces
+ local data = di.data
+ for i=1,#data do
+ local d = data[i]
+ local c = d.content
+ if type(c) == "string" then
+ d.content = lpegmatch(p_stripper,c)
+ end
+ end
+ end
+
+ implement {
+ name = "settagregister",
+ actions = setregister,
+ arguments = { "string", "integer" }
+ }
+
+ structurestags.setregister = setregister
+
+end
+
+do
+
+ -- todo: internal is already hashed
+
+ local function setlist(n)
+ local data = structures.lists.getresult(n)
+ if data then
+ referencehash[locatedtag("listitem")] = data
+ end
+ end
+
+ function extras.listitem(di,element,n,fulltag)
+ local data = referencehash[fulltag]
+ if data then
+ extras.addinternal(di,data.references)
+ return true
+ end
+ end
+
+ implement {
+ name = "settaglist",
+ actions = setlist,
+ arguments = "integer"
+ }
+
+ structurestags.setlist = setlist
+
+end
+
+do
+
+ local usedpublications = { }
+ local tagsindatasets = setmetatableindex("table")
+ local serialize = false
+
+ local function setpublication(dataset,tag,rendering)
+ usedpublications[locatedtag("publication")] = {
+ dataset = dataset,
+ tag = tag,
+ rendering = rendering
+ }
+ tagsindatasets[dataset][tag] = true
+ if not serialize then
+ structures.tags.registerextradata("btx",function()
+ local t = { "<btxdata>"}
+ for dataset, used in sortedhash(tagsindatasets) do
+ t[#t+1] = publications.converttoxml(dataset,true,false,true,false,true,true)
+ end
+ t[#t+1] = "</btxdata>"
+ return concat(t,"\n")
+ end)
+ end
+ end
+
+ function extras.publication(di,element,n,fulltag)
+ local hash = usedpublications[fulltag]
+ if hash then
+ setattribute(di,"dataset",hash.dataset)
+ setattribute(di,"tag",hash.tag)
+ end
+ end
+
+ implement {
+ name = "settagpublication",
+ actions = setpublication,
+ arguments = "2 strings"
+ }
+
+ structurestags.setpublication = setpublication
+
+end
+
+do
+
+ local usedparagraphs = { }
+
+ local function setparagraph(align)
+ if align ~= "" then
+ usedparagraphs[locatedtag("paragraph")] = {
+ align = align,
+ }
+ end
+ end
+
+ function extras.paragraph(di,element,n,fulltag)
+ local hash = usedparagraphs[fulltag]
+ if hash then
+ setattribute(di,"align",hash.align)
+ end
+ end
+
+ implement {
+ name = "settagparagraph",
+ actions = setparagraph,
+ arguments = "string"
+ }
+
+ structurestags.setparagraph = setparagraph
+
+end
+
+do
+
+ local marginanchors = { }
+ local margincontent = { }
+
+ function checks.margintext(di)
+ local i = marginanchors[di.fulltag]
+ margincontent[i] = di
+ end
+
+ function checks.marginanchor(di)
+ local i = marginanchors[di.fulltag]
+ local d = margincontent[i]
+ --
+ di.attribute = d.attribute
+ di.data = d.data
+ di.detail = d.detail
+ di.element = d.element
+ di.fulltag = d.fulltag
+ di.nature = d.nature
+ di.samepar = true
+ di.tg = d.tg
+ --
+ d.skip = "ignore"
+ end
+
+ implement {
+ name = "settagmargintext",
+ arguments = "integer",
+ actions = function(n)
+ marginanchors[locatedtag("margintext")] = n
+ end
+ }
+
+ implement {
+ name = "settagmarginanchor",
+ arguments = "integer",
+ actions = function(n)
+ marginanchors[locatedtag("marginanchor")] = n
+ end
+ }
+
+end
+
+do
+
+ function fixes.linenumber(di,data,i)
+ local ni = data[i+1]
+ if ni then
+ if ni.data then
+ while true do
+ local d = ni.data[1]
+ if d then
+ local e = d.element
+ if e then
+ if e == "line" or e == "verbatimline" then
+ insert(d.data,1,di)
+ data[i] = false
+ return
+ else
+ ni = d
+ end
+ else
+ return
+ end
+ else
+ return
+ end
+ end
+ end
+ end
+ end
+
+end
+
diff --git a/tex/context/base/mkxl/back-exp.lmt b/tex/context/base/mkxl/back-exp.lmt
new file mode 100644
index 000000000..1375d2655
--- /dev/null
+++ b/tex/context/base/mkxl/back-exp.lmt
@@ -0,0 +1,2719 @@
+if not modules then modules = { } end modules ['back-exp'] = {
+ version = 1.001,
+ comment = "companion to back-exp.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Todo: share properties more with tagged pdf (or thge reverse)
+
+-- Because we run into the 200 local limit we quite some do .. end wrappers .. not always
+-- that nice but it has to be.
+
+-- Experiments demonstrated that mapping to <div> and classes is messy because we have to
+-- package attributes (some 30) into one set of (space seperatated but prefixed classes)
+-- which only makes things worse .. so if you want something else, use xslt to get there.
+
+-- language -> only mainlanguage, local languages should happen through start/stoplanguage
+-- tocs/registers -> maybe add a stripper (i.e. just don't flush entries in final tree)
+-- footnotes -> css 3
+-- bodyfont -> in styles.css
+
+-- Because we need to look ahead we now always build a tree (this was optional in
+-- the beginning). The extra overhead in the frontend is neglectable.
+--
+-- We can optimize the code ... currently the overhead is some 10% for xml + html so
+-- there is no hurry.
+
+-- todo: move critital formatters out of functions
+-- todo: delay loading (apart from basic tag stuff)
+
+-- problem : too many local variables
+
+-- check setting __i__
+
+local next, type, tonumber = next, type, tonumber
+local sub, gsub, match = string.sub, string.gsub, string.match
+local validstring = string.valid
+local lpegmatch = lpeg.match
+local utfchar, utfvalues, utflen = utf.char, utf.values, utf.len
+local concat, merge, sort, setmetatableindex = table.concat, table.merge, table.sort, table.setmetatableindex
+local sortedhash, sortedkeys = table.sortedhash, table.sortedkeys
+local formatters = string.formatters
+local todimen = number.todimen
+local replacetemplate = utilities.templates.replace
+local settings_to_array = utilities.parsers.settings_to_array
+
+local addsuffix, joinfile, nameonly, basename, filesuffix = file.addsuffix, file.join, file.nameonly, file.basename, file.suffix
+
+local trace_export = false trackers.register ("export.trace", function(v) trace_export = v end)
+local trace_spacing = false trackers.register ("export.trace.spacing", function(v) trace_spacing = v end)
+local trace_details = false trackers.register ("export.trace.details", function(v) trace_details = v end)
+
+local less_state = false directives.register("export.lessstate", function(v) less_state = v end)
+local show_comment = true directives.register("export.comment", function(v) show_comment = v end)
+
+-- maybe we will also support these:
+--
+-- local css_hyphens = false directives.register("export.css.hyphens", function(v) css_hyphens = v end)
+-- local css_textalign = false directives.register("export.css.textalign", function(v) css_textalign = v end)
+-- local css_bodyfontsize = false directives.register("export.css.bodyfontsize", function(v) css_bodyfontsize = v end)
+-- local css_textwidth = false directives.register("export.css.textwidth", function(v) css_textwidth = v end)
+
+local report_export = logs.reporter("backend","export")
+
+local nodes = nodes
+local attributes = attributes
+
+local variables = interfaces.variables
+local v_yes = variables.yes
+local v_no = variables.no
+local v_xml = variables.xml
+local v_hidden = variables.hidden
+
+local implement = interfaces.implement
+
+local included = backends.included
+
+local tasks = nodes.tasks
+local fontchar = fonts.hashes.characters
+local fontquads = fonts.hashes.quads
+local languagenames = languages.numbers
+
+local texgetcount = tex.getcount
+
+local references = structures.references
+local structurestags = structures.tags
+local taglist = structurestags.taglist
+local specifications = structurestags.specifications
+local properties = structurestags.properties
+local locatedtag = structurestags.locatedtag
+
+structurestags.usewithcare = { }
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+local characterdata = characters.data
+local overloads = fonts.mappings.overloads
+
+-- todo: more locals (and optimize)
+
+local exportversion = "0.35"
+local mathmlns = "http://www.w3.org/1998/Math/MathML"
+local contextns = "http://www.contextgarden.net/context/export" -- whatever suits
+local cssnamespaceurl = "@namespace context url('%namespace%') ;"
+local cssnamespace = "context|"
+----- cssnamespacenop = "/* no namespace */"
+
+local usecssnamespace = false
+
+local nofcurrentcontent = 0 -- so we don't free (less garbage collection)
+local currentcontent = { }
+local currentnesting = nil
+local currentattribute = nil
+local last = nil
+local currentparagraph = nil
+
+local noftextblocks = 0
+
+----- hyphencode = 0xAD
+local hyphen = utfchar(0xAD) -- todo: also emdash etc
+local tagsplitter = structurestags.patterns.splitter
+----- colonsplitter = lpeg.splitat(":")
+----- dashsplitter = lpeg.splitat("-")
+local threshold = 65536
+local indexing = false
+local keephyphens = false
+local exportproperties = false
+
+local finetuning = { }
+
+local treestack = { }
+local nesting = { }
+local currentdepth = 0
+
+local wrapups = { }
+
+local tree = { data = { }, fulltag == "root" } -- root
+local treeroot = tree
+local treehash = { }
+local extras = { }
+local checks = { }
+local fixes = { }
+local finalizers = { }
+local nofbreaks = 0
+local used = { }
+local exporting = false
+local restart = false
+local specialspaces = { [0x20] = " " } -- for conversion
+local somespace = { [0x20] = true, [" "] = true } -- for testing
+local entities = { ["&"] = "&amp;", [">"] = "&gt;", ["<"] = "&lt;" }
+local attribentities = { ["&"] = "&amp;", [">"] = "&gt;", ["<"] = "&lt;", ['"'] = "quot;" }
+
+local p_entity = lpeg.replacer(entities) -- was: entityremapper = utf.remapper(entities)
+local p_attribute = lpeg.replacer(attribentities)
+local p_escaped = lpeg.patterns.xml.escaped
+
+local f_tagid = formatters["%s-%04i"]
+
+-- local alignmapping = {
+-- flushright = "right",
+-- middle = "center",
+-- flushleft = "left",
+-- }
+
+local defaultnature = "mixed" -- "inline"
+
+setmetatableindex(used, function(t,k)
+ if k then
+ local v = { }
+ t[k] = v
+ return v
+ end
+end)
+
+local f_entity = formatters["&#x%X;"]
+local f_attribute = formatters[" %s=%q"]
+local f_property = formatters[" %s%s=%q"]
+
+setmetatableindex(specialspaces, function(t,k)
+ local v = utfchar(k)
+ t[k] = v
+ entities[v] = f_entity(k)
+ somespace[k] = true
+ somespace[v] = true
+ return v
+end)
+
+
+local namespaced = {
+ -- filled on
+}
+
+local namespaces = {
+ msubsup = "m",
+ msub = "m",
+ msup = "m",
+ mn = "m",
+ mi = "m",
+ ms = "m",
+ mo = "m",
+ mtext = "m",
+ mrow = "m",
+ mfrac = "m",
+ mroot = "m",
+ msqrt = "m",
+ munderover = "m",
+ munder = "m",
+ mover = "m",
+ merror = "m",
+ math = "m",
+ mrow = "m",
+ mtable = "m",
+ mtr = "m",
+ mtd = "m",
+ mfenced = "m",
+ maction = "m",
+ mspace = "m",
+ -- only when testing
+ mstacker = "m",
+ mstackertop = "m",
+ mstackermid = "m",
+ mstackerbot = "m",
+}
+
+setmetatableindex(namespaced, function(t,k)
+ if k then
+ local namespace = namespaces[k]
+ local v = namespace and namespace .. ":" .. k or k
+ t[k] = v
+ return v
+ end
+end)
+
+local function attribute(key,value)
+ if value and value ~= "" then
+ return f_attribute(key,lpegmatch(p_attribute,value))
+ else
+ return ""
+ end
+end
+
+local function setattribute(di,key,value,escaped)
+ if value and value ~= "" then
+ local a = di.attributes
+ if escaped then
+ value = lpegmatch(p_escaped,value)
+ end
+ if not a then
+ di.attributes = { [key] = value }
+ else
+ a[key] = value
+ end
+ end
+end
+
+local listdata = { } -- this has to be done otherwise: each element can just point back to ...
+
+function wrapups.hashlistdata()
+ local c = structures.lists.collected
+ for i=1,#c do
+ local ci = c[i]
+ local tag = ci.references.tag
+ if tag then
+ local m = ci.metadata
+ local t = m.kind .. ">" .. tag -- todo: use internal (see strc-lst.lua where it's set)
+ listdata[t] = ci
+ end
+ end
+end
+
+function structurestags.setattributehash(attr,key,value) -- public hash
+ local specification = taglist[attr]
+ if specification then
+ specification[key] = value
+ else
+ -- some kind of error
+ end
+end
+
+local usedstyles = { }
+local usedimages = { }
+local referencehash = { } -- move ?
+local destinationhash = { } -- move ?
+
+structurestags.backend = {
+ setattribute = setattribute,
+ extras = extras,
+ checks = checks,
+ fixes = fixes,
+ listdata = listdata,
+ finalizers = finalizers,
+ usedstyles = usedstyles,
+ usedimages = usedimages,
+ referencehash = referencehash,
+ destinationhash = destinationhash,
+}
+
+local namespacetemplate = [[
+/* %what% for file %filename% */
+
+%cssnamespaceurl%
+]]
+
+do
+
+ -- experiment: styles and images
+ --
+ -- officially we should convert to bp but we round anyway
+
+ -- /* padding : ; */
+ -- /* text-justify : inter-word ; */
+ -- /* text-align : justify ; */
+
+local documenttemplate = [[
+document,
+%namespace%div.document {
+ font-size : %size% !important ;
+ max-width : %width% !important ;
+ text-align : %align% !important ;
+ hyphens : %hyphens% !important ;
+}]]
+
+local styletemplate = [[
+%element%[detail="%detail%"],
+%namespace%div.%element%.%detail% {
+ display : inline ;
+ font-style : %style% ;
+ font-variant : %variant% ;
+ font-weight : %weight% ;
+ font-family : %family% ;
+ color : %color% ;
+}]]
+
+ local numbertoallign = {
+ [0] = "justify", ["0"] = "justify", [variables.normal ] = "justify",
+ [1] = "right", ["1"] = "right", [variables.flushright] = "right",
+ [2] = "center", ["2"] = "center", [variables.middle ] = "center",
+ [3] = "left", ["3"] = "left", [variables.flushleft ] = "left",
+ }
+
+ function wrapups.allusedstyles(filename)
+ local result = { replacetemplate(namespacetemplate, {
+ what = "styles",
+ filename = filename,
+ namespace = contextns,
+ -- cssnamespaceurl = usecssnamespace and cssnamespaceurl or cssnamespacenop,
+ cssnamespaceurl = cssnamespaceurl,
+ },false,true) }
+ --
+ local bodyfont = finetuning.bodyfont
+ local width = finetuning.width
+ local hyphen = finetuning.hyphen
+ local align = finetuning.align
+ --
+ if type(bodyfont) == "number" then
+ bodyfont = todimen(bodyfont)
+ else
+ bodyfont = "12pt"
+ end
+ if type(width) == "number" then
+ width = todimen(width) or "50em"
+ else
+ width = "50em"
+ end
+ if hyphen == v_yes then
+ hyphen = "manual"
+ else
+ hyphen = "inherited"
+ end
+ if align then
+ align = numbertoallign[align]
+ end
+ if not align then
+ align = hyphen and "justify" or "inherited"
+ end
+ --
+ result[#result+1] = replacetemplate(documenttemplate,{
+ size = bodyfont,
+ width = width,
+ align = align,
+ hyphens = hyphen
+ })
+ --
+ local colorspecification = xml.css.colorspecification
+ local fontspecification = xml.css.fontspecification
+ for element, details in sortedhash(usedstyles) do
+ for detail, data in sortedhash(details) do
+ local s = fontspecification(data.style)
+ local c = colorspecification(data.color)
+ detail = gsub(detail,"[^A-Za-z0-9]+","-")
+ result[#result+1] = replacetemplate(styletemplate,{
+ namespace = usecssnamespace and cssnamespace or "",
+ element = element,
+ detail = detail,
+ style = s.style or "inherit",
+ variant = s.variant or "inherit",
+ weight = s.weight or "inherit",
+ family = s.family or "inherit",
+ color = c or "inherit",
+ display = s.display and "block" or nil,
+ })
+ end
+ end
+ return concat(result,"\n\n")
+ end
+
+end
+
+do
+
+local imagetemplate = [[
+%element%[id="%id%"], %namespace%div.%element%[id="%id%"] {
+ display : block ;
+ background-image : url('%url%') ;
+ background-size : 100%% auto ;
+ background-repeat : no-repeat ;
+ width : %width% ;
+ height : %height% ;
+}]]
+
+ local f_svgname = formatters["%s.svg"]
+ local f_svgpage = formatters["%s-page-%s.svg"]
+ local collected = { }
+
+ local function usedname(name,page)
+ if filesuffix(name) == "pdf" then
+ -- temp hack .. we will have a remapper
+ if page and page > 1 then
+ name = f_svgpage(nameonly(name),page)
+ else
+ name = f_svgname(nameonly(name))
+ end
+ end
+ local scheme = url.hasscheme(name)
+ if not scheme or scheme == "file" then
+ -- or can we just use the name ?
+ return joinfile("../images",basename(url.filename(name)))
+ else
+ return name
+ end
+ end
+
+ function wrapups.allusedimages(filename)
+ local result = { replacetemplate(namespacetemplate, {
+ what = "images",
+ filename = filename,
+ namespace = contextns,
+ -- cssnamespaceurl = usecssnamespace and cssnamespaceurl or "",
+ cssnamespaceurl = cssnamespaceurl,
+ },false,true) }
+ for element, details in sortedhash(usedimages) do
+ for detail, data in sortedhash(details) do
+ local name = data.name
+ local page = tonumber(data.page) or 1
+ local spec = {
+ element = element,
+ id = data.id,
+ name = name,
+ page = page,
+ url = usedname(name,page),
+ width = data.width,
+ height = data.height,
+ used = data.used,
+ namespace = usecssnamespace and cssnamespace or "",
+ }
+ result[#result+1] = replacetemplate(imagetemplate,spec)
+ collected[detail] = spec
+ end
+ end
+ return concat(result,"\n\n")
+ end
+
+ function wrapups.uniqueusedimages() -- todo: combine these two
+ return collected
+ end
+
+end
+
+--
+
+properties.vspace = { export = "break", nature = "display" }
+----------------- = { export = "pagebreak", nature = "display" }
+
+local function makebreaklist(list)
+ nofbreaks = nofbreaks + 1
+ local t = { }
+ local l = list and list.taglist
+ if l then
+ for i=1,#list do
+ t[i] = l[i]
+ end
+ end
+ t[#t+1] = "break>" .. nofbreaks -- maybe no number or 0
+ return { taglist = t }
+end
+
+local breakattributes = {
+ type = "collapse"
+}
+
+local function makebreaknode(attributes) -- maybe no fulltag
+ nofbreaks = nofbreaks + 1
+ return {
+ tg = "break",
+ fulltag = "break>" .. nofbreaks,
+ n = nofbreaks,
+ element = "break",
+ nature = "display",
+ attributes = attributes or nil,
+ -- data = { }, -- not needed
+ -- attribute = 0, -- not needed
+ -- parnumber = 0,
+ }
+end
+
+do
+
+ local fields = { "title", "subtitle", "author", "keywords", "url", "version" }
+
+ local ignoredelements = false
+
+ local function checkdocument(root)
+ local data = root.data
+ if data then
+ for i=1,#data do
+ local di = data[i]
+ local tg = di.tg
+ if tg == "noexport" then
+ local s = specifications[di.fulltag]
+ local u = s and s.userdata
+ if u then
+ local comment = u.comment
+ if comment then
+ di.element = "comment"
+ di.data = { { content = comment } }
+ u.comment = nil
+ else
+ data[i] = false
+ end
+ else
+ data[i] = false
+ end
+ elseif di.content then
+ -- okay
+ elseif tg == "ignore" then
+ di.element = ""
+ checkdocument(di)
+ elseif ignoredelements and ignoredelements[tg] then
+ di.element = ""
+ checkdocument(di)
+ else
+ checkdocument(di) -- new, else no noexport handling
+ end
+ end
+ end
+ end
+
+ function extras.document(di,element,n,fulltag)
+ setattribute(di,"language",languagenames[texgetcount("mainlanguagenumber")])
+ if not less_state then
+ setattribute(di,"file",tex.jobname)
+ if included.date then
+ setattribute(di,"date",os.fulltime())
+ end
+ setattribute(di,"context",environment.version)
+ setattribute(di,"version",exportversion)
+ setattribute(di,"xmlns:m",mathmlns)
+ local identity = interactions.general.getidentity()
+ for i=1,#fields do
+ local key = fields[i]
+ local value = identity[key]
+ if value and value ~= "" then
+ setattribute(di,key,value)
+ end
+ end
+ end
+ checkdocument(di)
+ end
+
+ implement {
+ name = "ignoretagsinexport",
+ arguments = "string",
+ actions = function(list)
+ for tag in string.gmatch(list,"[a-z]+") do
+ if ignoredelements then
+ ignoredelements[tag] = true
+ else
+ ignoredelements = { [tag] = true }
+ end
+ end
+ end,
+ }
+
+end
+
+-- flusher
+
+do
+
+ local f_detail = formatters[' detail="%s"']
+ local f_chain = formatters[' chain="%s"']
+ local f_index = formatters[' n="%s"']
+ local f_spacing = formatters['<c p="%s">%s</c>']
+
+ local f_empty_inline = formatters["<%s/>"]
+ local f_empty_mixed = formatters["%w<%s/>\n"]
+ local f_empty_display = formatters["\n%w<%s/>\n"]
+ local f_empty_inline_attr = formatters["<%s%s/>"]
+ local f_empty_mixed_attr = formatters["%w<%s%s/>"]
+ local f_empty_display_attr = formatters["\n%w<%s%s/>\n"]
+
+ local f_begin_inline = formatters["<%s>"]
+ local f_begin_mixed = formatters["%w<%s>"]
+ local f_begin_display = formatters["\n%w<%s>\n"]
+ local f_begin_inline_attr = formatters["<%s%s>"]
+ local f_begin_mixed_attr = formatters["%w<%s%s>"]
+ local f_begin_display_attr = formatters["\n%w<%s%s>\n"]
+
+ local f_end_inline = formatters["</%s>"]
+ local f_end_mixed = formatters["</%s>\n"]
+ local f_end_display = formatters["%w</%s>\n"]
+
+ local f_begin_inline_comment = formatters["<!-- %s --><%s>"]
+ local f_begin_mixed_comment = formatters["%w<!-- %s --><%s>"]
+ local f_begin_display_comment = formatters["\n%w<!-- %s -->\n%w<%s>\n"]
+ local f_begin_inline_attr_comment = formatters["<!-- %s --><%s%s>"]
+ local f_begin_mixed_attr_comment = formatters["%w<!-- %s --><%s%s>"]
+ local f_begin_display_attr_comment = formatters["\n%w<!-- %s -->\n%w<%s%s>\n"]
+
+ local f_comment_begin_inline = formatters["<!-- begin %s -->"]
+ local f_comment_begin_mixed = formatters["%w<!-- begin %s -->"]
+ local f_comment_begin_display = formatters["\n%w<!-- begin %s -->\n"]
+
+ local f_comment_end_inline = formatters["<!-- end %s -->"]
+ local f_comment_end_mixed = formatters["<!-- end %s -->\n"]
+ local f_comment_end_display = formatters["%w<!-- end %s -->\n"]
+
+ local f_metadata_begin = formatters["\n%w<metadata>\n"]
+ local f_metadata = formatters["%w<metavariable name=%q>%s</metavariable>\n"]
+ local f_metadata_end = formatters["%w</metadata>\n"]
+
+ local function attributes(a)
+ local r = { }
+ local n = 0
+ for k, v in next, a do
+ n = n + 1
+ r[n] = f_attribute(k,tostring(v)) -- tostring because of %q
+ end
+ sort(r)
+ return concat(r,"")
+ end
+
+ local function properties(a)
+ local r = { }
+ local n = 0
+ for k, v in next, a do
+ n = n + 1
+ r[n] = f_property(exportproperties,k,tostring(v)) -- tostring because of %q
+ end
+ sort(r)
+ return concat(r,"")
+ end
+
+ local depth = 0
+ local inline = 0
+
+ local function emptytag(result,element,nature,di) -- currently only break but at some point
+ local a = di.attributes -- we might add detail etc
+ if a then -- happens seldom
+ if nature == "display" then
+ result[#result+1] = f_empty_display_attr(depth,namespaced[element],attributes(a))
+ elseif nature == "mixed" then
+ result[#result+1] = f_empty_mixed_attr(depth,namespaced[element],attributes(a))
+ else
+ result[#result+1] = f_empty_inline_attr(namespaced[element],attributes(a))
+ end
+ else
+ if nature == "display" then
+ result[#result+1] = f_empty_display(depth,namespaced[element])
+ elseif nature == "mixed" then
+ result[#result+1] = f_empty_mixed(depth,namespaced[element])
+ else
+ result[#result+1] = f_empty_inline(namespaced[element])
+ end
+ end
+ end
+
+ -- local function stripspaces(di)
+ -- local d = di.data
+ -- local n = #d
+ -- local m = 0
+ -- for i=1,n do
+ -- local di = d[i]
+ -- if di.tg then
+ -- m = m + 1
+ -- d[m] = di
+ -- end
+ -- end
+ -- for i=n,m+1,-1 do
+ -- d[i] = nil
+ -- end
+ -- end
+ --
+ -- -- simpler:
+
+ local function stripspaces(di)
+ local d = di.data
+ for i=1,#d do
+ local di = d[i]
+ if not di.tg then
+ di.content = ""
+ end
+ end
+ end
+
+ local function begintag(result,element,nature,di,skip)
+ local index = di.n
+ local fulltag = di.fulltag
+ local specification = specifications[fulltag] or { } -- we can have a dummy
+ local comment = di.comment
+ local detail = specification.detail
+ if skip == "comment" then
+ if show_comment then
+ if nature == "inline" or inline > 0 then
+ result[#result+1] = f_comment_begin_inline(namespaced[element])
+ inline = inline + 1
+ elseif nature == "mixed" then
+ result[#result+1] = f_comment_begin_mixed(depth,namespaced[element])
+ depth = depth + 1
+ inline = 1
+ else
+ result[#result+1] = f_comment_begin_display(depth,namespaced[element])
+ depth = depth + 1
+ end
+ end
+ elseif skip then
+ -- ignore
+ else
+
+ local n = 0
+ local r = { } -- delay this
+ if detail then
+ detail = gsub(detail,"[^A-Za-z0-9]+","-")
+ specification.detail = detail -- we use it later in for the div
+ n = n + 1
+ r[n] = f_detail(detail)
+ end
+ local parents = specification.parents
+ if parents then
+ parents = gsub(parents,"[^A-Za-z0-9 ]+","-")
+ specification.parents = parents -- we use it later in for the div
+ n = n + 1
+ r[n] = f_chain(parents)
+ end
+ if indexing and index then
+ n = n + 1
+ r[n] = f_index(index)
+ end
+ --
+ local extra = extras[element]
+ if extra then
+ extra(di,element,index,fulltag)
+ end
+ --
+ if di.record then
+ stripspaces(di)
+ end
+ --
+ if exportproperties then
+ local p = specification.userdata
+ if not p then
+ -- skip
+ elseif exportproperties == v_yes then
+ n = n + 1
+ r[n] = attributes(p)
+ else
+ n = n + 1
+ r[n] = properties(p)
+ end
+ end
+ local a = di.attributes
+ if a then
+ if trace_spacing then
+ a.p = di.parnumber or 0
+ end
+ n = n + 1
+ r[n] = attributes(a)
+ elseif trace_spacing then
+ n = n + 1
+ r[n] = attributes { p = di.parnumber or 0 }
+ end
+ if n == 0 then
+ if nature == "inline" or inline > 0 then
+ if show_comment and comment then
+ result[#result+1] = f_begin_inline_comment(comment,namespaced[element])
+ else
+ result[#result+1] = f_begin_inline(namespaced[element])
+ end
+ inline = inline + 1
+ elseif nature == "mixed" then
+ if show_comment and comment then
+ result[#result+1] = f_begin_mixed_comment(depth,comment,namespaced[element])
+ else
+ result[#result+1] = f_begin_mixed(depth,namespaced[element])
+ end
+ depth = depth + 1
+ inline = 1
+ else
+ if show_comment and comment then
+ result[#result+1] = f_begin_display_comment(depth,comment,depth,namespaced[element])
+ else
+ result[#result+1] = f_begin_display(depth,namespaced[element])
+ end
+ depth = depth + 1
+ end
+ else
+ r = concat(r,"",1,n)
+ if nature == "inline" or inline > 0 then
+ if show_comment and comment then
+ result[#result+1] = f_begin_inline_attr_comment(comment,namespaced[element],r)
+ else
+ result[#result+1] = f_begin_inline_attr(namespaced[element],r)
+ end
+ inline = inline + 1
+ elseif nature == "mixed" then
+ if show_comment and comment then
+ result[#result+1] = f_begin_mixed_attr_comment(depth,comment,namespaced[element],r)
+ else
+ result[#result+1] = f_begin_mixed_attr(depth,namespaced[element],r)
+ end
+ depth = depth + 1
+ inline = 1
+ else
+ if show_comment and comment then
+ result[#result+1] = f_begin_display_attr_comment(depth,comment,depth,namespaced[element],r)
+ else
+ result[#result+1] = f_begin_display_attr(depth,namespaced[element],r)
+ end
+ depth = depth + 1
+ end
+ end
+ end
+ used[element][detail or ""] = { nature, specification.parents } -- for template css
+ -- also in last else ?
+ local metadata = specification.metadata
+ if metadata then
+ result[#result+1] = f_metadata_begin(depth)
+ for k, v in table.sortedpairs(metadata) do
+ if v ~= "" then
+ result[#result+1] = f_metadata(depth+1,k,lpegmatch(p_entity,v))
+ end
+ end
+ result[#result+1] = f_metadata_end(depth)
+ end
+ end
+
+ local function endtag(result,element,nature,di,skip)
+ if skip == "comment" then
+ if show_comment then
+ if nature == "display" and (inline == 0 or inline == 1) then
+ depth = depth - 1
+ result[#result+1] = f_comment_end_display(depth,namespaced[element])
+ inline = 0
+ elseif nature == "mixed" and (inline == 0 or inline == 1) then
+ depth = depth - 1
+ result[#result+1] = f_comment_end_mixed(namespaced[element])
+ inline = 0
+ else
+ inline = inline - 1
+ result[#result+1] = f_comment_end_inline(namespaced[element])
+ end
+ end
+ elseif skip then
+ -- ignore
+ else
+ if nature == "display" and (inline == 0 or inline == 1) then
+ depth = depth - 1
+ result[#result+1] = f_end_display(depth,namespaced[element])
+ inline = 0
+ elseif nature == "mixed" and (inline == 0 or inline == 1) then
+ depth = depth - 1
+ result[#result+1] = f_end_mixed(namespaced[element])
+ inline = 0
+ else
+ inline = inline - 1
+ result[#result+1] = f_end_inline(namespaced[element])
+ end
+ end
+ end
+
+ local function flushtree(result,data,nature)
+ local nofdata = #data
+ for i=1,nofdata do
+ local di = data[i]
+ if not di then -- hm, di can be string
+ -- whatever
+ else
+ local content = di.content
+ -- also optimize for content == "" : trace that first
+ if content then
+ -- already has breaks
+ local content = lpegmatch(p_entity,content)
+ if i == nofdata and sub(content,-1) == "\n" then -- move check
+ -- can be an end of line in par but can also be the last line
+ if trace_spacing then
+ result[#result+1] = f_spacing(di.parnumber or 0,sub(content,1,-2))
+ else
+ result[#result+1] = sub(content,1,-2)
+ end
+ result[#result+1] = " "
+ else
+ if trace_spacing then
+ result[#result+1] = f_spacing(di.parnumber or 0,content)
+ else
+ result[#result+1] = content
+ end
+ end
+ elseif not di.collapsed then -- ignore collapsed data (is appended, reconstructed par)
+ local element = di.element
+ if not element then
+ -- skip
+ elseif element == "break" then -- or element == "pagebreak"
+ emptytag(result,element,nature,di)
+ elseif element == "" or di.skip == "ignore" then
+ -- skip
+ else
+ if di.before then
+ flushtree(result,di.before,nature)
+ end
+ local natu = di.nature
+ local skip = di.skip
+ if di.breaknode then
+ emptytag(result,"break","display",di)
+ end
+ begintag(result,element,natu,di,skip)
+ flushtree(result,di.data,natu)
+ endtag(result,element,natu,di,skip)
+ if di.after then
+ flushtree(result,di.after,nature)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ local function breaktree(tree,parent,parentelement) -- also removes double breaks
+ local data = tree.data
+ if data then
+ local nofdata = #data
+ local prevelement
+ local prevnature
+ local prevparnumber
+ local newdata = { }
+ local nofnewdata = 0
+ for i=1,nofdata do
+ local di = data[i]
+ if not di then
+ -- skip
+ elseif di.skip == "ignore" then
+ -- skip (new)
+elseif di.tg == "ignore" then
+ -- skip (new)
+ elseif di.content then
+ if di.samepar then
+ prevparnumber = false
+ else
+ local parnumber = di.parnumber
+ if prevnature == "inline" and prevparnumber and prevparnumber ~= parnumber then
+ nofnewdata = nofnewdata + 1
+ if trace_spacing then
+ newdata[nofnewdata] = makebreaknode { type = "a", p = prevparnumber, n = parnumber }
+ else
+ newdata[nofnewdata] = makebreaknode()
+ end
+ end
+ prevelement = nil
+ prevparnumber = parnumber
+ end
+ prevnature = "inline"
+ nofnewdata = nofnewdata + 1
+ newdata[nofnewdata] = di
+ elseif not di.collapsed then
+ local element = di.element
+ if element == "break" then -- or element == "pagebreak"
+ if prevelement == "break" then
+ di.element = ""
+ end
+ prevelement = element
+ prevnature = "display"
+ nofnewdata = nofnewdata + 1
+ newdata[nofnewdata] = di
+ elseif element == "" or di.skip == "ignore" then
+ -- skip
+ else
+ if di.samepar then
+ prevnature = "inline"
+ prevparnumber = false
+ else
+ local nature = di.nature
+ local parnumber = di.parnumber
+ if prevnature == "inline" and nature == "inline" and prevparnumber and prevparnumber ~= parnumber then
+ nofnewdata = nofnewdata + 1
+ if trace_spacing then
+ newdata[nofnewdata] = makebreaknode { type = "b", p = prevparnumber, n = parnumber }
+ else
+ newdata[nofnewdata] = makebreaknode()
+ end
+ end
+ prevnature = nature
+ prevparnumber = parnumber
+ end
+ prevelement = element
+ breaktree(di,tree,element)
+ nofnewdata = nofnewdata + 1
+ newdata[nofnewdata] = di
+ end
+ else
+ if di.samepar then
+ prevnature = "inline"
+ prevparnumber = false
+ else
+ local nature = di.nature
+ local parnumber = di.parnumber
+ if prevnature == "inline" and nature == "inline" and prevparnumber and prevparnumber ~= parnumber then
+ nofnewdata = nofnewdata + 1
+ if trace_spacing then
+ newdata[nofnewdata] = makebreaknode { type = "c", p = prevparnumber, n = parnumber }
+ else
+ newdata[nofnewdata] = makebreaknode()
+ end
+ end
+ prevnature = nature
+ prevparnumber = parnumber
+ end
+ nofnewdata = nofnewdata + 1
+ newdata[nofnewdata] = di
+ end
+ end
+ tree.data = newdata
+ end
+ end
+
+ -- also tabulaterow reconstruction .. maybe better as a checker
+ -- i.e cell attribute
+
+ local function collapsetree(tree)
+-- for tag, trees in sortedhash(treehash) do
+ for tag, trees in next, treehash do
+ local d = trees[1].data
+-- print("!!!!!!!!",tag)
+-- inspect(trees)
+ if d then
+ local nd = #d
+ if nd > 0 then
+ for i=2,#trees do
+ local currenttree = trees[i]
+ local currentdata = currenttree.data
+ local currentpar = currenttree.parnumber
+ local previouspar = trees[i-1].parnumber
+ currenttree.collapsed = true
+ -- is the next ok?
+ if previouspar == 0 or not (di and di.content) then
+ previouspar = nil -- no need anyway so no further testing needed
+ end
+ for j=1,#currentdata do
+ local cd = currentdata[j]
+ if not cd or cd == "" then
+ -- skip
+ elseif cd.skip == "ignore" then
+ -- skip
+ elseif cd.content then
+ if not currentpar then
+ -- add space ?
+ elseif not previouspar then
+ -- add space ?
+ elseif currentpar ~= previouspar then
+ nd = nd + 1
+ if trace_spacing then
+ d[nd] = makebreaknode { type = "d", p = previouspar, n = currentpar }
+ else
+ d[nd] = makebreaknode()
+ end
+ end
+ previouspar = currentpar
+ nd = nd + 1
+ d[nd] = cd
+ else
+ nd = nd + 1
+ d[nd] = cd
+ end
+ currentdata[j] = false
+ end
+ end
+ end
+ end
+ end
+ end
+
+ local function finalizetree(tree)
+ for _, finalizer in next, finalizers do
+ finalizer(tree)
+ end
+ end
+
+ -- local function showtree(data,when,where)
+ -- if data then
+ -- for i=1,#data do
+ -- local d = data[i]
+ -- if type(d) == "table" and d.element then
+ -- print(when,where,i,d.element,d.parnumber or 0)
+ -- end
+ -- end
+ -- end
+ -- end
+
+ local function indextree(tree)
+ local data = tree.data
+ if data then
+ local n, new = 0, { }
+ -- showtree(data,"before","index")
+ for i=1,#data do
+ local d = data[i]
+ if not d then
+ -- skip
+ elseif d.content then
+ n = n + 1
+ new[n] = d
+ elseif not d.collapsed then
+ n = n + 1
+ d.__i__ = n
+ d.__p__ = tree
+ indextree(d)
+ new[n] = d
+ end
+ end
+ tree.data = new
+ -- showtree(new,"after","index")
+ end
+ end
+
+ local function checktree(tree)
+ local data = tree.data
+ if data then
+ -- showtree(data,"before","check")
+ for i=1,#data do
+ local d = data[i]
+ if type(d) == "table" then
+ local check = checks[d.tg]
+ if check then
+ check(d,data,i)
+ end
+ checktree(d) -- so parts can pass twice
+ end
+ end
+ -- showtree(data,"after","check")
+ end
+ end
+
+ local function fixtree(tree)
+ local data = tree.data
+ if data then
+ -- showtree(data,"before","fix")
+ for i=1,#data do
+ local d = data[i]
+ if type(d) == "table" then
+ local fix = fixes[d.tg]
+ if fix then
+ fix(d,data,i)
+ end
+ fixtree(d) -- so parts can pass twice
+ end
+ end
+ -- showtree(data,"after","fix")
+ end
+ end
+
+ wrapups.flushtree = flushtree
+ wrapups.breaktree = breaktree
+ wrapups.collapsetree = collapsetree
+ wrapups.finalizetree = finalizetree
+ wrapups.indextree = indextree
+ wrapups.checktree = checktree
+ wrapups.fixtree = fixtree
+
+end
+
+-- collector code
+
+local function push(fulltag,depth)
+ local tg, n, detail, element, nature, record
+ local specification = specifications[fulltag]
+ if specification then
+ tg = specification.tagname
+ n = specification.tagindex
+ detail = specification.detail
+ else
+ -- a break (more efficient if we don't store those in specifications)
+ tg, n = lpegmatch(tagsplitter,fulltag)
+ n = tonumber(n) -- to tonumber in tagsplitter
+ end
+ local p = properties[tg]
+ if p then
+ element = p.export or tg
+ nature = p.nature or "inline" -- defaultnature
+ record = p.record
+ end
+ local treedata = tree.data
+ local t = { -- maybe we can use the tag table
+ tg = tg,
+ fulltag = fulltag,
+ detail = detail,
+ n = n, -- already a number
+ element = element,
+ nature = nature,
+ data = { },
+ attribute = currentattribute,
+ parnumber = currentparagraph,
+ record = record, -- we can consider storing properties
+ }
+ treedata[#treedata+1] = t
+ currentdepth = currentdepth + 1
+ nesting[currentdepth] = fulltag
+ treestack[currentdepth] = tree
+ if trace_export then
+ if detail and detail ~= "" then
+ report_export("%w<%s trigger=%q n=%q paragraph=%q index=%q detail=%q>",currentdepth-1,tg,n,currentattribute or 0,currentparagraph or 0,#treedata,detail)
+ else
+ report_export("%w<%s trigger=%q n=%q paragraph=%q index=%q>",currentdepth-1,tg,n,currentattribute or 0,currentparagraph or 0,#treedata)
+ end
+ end
+ tree = t
+ if tg == "break" then
+ -- no need for this
+ else
+ local h = treehash[fulltag]
+ if h then
+ h[#h+1] = t
+ else
+ treehash[fulltag] = { t }
+ end
+ end
+end
+
+local function pop()
+ if currentdepth > 0 then
+ local top = nesting[currentdepth]
+ tree = treestack[currentdepth]
+ currentdepth = currentdepth - 1
+ if trace_export then
+ if top then
+ report_export("%w</%s>",currentdepth,match(top,"[^>]+"))
+ else
+ report_export("</BAD>")
+ end
+ end
+ else
+ report_export("%w<!-- too many pops -->",currentdepth)
+ end
+end
+
+local function continueexport()
+ if nofcurrentcontent > 0 then
+ if trace_export then
+ report_export("%w<!-- injecting pagebreak space -->",currentdepth)
+ end
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = " " -- pagebreak
+ end
+end
+
+local function pushentry(current)
+ if not current then
+ -- bad news
+ return
+ end
+ current = current.taglist
+ if not current then
+ -- even worse news
+ return
+ end
+ if restart then
+ continueexport()
+ restart = false
+ end
+ local newdepth = #current
+ local olddepth = currentdepth
+ if trace_export then
+ report_export("%w<!-- moving from depth %s to %s (%s) -->",currentdepth,olddepth,newdepth,current[newdepth])
+ end
+ if olddepth <= 0 then
+ for i=1,newdepth do
+ push(current[i],i)
+ end
+ else
+ local difference
+ if olddepth < newdepth then
+ for i=1,olddepth do
+ if current[i] ~= nesting[i] then
+ difference = i
+ break
+ end
+ end
+ else
+ for i=1,newdepth do
+ if current[i] ~= nesting[i] then
+ difference = i
+ break
+ end
+ end
+ end
+ if difference then
+ for i=olddepth,difference,-1 do
+ pop()
+ end
+ for i=difference,newdepth do
+ push(current[i],i)
+ end
+ elseif newdepth > olddepth then
+ for i=olddepth+1,newdepth do
+ push(current[i],i)
+ end
+ elseif newdepth < olddepth then
+ for i=olddepth,newdepth,-1 do
+ pop()
+ end
+ elseif trace_export then
+ report_export("%w<!-- staying at depth %s (%s) -->",currentdepth,newdepth,nesting[newdepth] or "?")
+ end
+ end
+ return olddepth, newdepth
+end
+
+local function pushcontent(oldparagraph,newparagraph)
+ if nofcurrentcontent > 0 then
+ if oldparagraph then
+ if currentcontent[nofcurrentcontent] == "\n" then
+ if trace_export then
+ report_export("%w<!-- removing newline -->",currentdepth)
+ end
+ nofcurrentcontent = nofcurrentcontent - 1
+ end
+ end
+ local content = concat(currentcontent,"",1,nofcurrentcontent)
+ if content == "" then
+ -- omit; when oldparagraph we could push, remove spaces, pop
+ elseif somespace[content] and oldparagraph then
+ -- omit; when oldparagraph we could push, remove spaces, pop
+ else
+ local olddepth, newdepth
+ local list = taglist[currentattribute]
+ if list then
+ olddepth, newdepth = pushentry(list)
+ end
+ if tree then
+ local td = tree.data
+ local nd = #td
+ td[nd+1] = { parnumber = oldparagraph or currentparagraph, content = content }
+ if trace_export then
+ report_export("%w<!-- start content with length %s -->",currentdepth,utflen(content))
+ report_export("%w%s",currentdepth,(gsub(content,"\n","\\n")))
+ report_export("%w<!-- stop content -->",currentdepth)
+ end
+ if olddepth then
+ for i=newdepth-1,olddepth,-1 do
+ pop()
+ end
+ end
+ end
+ end
+ nofcurrentcontent = 0
+ end
+ if oldparagraph then
+ pushentry(makebreaklist(currentnesting))
+ if trace_export then
+ report_export("%w<!-- break added between paragraph %a and %a -->",currentdepth,oldparagraph,newparagraph)
+ end
+ end
+end
+
+local function finishexport()
+ if trace_export then
+ report_export("%w<!-- start finalizing -->",currentdepth)
+ end
+ if nofcurrentcontent > 0 then
+ if somespace[currentcontent[nofcurrentcontent]] then
+ if trace_export then
+ report_export("%w<!-- removing space -->",currentdepth)
+ end
+ nofcurrentcontent = nofcurrentcontent - 1
+ end
+ pushcontent()
+ end
+ for i=currentdepth,1,-1 do
+ pop()
+ end
+ currentcontent = { } -- we're nice and do a cleanup
+ if trace_export then
+ report_export("%w<!-- stop finalizing -->",currentdepth)
+ end
+end
+
+-- inserts ?
+
+local collectresults do -- too many locals otherwise
+
+ local nodecodes = nodes.nodecodes
+ local gluecodes = nodes.gluecodes
+ local listcodes = nodes.listcodes
+ local whatsitcodes = nodes.whatsitcodes
+
+ local subtypes = nodes.subtypes
+
+ local hlist_code = nodecodes.hlist
+ local vlist_code = nodecodes.vlist
+ local glyph_code = nodecodes.glyph
+ local glue_code = nodecodes.glue
+ local kern_code = nodecodes.kern
+ local disc_code = nodecodes.disc
+ local whatsit_code = nodecodes.whatsit
+ local par_code = nodecodes.par
+
+ local userskip_code = gluecodes.userskip
+ local rightskip_code = gluecodes.rightskip
+ local parfillskip_code = gluecodes.parfillskip
+ local spaceskip_code = gluecodes.spaceskip
+ local xspaceskip_code = gluecodes.xspaceskip
+
+ local linelist_code = listcodes.line
+
+ local userdefinedwhatsit_code = whatsitcodes.userdefined
+
+ local privateattribute = attributes.private
+ local a_image = privateattribute('image')
+ local a_reference = privateattribute('reference')
+ local a_destination = privateattribute('destination')
+ local a_characters = privateattribute('characters')
+ local a_exportstatus = privateattribute('exportstatus')
+ local a_tagged = privateattribute('tagged')
+ local a_taggedpar = privateattribute("taggedpar")
+ local a_textblock = privateattribute("textblock")
+
+ local inline_mark = nodes.pool.userids["margins.inline"]
+
+ local nuts = nodes.nuts
+
+ local getnext = nuts.getnext
+ local getdisc = nuts.getdisc
+ local getlist = nuts.getlist
+ local getid = nuts.getid
+ local getattr = nuts.getattr
+ local setattr = nuts.setattr -- maybe use properties
+ local isglyph = nuts.isglyph
+ local getkern = nuts.getkern
+ local getwidth = nuts.getwidth
+
+ local start_of_par = nuts.start_of_par
+
+ local nexthlist = nuts.traversers.hlist
+ local nextnode = nuts.traversers.node
+
+ local function addtomaybe(maybewrong,c,case)
+ if trace_export then
+ report_export("%w<!-- possible paragraph mixup at %C case %i -->",currentdepth,c,case)
+ else
+ local s = formatters["%C"](c)
+ if maybewrong then
+ maybewrong[#maybewrong+1] = s
+ else
+ maybewrong = { s }
+ end
+ return maybewrong
+ end
+ end
+
+ local function showmaybe(maybewrong)
+ if not trace_export then
+ report_export("fuzzy paragraph: % t",maybewrong)
+ end
+ end
+
+ local function showdetail(n,id,subtype)
+ local a = getattr(n,a_tagged)
+ local t = taglist[a]
+ local c = nodecodes[id]
+ local s = subtypes[id][subtype]
+ if a and t then
+ report_export("node %a, subtype %a, tag %a, element %a, tree '% t'",c,s,a,t.tagname,t.taglist)
+ else
+ report_export("node %a, subtype %a, untagged",c,s)
+ end
+ end
+
+ local function collectresults(head,list,pat,pap) -- is last used (we also have currentattribute)
+ local p
+ local paragraph
+ local maybewrong
+ local pid
+ for n, id, subtype in nextnode, head do
+ if trace_details then
+ showdetail(n,id,subtype)
+ end
+ if id == glyph_code then
+ local c, f = isglyph(n)
+ local at = getattr(n,a_tagged) or pat
+ if not at then
+ -- we need to tag the pagebody stuff as being valid skippable
+ --
+ -- report_export("skipping character: %C (no attribute)",n.char)
+ else
+ if last ~= at then
+ local tl = taglist[at]
+ local ap = getattr(n,a_taggedpar) or pap
+ if paragraph and (not ap or ap < paragraph) then
+ maybewrong = addtomaybe(maybewrong,c,1)
+ end
+ pushcontent()
+ currentnesting = tl
+ currentparagraph = ap
+ currentattribute = at
+ last = at
+ pushentry(currentnesting)
+ if trace_export then
+ report_export("%w<!-- processing glyph %C tagged %a -->",currentdepth,c,at)
+ end
+ -- We need to intercept this here; maybe I will also move this
+ -- to a regular setter at the tex end.
+ local r = getattr(n,a_reference)
+ if r then
+ local t = tl.taglist
+ referencehash[t[#t]] = r -- fulltag
+ end
+ local d = getattr(n,a_destination)
+ if d then
+ local t = tl.taglist
+ destinationhash[t[#t]] = d -- fulltag
+ end
+ --
+ elseif last then
+ -- we can consider tagging the pars (lines) in the parbuilder but then we loose some
+ -- information unless we inject a special node (but even then we can run into nesting
+ -- issues)
+ local ap = getattr(n,a_taggedpar) or pap
+ if ap ~= currentparagraph then
+ pushcontent(currentparagraph,ap)
+ pushentry(currentnesting)
+ currentattribute = last
+ currentparagraph = ap
+ end
+ if paragraph and (not ap or ap < paragraph) then
+ maybewrong = addtomaybe(maybewrong,c,2)
+ end
+ if trace_export then
+ report_export("%w<!-- processing glyph %C tagged %a -->",currentdepth,c,last)
+ end
+ else
+ if trace_export then
+ report_export("%w<!-- processing glyph %C tagged %a -->",currentdepth,c,at)
+ end
+ end
+ local s = getattr(n,a_exportstatus)
+ if s then
+ c = s
+ end
+ if c == 0 then
+ if trace_export then
+ report_export("%w<!-- skipping last glyph -->",currentdepth)
+ end
+ elseif c == 0x20 then
+ local a = getattr(n,a_characters)
+ nofcurrentcontent = nofcurrentcontent + 1
+ if a then
+ if trace_export then
+ report_export("%w<!-- turning last space into special space %U -->",currentdepth,a)
+ end
+ currentcontent[nofcurrentcontent] = specialspaces[a] -- special space
+ else
+ currentcontent[nofcurrentcontent] = " "
+ end
+ else
+ local fc = fontchar[f]
+ if fc then
+ fc = fc and fc[c]
+ if fc then
+ local u = fc.unicode
+ if not u then
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = utfchar(c)
+ elseif type(u) == "table" then
+ for i=1,#u do
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = utfchar(u[i])
+ end
+ else
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = utfchar(u)
+ end
+ elseif c > 0 then
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = utfchar(c)
+ else
+ -- we can have -1 as side effect of an explicit hyphen (unless we expand)
+ end
+ elseif c > 0 then
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = utfchar(c)
+ else
+ -- we can have -1 as side effect of an explicit hyphen (unless we expand)
+ end
+ end
+ end
+ elseif id == glue_code then
+ -- we need to distinguish between hskips and vskips
+ local ca = getattr(n,a_characters)
+ if ca == 0 then
+ -- skip this one ... already converted special character (node-acc)
+ elseif ca then
+ local a = getattr(n,a_tagged) or pat
+ if a then
+ local c = specialspaces[ca]
+ if last ~= a then
+ local tl = taglist[a]
+ if trace_export then
+ report_export("%w<!-- processing space glyph %U tagged %a case 1 -->",currentdepth,ca,a)
+ end
+ pushcontent()
+ currentnesting = tl
+ currentparagraph = getattr(n,a_taggedpar) or pap
+ currentattribute = a
+ last = a
+ pushentry(currentnesting)
+ -- no reference check (see above)
+ elseif last then
+ local ap = getattr(n,a_taggedpar) or pap
+ if ap ~= currentparagraph then
+ pushcontent(currentparagraph,ap)
+ pushentry(currentnesting)
+ currentattribute = last
+ currentparagraph = ap
+ end
+ if trace_export then
+ report_export("%w<!-- processing space glyph %U tagged %a case 2 -->",currentdepth,ca,last)
+ end
+ end
+ -- if somespace[currentcontent[nofcurrentcontent]] then
+ -- if trace_export then
+ -- report_export("%w<!-- removing space -->",currentdepth)
+ -- end
+ -- nofcurrentcontent = nofcurrentcontent - 1
+ -- end
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = c
+ end
+ elseif subtype == userskip_code then
+ if getwidth(n) > threshold then
+ if last and not somespace[currentcontent[nofcurrentcontent]] then
+ local a = getattr(n,a_tagged) or pat
+ if a == last then
+ if trace_export then
+ report_export("%w<!-- injecting spacing 5a -->",currentdepth)
+ end
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = " "
+ elseif a then
+ -- e.g LOGO<space>LOGO
+ if trace_export then
+ report_export("%w<!-- processing glue > threshold tagged %s becomes %s -->",currentdepth,last,a)
+ end
+ pushcontent()
+ if trace_export then
+ report_export("%w<!-- injecting spacing 5b -->",currentdepth)
+ end
+ last = a
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = " "
+ currentnesting = taglist[last]
+ pushentry(currentnesting)
+ currentattribute = last
+ end
+ end
+ end
+ elseif subtype == spaceskip_code or subtype == xspaceskip_code then
+ if not somespace[currentcontent[nofcurrentcontent]] then
+ local a = getattr(n,a_tagged) or pat
+ if a == last then
+ if trace_export then
+ report_export("%w<!-- injecting spacing 7 (stay in element) -->",currentdepth)
+ end
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = " "
+ else
+ if trace_export then
+ report_export("%w<!-- injecting spacing 7 (end of element) -->",currentdepth)
+ end
+ last = a
+ pushcontent()
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = " "
+ currentnesting = taglist[last]
+ pushentry(currentnesting)
+ currentattribute = last
+ end
+ end
+ elseif subtype == rightskip_code then
+ -- a line
+ if nofcurrentcontent > 0 then
+ local r = currentcontent[nofcurrentcontent]
+ if r == hyphen then
+ if not keephyphens then
+ nofcurrentcontent = nofcurrentcontent - 1
+ end
+ elseif pid == disc_code then
+ -- go on .. tricky: we should mark the glyhs as coming from a disc
+ elseif not somespace[r] then
+ local a = getattr(n,a_tagged) or pat
+ if a == last then
+ if trace_export then
+ report_export("%w<!-- injecting spacing 1 (end of line, stay in element) -->",currentdepth)
+ end
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = " "
+ else
+ if trace_export then
+ report_export("%w<!-- injecting spacing 1 (end of line, end of element) -->",currentdepth)
+ end
+ last = a
+ pushcontent()
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = " "
+ currentnesting = taglist[last]
+ pushentry(currentnesting)
+ currentattribute = last
+ end
+ end
+ end
+ elseif subtype == parfillskip_code then
+ -- deal with paragraph endings (crossings) elsewhere and we quit here
+ -- as we don't want the rightskip space addition
+ if maybewrong then
+ showmaybe(maybewrong)
+ end
+ return
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local ai = getattr(n,a_image)
+ if ai then
+ local at = getattr(n,a_tagged) or pat
+ if nofcurrentcontent > 0 then
+ pushcontent()
+ pushentry(currentnesting) -- ??
+ end
+ pushentry(taglist[at]) -- has an index, todo: flag empty element
+ if trace_export then
+ report_export("%w<!-- processing image tagged %a",currentdepth,last)
+ end
+ last = nil
+ currentparagraph = nil
+ else
+ -- we need to determine an end-of-line
+ local list = getlist(n)
+ if list then
+ -- todo: no par checking needed in math
+ local at = getattr(n,a_tagged) or pat
+ collectresults(list,n,at)
+ end
+ end
+ elseif id == kern_code then
+ local kern = getkern(n)
+ if kern > 0 then
+local a = getattr(n,a_tagged) or pat
+local t = taglist[a]
+if not t or t.tagname ~= "ignore" then -- maybe earlier on top)
+ local limit = threshold
+ if p then
+ local c, f = isglyph(p)
+ if c then
+ limit = fontquads[f] / 4
+ end
+ end
+ if kern > limit then
+ if last and not somespace[currentcontent[nofcurrentcontent]] then
+-- local a = getattr(n,a_tagged) or pat
+ if a == last then
+ if not somespace[currentcontent[nofcurrentcontent]] then
+ if trace_export then
+ report_export("%w<!-- injecting spacing 8 (kern %p) -->",currentdepth,kern)
+ end
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = " "
+ end
+ elseif a then
+ -- e.g LOGO<space>LOGO
+ if trace_export then
+ report_export("%w<!-- processing kern, threshold %p, tag %s => %s -->",currentdepth,limit,last,a)
+ end
+ last = a
+ pushcontent()
+ if trace_export then
+ report_export("%w<!-- injecting spacing 9 (kern %p) -->",currentdepth,kern)
+ end
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = " "
+-- currentnesting = taglist[last]
+currentnesting = t
+ pushentry(currentnesting)
+ currentattribute = last
+ end
+ end
+ end
+end
+ end
+ elseif id == whatsit_code then
+ if subtype == userdefinedwhatsit_code then
+ -- similar to images, see above
+ local at = getattr(n,a_tagged)
+ if nofcurrentcontent > 0 then
+ pushcontent()
+ pushentry(currentnesting) -- ??
+ end
+ pushentry(taglist[at])
+ if trace_export then
+ report_export("%w<!-- processing anchor tagged %a",currentdepth,last)
+ end
+ last = nil
+ currentparagraph = nil
+ end
+ elseif not paragraph and id == par_code and start_of_par(n) then
+ paragraph = getattr(n,a_taggedpar)
+ elseif id == disc_code then
+ -- very unlikely because we stripped them
+ local pre, post, replace = getdisc(n)
+ if keephyphens then
+ if pre and not getnext(pre) and isglyph(pre) == 0xAD then -- hyphencode then
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = hyphen
+ end
+ end
+ if replace then
+ collectresults(replace,nil)
+ end
+ end
+ p = n
+ pid = id
+ end
+ if maybewrong then
+ showmaybe(maybewrong)
+ end
+ end
+
+ function nodes.handlers.export(head) -- hooks into the page builder
+ starttiming(treehash)
+ if trace_export then
+ report_export("%w<!-- start flushing page -->",currentdepth)
+ end
+ -- continueexport()
+ restart = true
+ collectresults(head)
+ if trace_export then
+ report_export("%w<!-- stop flushing page -->",currentdepth)
+ end
+ stoptiming(treehash)
+ return head
+ end
+
+ function nodes.handlers.checkparcounter(p)
+ setattr(p,a_taggedpar,texgetcount("tagparcounter") + 1)
+ return p
+ end
+
+ function builders.paragraphs.tag(head)
+ noftextblocks = noftextblocks + 1
+ for n, subtype in nexthlist, head do
+ if subtype == linelist_code then
+ setattr(n,a_textblock,noftextblocks)
+ elseif subtype == glue_code or subtype == kern_code then -- no need to set fontkerns
+ setattr(n,a_textblock,0)
+ end
+ end
+ return false
+ end
+
+end
+
+do
+
+ local xmlcollected = xml.collected
+ local xmlsetcomment = xml.setcomment
+
+local xmlpreamble = [[
+<?xml version="1.0" encoding="UTF-8" standalone="%standalone%" ?>
+
+<!--
+
+ input filename : %filename%
+ processing date : %date%
+ context version : %contextversion%
+ exporter version : %exportversion%
+
+-->
+
+]]
+
+ local flushtree = wrapups.flushtree
+
+ local function wholepreamble(standalone)
+ return replacetemplate(xmlpreamble, {
+ standalone = standalone and "yes" or "no",
+ filename = tex.jobname,
+ date = included.date and os.fulltime(),
+ contextversion = environment.version,
+ exportversion = exportversion,
+ })
+ end
+
+
+local csspreamble = [[
+<?xml-stylesheet type="text/css" href="%filename%" ?>
+]]
+
+local cssheadlink = [[
+<link type="text/css" rel="stylesheet" href="%filename%" />
+]]
+
+ local function allusedstylesheets(cssfiles,files,path)
+ local done = { }
+ local result = { }
+ local extras = { }
+ for i=1,#cssfiles do
+ local cssfile = cssfiles[i]
+ if type(cssfile) ~= "string" then
+ -- error
+ elseif cssfile == "export-example.css" then
+ -- ignore
+ elseif not done[cssfile] then
+ cssfile = joinfile(path,basename(cssfile))
+ report_export("adding css reference '%s'",cssfile)
+ files[#files+1] = cssfile
+ result[#result+1] = replacetemplate(csspreamble, { filename = cssfile })
+ extras[#extras+1] = replacetemplate(cssheadlink, { filename = cssfile })
+ done[cssfile] = true
+ end
+ end
+ return concat(result), concat(extras)
+ end
+
+local elementtemplate = [[
+/* element="%element%" detail="%detail%" chain="%chain%" */
+
+%element%,
+%namespace%div.%element% {
+ display: %display% ;
+}]]
+
+local detailtemplate = [[
+/* element="%element%" detail="%detail%" chain="%chain%" */
+
+%element%[detail=%detail%],
+%namespace%div.%element%.%detail% {
+ display: %display% ;
+}]]
+
+-- <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1 plus MathML 2.0 plus SVG 1.1//EN" "http://www.w3.org/2002/04/xhtml-math-svg/xhtml-math-svg.dtd" >
+
+local htmltemplate = [[
+%preamble%
+
+<html xmlns="http://www.w3.org/1999/xhtml" xmlns:math="http://www.w3.org/1998/Math/MathML">
+
+ <head>
+
+ <meta charset="utf-8"/>
+
+ <title>%title%</title>
+
+%style%
+
+ </head>
+ <body>
+ <div class="document" xmlns="http://www.pragma-ade.com/context/export">
+
+<div class="warning">Rendering can be suboptimal because there is no default/fallback css loaded.</div>
+
+%body%
+
+ </div>
+ </body>
+</html>
+]]
+
+ local displaymapping = {
+ inline = "inline",
+ display = "block",
+ mixed = "inline",
+ }
+
+ local function allusedelements(filename)
+ local result = { replacetemplate(namespacetemplate, {
+ what = "template",
+ filename = filename,
+ namespace = contextns,
+ -- cssnamespaceurl = usecssnamespace and cssnamespaceurl or "",
+ cssnamespaceurl = cssnamespaceurl,
+ },false,true) }
+ for element, details in sortedhash(used) do
+ if namespaces[element] then
+ -- skip math
+ else
+ for detail, what in sortedhash(details) do
+ local nature = what[1] or "display"
+ local chain = what[2]
+ local display = displaymapping[nature] or "block"
+ if detail == "" then
+ result[#result+1] = replacetemplate(elementtemplate, {
+ element = element,
+ display = display,
+ chain = chain,
+ namespace = usecssnamespace and namespace or "",
+ })
+ else
+ result[#result+1] = replacetemplate(detailtemplate, {
+ element = element,
+ display = display,
+ detail = detail,
+ chain = chain,
+ namespace = usecssnamespace and cssnamespace or "",
+ })
+ end
+ end
+ end
+ end
+ return concat(result,"\n\n")
+ end
+
+ local function allcontent(tree,embed)
+ local result = { }
+ flushtree(result,tree.data,"display") -- we need to collect images
+ result = concat(result)
+ -- no need to lpeg .. fast enough
+ result = gsub(result,"\n *\n","\n")
+ result = gsub(result,"\n +([^< ])","\n%1")
+ return result
+ end
+
+ -- local xhtmlpreamble = [[
+ -- <!DOCTYPE html PUBLIC
+ -- "-//W3C//DTD XHTML 1.1 plus MathML 2.0 plus SVG 1.1//EN"
+ -- "http://www.w3.org/2002/04/xhtml-math-svg/xhtml-math-svg.dtd"
+ -- >
+ -- ]]
+
+ local function cleanxhtmltree(xmltree)
+ if xmltree then
+ local implicits = { }
+ local explicits = { }
+ local overloads = { }
+ for e in xmlcollected(xmltree,"*") do
+ local at = e.at
+ if at then
+ local explicit = at.explicit
+ local implicit = at.implicit
+ if explicit then
+ if not explicits[explicit] then
+ explicits[explicit] = true
+ at.id = explicit
+ if implicit then
+ overloads[implicit] = explicit
+ end
+ end
+ else
+ if implicit and not implicits[implicit] then
+ implicits[implicit] = true
+ at.id = "aut:" .. implicit
+ end
+ end
+ end
+ end
+ for e in xmlcollected(xmltree,"*") do
+ local at = e.at
+ if at then
+ local internal = at.internal
+ local location = at.location
+ if internal then
+ if location then
+ local explicit = overloads[location]
+ if explicit then
+ at.href = "#" .. explicit
+ else
+ at.href = "#aut:" .. internal
+ end
+ else
+ at.href = "#aut:" .. internal
+ end
+ else
+ if location then
+ at.href = "#" .. location
+ else
+ local url = at.url
+ if url then
+ at.href = url
+ else
+ local file = at.file
+ if file then
+ at.href = file
+ end
+ end
+ end
+ end
+ end
+ end
+ return xmltree
+ else
+ return xml.convert('<?xml version="1.0"?>\n<error>invalid xhtml tree</error>')
+ end
+ end
+
+ -- maybe the reverse: be explicit about what is permitted
+
+ local private = {
+ destination = true,
+ prefix = true,
+ reference = true,
+ --
+ id = true,
+ href = true,
+ --
+ implicit = true,
+ explicit = true,
+ --
+ url = true,
+ file = true,
+ internal = true,
+ location = true,
+ --
+ name = true, -- image name
+ used = true, -- image name
+ page = true, -- image name
+ width = true,
+ height = true,
+ --
+ }
+
+ local addclicks = true
+ local f_onclick = formatters[ [[location.href='%s']] ]
+ local f_onclick = formatters[ [[location.href='%s']] ]
+
+ local p_cleanid = lpeg.replacer { [":"] = "-" }
+ local p_cleanhref = lpeg.Cs(lpeg.P("#") * p_cleanid)
+
+ local p_splitter = lpeg.Ct ( (
+ lpeg.Carg(1) * lpeg.C((1-lpeg.P(" "))^1) / function(d,s) if not d[s] then d[s] = true return s end end
+ * lpeg.P(" ")^0 )^1 )
+
+
+ local classes = table.setmetatableindex(function(t,k)
+ local v = concat(lpegmatch(p_splitter,k,1,{})," ")
+ t[k] = v
+ return v
+ end)
+
+ local function makeclass(tg,at)
+ local detail = at.detail
+ local chain = at.chain
+ local extra = nil
+ local classes = { }
+ local nofclasses = 0
+ at.detail = nil
+ at.chain = nil
+ for k, v in next, at do
+ if not private[k] then
+ nofclasses = nofclasses + 1
+ classes[nofclasses] = k .. "-" .. v
+ end
+ end
+ if detail and detail ~= "" then
+ if chain and chain ~= "" then
+ if chain ~= detail then
+ extra = classes[tg .. " " .. chain .. " " .. detail]
+ elseif tg ~= detail then
+ extra = detail
+ end
+ elseif tg ~= detail then
+ extra = detail
+ end
+ elseif chain and chain ~= "" then
+ if tg ~= chain then
+ extra = chain
+ end
+ end
+ -- in this order
+ if nofclasses > 0 then
+ sort(classes)
+ classes = concat(classes," ")
+ if extra then
+ return tg .. " " .. extra .. " " .. classes
+ else
+ return tg .. " " .. classes
+ end
+ else
+ if extra then
+ return tg .. " " .. extra
+ else
+ return tg
+ end
+ end
+ end
+
+ -- Some elements are not supported (well) in css so we need to retain them. For
+ -- instance, tablecells have no colspan so basically that renders css table div
+ -- elements quite useless. A side effect is that we nwo can have conflicts when
+ -- we mix in with other html (as there is no reset). Of course, when it eventually
+ -- gets added, there is a change then that those not using the div abstraction
+ -- will be rediculed.
+ --
+ -- a table tr td th thead tbody tfoot
+ --
+
+ local crappycss = {
+ table = "table", tabulate = "table",
+ tablehead = "thead", tabulatehead = "thead",
+ tablebody = "tbody", tabulatebody = "tbody",
+ tablefoot = "tfoot", tabulatefoot = "tfoot",
+ tablerow = "tr", tabulaterow = "tr",
+ tablecell = "td", tabulatecell = "td",
+ }
+
+ local cssmapping = false
+
+ directives.register("export.nativetags", function(v)
+ cssmapping = v and crappycss or false
+ end)
+
+ local function remap(specification,source,target)
+ local comment = nil -- share comments
+ for c in xmlcollected(source,"*") do
+ if not c.special then
+ local tg = c.tg
+ local ns = c.ns
+ if ns == "m" then
+ if false then -- yes or no
+ c.ns = ""
+ c.at["xmlns:m"] = nil
+ end
+ -- elseif tg == "a" then
+ -- c.ns = ""
+ else
+ local dt = c.dt
+ local nt = #dt
+ if nt == 0 or (nt == 1 and dt[1] == "") then
+ if comment then
+ c.dt = comment
+ else
+ xmlsetcomment(c,"empty")
+ comment = c.dt
+ end
+ end
+ local at = c.at
+ local class = nil
+ local label = nil
+ if tg == "document" then
+ at.href = nil
+ at.detail = nil
+ at.chain = nil
+ elseif tg == "metavariable" then
+ label = at.name
+ at.detail = "metaname-" .. label
+ class = makeclass(tg,at)
+ else
+ class = makeclass(tg,at)
+ end
+ local id = at.id
+ local href = at.href
+ local attr = nil
+ if id then
+ id = lpegmatch(p_cleanid, id) or id
+ if href then
+ href = lpegmatch(p_cleanhref,href) or href
+ attr = {
+ class = class,
+ id = id,
+ href = href,
+ onclick = addclicks and f_onclick(href) or nil,
+ }
+ else
+ attr = {
+ class = class,
+ id = id,
+ }
+ end
+ else
+ if href then
+ href = lpegmatch(p_cleanhref,href) or href
+ attr = {
+ class = class,
+ href = href,
+ onclick = addclicks and f_onclick(href) or nil,
+ }
+ else
+ attr = {
+ class = class,
+ }
+ end
+ end
+ c.at = attr
+ if label then
+ attr.label = label
+ end
+ c.tg = cssmapping and cssmapping[tg] or "div"
+ end
+ end
+ end
+ end
+
+ -- local cssfile = nil directives.register("backend.export.css", function(v) cssfile = v end)
+
+ local embedfile = false directives.register("export.embed",function(v) embedfile = v end)
+
+ function structurestags.finishexport()
+
+ if exporting then
+ exporting = false
+ else
+ return
+ end
+
+ local onlyxml = finetuning.export == v_xml
+
+ starttiming(treehash)
+ --
+ finishexport()
+ --
+ report_export("")
+ if onlyxml then
+ report_export("exporting xml, no other files")
+ else
+ report_export("exporting xml, xhtml, html and css files")
+ end
+ report_export("")
+ --
+ wrapups.fixtree(tree)
+ wrapups.collapsetree(tree)
+ wrapups.indextree(tree)
+ wrapups.checktree(tree)
+ wrapups.breaktree(tree)
+ wrapups.finalizetree(tree)
+ --
+ wrapups.hashlistdata()
+ --
+ local askedname = finetuning.file
+ --
+ -- we use a dedicated subpath:
+ --
+ -- ./jobname-export
+ -- ./jobname-export/images
+ -- ./jobname-export/styles
+ -- ./jobname-export/styles
+ -- ./jobname-export/jobname-export.xml
+ -- ./jobname-export/jobname-export.xhtml
+ -- ./jobname-export/jobname-export.html
+ -- ./jobname-export/jobname-specification.lua
+ -- ./jobname-export/styles/jobname-defaults.css
+ -- ./jobname-export/styles/jobname-styles.css
+ -- ./jobname-export/styles/jobname-images.css
+ -- ./jobname-export/styles/jobname-templates.css
+
+ if type(askedname) ~= "string" or askedname == "" then
+ askedname = tex.jobname
+ end
+
+ local usedname = nameonly(askedname)
+ local basepath = usedname .. "-export"
+ local imagepath = joinfile(basepath,"images")
+ local stylepath = joinfile(basepath,"styles")
+
+ local function validpath(what,pathname)
+ if lfs.isdir(pathname) then
+ report_export("using existing %s path %a",what,pathname)
+ return pathname
+ end
+ lfs.mkdir(pathname)
+ if lfs.isdir(pathname) then
+ report_export("using cretated %s path %a",what,basepath)
+ return pathname
+ else
+ report_export("unable to create %s path %a",what,basepath)
+ return false
+ end
+ end
+
+ if not (validpath("export",basepath) and validpath("images",imagepath) and validpath("styles",stylepath)) then
+ return
+ end
+
+ -- we're now on the dedicated export subpath so we can't clash names
+ --
+ -- a xhtml suffix no longer seems to be work well with browsers
+
+ local xmlfilebase = addsuffix(usedname .. "-raw","xml" )
+ local xhtmlfilebase = addsuffix(usedname .. "-tag","xhtml")
+ local htmlfilebase = addsuffix(usedname .. "-div","html")
+ local specificationfilebase = addsuffix(usedname .. "-pub","lua" )
+
+ local xmlfilename = joinfile(basepath, xmlfilebase )
+ local xhtmlfilename = joinfile(basepath, xhtmlfilebase )
+ local htmlfilename = joinfile(basepath, htmlfilebase )
+ local specificationfilename = joinfile(basepath, specificationfilebase)
+ --
+ local defaultfilebase = addsuffix(usedname .. "-defaults", "css")
+ local imagefilebase = addsuffix(usedname .. "-images", "css")
+ local stylefilebase = addsuffix(usedname .. "-styles", "css")
+ local templatefilebase = addsuffix(usedname .. "-templates","css")
+ --
+ local defaultfilename = joinfile(stylepath,defaultfilebase )
+ local imagefilename = joinfile(stylepath,imagefilebase )
+ local stylefilename = joinfile(stylepath,stylefilebase )
+ local templatefilename = joinfile(stylepath,templatefilebase)
+
+ local cssfile = finetuning.cssfile
+
+ -- we keep track of all used files
+
+ local files = {
+ }
+
+ -- we always load the defaults and optionally extra css files; we also copy the example
+ -- css file so that we always have the latest version
+
+ local cssfiles = {
+ defaultfilebase,
+ imagefilebase,
+ stylefilebase,
+ }
+
+ local cssextra = cssfile and table.unique(settings_to_array(cssfile)) or { }
+
+ -- at this point we're ready for the content; the collector also does some
+ -- housekeeping and data collecting; at this point we still have an xml
+ -- representation that uses verbose element names and carries information in
+ -- attributes
+
+ local data = tree.data
+ for i=1,#data do
+ if data[i].tg ~= "document" then
+ data[i] = { }
+ end
+ end
+
+ local result = allcontent(tree,embedmath) -- embedfile is for testing
+
+ -- ugly but so be it:
+
+ local extradata = structures.tags.getextradata()
+ if extradata then
+ local t = { "" }
+ t[#t+1] = "<extradata>"
+ for name, action in sortedhash(extradata) do
+ t[#t+1] = action()
+ end
+ t[#t+1] = "</extradata>"
+ t[#t+1] = "</document>"
+ -- we use a function because otherwise we can have a bad capture index
+ result = gsub(result,"</document>",function()
+ return concat(t,"\n")
+ end)
+ end
+
+ -- done with ugly
+
+ if onlyxml then
+
+ os.remove(defaultfilename)
+ os.remove(imagefilename)
+ os.remove(stylefilename)
+ os.remove(templatefilename)
+
+ for i=1,#cssextra do
+ os.remove(joinfile(stylepath,basename(source)))
+ end
+
+ -- os.remove(xmlfilename)
+
+ os.remove(imagefilename)
+ os.remove(stylefilename)
+ os.remove(templatefilename)
+ os.remove(xhtmlfilename)
+ os.remove(specificationfilename)
+ os.remove(htmlfilename)
+
+ result = concat {
+ wholepreamble(true),
+ "<!-- This export file is used for filtering runtime only! -->\n",
+ result,
+ }
+
+ report_export("saving xml data in %a",xmlfilename)
+ io.savedata(xmlfilename,result)
+
+ return
+
+ end
+
+ local examplefilename = resolvers.findfile("export-example.css")
+ if examplefilename then
+ local data = io.loaddata(examplefilename)
+ if not data or data == "" then
+ data = "/* missing css file */"
+ elseif not usecssnamespace then
+ data = gsub(data,cssnamespace,"")
+ end
+ io.savedata(defaultfilename,data)
+ end
+
+ if cssfile then
+ for i=1,#cssextra do
+ local source = addsuffix(cssextra[i],"css")
+ local target = joinfile(stylepath,basename(source))
+ cssfiles[#cssfiles+1] = source
+ if not lfs.isfile(source) then
+ source = joinfile("../",source)
+ end
+ if lfs.isfile(source) then
+ report_export("copying %s",source)
+ file.copy(source,target)
+ end
+ end
+ end
+
+ local x_styles, h_styles = allusedstylesheets(cssfiles,files,"styles")
+
+ local attach = backends.nodeinjections.attachfile
+
+ if embedfile and attach then
+ -- only for testing
+ attach {
+ data = concat{ wholepreamble(true), result },
+ name = basename(xmlfilename),
+ registered = "export",
+ title = "raw xml export",
+ method = v_hidden,
+ mimetype = "application/mathml+xml",
+ }
+ end
+
+ result = concat {
+ wholepreamble(true),
+ x_styles, -- adds to files
+ result,
+ }
+
+ cssfiles = table.unique(cssfiles)
+
+ -- we're now ready for saving the result in the xml file
+
+ report_export("saving xml data in %a",xmlfilename)
+ io.savedata(xmlfilename,result)
+
+ report_export("saving css image definitions in %a",imagefilename)
+ io.savedata(imagefilename,wrapups.allusedimages(usedname))
+
+ report_export("saving css style definitions in %a",stylefilename)
+ io.savedata(stylefilename,wrapups.allusedstyles(usedname))
+
+ report_export("saving css template in %a",templatefilename)
+ io.savedata(templatefilename,allusedelements(usedname))
+
+ -- additionally we save an xhtml file; for that we load the file as xml tree
+
+ report_export("saving xhtml variant in %a",xhtmlfilename)
+
+ local xmltree = cleanxhtmltree(xml.convert(result))
+
+ -- local xmltree = xml.convert(result)
+ -- for c in xml.collected(xmltree,"m:mtext[lastindex()=1]/m:mrow") do
+ -- print(c)
+ -- end
+ -- for c in xml.collected(xmltree,"mtext/mrow") do
+ -- print(c)
+ -- end
+ -- local xmltree = cleanxhtmltree(xmltree)
+
+ xml.save(xmltree,xhtmlfilename)
+
+ -- now we save a specification file that can b eused for generating an epub file
+
+ -- looking at identity is somewhat redundant as we also inherit from interaction
+ -- at the tex end
+
+ local identity = interactions.general.getidentity()
+ local metadata = structures.tags.getmetadata()
+
+ local specification = {
+ name = usedname,
+ identifier = os.uuid(),
+ images = wrapups.uniqueusedimages(),
+ imagefile = joinfile("styles",imagefilebase),
+ imagepath = "images",
+ stylepath = "styles",
+ xmlfiles = { xmlfilebase },
+ xhtmlfiles = { xhtmlfilebase },
+ htmlfiles = { htmlfilebase },
+ styles = cssfiles,
+ htmlroot = htmlfilebase,
+ language = languagenames[texgetcount("mainlanguagenumber")],
+ title = validstring(finetuning.title) or validstring(identity.title),
+ subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle),
+ author = validstring(finetuning.author) or validstring(identity.author),
+ firstpage = validstring(finetuning.firstpage),
+ lastpage = validstring(finetuning.lastpage),
+ metadata = metadata,
+ }
+
+ report_export("saving specification in %a",specificationfilename,specificationfilename)
+
+ xml.wipe(xmltree,"metadata") -- maybe optional
+
+ io.savedata(specificationfilename,table.serialize(specification,true))
+
+ -- the html export for epub is different in the sense that it uses div's instead of
+ -- specific tags
+
+ report_export("saving div based alternative in %a",htmlfilename)
+
+ remap(specification,xmltree)
+
+ -- believe it or not, but a <title/> can prevent viewing in browsers
+
+ local title = specification.title
+
+ if not title or title == "" then
+ title = metadata.title
+ if not title or title == "" then
+ title = usedname -- was: "no title"
+ end
+ end
+
+ local variables = {
+ style = h_styles,
+ body = xml.tostring(xml.first(xmltree,"/div")),
+ preamble = wholepreamble(false),
+ title = title,
+ }
+
+ io.savedata(htmlfilename,replacetemplate(htmltemplate,variables,"xml"))
+
+ -- finally we report how an epub file can be made (using the specification)
+
+ report_export("")
+ report_export('create epub with: mtxrun --script epub --make "%s" [--purge --rename --svgmath]',usedname)
+ report_export("")
+
+ stoptiming(treehash)
+ end
+
+ local enableaction = nodes.tasks.enableaction
+
+ function structurestags.initializeexport()
+ if not exporting then
+ report_export("enabling export to xml")
+ enableaction("shipouts","nodes.handlers.export")
+ enableaction("shipouts","nodes.handlers.accessibility")
+ enableaction("math", "noads.handlers.tags")
+ enableaction("everypar","nodes.handlers.checkparcounter")
+ luatex.registerstopactions(structurestags.finishexport)
+ exporting = true
+ end
+ end
+
+ function structurestags.setupexport(t)
+ merge(finetuning,t)
+ keephyphens = finetuning.hyphen == v_yes
+ exportproperties = finetuning.properties
+ if exportproperties == v_no then
+ exportproperties = false
+ end
+ end
+
+ statistics.register("xml exporting time", function()
+ if exporting then
+ return string.format("%s seconds, version %s", statistics.elapsedtime(treehash),exportversion)
+ end
+ end)
+
+end
+
+-- These are called at the tex end:
+
+implement {
+ name = "setupexport",
+ actions = structurestags.setupexport,
+ arguments = {
+ {
+ { "align" },
+ { "bodyfont", "dimen" },
+ { "width", "dimen" },
+ { "properties" },
+ { "hyphen" },
+ { "title" },
+ { "subtitle" },
+ { "author" },
+ { "firstpage" },
+ { "lastpage" },
+ { "svgstyle" },
+ { "cssfile" },
+ { "file" },
+ { "export" },
+ }
+ }
+}
+
+implement {
+ name = "finishexport",
+ actions = structurestags.finishexport,
+}
+
+implement {
+ name = "initializeexport",
+ actions = structurestags.initializeexport,
+}
diff --git a/tex/context/base/mkxl/back-exp.mkxl b/tex/context/base/mkxl/back-exp.mkxl
index f41a8d7ba..5aebd302f 100644
--- a/tex/context/base/mkxl/back-exp.mkxl
+++ b/tex/context/base/mkxl/back-exp.mkxl
@@ -15,7 +15,10 @@
\writestatus{loading}{ConTeXt Backend Macros / XML export}
-\registerctxluafile{back-exp}{}
+\registerctxluafile{back-exp} {autosuffix}
+\registerctxluafile{back-exp-imp-tag}{autosuffix}
+\registerctxluafile{back-exp-imp-mth}{autosuffix}
+\registerctxluafile{back-exp-imp-ref}{autosuffix}
%D This is an experimental exporter and a logical follow up on tagging. The
%D exporter assumes a properly tagged document. Some elements get a couple
diff --git a/tex/context/base/mkxl/cont-new.mkxl b/tex/context/base/mkxl/cont-new.mkxl
index dd4afb289..e1a59c4e4 100644
--- a/tex/context/base/mkxl/cont-new.mkxl
+++ b/tex/context/base/mkxl/cont-new.mkxl
@@ -13,7 +13,7 @@
% \normalend % uncomment this to get the real base runtime
-\newcontextversion{2021.03.02 19:17}
+\newcontextversion{2021.03.05 11:13}
%D This file is loaded at runtime, thereby providing an excellent place for hacks,
%D patches, extensions and new features. There can be local overloads in cont-loc
diff --git a/tex/context/base/mkxl/context.mkxl b/tex/context/base/mkxl/context.mkxl
index 5c1ff9888..b3633879c 100644
--- a/tex/context/base/mkxl/context.mkxl
+++ b/tex/context/base/mkxl/context.mkxl
@@ -29,7 +29,7 @@
%D {YYYY.MM.DD HH:MM} format.
\immutable\edef\contextformat {\jobname}
-\immutable\edef\contextversion{2021.03.02 19:17}
+\immutable\edef\contextversion{2021.03.05 11:13}
%overloadmode 1 % check frozen / warning
%overloadmode 2 % check frozen / error
diff --git a/tex/context/base/mkxl/core-con.mkxl b/tex/context/base/mkxl/core-con.mkxl
index 9a88e8f8f..df505cfee 100644
--- a/tex/context/base/mkxl/core-con.mkxl
+++ b/tex/context/base/mkxl/core-con.mkxl
@@ -448,9 +448,9 @@
\getdummyparameters[#1]%
\normalexpanded
{\endgroup
- \normalday \number\directdummyparameter\c!d\relax
- \normalmonth\number\directdummyparameter\c!m\relax
- \normalyear \number\directdummyparameter\c!y\relax}}
+ \iftok{\directdummyparameter\c!d}\emptytoks\else\normalday \number\directdummyparameter\c!d\relax\fi
+ \iftok{\directdummyparameter\c!m}\emptytoks\else\normalmonth\number\directdummyparameter\c!m\relax\fi
+ \iftok{\directdummyparameter\c!y}\emptytoks\else\normalyear \number\directdummyparameter\c!y\relax\fi}}
\permanent\tolerant\protected\def\date[#1]#*[#2]% sets the date !
{\dontleavehmode
diff --git a/tex/context/base/mkxl/luat-log.lmt b/tex/context/base/mkxl/luat-log.lmt
index 684b2ac8d..522f4ac4f 100644
--- a/tex/context/base/mkxl/luat-log.lmt
+++ b/tex/context/base/mkxl/luat-log.lmt
@@ -660,7 +660,7 @@ do
if s then
report("start %s: %s",what,s)
else
- report("start %s",what)
+ report("start %s",what or "")
end
if target == "logfile" then
newline()
@@ -672,7 +672,7 @@ do
if target == "logfile" then
newline()
end
- report("stop %s",what)
+ report("stop %s",what or "")
if target == "logfile" then
newline()
end
diff --git a/tex/context/base/mkxl/mlib-fio.lmt b/tex/context/base/mkxl/mlib-fio.lmt
index cdffbfcf1..9de61affe 100644
--- a/tex/context/base/mkxl/mlib-fio.lmt
+++ b/tex/context/base/mkxl/mlib-fio.lmt
@@ -73,7 +73,7 @@ local function findmpfile(name,ftype)
elseif suffix(name) == "" then
for i=1,#suffixlist do
fullname = findfile(addsuffix(name,suffixlist[i]),validtyp)
- if fullname and fulllname ~= "" then
+ if fullname and fullname ~= "" then
return fullname
end
end
diff --git a/tex/context/base/mkxl/typo-bld.lmt b/tex/context/base/mkxl/typo-bld.lmt
new file mode 100644
index 000000000..599106ccd
--- /dev/null
+++ b/tex/context/base/mkxl/typo-bld.lmt
@@ -0,0 +1,414 @@
+if modules then modules = { } end modules ['typo-bld'] = { -- was node-par
+ version = 1.001,
+ comment = "companion to typo-bld.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- no need for nuts in the one-line demo (that might move anyway)
+
+local insert, remove = table.insert, table.remove
+
+builders = builders or { }
+local builders = builders
+
+builders.paragraphs = builders.paragraphs or { }
+local parbuilders = builders.paragraphs
+
+parbuilders.constructors = parbuilders.constructors or { }
+local constructors = parbuilders.constructors
+
+constructors.names = constructors.names or { }
+local names = constructors.names
+
+constructors.numbers = constructors.numbers or { }
+local numbers = constructors.numbers
+
+constructors.methods = constructors.methods or { }
+local methods = constructors.methods
+
+local a_parbuilder = attributes.numbers['parbuilder'] or 999 -- why 999
+constructors.attribute = a_parbuilder
+
+local unsetvalue = attributes.unsetvalue
+local texsetattribute = tex.setattribute
+local texnest = tex.nest
+local texlists = tex.lists
+
+local texget = tex.get
+local texset = tex.set
+
+local texgetdimen = tex.getdimen
+
+local nodes = nodes
+local nodeidstostring = nodes.idstostring
+local nodepool = nodes.pool
+local new_baselineskip = nodepool.baselineskip
+local new_lineskip = nodepool.lineskip
+local insert_node_before = nodes.insert_before
+local hpack_node = nodes.hpack
+
+local nuts = nodes.nuts
+local tonode = nodes.tonode
+local tonut = nodes.tonut
+local count_nodes = nuts.countall
+local getattr = nuts.getattr
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+local registercallback = callbacks.register
+
+storage.register("builders/paragraphs/constructors/names", names, "builders.paragraphs.constructors.names")
+storage.register("builders/paragraphs/constructors/numbers", numbers, "builders.paragraphs.constructors.numbers")
+
+local trace_page_builder = false trackers.register("builders.page", function(v) trace_page_builder = v end)
+local trace_vbox_builder = false trackers.register("builders.vbox", function(v) trace_vbox_builder = v end)
+local trace_post_builder = false trackers.register("builders.post", function(v) trace_post_builder = v end)
+
+local report_page_builder = logs.reporter("builders","page")
+local report_vbox_builder = logs.reporter("builders","vbox")
+local report_par_builder = logs.reporter("builders","par")
+
+local mainconstructor = nil -- not stored in format
+local nofconstructors = 0
+local stack = { }
+
+function constructors.define(name)
+ nofconstructors = nofconstructors + 1
+ names[nofconstructors] = name
+ numbers[name] = nofconstructors
+end
+
+function constructors.set(name) --- will go
+ if name then
+ mainconstructor = numbers[name] or unsetvalue
+ else
+ mainconstructor = stack[#stack] or unsetvalue
+ end
+ texsetattribute(a_parbuilder,mainconstructor)
+ if mainconstructor ~= unsetvalue then
+ constructors.enable()
+ end
+end
+
+function constructors.start(name)
+ local number = numbers[name]
+ insert(stack,number)
+ mainconstructor = number or unsetvalue
+ texsetattribute(a_parbuilder,mainconstructor)
+ if mainconstructor ~= unsetvalue then
+ constructors.enable()
+ end
+ -- report_par_builder("start %a",name)
+end
+
+function constructors.stop()
+ remove(stack)
+ mainconstructor = stack[#stack] or unsetvalue
+ texsetattribute(a_parbuilder,mainconstructor)
+ if mainconstructor == unsetvalue then
+ constructors.disable()
+ end
+ -- report_par_builder("stop")
+end
+
+-- return values:
+--
+-- true : tex will break itself
+-- false : idem but dangerous
+-- head : list of valid vmode nodes with last being hlist
+
+function constructors.handler(head,followed_by_display)
+ if type(head) == "boolean" then
+ return head
+ else
+ local attribute = getattr(head,a_parbuilder) -- or mainconstructor
+ if attribute then
+ local method = names[attribute]
+ if method then
+ local handler = methods[method]
+ if handler then
+ return handler(head,followed_by_display)
+ else
+ report_par_builder("contructor method %a is not defined",tostring(method))
+ return true -- let tex break
+ end
+ end
+ end
+ return true -- let tex break
+ end
+end
+
+-- just for testing
+
+function constructors.methods.default(head,followed_by_display)
+ return true -- let tex break
+end
+
+-- also for testing (now also surrounding spacing done)
+
+function parbuilders.constructors.methods.oneline(head,followed_by_display)
+ -- when needed we will turn this into a helper
+ local t = texnest[texnest.ptr]
+ local h = hpack_node(head)
+ local d = texget("baselineskip",false) - t.prevdepth - h.height
+ t.prevdepth = h.depth
+ t.prevgraf = 1
+ if d < texget("lineskiplimit") then
+ return insert_node_before(h,h,new_lineskip(texget("lineskip",false))) -- no stretch etc
+ else
+ return insert_node_before(h,h,new_baselineskip(d))
+ end
+end
+
+-- It makes no sense to have a sequence here as we already have
+-- pre and post hooks and only one parbuilder makes sense, so no:
+--
+-- local actions = nodes.tasks.actions("parbuilders")
+--
+-- yet ... maybe some day.
+
+local actions = constructors.handler
+local enabled = false
+
+local function processor(head,followed_by_display)
+ -- todo: not again in otr so we need to flag
+ if enabled then
+ starttiming(parbuilders)
+ head = tonut(head)
+ head = actions(head,followed_by_display)
+ head = tonode(head)
+ stoptiming(parbuilders)
+ return head
+ else
+ return true -- let tex do the work
+ end
+end
+
+function constructors.enable () enabled = true end
+function constructors.disable() enabled = false end
+
+registercallback('linebreak_filter', processor, "breaking paragraps into lines")
+
+statistics.register("linebreak processing time", function()
+ return statistics.elapsedseconds(parbuilders)
+end)
+
+-- todo: move from nodes.builders to builders
+
+nodes.builders = nodes.builder or { }
+local builders = nodes.builders
+
+local vboxactions = nodes.tasks.actions("vboxbuilders")
+
+function builders.vpack_filter(head,groupcode,size,packtype,maxdepth,direction)
+ local done = false
+ if head then
+ starttiming(builders)
+ head = tonut(head)
+ if trace_vbox_builder then
+ local before = count_nodes(head)
+ head, done = vboxactions(head,groupcode,size,packtype,maxdepth,direction)
+ local after = count_nodes(head)
+ nodes.processors.tracer("vpack",head,groupcode,before,after,done)
+ else
+ head, done = vboxactions(head,groupcode)
+ end
+ head = tonode(head)
+ stoptiming(builders)
+ end
+ return head, done
+end
+
+-- This one is special in the sense that it has no head and we operate on the mlv. Also,
+-- we need to do the vspacing last as it removes items from the mvl.
+
+local pageactions = nodes.tasks.actions("mvlbuilders")
+----- lineactions = nodes.tasks.actions("linebuilders")
+
+local function report(groupcode,head)
+ report_page_builder("trigger: %s",groupcode)
+ report_page_builder(" vsize : %p",texget("vsize"))
+ report_page_builder(" pagegoal : %p",texget("pagegoal"))
+ report_page_builder(" pagetotal: %p",texget("pagetotal"))
+ report_page_builder(" list : %s",head and nodeidstostring(head) or "<empty>")
+end
+
+-- check why box is called before after_linebreak .. maybe make categories and
+-- call 'm less
+
+-- this will be split into contribute_filter for these 4 so at some point
+-- the check can go away
+
+-- Todo: contrib_head can be any head (kind of) not per se the page one so maybe I will
+-- intercept that in the engine with page_contribute_head or so.
+
+function builders.buildpage_filter(groupcode)
+ local head = texlists.contribute_head
+ if head then
+ local done = false
+ -- called quite often ... maybe time to remove timing
+ starttiming(builders)
+ if trace_page_builder then
+ report(groupcode,head)
+ end
+ head, done = pageactions(head,groupcode)
+ stoptiming(builders)
+ -- -- doesn't work here (not passed on?)
+ -- texset("pagegoal,texget("vsize") - texgetdimen("d_page_floats_inserted_top") - texgetdimen("d_page_floats_inserted_bottom")
+ texlists.contribute_head = head or nil -- needs checking
+ -- tex.setlist("contribute_head",head,head and nodes.tail(head))
+ return done and head or true -- no return value needed
+ else
+ -- happens quite often
+ if trace_page_builder then
+ report(groupcode)
+ end
+-- return nil, false -- no return value needed
+ return nil
+ end
+end
+
+registercallback('vpack_filter', builders.vpack_filter, "vertical spacing etc")
+registercallback('buildpage_filter', builders.buildpage_filter, "vertical spacing etc (mvl)")
+
+statistics.register("v-node processing time", function()
+ return statistics.elapsedseconds(builders)
+end)
+
+local implement = interfaces.implement
+
+implement { name = "defineparbuilder", actions = constructors.define, arguments = "string" }
+implement { name = "setparbuilder", actions = constructors.set, arguments = "string" }
+implement { name = "startparbuilder", actions = constructors.start, arguments = "string" }
+implement { name = "stopparbuilder", actions = constructors.stop }
+implement { name = "enableparbuilder", actions = constructors.enable }
+implement { name = "disableparbuilder", actions = constructors.disable }
+
+-- Here are some tracers:
+
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local setcolor = nodes.tracers.colors.set
+local listtoutf = nodes.listtoutf
+local new_kern = nuts.pool.kern
+local new_rule = nuts.pool.rule
+local hpack = nuts.hpack
+local getheight = nuts.getheight
+local getdepth = nuts.getdepth
+local getdirection = nuts.getdirection
+local getlist = nuts.getlist
+local setwidth = nuts.setwidth
+local setdirection = nuts.setdirection
+local setlink = nuts.setlink
+local tonode = nuts.tonode
+
+local list = { }
+
+local report_quality = logs.reporter("pack quality")
+
+-- overflow|badness w h d dir
+
+local function vpack_quality(how,n,detail,first,last,filename)
+ if last <= 0 then
+ report_quality("%s vbox",how)
+ elseif first > 0 and first < last then
+ report_quality("%s vbox at line %i - %i in file %a",how,first,last,filename or "?")
+ else
+ report_quality("%s vbox at line %i in file %a",how,filename or "?",last)
+ end
+ list[#list+1] = { "hbox", how, filename, first, last, how }
+end
+
+trackers.register("builders.vpack.quality",function(v)
+ registercallback("vpack_quality",v and vpack_quality or nil,"check vpack quality")
+end)
+
+local report = false
+local show = false
+
+local function hpack_quality(how,detail,n,first,last,filename)
+ n = tonut(n)
+ if report then
+ local str = listtoutf(getlist(n),"",true,nil,true)
+ if last <= 0 then
+ report_quality("%s hbox: %s",how,str)
+ elseif first > 0 and first < last then
+ report_quality("%s hbox at line %i - %i in file %a: %s",how,first,last,filename or "?",str)
+ else
+ report_quality("%s hbox at line %i in file %a: %s",how,last,filename or "?",str)
+ end
+ list[#list+1] = { "hbox", how, filename, first, last, str }
+ end
+ if show then
+ local width = 2*65536
+ local height = getheight(n)
+ local depth = getdepth(n)
+ local direction = getdirection(n)
+ if height < 4*65526 then
+ height = 4*65526
+ end
+ if depth < 2*65526 then
+ depth = 2*65526
+ end
+ local rule = new_rule(width,height,depth)
+ setdirection(rule,direction)
+ if how == "overfull" then
+ setcolor(rule,"red")
+ local kern = new_kern(-detail)
+ setlink(kern,rule)
+ rule = kern
+ elseif how == "underfull" then
+ setcolor(rule,"blue")
+ elseif how == "loose" then
+ setcolor(rule,"magenta")
+ elseif how == "tight" then
+ setcolor(rule,"cyan")
+ end
+ rule = hpack(rule)
+ setwidth(rule,0)
+ setdirection(rule,direction)
+ return tonode(rule) -- can be a nut
+ end
+end
+
+trackers.register("builders.hpack.quality",function(v)
+ report = v
+ registercallback("hpack_quality",(report or show) and hpack_quality or nil,"check hpack quality")
+end)
+
+trackers.register("builders.hpack.overflow",function(v)
+ show = v
+ registercallback("hpack_quality",(report or show) and hpack_quality or nil,"check hpack quality")
+end)
+
+statistics.register("quality reports", function()
+ local n = #list
+ if n > 0 then
+ local t = table.setmetatableindex("number")
+ local fw = 0
+ local hw = 0
+ for i=1,n do
+ local f = list[i][1]
+ local h = list[i][2]
+ if #f > fw then
+ fw = #f
+ end
+ if #h > hw then
+ hw = #h
+ end
+ t[h] = t[h] + 1
+ end
+ logs.startfilelogging(report_quality)
+ for i=1,n do
+ local l = list[i]
+ report_quality("%-" .. fw .. "s [%04i - %04i] : %-" .. hw .. "s %s : %s",file.basename(l[3]),l[4],l[5],l[2],l[1],l[6])
+ end
+ logs.stopfilelogging()
+ report_quality()
+ report_quality("%i entries added to the log file : %s",n,table.sequenced(t))
+ report_quality()
+ end
+end)
diff --git a/tex/context/base/mkxl/typo-bld.mkxl b/tex/context/base/mkxl/typo-bld.mkxl
index d170397fc..ff1f569cb 100644
--- a/tex/context/base/mkxl/typo-bld.mkxl
+++ b/tex/context/base/mkxl/typo-bld.mkxl
@@ -35,7 +35,7 @@
\unprotect
-\registerctxluafile{typo-bld}{}
+\registerctxluafile{typo-bld}{autosuffix}
\definesystemattribute[parbuilder][public]
diff --git a/tex/context/base/mkxl/typo-shp.lmt b/tex/context/base/mkxl/typo-shp.lmt
index ffd9e556d..30719c40e 100644
--- a/tex/context/base/mkxl/typo-shp.lmt
+++ b/tex/context/base/mkxl/typo-shp.lmt
@@ -114,7 +114,7 @@ implement {
if type(value) == "boolean" then
value = value and 1 or 0
end
- context(value)
+ context(value or 0) -- so the first check, for "lines" is always ok
end
}
diff --git a/tex/context/base/mkxl/typo-shp.mkxl b/tex/context/base/mkxl/typo-shp.mkxl
index 21df41bd4..b74003068 100644
--- a/tex/context/base/mkxl/typo-shp.mkxl
+++ b/tex/context/base/mkxl/typo-shp.mkxl
@@ -49,7 +49,7 @@
\aliased\let\stopparagraphshape\relax
\permanent\protected\def\startparagraphshape[#1]#2\stopparagraphshape
- {\defcsname\??parshapes#1\endcsname{#2}}
+ {\gdefcsname\??parshapes#1\endcsname{#2}} % global
\permanent\protected\def\rawparagraphshape#1%
{\begincsname\??parshapes#1\endcsname}
@@ -107,20 +107,24 @@
\dontcomplain
\global\advance\shapetextindex\plusone
\scratchcounter\getshapeparameter{lines}\relax
- \scratchwidth \getshapeparameter{width}\scaledpoint\relax
- \scratchheight \getshapeparameter{height}\scaledpoint\relax
- \setbox\scratchbox\vpack to \scratchheight
- {\splittopskip\strutheight
- \vskip\dimexpr\getshapeparameter{voffset}\scaledpoint\relax
- \ifcase\numexpr\getshapeparameter{first}\relax\else
- \vskip\lineheight
- \fi
- \hskip\dimexpr\getshapeparameter{hoffset}\scaledpoint\relax
- \hpack{\vsplit\shapetextbox to \scratchcounter\lineheight}}%
- \wd\scratchbox\scratchwidth
- \ht\scratchbox\scratchheight
- \dp\scratchbox\zeropoint
- \box\scratchbox
+ \ifnum\scratchcounter>\zerocount
+ \scratchwidth \getshapeparameter{width}\scaledpoint\relax
+ \scratchheight \getshapeparameter{height}\scaledpoint\relax
+ \setbox\scratchbox\vpack to \scratchheight
+ {\splittopskip\strutheight
+ \vskip\dimexpr\getshapeparameter{voffset}\scaledpoint\relax
+ \ifcase\numexpr\getshapeparameter{first}\relax\else
+ \vskip\lineheight
+ \fi
+ \hskip\dimexpr\getshapeparameter{hoffset}\scaledpoint\relax
+ \hpack{\vsplit\shapetextbox to \scratchcounter\lineheight}}%
+ \wd\scratchbox\scratchwidth
+ \ht\scratchbox\scratchheight
+ \dp\scratchbox\zeropoint
+ \box\scratchbox
+ \else
+ % what now
+ \fi
\egroup}
\protect
diff --git a/tex/context/fonts/mkiv/type-imp-dejavu.mkiv b/tex/context/fonts/mkiv/type-imp-dejavu.mkiv
index ee1e9f383..6da2daa89 100644
--- a/tex/context/fonts/mkiv/type-imp-dejavu.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-dejavu.mkiv
@@ -11,6 +11,9 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% \definefontfallback[Serif] [file:notoserif-regular.ttf] [0x0000-0xFFFF] [check=yes,rscale=1.02]
+% \definefontfallback[SerifBold][file:notoserif-bold.ttf] [0x0000-0xFFFF] [check=yes,rscale=1.02]
+
\definefontfeature[dejavu-condensed-mono][extend=.8]
\starttypescriptcollection[dejavu]
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 89650fcbc..faecb07c3 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : c:/data/develop/context/sources/luatex-fonts-merged.lua
-- parent file : c:/data/develop/context/sources/luatex-fonts.lua
--- merge date : 2021-03-02 19:17
+-- merge date : 2021-03-05 11:13
do -- begin closure to overcome local limits and interference