From 059fc69b2c7853b937ddb4cfc9d36304dee07893 Mon Sep 17 00:00:00 2001
From: Hans Hagen <pragma@wxs.nl>
Date: Sat, 1 Apr 2023 10:50:35 +0200
Subject: 2023-04-01 09:31:00

---
 tex/context/base/mkii/cont-new.mkii                |   2 +-
 tex/context/base/mkii/context.mkii                 |   2 +-
 tex/context/base/mkiv/anch-pos.lua                 |   9 +-
 tex/context/base/mkiv/attr-ini.lua                 |  18 +-
 tex/context/base/mkiv/bibl-bib.lua                 |   8 +-
 tex/context/base/mkiv/char-def.lua                 |   1 +
 tex/context/base/mkiv/char-ini.lua                 |  44 +-
 tex/context/base/mkiv/char-tex.lua                 |  44 +-
 tex/context/base/mkiv/char-utf.lua                 |  34 +-
 tex/context/base/mkiv/chem-ini.lua                 |   8 +-
 tex/context/base/mkiv/cont-new.mkiv                |   2 +-
 tex/context/base/mkiv/context.mkiv                 |   2 +-
 tex/context/base/mkiv/core-con.lua                 |  10 +-
 tex/context/base/mkiv/core-dat.lua                 |  18 +-
 tex/context/base/mkiv/core-two.lua                 |   9 +-
 tex/context/base/mkiv/core-uti.lua                 |  28 +-
 tex/context/base/mkiv/data-con.lua                 |  24 +-
 tex/context/base/mkiv/data-res.lua                 |  33 +-
 tex/context/base/mkiv/data-tar.lua                 |  12 +-
 tex/context/base/mkiv/data-tmp.lua                 |  23 +-
 tex/context/base/mkiv/data-zip.lua                 |  18 +-
 tex/context/base/mkiv/file-ini.lua                 |   7 +-
 tex/context/base/mkiv/file-mod.lua                 |  16 +-
 tex/context/base/mkiv/font-afk.lua                 |   8 +-
 tex/context/base/mkiv/font-con.lua                 |  65 ++-
 tex/context/base/mkiv/font-ctx.lua                 |  39 +-
 tex/context/base/mkiv/font-def.lua                 |  92 ++--
 tex/context/base/mkiv/font-enc.lua                 |  42 +-
 tex/context/base/mkiv/font-fbk.lua                 |   4 -
 tex/context/base/mkiv/font-imp-tex.lua             |  47 +-
 tex/context/base/mkiv/font-ini.lua                 |   4 +-
 tex/context/base/mkiv/font-log.lua                 |   9 +-
 tex/context/base/mkiv/font-nod.lua                 |   5 -
 tex/context/base/mkiv/font-one.lua                 |  90 ++--
 tex/context/base/mkiv/font-onr.lua                 |  40 +-
 tex/context/base/mkiv/font-ota.lua                 |   6 +-
 tex/context/base/mkiv/font-ots.lua                 | 225 +++++-----
 tex/context/base/mkiv/font-syn.lua                 |  31 +-
 tex/context/base/mkiv/font-tfm.lua                 |  29 +-
 tex/context/base/mkiv/font-trt.lua                 |   8 +-
 tex/context/base/mkiv/font-vir.lua                 |  11 +-
 tex/context/base/mkiv/l-dir.lua                    |  17 +-
 tex/context/base/mkiv/lang-url.lua                 |  10 +-
 tex/context/base/mkiv/luat-cbk.lua                 | 132 +++---
 tex/context/base/mkiv/luat-ini.lua                 |   8 +-
 tex/context/base/mkiv/lxml-aux.lua                 |  18 +-
 tex/context/base/mkiv/lxml-ent.lua                 |  12 +-
 tex/context/base/mkiv/lxml-lpt.lua                 |  75 ++--
 tex/context/base/mkiv/lxml-mis.lua                 |  11 +-
 tex/context/base/mkiv/lxml-tab.lua                 | 254 +++++------
 tex/context/base/mkiv/math-map.lua                 |  32 +-
 tex/context/base/mkiv/meta-fun.lua                 |  22 +-
 tex/context/base/mkiv/mlib-fio.lua                 |  12 +-
 tex/context/base/mkiv/mlib-run.lua                 |  28 +-
 tex/context/base/mkiv/mult-mps.lua                 |   2 +-
 tex/context/base/mkiv/node-ini.lua                 |  64 ++-
 tex/context/base/mkiv/node-res.lua                 |   5 -
 tex/context/base/mkiv/node-tra.lua                 |   6 +-
 tex/context/base/mkiv/pack-obj.lua                 |   6 +-
 tex/context/base/mkiv/pack-rul.lua                 |   4 -
 tex/context/base/mkiv/publ-dat.lua                 |   6 -
 tex/context/base/mkiv/publ-ini.lua                 |   3 +-
 tex/context/base/mkiv/publ-ini.mkiv                |   2 +-
 tex/context/base/mkiv/regi-ini.lua                 |  11 +-
 tex/context/base/mkiv/sort-ini.lua                 |  82 ++--
 tex/context/base/mkiv/status-files.pdf             | Bin 24657 -> 24625 bytes
 tex/context/base/mkiv/status-lua.pdf               | Bin 267358 -> 267345 bytes
 tex/context/base/mkiv/syst-con.lua                 |   7 +-
 tex/context/base/mkiv/syst-ini.mkiv                |   3 +
 tex/context/base/mkiv/tabl-tbl.mkiv                |   3 +-
 tex/context/base/mkiv/trac-lmx.lua                 |   3 +-
 tex/context/base/mkiv/util-dim.lua                 | 234 ++++------
 tex/context/base/mkiv/util-fmt.lua                 |  70 ++-
 tex/context/base/mkiv/util-seq.lua                 |  14 +-
 tex/context/base/mkxl/attr-ini.lmt                 |  18 +-
 tex/context/base/mkxl/char-tex.lmt                 |  68 +--
 tex/context/base/mkxl/cont-new.mkxl                |   2 +-
 tex/context/base/mkxl/context.mkxl                 | 106 +++--
 tex/context/base/mkxl/core-dat.lmt                 | 225 ++++++++++
 tex/context/base/mkxl/core-dat.mkxl                |  50 +--
 tex/context/base/mkxl/core-pag.lmt                 | 160 +++++++
 tex/context/base/mkxl/core-pag.mkxl                |  68 +++
 tex/context/base/mkxl/core-two.lmt                 | 210 +++++++++
 tex/context/base/mkxl/core-two.mkxl                | 194 +++++----
 tex/context/base/mkxl/core-uti.lmt                 |  34 +-
 tex/context/base/mkxl/file-mod.lmt                 |  16 +-
 tex/context/base/mkxl/font-con.lmt                 |  75 ++--
 tex/context/base/mkxl/font-ctx.lmt                 |  26 +-
 tex/context/base/mkxl/font-def.lmt                 |  88 ++--
 tex/context/base/mkxl/font-fbk.lmt                 |   4 -
 tex/context/base/mkxl/font-fil.mklx                |   2 +-
 tex/context/base/mkxl/font-ini.lmt                 |   4 -
 tex/context/base/mkxl/font-ini.mklx                |  10 +
 tex/context/base/mkxl/font-mat.mklx                |  20 +-
 tex/context/base/mkxl/font-one.lmt                 |  90 ++--
 tex/context/base/mkxl/font-onr.lmt                 |  42 +-
 tex/context/base/mkxl/font-ota.lmt                 |   6 +-
 tex/context/base/mkxl/font-ots.lmt                 | 223 +++++-----
 tex/context/base/mkxl/font-tfm.lmt                 |  30 +-
 tex/context/base/mkxl/lang-url.lmt                 |  10 +-
 tex/context/base/mkxl/lpdf-ano.lmt                 |   1 +
 tex/context/base/mkxl/lpdf-pde.lmt                 |   6 +-
 tex/context/base/mkxl/luat-cbk.lmt                 |  29 +-
 tex/context/base/mkxl/luat-cod.mkxl                |   2 +-
 tex/context/base/mkxl/luat-ini.lmt                 |   8 +-
 tex/context/base/mkxl/math-act.lmt                 | 267 +++++++-----
 tex/context/base/mkxl/math-ali.mkxl                |  78 +++-
 tex/context/base/mkxl/math-fnt.lmt                 |   8 +-
 tex/context/base/mkxl/math-frc.mkxl                |   8 +
 tex/context/base/mkxl/math-ini.mkxl                | 123 ++++--
 tex/context/base/mkxl/math-map.lmt                 |  32 +-
 tex/context/base/mkxl/math-noa.lmt                 |  64 +--
 tex/context/base/mkxl/math-rad.mklx                |   6 +
 tex/context/base/mkxl/math-spa.lmt                 |  28 +-
 tex/context/base/mkxl/math-stc.mklx                |  22 +-
 tex/context/base/mkxl/math-twk.mkxl                |   7 +
 tex/context/base/mkxl/math-vfu.lmt                 | 120 +++--
 tex/context/base/mkxl/meta-imp-newmath.mkxl        |  76 ++++
 tex/context/base/mkxl/mlib-run.lmt                 |  32 +-
 tex/context/base/mkxl/node-ini.lmt                 |  10 +-
 tex/context/base/mkxl/node-res.lmt                 |   5 -
 tex/context/base/mkxl/node-tra.lmt                 |   6 +-
 tex/context/base/mkxl/pack-obj.lmt                 |   6 +-
 tex/context/base/mkxl/pack-rul.lmt                 |   4 -
 tex/context/base/mkxl/publ-ini.mkxl                |   2 +-
 tex/context/base/mkxl/regi-ini.lmt                 |   7 +-
 tex/context/base/mkxl/scrn-wid.lmt                 |  38 +-
 tex/context/base/mkxl/spac-pag.mkxl                |   1 -
 tex/context/base/mkxl/strc-itm.lmt                 |  59 ++-
 tex/context/base/mkxl/strc-lst.lmt                 |   2 +-
 tex/context/base/mkxl/strc-ref.lmt                 |   2 +-
 tex/context/base/mkxl/strc-reg.lmt                 |  59 ++-
 tex/context/base/mkxl/strc-reg.mkxl                |  31 +-
 tex/context/base/mkxl/tabl-ntb.mkxl                |  17 +-
 tex/context/base/mkxl/tabl-tbl.mkxl                |  51 ++-
 tex/context/base/mkxl/trac-vis.lmt                 |   2 +-
 tex/context/base/mkxl/typo-cln.lmt                 | 109 +++++
 tex/context/base/mkxl/typo-cln.mkxl                |   2 +-
 tex/context/base/mkxl/typo-dha.lmt                 | 481 +++++++++++++++++++++
 tex/context/base/mkxl/typo-dir.mkxl                |   4 +-
 tex/context/base/mkxl/typo-prc.mklx                |   2 +
 tex/context/fonts/mkiv/bonum-math.lfg              |   7 +
 tex/context/fonts/mkiv/cambria-math.lfg            |  20 +
 tex/context/fonts/mkiv/common-math-jmn.lfg         |  15 +-
 tex/context/fonts/mkiv/concrete-math.lfg           |   7 +
 tex/context/fonts/mkiv/dejavu-math.lfg             |   7 +
 tex/context/fonts/mkiv/ebgaramond-math.lfg         |  25 +-
 tex/context/fonts/mkiv/erewhon-math.lfg            |  22 +
 tex/context/fonts/mkiv/kpfonts-math.lfg            |  22 +
 tex/context/fonts/mkiv/libertinus-math.lfg         |  23 +
 tex/context/fonts/mkiv/lucida-math.lfg             |  37 +-
 tex/context/fonts/mkiv/modern-math.lfg             |   9 +
 tex/context/fonts/mkiv/newcomputermodern-math.lfg  |   9 +-
 tex/context/fonts/mkiv/pagella-math.lfg            |   9 +-
 tex/context/fonts/mkiv/schola-math.lfg             |   7 +
 tex/context/fonts/mkiv/stixtwo-math.lfg            |  23 +
 tex/context/fonts/mkiv/termes-math.lfg             |   7 +
 tex/context/fonts/mkiv/type-imp-antykwa.mkiv       |   9 +-
 tex/context/fonts/mkiv/type-imp-concrete.mkiv      |   2 +-
 tex/context/fonts/mkiv/type-imp-ebgaramond.mkiv    |  10 +-
 tex/context/fonts/mkiv/type-imp-iwona.mkiv         |   8 +-
 tex/context/fonts/mkiv/type-imp-kurier.mkiv        |   8 +-
 tex/context/fonts/mkiv/xcharter-math.lfg           |  22 +
 tex/context/modules/mkiv/m-tikz.mkiv               |   1 +
 tex/context/modules/mkiv/s-abbreviations-logos.tex |  15 +-
 tex/context/modules/mkiv/x-asciimath.lua           |  17 +-
 tex/context/modules/mkxl/m-tikz.mkxl               |   1 +
 tex/generic/context/luatex/luatex-fonts-merged.lua |  30 +-
 tex/generic/context/luatex/luatex-mplib.lua        |  70 ++-
 tex/generic/context/luatex/luatex-preprocessor.lua |  12 +-
 tex/latex/context/ppchtex/m-ch-de.sty              |  19 -
 tex/latex/context/ppchtex/m-ch-en.sty              |  19 -
 tex/latex/context/ppchtex/m-ch-nl.sty              |  19 -
 tex/latex/context/ppchtex/m-pictex.sty             |   5 -
 174 files changed, 3808 insertions(+), 2589 deletions(-)
 create mode 100644 tex/context/base/mkxl/core-dat.lmt
 create mode 100644 tex/context/base/mkxl/core-pag.lmt
 create mode 100644 tex/context/base/mkxl/core-pag.mkxl
 create mode 100644 tex/context/base/mkxl/core-two.lmt
 create mode 100644 tex/context/base/mkxl/meta-imp-newmath.mkxl
 create mode 100644 tex/context/base/mkxl/typo-cln.lmt
 create mode 100644 tex/context/base/mkxl/typo-dha.lmt
 delete mode 100644 tex/latex/context/ppchtex/m-ch-de.sty
 delete mode 100644 tex/latex/context/ppchtex/m-ch-en.sty
 delete mode 100644 tex/latex/context/ppchtex/m-ch-nl.sty
 delete mode 100644 tex/latex/context/ppchtex/m-pictex.sty

(limited to 'tex')

diff --git a/tex/context/base/mkii/cont-new.mkii b/tex/context/base/mkii/cont-new.mkii
index ec168c472..4eb526804 100644
--- a/tex/context/base/mkii/cont-new.mkii
+++ b/tex/context/base/mkii/cont-new.mkii
@@ -11,7 +11,7 @@
 %C therefore copyrighted by \PRAGMA. See mreadme.pdf for
 %C details.
 
-\newcontextversion{2023.03.20 15:42}
+\newcontextversion{2023.04.01 09:28}
 
 %D This file is loaded at runtime, thereby providing an
 %D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/mkii/context.mkii b/tex/context/base/mkii/context.mkii
index 9e6c53624..a09091440 100644
--- a/tex/context/base/mkii/context.mkii
+++ b/tex/context/base/mkii/context.mkii
@@ -20,7 +20,7 @@
 %D your styles an modules.
 
 \edef\contextformat {\jobname}
-\edef\contextversion{2023.03.20 15:42}
+\edef\contextversion{2023.04.01 09:28}
 
 %D For those who want to use this:
 
diff --git a/tex/context/base/mkiv/anch-pos.lua b/tex/context/base/mkiv/anch-pos.lua
index 77f55964f..cf3ed87fc 100644
--- a/tex/context/base/mkiv/anch-pos.lua
+++ b/tex/context/base/mkiv/anch-pos.lua
@@ -6,12 +6,9 @@ if not modules then modules = { } end modules ['anch-pos'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>We save positional information in the main utility table. Not only
-can we store much more information in <l n='lua'/> but it's also
-more efficient.</p>
---ldx]]--
-
+-- We save positional information in the main utility table. Not only can we store
+-- much more information in Lua but it's also more efficient.
+--
 -- plus (extra) is obsolete but we will keep it for a while
 --
 -- maybe replace texsp by our own converter (stay at the lua end)
diff --git a/tex/context/base/mkiv/attr-ini.lua b/tex/context/base/mkiv/attr-ini.lua
index b05c343e5..cd1a0c549 100644
--- a/tex/context/base/mkiv/attr-ini.lua
+++ b/tex/context/base/mkiv/attr-ini.lua
@@ -9,10 +9,8 @@ if not modules then modules = { } end modules ['attr-ini'] = {
 local next, type = next, type
 local osexit = os.exit
 
---[[ldx--
-<p>We start with a registration system for atributes so that we can use the
-symbolic names later on.</p>
---ldx]]--
+-- We start with a registration system for atributes so that we can use the symbolic
+-- names later on.
 
 local nodes             = nodes
 local context           = context
@@ -54,17 +52,13 @@ storage.register("attributes/list",    list,    "attributes.list")
 --     end
 -- end
 
---[[ldx--
-<p>We reserve this one as we really want it to be always set (faster).</p>
---ldx]]--
+-- We reserve this one as we really want it to be always set (faster).
 
 names[0], numbers["fontdynamic"] = "fontdynamic", 0
 
---[[ldx--
-<p>private attributes are used by the system and public ones are for users. We use dedicated
-ranges of numbers for them. Of course a the <l n='context'/> end a private attribute can be
-accessible too, so a private attribute can have a public appearance.</p>
---ldx]]--
+-- Private attributes are used by the system and public ones are for users. We use
+-- dedicated ranges of numbers for them. Of course a the TeX end a private attribute
+-- can be accessible too, so a private attribute can have a public appearance.
 
 sharedstorage.attributes_last_private = sharedstorage.attributes_last_private or   15 -- very private
 sharedstorage.attributes_last_public  = sharedstorage.attributes_last_public  or 1024 -- less private
diff --git a/tex/context/base/mkiv/bibl-bib.lua b/tex/context/base/mkiv/bibl-bib.lua
index baeb3d2f9..b7e478004 100644
--- a/tex/context/base/mkiv/bibl-bib.lua
+++ b/tex/context/base/mkiv/bibl-bib.lua
@@ -6,11 +6,9 @@ if not modules then modules = { } end modules ['bibl-bib'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>This is a prelude to integrated bibliography support. This file just loads
-bibtex files and converts them to xml so that the we access the content
-in a convenient way. Actually handling the data takes place elsewhere.</p>
---ldx]]--
+-- This is a prelude to integrated bibliography support. This file just loads bibtex
+-- files and converts them to xml so that the we access the content in a convenient
+-- way. Actually handling the data takes place elsewhere.
 
 local lower, format, gsub, concat = string.lower, string.format, string.gsub, table.concat
 local next = next
diff --git a/tex/context/base/mkiv/char-def.lua b/tex/context/base/mkiv/char-def.lua
index 5e9d7d05a..1d4c130e9 100644
--- a/tex/context/base/mkiv/char-def.lua
+++ b/tex/context/base/mkiv/char-def.lua
@@ -67155,6 +67155,7 @@ characters.data={
   description="MINUS SIGN",
   direction="es",
   linebreak="pr",
+  mathextensible="h",
   mathgroup="binary arithmetic",
   mathspec={
    {
diff --git a/tex/context/base/mkiv/char-ini.lua b/tex/context/base/mkiv/char-ini.lua
index db1b85cc5..627ba072c 100644
--- a/tex/context/base/mkiv/char-ini.lua
+++ b/tex/context/base/mkiv/char-ini.lua
@@ -36,20 +36,16 @@ local trace_defining        = false  trackers.register("characters.defining", fu
 
 local report_defining       = logs.reporter("characters")
 
---[[ldx--
-<p>This module implements some methods and creates additional datastructured
-from the big character table that we use for all kind of purposes:
-<type>char-def.lua</type>.</p>
-
-<p>We assume that at this point <type>characters.data</type> is already
-loaded!</p>
---ldx]]--
-
+-- This module implements some methods and creates additional datastructured from
+-- the big character table that we use for all kind of purposes: 'char-def.lua'.
+--
+-- We assume that at this point 'characters.data' is already populated!
+--
 -- todo: in 'char-def.lua' assume defaults:
 --
--- directions = l
--- cjkwd      = a
--- linebreak  = al
+--   directions = l
+--   cjkwd      = a
+--   linebreak  = al
 
 characters       = characters or { }
 local characters = characters
@@ -62,9 +58,7 @@ else
     os.exit()
 end
 
---[[ldx--
-Extending the table.
---ldx]]--
+-- Extending the table.
 
 if context and CONTEXTLMTXMODE == 0 then
 
@@ -84,9 +78,7 @@ if context and CONTEXTLMTXMODE == 0 then
 
 end
 
---[[ldx--
-<p>This converts a string (if given) into a number.</p>
---ldx]]--
+-- This converts a string (if given) into a number.
 
 local pattern = (P("0x") + P("U+")) * ((R("09","AF")^1 * P(-1)) / function(s) return tonumber(s,16) end)
 
@@ -957,10 +949,8 @@ characters.bidi = allocate {
     on  = "Other Neutrals",
 }
 
---[[ldx--
-<p>At this point we assume that the big data table is loaded. From this
-table we derive a few more.</p>
---ldx]]--
+-- At this point we assume that the big data table is loaded. From this table we
+-- derive a few more.
 
 if not characters.fallbacks then
 
@@ -1037,10 +1027,8 @@ setmetatableindex(characters.textclasses,function(t,k)
     return false
 end)
 
---[[ldx--
-<p>Next comes a whole series of helper methods. These are (will be) part
-of the official <l n='api'/>.</p>
---ldx]]--
+-- Next comes a whole series of helper methods. These are (will be) part of the
+-- official API.
 
 -- we could make them virtual: characters.contextnames[n]
 
@@ -1433,9 +1421,7 @@ function characters.lettered(str,spacing)
     return concat(new)
 end
 
---[[ldx--
-<p>Requesting lower and uppercase codes:</p>
---ldx]]--
+-- Requesting lower and uppercase codes:
 
 function characters.uccode(n) return uccodes[n] end -- obsolete
 function characters.lccode(n) return lccodes[n] end -- obsolete
diff --git a/tex/context/base/mkiv/char-tex.lua b/tex/context/base/mkiv/char-tex.lua
index 7f544b147..09547d005 100644
--- a/tex/context/base/mkiv/char-tex.lua
+++ b/tex/context/base/mkiv/char-tex.lua
@@ -42,17 +42,14 @@ local trace_defining        = false  trackers.register("characters.defining", fu
 
 local report_defining       = logs.reporter("characters")
 
---[[ldx--
-<p>In order to deal with 8-bit output, we need to find a way to go from <l n='utf'/> to
-8-bit. This is handled in the <l n='luatex'/> engine itself.</p>
-
-<p>This leaves us problems with characters that are specific to <l n='tex'/> like
-<type>{}</type>, <type>$</type> and alike. We can remap some chars that tex input files
-are sensitive for to a private area (while writing to a utility file) and revert then
-to their original slot when we read in such a file. Instead of reverting, we can (when
-we resolve characters to glyphs) map them to their right glyph there. For this purpose
-we can use the private planes 0x0F0000 and 0x100000.</p>
---ldx]]--
+-- In order to deal with 8-bit output, we need to find a way to go from UTF to
+-- 8-bit. This is handled in the 32 bit engine itself. This leaves us problems with
+-- characters that are specific to TeX, like curly braces and dollars. We can remap
+-- some chars that tex input files are sensitive for to a private area (while
+-- writing to a utility file) and revert then to their original slot when we read in
+-- such a file. Instead of reverting, we can (when we resolve characters to glyphs)
+-- map them to their right glyph there. For this purpose we can use the private
+-- planes 0x0F0000 and 0x100000.
 
 local low     = allocate()
 local high    = allocate()
@@ -102,21 +99,6 @@ private.escape  = utf.remapper(escapes) -- maybe: ,"dynamic"
 private.replace = utf.remapper(low)     -- maybe: ,"dynamic"
 private.revert  = utf.remapper(high)    -- maybe: ,"dynamic"
 
---[[ldx--
-<p>We get a more efficient variant of this when we integrate
-replacements in collapser. This more or less renders the previous
-private code redundant. The following code is equivalent but the
-first snippet uses the relocated dollars.</p>
-
-<typing>
-[󰀤x󰀤] [$x$]
-</typing>
---ldx]]--
-
--- using the tree-lpeg-mapper would be nice but we also need to deal with end-of-string
--- cases: "\"\i" and don't want "\relax" to be seen as \r e lax" (for which we need to mess
--- with spaces
-
 local accentmapping = allocate {
     ['"'] = { [""] = "¨",
         A = "Ä", a = "ä",
@@ -452,10 +434,8 @@ implement { -- a waste of scanner but consistent
     actions = texcharacters.defineaccents
 }
 
---[[ldx--
-<p>Instead of using a <l n='tex'/> file to define the named glyphs, we
-use the table. After all, we have this information available anyway.</p>
---ldx]]--
+-- Instead of using a TeX file to define the named glyphs, we use the table. After
+-- all, we have this information available anyway.
 
 function commands.makeactive(n,name) -- not used
     contextsprint(ctxcatcodes,format("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name))
@@ -747,9 +727,7 @@ function characters.setactivecatcodes(cct)
     tex.catcodetable = saved
 end
 
---[[ldx--
-<p>Setting the lccodes is also done in a loop over the data table.</p>
---ldx]]--
+-- -- Setting the lccodes is also done in a loop over the data table.
 
 -- function characters.setcodes() -- we could loop over csletters
 --     if trace_defining then
diff --git a/tex/context/base/mkiv/char-utf.lua b/tex/context/base/mkiv/char-utf.lua
index e230370b5..f9cba36ca 100644
--- a/tex/context/base/mkiv/char-utf.lua
+++ b/tex/context/base/mkiv/char-utf.lua
@@ -6,21 +6,19 @@ if not modules then modules = { } end modules ['char-utf'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>When a sequence of <l n='utf'/> characters enters the application, it may be
-neccessary to collapse subsequences into their composed variant.</p>
-
-<p>This module implements methods for collapsing and expanding <l n='utf'/>
-sequences. We also provide means to deal with characters that are special to
-<l n='tex'/> as well as 8-bit characters that need to end up in special kinds
-of output (for instance <l n='pdf'/>).</p>
-
-<p>We implement these manipulations as filters. One can run multiple filters
-over a string.</p>
-
-<p>The old code has now been moved to char-obs.lua which we keep around for
-educational purposes.</p>
---ldx]]--
+-- When a sequence of UTF characters enters the application, it may be
+-- neccessary to collapse subsequences into their composed variant.
+--
+-- This module implements methods for collapsing and expanding UTF sequences. We
+-- also provide means to deal with characters that are special to TeX as well as
+-- 8-bit characters that need to end up in special kinds of output (for instance
+-- PDF).
+--
+-- We implement these manipulations as filters. One can run multiple filters over a
+-- string.
+--
+-- The old code has now been moved to char-obs.lua which we keep around for
+-- educational purposes.
 
 local next, type = next, type
 local gsub, find = string.gsub, string.find
@@ -55,10 +53,8 @@ characters.filters.utf      = utffilters
 
 local data                  = characters.data
 
---[[ldx--
-<p>It only makes sense to collapse at runtime, since we don't expect source code
-to depend on collapsing.</p>
---ldx]]--
+-- It only makes sense to collapse at runtime, since we don't expect source code to
+-- depend on collapsing.
 
 -- for the moment, will be entries in char-def.lua .. this is just a subset that for
 -- typographic (font) reasons we want to have split ... if we decompose all, we get
diff --git a/tex/context/base/mkiv/chem-ini.lua b/tex/context/base/mkiv/chem-ini.lua
index f7d10ffa2..06049807a 100644
--- a/tex/context/base/mkiv/chem-ini.lua
+++ b/tex/context/base/mkiv/chem-ini.lua
@@ -19,11 +19,9 @@ local cpatterns = patterns.context
 chemistry       = chemistry or { }
 local chemistry = chemistry
 
---[[
-<p>The next code started out as adaptation of code from Wolfgang Schuster as
-posted on the mailing list. The current version supports nested braces and
-unbraced integers as scripts.</p>
-]]--
+-- The next code started out as adaptation of code from Wolfgang Schuster as posted
+-- on the mailing list. The current version supports nested braces and unbraced
+-- integers as scripts.
 
 local moleculeparser     = cpatterns.scripted
 chemistry.moleculeparser = moleculeparser
diff --git a/tex/context/base/mkiv/cont-new.mkiv b/tex/context/base/mkiv/cont-new.mkiv
index 684cf24c8..f0fd15f3b 100644
--- a/tex/context/base/mkiv/cont-new.mkiv
+++ b/tex/context/base/mkiv/cont-new.mkiv
@@ -13,7 +13,7 @@
 
 % \normalend % uncomment this to get the real base runtime
 
-\newcontextversion{2023.03.20 15:42}
+\newcontextversion{2023.04.01 09:28}
 
 %D This file is loaded at runtime, thereby providing an excellent place for hacks,
 %D patches, extensions and new features. There can be local overloads in cont-loc
diff --git a/tex/context/base/mkiv/context.mkiv b/tex/context/base/mkiv/context.mkiv
index 9b89b9bdf..c2735fa5e 100644
--- a/tex/context/base/mkiv/context.mkiv
+++ b/tex/context/base/mkiv/context.mkiv
@@ -49,7 +49,7 @@
 %D {YYYY.MM.DD HH:MM} format.
 
 \edef\contextformat {\jobname}
-\edef\contextversion{2023.03.20 15:42}
+\edef\contextversion{2023.04.01 09:28}
 
 %D Kind of special:
 
diff --git a/tex/context/base/mkiv/core-con.lua b/tex/context/base/mkiv/core-con.lua
index f57eb6ef8..d3e108a7a 100644
--- a/tex/context/base/mkiv/core-con.lua
+++ b/tex/context/base/mkiv/core-con.lua
@@ -8,13 +8,9 @@ if not modules then modules = { } end modules ['core-con'] = {
 
 -- todo: split into lang-con.lua and core-con.lua
 
---[[ldx--
-<p>This module implements a bunch of conversions. Some are more
-efficient than their <l n='tex'/> counterpart, some are even
-slower but look nicer this way.</p>
-
-<p>Some code may move to a module in the language namespace.</p>
---ldx]]--
+-- This module implements a bunch of conversions. Some are more efficient than their
+-- TeX counterpart, some are even slower but look nicer this way. Some code may move
+-- to a module in the language namespace.
 
 local floor = math.floor
 local osdate, ostime, ostimezone = os.date, os.time, os.timezone
diff --git a/tex/context/base/mkiv/core-dat.lua b/tex/context/base/mkiv/core-dat.lua
index b58a801d9..89521b185 100644
--- a/tex/context/base/mkiv/core-dat.lua
+++ b/tex/context/base/mkiv/core-dat.lua
@@ -6,10 +6,8 @@ if not modules then modules = { } end modules ['core-dat'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>This module provides a (multipass) container for arbitrary data. It
-replaces the twopass data mechanism.</p>
---ldx]]--
+-- This module provides a (multipass) container for arbitrary data. It replaces the
+-- twopass data mechanism.
 
 local tonumber, tostring, type = tonumber, tostring, type
 
@@ -231,9 +229,7 @@ implement {
     actions   = datasetvariablefromjob
 }
 
---[[ldx--
-<p>We also provide an efficient variant for page states.</p>
---ldx]]--
+-- We also provide an efficient variant for page states.
 
 local collected = allocate()
 local tobesaved = allocate()
@@ -250,13 +246,9 @@ local function initializer()
     tobesaved = pagestates.tobesaved
 end
 
-job.register('job.pagestates.collected', tobesaved, initializer, nil)
+job.register("job.pagestates.collected", tobesaved, initializer, nil)
 
-table.setmetatableindex(tobesaved, function(t,k)
-    local v = { }
-    t[k] = v
-    return v
-end)
+table.setmetatableindex(tobesaved, "table")
 
 local function setstate(settings)
     local name = settings.name
diff --git a/tex/context/base/mkiv/core-two.lua b/tex/context/base/mkiv/core-two.lua
index 3ab2112b9..da37a6170 100644
--- a/tex/context/base/mkiv/core-two.lua
+++ b/tex/context/base/mkiv/core-two.lua
@@ -6,15 +6,14 @@ if not modules then modules = { } end modules ['core-two'] = {
     license   = "see context related readme files"
 }
 
+-- This is actually one of the oldest MkIV files and basically a port of MkII but
+-- the old usage has long be phased out. Also, the public part is now handled by
+-- datasets which makes this a more private store.
+
 local next = next
 local remove, concat = table.remove, table.concat
 local allocate = utilities.storage.allocate
 
---[[ldx--
-<p>We save multi-pass information in the main utility table. This is a
-bit of a mess because we support old and new methods.</p>
---ldx]]--
-
 local collected = allocate()
 local tobesaved = allocate()
 
diff --git a/tex/context/base/mkiv/core-uti.lua b/tex/context/base/mkiv/core-uti.lua
index 887ef9a75..e8a28c187 100644
--- a/tex/context/base/mkiv/core-uti.lua
+++ b/tex/context/base/mkiv/core-uti.lua
@@ -6,16 +6,13 @@ if not modules then modules = { } end modules ['core-uti'] = {
     license   = "see context related readme files"
 }
 
--- todo: keep track of changes here (hm, track access, and only true when
--- accessed and changed)
-
---[[ldx--
-<p>A utility file has always been part of <l n='context'/> and with
-the move to <l n='luatex'/> we also moved a lot of multi-pass info
-to a <l n='lua'/> table. Instead of loading a <l n='tex'/> based
-utility file under different setups, we now load a table once. This
-saves much runtime but at the cost of more memory usage.</p>
---ldx]]--
+-- A utility file has always been part of ConTeXt and with the move to LuaTeX we
+-- also moved a lot of multi-pass info to a Lua table. Instead of loading a TeX
+-- based utility file under different setups, we now load a table once. This saves
+-- much runtime but at the cost of more memory usage.
+--
+-- In the meantime the overhead is a bit more due to the amount of data being saved
+-- and more agressive compacting.
 
 local math = math
 local format, match = string.format, string.match
@@ -46,14 +43,9 @@ local job            = job
 job.version          = 1.32
 job.packversion      = 1.02
 
--- some day we will implement loading of other jobs and then we need
--- job.jobs
-
---[[ldx--
-<p>Variables are saved using in the previously defined table and passed
-onto <l n='tex'/> using the following method. Of course one can also
-directly access the variable using a <l n='lua'/> call.</p>
---ldx]]--
+-- Variables are saved using in the previously defined table and passed onto TeX
+-- using the following method. Of course one can also directly access the variable
+-- using a Lua call.
 
 local savelist, comment = { }, { }
 
diff --git a/tex/context/base/mkiv/data-con.lua b/tex/context/base/mkiv/data-con.lua
index 51e0ce856..d7d3c7d46 100644
--- a/tex/context/base/mkiv/data-con.lua
+++ b/tex/context/base/mkiv/data-con.lua
@@ -13,19 +13,17 @@ local trace_cache      = false  trackers.register("resolvers.cache",      functi
 local trace_containers = false  trackers.register("resolvers.containers", function(v) trace_containers = v end)
 local trace_storage    = false  trackers.register("resolvers.storage",    function(v) trace_storage    = v end)
 
---[[ldx--
-<p>Once we found ourselves defining similar cache constructs several times,
-containers were introduced. Containers are used to collect tables in memory and
-reuse them when possible based on (unique) hashes (to be provided by the calling
-function).</p>
-
-<p>Caching to disk is disabled by default. Version numbers are stored in the
-saved table which makes it possible to change the table structures without
-bothering about the disk cache.</p>
-
-<p>Examples of usage can be found in the font related code. This code is not
-ideal but we need it in generic too so we compromise.</p>
---ldx]]--
+-- Once we found ourselves defining similar cache constructs several times,
+-- containers were introduced. Containers are used to collect tables in memory and
+-- reuse them when possible based on (unique) hashes (to be provided by the calling
+-- function).
+--
+-- Caching to disk is disabled by default. Version numbers are stored in the saved
+-- table which makes it possible to change the table structures without bothering
+-- about the disk cache.
+--
+-- Examples of usage can be found in the font related code. This code is not ideal
+-- but we need it in generic too so we compromise.
 
 containers              = containers or { }
 local containers        = containers
diff --git a/tex/context/base/mkiv/data-res.lua b/tex/context/base/mkiv/data-res.lua
index 8afc09b97..11e67f785 100644
--- a/tex/context/base/mkiv/data-res.lua
+++ b/tex/context/base/mkiv/data-res.lua
@@ -135,16 +135,35 @@ local criticalvars = {
 -- we also report weird ones, with weird being: (1) duplicate /texmf or (2) no /web2c in
 -- the names.
 
+-- if environment.default_texmfcnf then
+--     resolvers.luacnfspec = "home:texmf/web2c;" .. environment.default_texmfcnf -- texlive + home: for taco etc
+-- else
+--     resolvers.luacnfspec = concat ( {
+--         "home:texmf/web2c",
+--         "selfautoparent:/texmf-local/web2c",
+--         "selfautoparent:/texmf-context/web2c",
+--         "selfautoparent:/texmf-dist/web2c",
+--         "selfautoparent:/texmf/web2c",
+--     }, ";")
+-- end
+
 if environment.default_texmfcnf then
+    -- this will go away (but then also no more checking in mtxrun.lua itself)
     resolvers.luacnfspec = "home:texmf/web2c;" .. environment.default_texmfcnf -- texlive + home: for taco etc
 else
-    resolvers.luacnfspec = concat ( {
-        "home:texmf/web2c",
-        "selfautoparent:/texmf-local/web2c",
-        "selfautoparent:/texmf-context/web2c",
-        "selfautoparent:/texmf-dist/web2c",
-        "selfautoparent:/texmf/web2c",
-    }, ";")
+    local texroot = environment.texroot
+    resolvers.luacnfspec = "home:texmf/web2c;selfautoparent:/texmf-local/web2c;selfautoparent:/texmf-context/web2c;selfautoparent:/texmf/web2c"
+    if texroot and isdir(texroot .. "/texmf-context") then
+        -- we're okay and run the lean and mean installation
+    elseif texroot and isdir(texroot .. "/texmf-dist") then
+        -- we're in texlive where texmf-dist is leading
+        resolvers.luacnfspec = "home:texmf/web2c;selfautoparent:/texmf-local/web2c;selfautoparent:/texmf-dist/web2c;selfautoparent:/texmf/web2c"
+    elseif ostype ~= "windows" and isdir("/etc/texmf/web2c") then
+        -- we have some linux distribution that does it its own way
+        resolvers.luacnfspec = "home:texmf/web2c;/etc/texmf/web2c;selfautodir:/share/texmf/web2c"
+    else
+        -- we stick to the reference specification
+    end
 end
 
 local unset_variable = "unset"
diff --git a/tex/context/base/mkiv/data-tar.lua b/tex/context/base/mkiv/data-tar.lua
index 45de749b6..b2416330f 100644
--- a/tex/context/base/mkiv/data-tar.lua
+++ b/tex/context/base/mkiv/data-tar.lua
@@ -12,14 +12,10 @@ local trace_locating = false  trackers.register("resolvers.locating", function(v
 
 local report_tar = logs.reporter("resolvers","tar")
 
---[[ldx--
-<p>We use a url syntax for accessing the tar file itself and file in it:</p>
-
-<typing>
-tar:///oeps.tar?name=bla/bla.tex
-tar:///oeps.tar?tree=tex/texmf-local
-</typing>
---ldx]]--
+-- We use a url syntax for accessing the tar file itself and file in it:
+--
+--   tar:///oeps.tar?name=bla/bla.tex
+--   tar:///oeps.tar?tree=tex/texmf-local
 
 local resolvers    = resolvers
 local findfile     = resolvers.findfile
diff --git a/tex/context/base/mkiv/data-tmp.lua b/tex/context/base/mkiv/data-tmp.lua
index 1948f1ea5..21e0d1f4f 100644
--- a/tex/context/base/mkiv/data-tmp.lua
+++ b/tex/context/base/mkiv/data-tmp.lua
@@ -6,20 +6,15 @@ if not modules then modules = { } end modules ['data-tmp'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>This module deals with caching data. It sets up the paths and implements
-loaders and savers for tables. Best is to set the following variable. When not
-set, the usual paths will be checked. Personally I prefer the (users) temporary
-path.</p>
-
-</code>
-TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
-</code>
-
-<p>Currently we do no locking when we write files. This is no real problem
-because most caching involves fonts and the chance of them being written at the
-same time is small. We also need to extend luatools with a recache feature.</p>
---ldx]]--
+-- This module deals with caching data. It sets up the paths and implements loaders
+-- and savers for tables. Best is to set the following variable. When not set, the
+-- usual paths will be checked. Personally I prefer the (users) temporary path.
+--
+--   TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
+--
+-- Currently we do no locking when we write files. This is no real problem because
+-- most caching involves fonts and the chance of them being written at the same time
+-- is small. We also need to extend luatools with a recache feature.
 
 local next, type = next, type
 local pcall, loadfile, collectgarbage = pcall, loadfile, collectgarbage
diff --git a/tex/context/base/mkiv/data-zip.lua b/tex/context/base/mkiv/data-zip.lua
index 1a9310f17..40f38c855 100644
--- a/tex/context/base/mkiv/data-zip.lua
+++ b/tex/context/base/mkiv/data-zip.lua
@@ -14,17 +14,13 @@ local trace_locating = false  trackers.register("resolvers.locating", function(v
 
 local report_zip = logs.reporter("resolvers","zip")
 
---[[ldx--
-<p>We use a url syntax for accessing the zip file itself and file in it:</p>
-
-<typing>
-zip:///oeps.zip?name=bla/bla.tex
-zip:///oeps.zip?tree=tex/texmf-local
-zip:///texmf.zip?tree=/tex/texmf
-zip:///texmf.zip?tree=/tex/texmf-local
-zip:///texmf-mine.zip?tree=/tex/texmf-projects
-</typing>
---ldx]]--
+-- We use a url syntax for accessing the zip file itself and file in it:
+--
+--   zip:///oeps.zip?name=bla/bla.tex
+--   zip:///oeps.zip?tree=tex/texmf-local
+--   zip:///texmf.zip?tree=/tex/texmf
+--   zip:///texmf.zip?tree=/tex/texmf-local
+--   zip:///texmf-mine.zip?tree=/tex/texmf-projects
 
 local resolvers    = resolvers
 local findfile     = resolvers.findfile
diff --git a/tex/context/base/mkiv/file-ini.lua b/tex/context/base/mkiv/file-ini.lua
index 2a0271a9d..01bedeeeb 100644
--- a/tex/context/base/mkiv/file-ini.lua
+++ b/tex/context/base/mkiv/file-ini.lua
@@ -6,11 +6,8 @@ if not modules then modules = { } end modules ['file-ini'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>It's more convenient to manipulate filenames (paths) in <l n='lua'/> than in
-<l n='tex'/>. These methods have counterparts at the <l n='tex'/> end.</p>
---ldx]]--
-
+-- It's more convenient to manipulate filenames (paths) in Lua than in TeX. These
+-- methods have counterparts at the TeX end.
 
 local implement         = interfaces.implement
 local setmacro          = interfaces.setmacro
diff --git a/tex/context/base/mkiv/file-mod.lua b/tex/context/base/mkiv/file-mod.lua
index d392887ec..ac9ad938a 100644
--- a/tex/context/base/mkiv/file-mod.lua
+++ b/tex/context/base/mkiv/file-mod.lua
@@ -6,17 +6,11 @@ if not modules then modules = { } end modules ['file-mod'] = {
     license   = "see context related readme files"
 }
 
--- This module will be redone! For instance, the prefixes will move to data-*
--- as they arr sort of generic along with home:// etc/.
-
--- context is not defined yet! todo! (we need to load tupp-fil after cld)
--- todo: move startreadingfile to lua and push regime there
-
---[[ldx--
-<p>It's more convenient to manipulate filenames (paths) in
-<l n='lua'/> than in <l n='tex'/>. These methods have counterparts
-at the <l n='tex'/> side.</p>
---ldx]]--
+-- This module will be redone! For instance, the prefixes will move to data-* as
+-- they are sort of generic along with home:// etc/.
+--
+-- It is more convenient to manipulate filenames (paths) in Lua than in TeX. The
+-- methods below have counterparts at the TeX end.
 
 local format, find, concat, tonumber = string.format, string.find, table.concat, tonumber
 local sortedhash = table.sortedhash
diff --git a/tex/context/base/mkiv/font-afk.lua b/tex/context/base/mkiv/font-afk.lua
index 761016d34..250c17e77 100644
--- a/tex/context/base/mkiv/font-afk.lua
+++ b/tex/context/base/mkiv/font-afk.lua
@@ -7,11 +7,9 @@ if not modules then modules = { } end modules ['font-afk'] = {
     dataonly  = true,
 }
 
---[[ldx--
-<p>For ligatures, only characters with a code smaller than 128 make sense,
-anything larger is encoding dependent. An interesting complication is that a
-character can be in an encoding twice but is hashed once.</p>
---ldx]]--
+-- For ligatures, only characters with a code smaller than 128 make sense, anything
+-- larger is encoding dependent. An interesting complication is that a character can
+-- be in an encoding twice but is hashed once.
 
 local allocate = utilities.storage.allocate
 
diff --git a/tex/context/base/mkiv/font-con.lua b/tex/context/base/mkiv/font-con.lua
index 066ea33ed..77708ee08 100644
--- a/tex/context/base/mkiv/font-con.lua
+++ b/tex/context/base/mkiv/font-con.lua
@@ -22,11 +22,9 @@ local trace_scaling   = false  trackers.register("fonts.scaling",   function(v)
 
 local report_defining = logs.reporter("fonts","defining")
 
--- watch out: no negative depths and negative eights permitted in regular fonts
-
---[[ldx--
-<p>Here we only implement a few helper functions.</p>
---ldx]]--
+-- Watch out: no negative depths and negative heights are permitted in regular
+-- fonts. Also, the code in LMTX is a bit different. Here we only implement a
+-- few helper functions.
 
 local fonts                  = fonts
 local constructors           = fonts.constructors or { }
@@ -59,11 +57,9 @@ constructors.designsizes    = designsizes
 local loadedfonts           = allocate()
 constructors.loadedfonts    = loadedfonts
 
---[[ldx--
-<p>We need to normalize the scale factor (in scaled points). This has to
-do with the fact that <l n='tex'/> uses a negative multiple of 1000 as
-a signal for a font scaled based on the design size.</p>
---ldx]]--
+-- We need to normalize the scale factor (in scaled points). This has to do with the
+-- fact that TeX uses a negative multiple of 1000 as a signal for a font scaled
+-- based on the design size.
 
 local factors = {
     pt = 65536.0,
@@ -118,22 +114,18 @@ function constructors.getmathparameter(tfmdata,name)
     end
 end
 
---[[ldx--
-<p>Beware, the boundingbox is passed as reference so we may not overwrite it
-in the process; numbers are of course copies. Here 65536 equals 1pt. (Due to
-excessive memory usage in CJK fonts, we no longer pass the boundingbox.)</p>
---ldx]]--
-
--- The scaler is only used for otf and afm and virtual fonts. If a virtual font has italic
--- correction make sure to set the hasitalics flag. Some more flags will be added in the
--- future.
-
---[[ldx--
-<p>The reason why the scaler was originally split, is that for a while we experimented
-with a helper function. However, in practice the <l n='api'/> calls are too slow to
-make this profitable and the <l n='lua'/> based variant was just faster. A days
-wasted day but an experience richer.</p>
---ldx]]--
+-- Beware, the boundingbox is passed as reference so we may not overwrite it in the
+-- process; numbers are of course copies. Here 65536 equals 1pt. (Due to excessive
+-- memory usage in CJK fonts, we no longer pass the boundingbox.)
+--
+-- The scaler is only used for OTF and AFM and virtual fonts. If a virtual font has
+-- italic correction make sure to set the hasitalics flag. Some more flags will be
+-- added in the future.
+--
+-- The reason why the scaler was originally split, is that for a while we
+-- experimented with a helper function. However, in practice the API calls are too
+-- slow to make this profitable and the Lua based variant was just faster. A days
+-- wasted day but an experience richer.
 
 function constructors.cleanuptable(tfmdata)
     -- This no longer makes sense because the addition of font.getcopy and its
@@ -1093,9 +1085,7 @@ function constructors.finalize(tfmdata)
     return tfmdata
 end
 
---[[ldx--
-<p>A unique hash value is generated by:</p>
---ldx]]--
+-- A unique hash value is generated by:
 
 local hashmethods        = { }
 constructors.hashmethods = hashmethods
@@ -1154,13 +1144,11 @@ hashmethods.normal = function(list)
     end
 end
 
---[[ldx--
-<p>In principle we can share tfm tables when we are in need for a font, but then
-we need to define a font switch as an id/attr switch which is no fun, so in that
-case users can best use dynamic features ... so, we will not use that speedup. Okay,
-when we get rid of base mode we can optimize even further by sharing, but then we
-loose our testcases for <l n='luatex'/>.</p>
---ldx]]--
+-- In principle we can share tfm tables when we are in need for a font, but then we
+-- need to define a font switch as an id/attr switch which is no fun, so in that
+-- case users can best use dynamic features ... so, we will not use that speedup.
+-- Okay, when we get rid of base mode we can optimize even further by sharing, but
+-- then we loose our testcases for LuaTeX.
 
 function constructors.hashinstance(specification,force)
     local hash      = specification.hash
@@ -1516,10 +1504,7 @@ do
 
 end
 
---[[ldx--
-<p>We need to check for default features. For this we provide
-a helper function.</p>
---ldx]]--
+-- We need to check for default features. For this we provide a helper function.
 
 function constructors.checkedfeatures(what,features)
     local defaults = handlers[what].features.defaults
diff --git a/tex/context/base/mkiv/font-ctx.lua b/tex/context/base/mkiv/font-ctx.lua
index 2c56b5613..f9ad475ac 100644
--- a/tex/context/base/mkiv/font-ctx.lua
+++ b/tex/context/base/mkiv/font-ctx.lua
@@ -528,26 +528,19 @@ do
 
 end
 
---[[ldx--
-<p>So far we haven't really dealt with features (or whatever we want
-to pass along with the font definition. We distinguish the following
-situations:</p>
-situations:</p>
-
-<code>
-name:xetex like specs
-name@virtual font spec
-name*context specification
-</code>
---ldx]]--
-
--- currently fonts are scaled while constructing the font, so we
--- have to do scaling of commands in the vf at that point using e.g.
--- "local scale = g.parameters.factor or 1" after all, we need to
--- work with copies anyway and scaling needs to be done at some point;
--- however, when virtual tricks are used as feature (makes more
--- sense) we scale the commands in fonts.constructors.scale (and set the
--- factor there)
+-- So far we haven't really dealt with features (or whatever we want to pass along
+-- with the font definition. We distinguish the following situations:
+--
+--   name:xetex like specs
+--   name@virtual font spec
+--   name*context specification
+--
+-- Currently fonts are scaled while constructing the font, so we have to do scaling
+-- of commands in the vf at that point using e.g. "local scale = g.parameters.factor
+-- or 1" after all, we need to work with copies anyway and scaling needs to be done
+-- at some point; however, when virtual tricks are used as feature (makes more
+-- sense) we scale the commands in fonts.constructors.scale (and set the factor
+-- there).
 
 local loadfont = definers.loadfont
 
@@ -2385,10 +2378,8 @@ dimenfactors.em   = nil
 dimenfactors["%"] = nil
 dimenfactors.pct  = nil
 
---[[ldx--
-<p>Before a font is passed to <l n='tex'/> we scale it. Here we also need
-to scale virtual characters.</p>
---ldx]]--
+-- Before a font is passed to TeX we scale it. Here we also need to scale virtual
+-- characters.
 
 do
 
diff --git a/tex/context/base/mkiv/font-def.lua b/tex/context/base/mkiv/font-def.lua
index b752b3258..01bced6d1 100644
--- a/tex/context/base/mkiv/font-def.lua
+++ b/tex/context/base/mkiv/font-def.lua
@@ -24,10 +24,9 @@ trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading
 
 local report_defining = logs.reporter("fonts","defining")
 
---[[ldx--
-<p>Here we deal with defining fonts. We do so by intercepting the
-default loader that only handles <l n='tfm'/>.</p>
---ldx]]--
+-- Here we deal with defining fonts. We do so by intercepting the default loader
+-- that only handles TFM files. Although, we started out that way but in the
+-- meantime we can hardly speak of TFM any more.
 
 local fonts         = fonts
 local fontdata      = fonts.hashes.identifiers
@@ -53,25 +52,18 @@ local designsizes   = constructors.designsizes
 
 local resolvefile   = fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
 
---[[ldx--
-<p>We hardly gain anything when we cache the final (pre scaled)
-<l n='tfm'/> table. But it can be handy for debugging, so we no
-longer carry this code along. Also, we now have quite some reference
-to other tables so we would end up with lots of catches.</p>
---ldx]]--
-
---[[ldx--
-<p>We can prefix a font specification by <type>name:</type> or
-<type>file:</type>. The first case will result in a lookup in the
-synonym table.</p>
-
-<typing>
-[ name: | file: ] identifier [ separator [ specification ] ]
-</typing>
-
-<p>The following function split the font specification into components
-and prepares a table that will move along as we proceed.</p>
---ldx]]--
+-- We hardly gain anything when we cache the final (pre scaled) TFM table. But it
+-- can be handy for debugging, so we no longer carry this code along. Also, we now
+-- have quite some reference to other tables so we would end up with lots of
+-- catches.
+--
+-- We can prefix a font specification by "name:" or "file:". The first case will
+-- result in a lookup in the synonym table.
+--
+--   [ name: | file: ] identifier [ separator [ specification ] ]
+--
+-- The following function split the font specification into components and prepares
+-- a table that will move along as we proceed.
 
 -- beware, we discard additional specs
 --
@@ -164,9 +156,7 @@ if context then
 
 end
 
---[[ldx--
-<p>We can resolve the filename using the next function:</p>
---ldx]]--
+-- We can resolve the filename using the next function:
 
 definers.resolvers = definers.resolvers or { }
 local resolvers    = definers.resolvers
@@ -258,23 +248,17 @@ function definers.resolve(specification)
     return specification
 end
 
---[[ldx--
-<p>The main read function either uses a forced reader (as determined by
-a lookup) or tries to resolve the name using the list of readers.</p>
-
-<p>We need to cache when possible. We do cache raw tfm data (from <l
-n='tfm'/>, <l n='afm'/> or <l n='otf'/>). After that we can cache based
-on specificstion (name) and size, that is, <l n='tex'/> only needs a number
-for an already loaded fonts. However, it may make sense to cache fonts
-before they're scaled as well (store <l n='tfm'/>'s with applied methods
-and features). However, there may be a relation between the size and
-features (esp in virtual fonts) so let's not do that now.</p>
-
-<p>Watch out, here we do load a font, but we don't prepare the
-specification yet.</p>
---ldx]]--
-
--- very experimental:
+-- The main read function either uses a forced reader (as determined by a lookup) or
+-- tries to resolve the name using the list of readers.
+--
+-- We need to cache when possible. We do cache raw tfm data (from TFM, AFM or OTF).
+-- After that we can cache based on specificstion (name) and size, that is, TeX only
+-- needs a number for an already loaded fonts. However, it may make sense to cache
+-- fonts before they're scaled as well (store TFM's with applied methods and
+-- features). However, there may be a relation between the size and features (esp in
+-- virtual fonts) so let's not do that now.
+--
+-- Watch out, here we do load a font, but we don't prepare the specification yet.
 
 function definers.applypostprocessors(tfmdata)
     local postprocessors = tfmdata.postprocessors
@@ -439,17 +423,13 @@ function constructors.readanddefine(name,size) -- no id -- maybe a dummy first
     return fontdata[id], id
 end
 
---[[ldx--
-<p>So far the specifiers. Now comes the real definer. Here we cache
-based on id's. Here we also intercept the virtual font handler. Since
-it evolved stepwise I may rewrite this bit (combine code).</p>
-
-In the previously defined reader (the one resulting in a <l n='tfm'/>
-table) we cached the (scaled) instances. Here we cache them again, but
-this time based on id. We could combine this in one cache but this does
-not gain much. By the way, passing id's back to in the callback was
-introduced later in the development.</p>
---ldx]]--
+-- So far the specifiers. Now comes the real definer. Here we cache based on id's.
+-- Here we also intercept the virtual font handler.
+--
+-- In the previously defined reader (the one resulting in a TFM table) we cached the
+-- (scaled) instances. Here we cache them again, but this time based on id. We could
+-- combine this in one cache but this does not gain much. By the way, passing id's
+-- back to in the callback was introduced later in the development.
 
 function definers.registered(hash)
     local id = internalized[hash]
@@ -522,9 +502,7 @@ function font.getfont(id)
     return fontdata[id] -- otherwise issues
 end
 
---[[ldx--
-<p>We overload the <l n='tfm'/> reader.</p>
---ldx]]--
+-- We overload the <l n='tfm'/> reader.
 
 if not context then
     callbacks.register('define_font', definers.read, "definition of fonts (tfmdata preparation)")
diff --git a/tex/context/base/mkiv/font-enc.lua b/tex/context/base/mkiv/font-enc.lua
index f2f0595dd..732bc8907 100644
--- a/tex/context/base/mkiv/font-enc.lua
+++ b/tex/context/base/mkiv/font-enc.lua
@@ -16,10 +16,8 @@ local setmetatableindex = table.setmetatableindex
 local allocate          = utilities.storage.allocate
 local mark              = utilities.storage.mark
 
---[[ldx--
-<p>Because encodings are going to disappear, we don't bother defining
-them in tables. But we may do so some day, for consistency.</p>
---ldx]]--
+-- Because encodings are going to disappear, we don't bother defining them in
+-- tables. But we may do so some day, for consistency.
 
 local report_encoding = logs.reporter("fonts","encoding")
 
@@ -43,24 +41,19 @@ function encodings.is_known(encoding)
     return containers.is_valid(encodings.cache,encoding)
 end
 
---[[ldx--
-<p>An encoding file looks like this:</p>
-
-<typing>
-/TeXnANSIEncoding [
-/.notdef
-/Euro
-...
-/ydieresis
-] def
-</typing>
-
-<p>Beware! The generic encoding files don't always apply to the ones that
-ship with fonts. This has to do with the fact that names follow (slightly)
-different standards. However, the fonts where this applies to (for instance
-Latin Modern or <l n='tex'> Gyre) come in OpenType variants too, so these
-will be used.</p>
---ldx]]--
+-- An encoding file looks like this:
+--
+--   /TeXnANSIEncoding [
+--   /.notdef
+--   /Euro
+--   ...
+--   /ydieresis
+--   ] def
+--
+-- Beware! The generic encoding files don't always apply to the ones that ship with
+-- fonts. This has to do with the fact that names follow (slightly) different
+-- standards. However, the fonts where this applies to (for instance Latin Modern or
+-- TeXGyre come in OpenType variants too, so these will be used.
 
 local enccodes = characters.enccodes or { }
 
@@ -120,10 +113,7 @@ function encodings.load(filename)
     return containers.write(encodings.cache, name, data)
 end
 
---[[ldx--
-<p>There is no unicode encoding but for practical purposes we define
-one.</p>
---ldx]]--
+-- There is no unicode encoding but for practical purposes we define one.
 
 -- maybe make this a function:
 
diff --git a/tex/context/base/mkiv/font-fbk.lua b/tex/context/base/mkiv/font-fbk.lua
index b6c9a430d..da04b50a8 100644
--- a/tex/context/base/mkiv/font-fbk.lua
+++ b/tex/context/base/mkiv/font-fbk.lua
@@ -10,10 +10,6 @@ local cos, tan, rad, format = math.cos, math.tan, math.rad, string.format
 local utfbyte, utfchar = utf.byte, utf.char
 local next = next
 
---[[ldx--
-<p>This is very experimental code!</p>
---ldx]]--
-
 local trace_visualize    = false  trackers.register("fonts.composing.visualize", function(v) trace_visualize = v end)
 local trace_define       = false  trackers.register("fonts.composing.define",    function(v) trace_define    = v end)
 
diff --git a/tex/context/base/mkiv/font-imp-tex.lua b/tex/context/base/mkiv/font-imp-tex.lua
index b4b9a7b69..87a1ae3aa 100644
--- a/tex/context/base/mkiv/font-imp-tex.lua
+++ b/tex/context/base/mkiv/font-imp-tex.lua
@@ -13,36 +13,31 @@ local otf                = fonts.handlers.otf
 local registerotffeature = otf.features.register
 local addotffeature      = otf.addfeature
 
--- tlig (we need numbers for some fonts so ...)
+-- We provide a few old and obsolete compatibility input features. We need numbers
+-- for some fonts so no names here. Do we also need them for afm fonts?
 
-local specification = {
+local tlig = {
     type     = "ligature",
     order    = { "tlig" },
     prepend  = true,
     data     = {
-     -- endash        = "hyphen hyphen",
-     -- emdash        = "hyphen hyphen hyphen",
-        [0x2013]      = { 0x002D, 0x002D },
-        [0x2014]      = { 0x002D, 0x002D, 0x002D },
-     -- quotedblleft  = "quoteleft quoteleft",
-     -- quotedblright = "quoteright quoteright",
-     -- quotedblleft  = "grave grave",
-     -- quotedblright = "quotesingle quotesingle",
-     -- quotedblbase  = "comma comma",
+        [0x2013] = { 0x002D, 0x002D },
+        [0x2014] = { 0x002D, 0x002D, 0x002D },
     },
 }
 
-addotffeature("tlig",specification)
-
-registerotffeature {
-    -- this makes it a known feature (in tables)
-    name        = "tlig",
-    description = "tex ligatures",
+local tquo = {
+    type     = "ligature",
+    order    = { "tquo" },
+    prepend  = true,
+    data     = {
+        [0x201C] = { 0x0060, 0x0060 },
+        [0x201D] = { 0x0027, 0x0027 },
+        [0x201E] = { 0x002C, 0x002C },
+    },
 }
 
--- trep
-
-local specification = {
+local trep = {
     type      = "substitution",
     order     = { "trep" },
     prepend   = true,
@@ -53,13 +48,13 @@ local specification = {
     },
 }
 
-addotffeature("trep",specification)
+addotffeature("trep",trep) -- last
+addotffeature("tlig",tlig)
+addotffeature("tquo",tquo) -- first
 
-registerotffeature {
-    -- this makes it a known feature (in tables)
-    name        = "trep",
-    description = "tex replacements",
-}
+registerotffeature { name = "tlig", description = "tex ligatures" }
+registerotffeature { name = "tquo", description = "tex quotes" }
+registerotffeature { name = "trep", description = "tex replacements" }
 
 -- some day this will be moved to font-imp-scripts.lua
 
diff --git a/tex/context/base/mkiv/font-ini.lua b/tex/context/base/mkiv/font-ini.lua
index 8bab6d902..201cc69f4 100644
--- a/tex/context/base/mkiv/font-ini.lua
+++ b/tex/context/base/mkiv/font-ini.lua
@@ -6,9 +6,7 @@ if not modules then modules = { } end modules ['font-ini'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Not much is happening here.</p>
---ldx]]--
+-- Not much is happening here.
 
 local allocate    = utilities.storage.allocate
 local sortedhash  = table.sortedhash
diff --git a/tex/context/base/mkiv/font-log.lua b/tex/context/base/mkiv/font-log.lua
index 092b5a62e..96b5864fd 100644
--- a/tex/context/base/mkiv/font-log.lua
+++ b/tex/context/base/mkiv/font-log.lua
@@ -19,12 +19,9 @@ fonts.loggers     = loggers
 local usedfonts   = utilities.storage.allocate()
 ----- loadedfonts = utilities.storage.allocate()
 
---[[ldx--
-<p>The following functions are used for reporting about the fonts
-used. The message itself is not that useful in regular runs but since
-we now have several readers it may be handy to know what reader is
-used for which font.</p>
---ldx]]--
+-- The following functions are used for reporting about the fonts used. The message
+-- itself is not that useful in regular runs but since we now have several readers
+-- it may be handy to know what reader is used for which font.
 
 function loggers.onetimemessage(font,char,message,reporter)
     local tfmdata = fonts.hashes.identifiers[font]
diff --git a/tex/context/base/mkiv/font-nod.lua b/tex/context/base/mkiv/font-nod.lua
index a7dcfd9b0..1e39784d9 100644
--- a/tex/context/base/mkiv/font-nod.lua
+++ b/tex/context/base/mkiv/font-nod.lua
@@ -7,11 +7,6 @@ if not modules then modules = { } end modules ['font-nod'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>This is rather experimental. We need more control and some of this
-might become a runtime module instead. This module will be cleaned up!</p>
---ldx]]--
-
 local utfchar = utf.char
 local concat, fastcopy = table.concat, table.fastcopy
 local match, rep = string.match, string.rep
diff --git a/tex/context/base/mkiv/font-one.lua b/tex/context/base/mkiv/font-one.lua
index 829f52ea0..25efc2a04 100644
--- a/tex/context/base/mkiv/font-one.lua
+++ b/tex/context/base/mkiv/font-one.lua
@@ -7,18 +7,16 @@ if not modules then modules = { } end modules ['font-one'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Some code may look a bit obscure but this has to do with the fact that we also use
-this code for testing and much code evolved in the transition from <l n='tfm'/> to
-<l n='afm'/> to <l n='otf'/>.</p>
-
-<p>The following code still has traces of intermediate font support where we handles
-font encodings. Eventually font encoding went away but we kept some code around in
-other modules.</p>
-
-<p>This version implements a node mode approach so that users can also more easily
-add features.</p>
---ldx]]--
+-- Some code may look a bit obscure but this has to do with the fact that we also
+-- use this code for testing and much code evolved in the transition from TFM to AFM
+-- to OTF.
+--
+-- The following code still has traces of intermediate font support where we handles
+-- font encodings. Eventually font encoding went away but we kept some code around
+-- in other modules.
+--
+-- This version implements a node mode approach so that users can also more easily
+-- add features.
 
 local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers
 
@@ -71,15 +69,13 @@ local overloads           = fonts.mappings.overloads
 
 local applyruntimefixes   = fonts.treatments and fonts.treatments.applyfixes
 
---[[ldx--
-<p>We cache files. Caching is taken care of in the loader. We cheat a bit by adding
-ligatures and kern information to the afm derived data. That way we can set them faster
-when defining a font.</p>
-
-<p>We still keep the loading two phased: first we load the data in a traditional
-fashion and later we transform it to sequences. Then we apply some methods also
-used in opentype fonts (like <t>tlig</t>).</p>
---ldx]]--
+-- We cache files. Caching is taken care of in the loader. We cheat a bit by adding
+-- ligatures and kern information to the afm derived data. That way we can set them
+-- faster when defining a font.
+--
+-- We still keep the loading two phased: first we load the data in a traditional
+-- fashion and later we transform it to sequences. Then we apply some methods also
+-- used in opentype fonts (like tlig).
 
 function afm.load(filename)
     filename = resolvers.findfile(filename,'afm') or ""
@@ -312,10 +308,8 @@ local function enhance_fix_names(data)
     end
 end
 
---[[ldx--
-<p>These helpers extend the basic table with extra ligatures, texligatures
-and extra kerns. This saves quite some lookups later.</p>
---ldx]]--
+-- These helpers extend the basic table with extra ligatures, texligatures and extra
+-- kerns. This saves quite some lookups later.
 
 local addthem = function(rawdata,ligatures)
     if ligatures then
@@ -349,17 +343,14 @@ local function enhance_add_ligatures(rawdata)
     addthem(rawdata,afm.helpdata.ligatures)
 end
 
---[[ldx--
-<p>We keep the extra kerns in separate kerning tables so that we can use
-them selectively.</p>
---ldx]]--
-
--- This is rather old code (from the beginning when we had only tfm). If
--- we unify the afm data (now we have names all over the place) then
--- we can use shcodes but there will be many more looping then. But we
--- could get rid of the tables in char-cmp then. Als, in the generic version
--- we don't use the character database. (Ok, we can have a context specific
--- variant).
+-- We keep the extra kerns in separate kerning tables so that we can use them
+-- selectively.
+--
+-- This is rather old code (from the beginning when we had only tfm). If we unify
+-- the afm data (now we have names all over the place) then we can use shcodes but
+-- there will be many more looping then. But we could get rid of the tables in
+-- char-cmp then. Als, in the generic version we don't use the character database.
+-- (Ok, we can have a context specific variant).
 
 local function enhance_add_extra_kerns(rawdata) -- using shcodes is not robust here
     local descriptions = rawdata.descriptions
@@ -440,9 +431,7 @@ local function enhance_add_extra_kerns(rawdata) -- using shcodes is not robust h
     do_it_copy(afm.helpdata.rightkerned)
 end
 
---[[ldx--
-<p>The copying routine looks messy (and is indeed a bit messy).</p>
---ldx]]--
+-- The copying routine looks messy (and is indeed a bit messy).
 
 local function adddimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name
     if data then
@@ -619,11 +608,9 @@ end
     return nil
 end
 
---[[ldx--
-<p>Originally we had features kind of hard coded for <l n='afm'/> files but since I
-expect to support more font formats, I decided to treat this fontformat like any
-other and handle features in a more configurable way.</p>
---ldx]]--
+-- Originally we had features kind of hard coded for AFM files but since I expect to
+-- support more font formats, I decided to treat this fontformat like any other and
+-- handle features in a more configurable way.
 
 function afm.setfeatures(tfmdata,features)
     local okay = constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm)
@@ -715,13 +702,10 @@ local function afmtotfm(specification)
     end
 end
 
---[[ldx--
-<p>As soon as we could intercept the <l n='tfm'/> reader, I implemented an
-<l n='afm'/> reader. Since traditional <l n='pdftex'/> could use <l n='opentype'/>
-fonts with <l n='afm'/> companions, the following method also could handle
-those cases, but now that we can handle <l n='opentype'/> directly we no longer
-need this features.</p>
---ldx]]--
+-- As soon as we could intercept the TFM reader, I implemented an AFM reader. Since
+-- traditional pdfTeX could use OpenType fonts with AFM companions, the following
+-- method also could handle those cases, but now that we can handle OpenType
+-- directly we no longer need this features.
 
 local function read_from_afm(specification)
     local tfmdata = afmtotfm(specification)
@@ -736,9 +720,7 @@ local function read_from_afm(specification)
     return tfmdata
 end
 
---[[ldx--
-<p>We have the usual two modes and related features initializers and processors.</p>
---ldx]]--
+-- We have the usual two modes and related features initializers and processors.
 
 registerafmfeature {
     name         = "mode",
diff --git a/tex/context/base/mkiv/font-onr.lua b/tex/context/base/mkiv/font-onr.lua
index 9e5a012bd..6234742a3 100644
--- a/tex/context/base/mkiv/font-onr.lua
+++ b/tex/context/base/mkiv/font-onr.lua
@@ -7,18 +7,16 @@ if not modules then modules = { } end modules ['font-onr'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Some code may look a bit obscure but this has to do with the fact that we also use
-this code for testing and much code evolved in the transition from <l n='tfm'/> to
-<l n='afm'/> to <l n='otf'/>.</p>
-
-<p>The following code still has traces of intermediate font support where we handles
-font encodings. Eventually font encoding went away but we kept some code around in
-other modules.</p>
-
-<p>This version implements a node mode approach so that users can also more easily
-add features.</p>
---ldx]]--
+-- Some code may look a bit obscure but this has to do with the fact that we also
+-- use this code for testing and much code evolved in the transition from TFM to AFM
+-- to OTF.
+--
+-- The following code still has traces of intermediate font support where we handles
+-- font encodings. Eventually font encoding went away but we kept some code around
+-- in other modules.
+--
+-- This version implements a node mode approach so that users can also more easily
+-- add features.
 
 local fonts, logs, trackers, resolvers = fonts, logs, trackers, resolvers
 
@@ -44,12 +42,9 @@ afm.readers              = readers
 
 afm.version              = 1.513 -- incrementing this number one up will force a re-cache
 
---[[ldx--
-<p>We start with the basic reader which we give a name similar to the built in <l n='tfm'/>
-and <l n='otf'/> reader.</p>
-<p>We use a new (unfinished) pfb loader but I see no differences between the old
-and new vectors (we actually had one bad vector with the old loader).</p>
---ldx]]--
+-- We start with the basic reader which we give a name similar to the built in TFM
+-- and OTF reader. We use a PFB loader but I see no differences between the old and
+-- new vectors (we actually had one bad vector with the old loader).
 
 local get_indexes, get_shapes
 
@@ -305,11 +300,10 @@ do
 
 end
 
---[[ldx--
-<p>We start with the basic reader which we give a name similar to the built in <l n='tfm'/>
-and <l n='otf'/> reader. We only need data that is relevant for our use. We don't support
-more complex arrangements like multiple master (obsolete), direction specific kerning, etc.</p>
---ldx]]--
+-- We start with the basic reader which we give a name similar to the built in TFM
+-- and OTF reader. We only need data that is relevant for our use. We don't support
+-- more complex arrangements like multiple master (obsolete), direction specific
+-- kerning, etc.
 
 local spacer     = patterns.spacer
 local whitespace = patterns.whitespace
diff --git a/tex/context/base/mkiv/font-ota.lua b/tex/context/base/mkiv/font-ota.lua
index a8f9f0047..160d0d0ed 100644
--- a/tex/context/base/mkiv/font-ota.lua
+++ b/tex/context/base/mkiv/font-ota.lua
@@ -54,10 +54,8 @@ local chardata            = characters and characters.data
 local otffeatures         = fonts.constructors.features.otf
 local registerotffeature  = otffeatures.register
 
---[[ldx--
-<p>Analyzers run per script and/or language and are needed in order to
-process features right.</p>
---ldx]]--
+-- Analyzers run per script and/or language and are needed in order to process
+-- features right.
 
 local setstate = nuts.setstate
 local getstate = nuts.getstate
diff --git a/tex/context/base/mkiv/font-ots.lua b/tex/context/base/mkiv/font-ots.lua
index 6d7c5fb25..48f85c365 100644
--- a/tex/context/base/mkiv/font-ots.lua
+++ b/tex/context/base/mkiv/font-ots.lua
@@ -7,92 +7,90 @@ if not modules then modules = { } end modules ['font-ots'] = { -- sequences
     license   = "see context related readme files",
 }
 
---[[ldx--
-<p>I need to check the description at the microsoft site ... it has been improved
-so maybe there are some interesting details there. Most below is based on old and
-incomplete documentation and involved quite a bit of guesswork (checking with the
-abstract uniscribe of those days. But changing things is tricky!</p>
-
-<p>This module is a bit more split up that I'd like but since we also want to test
-with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
-and discussion about improvements and functionality mostly happens on the
-<l n='context'/> mailing list.</p>
-
-<p>The specification of OpenType is (or at least decades ago was) kind of vague.
-Apart from a lack of a proper free specifications there's also the problem that
-Microsoft and Adobe may have their own interpretation of how and in what order to
-apply features. In general the Microsoft website has more detailed specifications
-and is a better reference. There is also some information in the FontForge help
-files. In the end we rely most on the Microsoft specification.</p>
-
-<p>Because there is so much possible, fonts might contain bugs and/or be made to
-work with certain rederers. These may evolve over time which may have the side
-effect that suddenly fonts behave differently. We don't want to catch all font
-issues.</p>
-
-<p>After a lot of experiments (mostly by Taco, me and Idris) the first implementation
-was already quite useful. When it did most of what we wanted, a more optimized version
-evolved. Of course all errors are mine and of course the code can be improved. There
-are quite some optimizations going on here and processing speed is currently quite
-acceptable and has been improved over time. Many complex scripts are not yet supported
-yet, but I will look into them as soon as <l n='context'/> users ask for it.</p>
-
-<p>The specification leaves room for interpretation. In case of doubt the Microsoft
-implementation is the reference as it is the most complete one. As they deal with
-lots of scripts and fonts, Kai and Ivo did a lot of testing of the generic code and
-their suggestions help improve the code. I'm aware that not all border cases can be
-taken care of, unless we accept excessive runtime, and even then the interference
-with other mechanisms (like hyphenation) are not trivial.</p>
-
-<p>Especially discretionary handling has been improved much by Kai Eigner who uses complex
-(latin) fonts. The current implementation is a compromis between his patches and my code
-and in the meantime performance is quite ok. We cannot check all border cases without
-compromising speed but so far we're okay. Given good test cases we can probably improve
-it here and there. Especially chain lookups are non trivial with discretionaries but
-things got much better over time thanks to Kai.</p>
-
-<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
-relationship with unicode at all, apart from the fact that a font might cover certain
-ranges of characters. One character can have multiple shapes. However, at the
-<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
-space. This is needed because we need to access them and <l n='tex'/> has to include
-then in the output eventually.</p>
-
-<p>The initial data table is rather close to the open type specification and also not
-that different from the one produced by <l n='fontforge'/> but we uses hashes instead.
-In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
-so that successive runs can use the optimized table (after loading the table is
-unpacked).</p>
-
-<p>This module is sparsely documented because it is has been a moving target. The
-table format of the reader changed a bit over time and we experiment a lot with
-different methods for supporting features. By now the structures are quite stable</p>
-
-<p>Incrementing the version number will force a re-cache. We jump the number by one
-when there's a fix in the reader or processing code that can result in different
-results.</p>
-
-<p>This code is also used outside context but in context it has to work with other
-mechanisms. Both put some constraints on the code here.</p>
-
---ldx]]--
-
--- Remark: We assume that cursives don't cross discretionaries which is okay because it
--- is only used in semitic scripts.
+-- I need to check the description at the microsoft site ... it has been improved so
+-- maybe there are some interesting details there. Most below is based on old and
+-- incomplete documentation and involved quite a bit of guesswork (checking with the
+-- abstract uniscribe of those days. But changing things is tricky!
+--
+-- This module is a bit more split up that I'd like but since we also want to test
+-- with plain TeX it has to be so. This module is part of ConTeXt and discussion
+-- about improvements and functionality mostly happens on the ConTeXt mailing list.
+--
+-- The specification of OpenType is (or at least decades ago was) kind of vague.
+-- Apart from a lack of a proper free specifications there's also the problem that
+-- Microsoft and Adobe may have their own interpretation of how and in what order to
+-- apply features. In general the Microsoft website has more detailed specifications
+-- and is a better reference. There is also some information in the FontForge help
+-- files. In the end we rely most on the Microsoft specification.
+--
+-- Because there is so much possible, fonts might contain bugs and/or be made to
+-- work with certain rederers. These may evolve over time which may have the side
+-- effect that suddenly fonts behave differently. We don't want to catch all font
+-- issues.
+--
+-- After a lot of experiments (mostly by Taco, me and Idris) the first
+-- implementation was already quite useful. When it did most of what we wanted, a
+-- more optimized version evolved. Of course all errors are mine and of course the
+-- code can be improved. There are quite some optimizations going on here and
+-- processing speed is currently quite acceptable and has been improved over time.
+-- Many complex scripts are not yet supported yet, but I will look into them as soon
+-- as ConTeXt users ask for it.
+--
+-- The specification leaves room for interpretation. In case of doubt the Microsoft
+-- implementation is the reference as it is the most complete one. As they deal with
+-- lots of scripts and fonts, Kai and Ivo did a lot of testing of the generic code
+-- and their suggestions help improve the code. I'm aware that not all border cases
+-- can be taken care of, unless we accept excessive runtime, and even then the
+-- interference with other mechanisms (like hyphenation) are not trivial.
+--
+-- Especially discretionary handling has been improved much by Kai Eigner who uses
+-- complex (latin) fonts. The current implementation is a compromis between his
+-- patches and my code and in the meantime performance is quite ok. We cannot check
+-- all border cases without compromising speed but so far we're okay. Given good
+-- test cases we can probably improve it here and there. Especially chain lookups
+-- are non trivial with discretionaries but things got much better over time thanks
+-- to Kai.
+--
+-- Glyphs are indexed not by unicode but in their own way. This is because there is
+-- no relationship with unicode at all, apart from the fact that a font might cover
+-- certain ranges of characters. One character can have multiple shapes. However, at
+-- the TeX end we use unicode so and all extra glyphs are mapped into a private
+-- space. This is needed because we need to access them and TeX has to include then
+-- in the output eventually.
+--
+-- The initial data table is rather close to the open type specification and also
+-- not that different from the one produced by Fontforge but we uses hashes instead.
+-- In ConTeXt that table is packed (similar tables are shared) and cached on disk so
+-- that successive runs can use the optimized table (after loading the table is
+-- unpacked).
+--
+-- This module is sparsely documented because it is has been a moving target. The
+-- table format of the reader changed a bit over time and we experiment a lot with
+-- different methods for supporting features. By now the structures are quite stable
+--
+-- Incrementing the version number will force a re-cache. We jump the number by one
+-- when there's a fix in the reader or processing code that can result in different
+-- results.
+--
+-- This code is also used outside ConTeXt but in ConTeXt it has to work with other
+-- mechanisms. Both put some constraints on the code here.
+--
+-- Remark: We assume that cursives don't cross discretionaries which is okay because
+-- it is only used in semitic scripts.
 --
 -- Remark: We assume that marks precede base characters.
 --
--- Remark: When complex ligatures extend into discs nodes we can get side effects. Normally
--- this doesn't happen; ff\d{l}{l}{l} in lm works but ff\d{f}{f}{f}.
+-- Remark: When complex ligatures extend into discs nodes we can get side effects.
+-- Normally this doesn't happen; ff\d{l}{l}{l} in lm works but ff\d{f}{f}{f}.
 --
 -- Todo: check if we copy attributes to disc nodes if needed.
 --
--- Todo: it would be nice if we could get rid of components. In other places we can use
--- the unicode properties. We can just keep a lua table.
+-- Todo: it would be nice if we could get rid of components. In other places we can
+-- use the unicode properties. We can just keep a lua table.
 --
--- Remark: We do some disc juggling where we need to keep in mind that the pre, post and
--- replace fields can have prev pointers to a nesting node ... I wonder if that is still
--- needed.
+-- Remark: We do some disc juggling where we need to keep in mind that the pre, post
+-- and replace fields can have prev pointers to a nesting node ... I wonder if that
+-- is still needed.
 --
 -- Remark: This is not possible:
 --
@@ -1038,10 +1036,8 @@ function handlers.gpos_pair(head,start,dataset,sequence,kerns,rlmode,skiphash,st
     end
 end
 
---[[ldx--
-<p>We get hits on a mark, but we're not sure if the it has to be applied so
-we need to explicitly test for basechar, baselig and basemark entries.</p>
---ldx]]--
+-- We get hits on a mark, but we're not sure if the it has to be applied so we need
+-- to explicitly test for basechar, baselig and basemark entries.
 
 function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode,skiphash)
     local markchar = getchar(start)
@@ -1236,10 +1232,8 @@ function handlers.gpos_cursive(head,start,dataset,sequence,exitanchors,rlmode,sk
     return head, start, false
 end
 
---[[ldx--
-<p>I will implement multiple chain replacements once I run into a font that uses
-it. It's not that complex to handle.</p>
---ldx]]--
+-- I will implement multiple chain replacements once I run into a font that uses it.
+-- It's not that complex to handle.
 
 local chainprocs = { }
 
@@ -1292,29 +1286,22 @@ end
 
 chainprocs.reversesub = reversesub
 
---[[ldx--
-<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
-applied: single, alternate, multiple or ligature where ligature can be an invalid
-one in the sense that it will replace multiple by one but not neccessary one that
-looks like the combination (i.e. it is the counterpart of multiple then). For
-example, the following is valid:</p>
-
-<typing>
-<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
-</typing>
-
-<p>Therefore we we don't really do the replacement here already unless we have the
-single lookup case. The efficiency of the replacements can be improved by deleting
-as less as needed but that would also make the code even more messy.</p>
---ldx]]--
-
---[[ldx--
-<p>Here we replace start by a single variant.</p>
---ldx]]--
-
--- To be done (example needed): what if > 1 steps
-
--- this is messy: do we need this disc checking also in alternates?
+-- This chain stuff is somewhat tricky since we can have a sequence of actions to be
+-- applied: single, alternate, multiple or ligature where ligature can be an invalid
+-- one in the sense that it will replace multiple by one but not neccessary one that
+-- looks like the combination (i.e. it is the counterpart of multiple then). For
+-- example, the following is valid:
+--
+--   xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx
+--
+-- Therefore we we don't really do the replacement here already unless we have the
+-- single lookup case. The efficiency of the replacements can be improved by
+-- deleting as less as needed but that would also make the code even more messy.
+--
+-- Here we replace start by a single variant.
+--
+-- To be done   : what if > 1 steps (example needed)
+-- This is messy: do we need this disc checking also in alternates?
 
 local function reportzerosteps(dataset,sequence)
     logwarning("%s: no steps",cref(dataset,sequence))
@@ -1390,9 +1377,7 @@ function chainprocs.gsub_single(head,start,stop,dataset,sequence,currentlookup,r
     return head, start, false
 end
 
---[[ldx--
-<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
---ldx]]--
+-- Here we replace start by new glyph. First we delete the rest of the match.
 
 -- char_1 mark_1 -> char_x mark_1 (ignore marks)
 -- char_1 mark_1 -> char_x
@@ -1444,9 +1429,7 @@ function chainprocs.gsub_alternate(head,start,stop,dataset,sequence,currentlooku
     return head, start, false
 end
 
---[[ldx--
-<p>Here we replace start by a sequence of new glyphs.</p>
---ldx]]--
+-- Here we replace start by a sequence of new glyphs.
 
 function chainprocs.gsub_multiple(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex)
     local mapping = currentlookup.mapping
@@ -1470,11 +1453,9 @@ function chainprocs.gsub_multiple(head,start,stop,dataset,sequence,currentlookup
     return head, start, false
 end
 
---[[ldx--
-<p>When we replace ligatures we use a helper that handles the marks. I might change
-this function (move code inline and handle the marks by a separate function). We
-assume rather stupid ligatures (no complex disc nodes).</p>
---ldx]]--
+-- When we replace ligatures we use a helper that handles the marks. I might change
+-- this function (move code inline and handle the marks by a separate function). We
+-- assume rather stupid ligatures (no complex disc nodes).
 
 -- compare to handlers.gsub_ligature which is more complex ... why
 
@@ -2532,7 +2513,7 @@ local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode,s
     -- fonts can have many steps (each doing one check) or many contexts
 
     -- todo: make a per-char cache so that we have small contexts (when we have a context
-    -- n == 1 and otherwise it can be more so we can even distingish n == 1 or more)
+    -- n == 1 and otherwise it can be more so we can even distinguish n == 1 or more)
 
     local nofcontexts = contexts.n -- #contexts
 
diff --git a/tex/context/base/mkiv/font-syn.lua b/tex/context/base/mkiv/font-syn.lua
index e80d57f41..9fba3d8d4 100644
--- a/tex/context/base/mkiv/font-syn.lua
+++ b/tex/context/base/mkiv/font-syn.lua
@@ -56,10 +56,8 @@ local trace_rejections     = false  trackers.register("fonts.rejections",     fu
 
 local report_names         = logs.reporter("fonts","names")
 
---[[ldx--
-<p>This module implements a name to filename resolver. Names are resolved
-using a table that has keys filtered from the font related files.</p>
---ldx]]--
+-- This module implements a name to filename resolver. Names are resolved using a
+-- table that has keys filtered from the font related files.
 
 fonts                      = fonts or { } -- also used elsewhere
 
@@ -88,10 +86,6 @@ local autoreload           = true
 directives.register("fonts.autoreload",     function(v) autoreload     = toboolean(v) end)
 directives.register("fonts.usesystemfonts", function(v) usesystemfonts = toboolean(v) end)
 
---[[ldx--
-<p>A few helpers.</p>
---ldx]]--
-
 -- -- what to do with these -- --
 --
 -- thin -> thin
@@ -305,10 +299,8 @@ local function analyzespec(somename)
     end
 end
 
---[[ldx--
-<p>It would make sense to implement the filters in the related modules,
-but to keep the overview, we define them here.</p>
---ldx]]--
+-- It would make sense to implement the filters in the related modules, but to keep
+-- the overview, we define them here.
 
 filters.afm = fonts.handlers.afm.readers.getinfo
 filters.otf = fonts.handlers.otf.readers.getinfo
@@ -412,11 +404,9 @@ filters.ttc = filters.otf
 --     end
 -- end
 
---[[ldx--
-<p>The scanner loops over the filters using the information stored in
-the file databases. Watch how we check not only for the names, but also
-for combination with the weight of a font.</p>
---ldx]]--
+-- The scanner loops over the filters using the information stored in the file
+-- databases. Watch how we check not only for the names, but also for combination
+-- with the weight of a font.
 
 filters.list = {
     "otf", "ttf", "ttc", "afm", -- no longer dfont support (for now)
@@ -1402,11 +1392,8 @@ local function is_reloaded()
     end
 end
 
---[[ldx--
-<p>The resolver also checks if the cached names are loaded. Being clever
-here is for testing purposes only (it deals with names prefixed by an
-encoding name).</p>
---ldx]]--
+-- The resolver also checks if the cached names are loaded. Being clever here is for
+-- testing purposes only (it deals with names prefixed by an encoding name).
 
 local function fuzzy(mapping,sorted,name,sub) -- no need for reverse sorted here
     local condensed = gsub(name,"[^%a%d]","")
diff --git a/tex/context/base/mkiv/font-tfm.lua b/tex/context/base/mkiv/font-tfm.lua
index 945421a42..81f94532b 100644
--- a/tex/context/base/mkiv/font-tfm.lua
+++ b/tex/context/base/mkiv/font-tfm.lua
@@ -50,21 +50,18 @@ constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua
 fonts.formats.tfm              = "type1" -- we need to have at least a value here
 fonts.formats.ofm              = "type1" -- we need to have at least a value here
 
---[[ldx--
-<p>The next function encapsulates the standard <l n='tfm'/> loader as
-supplied by <l n='luatex'/>.</p>
---ldx]]--
-
--- this might change: not scaling and then apply features and do scaling in the
--- usual way with dummy descriptions but on the other hand .. we no longer use
--- tfm so why bother
-
--- ofm directive blocks local path search unless set; btw, in context we
--- don't support ofm files anyway as this format is obsolete
-
--- we need to deal with nested virtual fonts, but because we load in the
--- frontend we also need to make sure we don't nest too deep (esp when sizes
--- get large)
+-- The next function encapsulates the standard TFM loader as supplied by LuaTeX.
+--
+-- This might change: not scaling and then apply features and do scaling in the
+-- usual way with dummy descriptions but on the other hand. However, we no longer
+-- use TFM (except for the JMN math fonts) so why bother.
+--
+-- The ofm directive blocks a local path search unless set. Actually, in ConTeXt we
+-- never had to deal with OFM files anyway as this format is obsolete (there are
+-- hardly any fonts in that format that are of use).
+--
+-- We need to deal with nested virtual fonts, but because we load in the frontend we
+-- also need to make sure we don't nest too deep (esp when sizes get large)
 --
 -- (VTITLE Example of a recursion)
 -- (MAPFONT D 0 (FONTNAME recurse)(FONTAT D 2))
@@ -72,7 +69,7 @@ supplied by <l n='luatex'/>.</p>
 -- (CHARACTER C B (CHARWD D 2)(CHARHT D 2)(MAP (SETCHAR C A)))
 -- (CHARACTER C C (CHARWD D 4)(CHARHT D 4)(MAP (SETCHAR C B)))
 --
--- we added the same checks as below to the luatex engine
+-- We added the same checks as below to the LuaTeX engine.
 
 function tfm.setfeatures(tfmdata,features)
     local okay = constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm)
diff --git a/tex/context/base/mkiv/font-trt.lua b/tex/context/base/mkiv/font-trt.lua
index abc92ba52..893534078 100644
--- a/tex/context/base/mkiv/font-trt.lua
+++ b/tex/context/base/mkiv/font-trt.lua
@@ -12,11 +12,9 @@ local cleanfilename = fonts.names.cleanfilename
 local splitbase     = file.splitbase
 local lower         = string.lower
 
---[[ldx--
-<p>We provide a simple treatment mechanism (mostly because I want to demonstrate
-something in a manual). It's one of the few places where an lfg file gets loaded
-outside the goodies manager.</p>
---ldx]]--
+-- We provide a simple treatment mechanism (mostly because I want to demonstrate
+-- something in a manual). It's one of the few places where an lfg file gets loaded
+-- outside the goodies manager.
 
 local treatments       = fonts.treatments or { }
 fonts.treatments       = treatments
diff --git a/tex/context/base/mkiv/font-vir.lua b/tex/context/base/mkiv/font-vir.lua
index c3071cac0..6142ddafd 100644
--- a/tex/context/base/mkiv/font-vir.lua
+++ b/tex/context/base/mkiv/font-vir.lua
@@ -6,9 +6,8 @@ if not modules then modules = { } end modules ['font-vir'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>This is very experimental code! Not yet adapted to recent changes. This will change.</p>
---ldx]]--
+-- This is very experimental code! Not yet adapted to recent changes. This will
+-- change. Actually we moved on.
 
 -- present in the backend but unspecified:
 --
@@ -25,10 +24,8 @@ local constructors      = fonts.constructors
 local vf                = constructors.handlers.vf
 vf.version              = 1.000 -- same as tfm
 
---[[ldx--
-<p>We overload the <l n='vf'/> reader.</p>
---ldx]]--
-
+-- -- We overload the VF reader.:
+--
 -- general code / already frozen
 --
 -- function vf.find(name)
diff --git a/tex/context/base/mkiv/l-dir.lua b/tex/context/base/mkiv/l-dir.lua
index 3e24e4e2a..316406850 100644
--- a/tex/context/base/mkiv/l-dir.lua
+++ b/tex/context/base/mkiv/l-dir.lua
@@ -21,7 +21,8 @@ local dir = dir
 local lfs = lfs
 
 local attributes = lfs.attributes
-local walkdir    = lfs.dir
+----- walkdir    = lfs.dir
+local scandir    = lfs.dir
 local isdir      = lfs.isdir  -- not robust, will be overloaded anyway
 local isfile     = lfs.isfile -- not robust, will be overloaded anyway
 local currentdir = lfs.currentdir
@@ -69,6 +70,20 @@ else
 
 end
 
+-- safeguard
+
+local isreadable = file.isreadable
+
+local walkdir = function(p,...)
+    if isreadable(p.."/.") then
+        return scandir(p,...)
+    else
+        return function() end
+    end
+end
+
+lfs.walkdir = walkdir
+
 -- handy
 
 function dir.current()
diff --git a/tex/context/base/mkiv/lang-url.lua b/tex/context/base/mkiv/lang-url.lua
index 7a8b7ca86..7cd666df5 100644
--- a/tex/context/base/mkiv/lang-url.lua
+++ b/tex/context/base/mkiv/lang-url.lua
@@ -21,12 +21,10 @@ local v_after   = variables.after
 
 local is_letter = characters.is_letter
 
---[[
-<p>Hyphenating <l n='url'/>'s is somewhat tricky and a matter of taste. I did
-consider using a dedicated hyphenation pattern or dealing with it by node
-parsing, but the following solution suits as well. After all, we're mostly
-dealing with <l n='ascii'/> characters.</p>
-]]--
+-- Hyphenating URL's is somewhat tricky and a matter of taste. I did consider using
+-- a dedicated hyphenation pattern or dealing with it by node parsing, but the
+-- following solution suits as well. After all, we're mostly dealing with ASCII
+-- characters.
 
 local urls     = { }
 languages.urls = urls
diff --git a/tex/context/base/mkiv/luat-cbk.lua b/tex/context/base/mkiv/luat-cbk.lua
index 9fd55f3ec..9e35283c1 100644
--- a/tex/context/base/mkiv/luat-cbk.lua
+++ b/tex/context/base/mkiv/luat-cbk.lua
@@ -12,20 +12,16 @@ local collectgarbage, type, next = collectgarbage, type, next
 local round = math.round
 local sortedhash, sortedkeys, tohash = table.sortedhash, table.sortedkeys, table.tohash
 
---[[ldx--
-<p>Callbacks are the real asset of <l n='luatex'/>. They permit you to hook
-your own code into the <l n='tex'/> engine. Here we implement a few handy
-auxiliary functions.</p>
---ldx]]--
+-- Callbacks are the real asset of LuaTeX. They permit you to hook your own code
+-- into the TeX engine. Here we implement a few handy auxiliary functions. Watch
+-- out, there are diferences between LuateX and LuaMetaTeX.
 
 callbacks       = callbacks or { }
 local callbacks = callbacks
 
---[[ldx--
-<p>When you (temporarily) want to install a callback function, and after a
-while wants to revert to the original one, you can use the following two
-functions. This only works for non-frozen ones.</p>
---ldx]]--
+-- When you (temporarily) want to install a callback function, and after a while
+-- wants to revert to the original one, you can use the following two functions.
+-- This only works for non-frozen ones.
 
 local trace_callbacks   = false  trackers.register("system.callbacks", function(v) trace_callbacks = v end)
 local trace_calls       = false  -- only used when analyzing performance and initializations
@@ -47,13 +43,12 @@ local list              = callbacks.list
 local permit_overloads  = false
 local block_overloads   = false
 
---[[ldx--
-<p>By now most callbacks are frozen and most provide a way to plug in your own code. For instance
-all node list handlers provide before/after namespaces and the file handling code can be extended
-by adding schemes and if needed I can add more hooks. So there is no real need to overload a core
-callback function. It might be ok for quick and dirty testing but anyway you're on your own if
-you permanently overload callback functions.</p>
---ldx]]--
+-- By now most callbacks are frozen and most provide a way to plug in your own code.
+-- For instance all node list handlers provide before/after namespaces and the file
+-- handling code can be extended by adding schemes and if needed I can add more
+-- hooks. So there is no real need to overload a core callback function. It might be
+-- ok for quick and dirty testing but anyway you're on your own if you permanently
+-- overload callback functions.
 
 -- This might become a configuration file only option when it gets abused too much.
 
@@ -279,65 +274,50 @@ end)
 -- callbacks.freeze("read_.*_file","reading file")
 -- callbacks.freeze("open_.*_file","opening file")
 
---[[ldx--
-<p>The simple case is to remove the callback:</p>
-
-<code>
-callbacks.push('linebreak_filter')
-... some actions ...
-callbacks.pop('linebreak_filter')
-</code>
-
-<p>Often, in such case, another callback or a macro call will pop
-the original.</p>
-
-<p>In practice one will install a new handler, like in:</p>
-
-<code>
-callbacks.push('linebreak_filter', function(...)
-    return something_done(...)
-end)
-</code>
-
-<p>Even more interesting is:</p>
-
-<code>
-callbacks.push('linebreak_filter', function(...)
-    callbacks.pop('linebreak_filter')
-    return something_done(...)
-end)
-</code>
-
-<p>This does a one-shot.</p>
---ldx]]--
-
---[[ldx--
-<p>Callbacks may result in <l n='lua'/> doing some hard work
-which takes time and above all resourses. Sometimes it makes
-sense to disable or tune the garbage collector in order to
-keep the use of resources acceptable.</p>
-
-<p>At some point in the development we did some tests with counting
-nodes (in this case 121049).</p>
-
-<table>
-<tr><td>setstepmul</td><td>seconds</td><td>megabytes</td></tr>
-<tr><td>200</td><td>24.0</td><td>80.5</td></tr>
-<tr><td>175</td><td>21.0</td><td>78.2</td></tr>
-<tr><td>150</td><td>22.0</td><td>74.6</td></tr>
-<tr><td>160</td><td>22.0</td><td>74.6</td></tr>
-<tr><td>165</td><td>21.0</td><td>77.6</td></tr>
-<tr><td>125</td><td>21.5</td><td>89.2</td></tr>
-<tr><td>100</td><td>21.5</td><td>88.4</td></tr>
-</table>
-
-<p>The following code is kind of experimental. In the documents
-that describe the development of <l n='luatex'/> we report
-on speed tests. One observation is that it sometimes helps to
-restart the collector. Okay, experimental code has been removed,
-because messing aroudn with the gc is too unpredictable.</p>
---ldx]]--
-
+-- The simple case is to remove the callback:
+--
+--   callbacks.push('linebreak_filter')
+--   ... some actions ...
+--   callbacks.pop('linebreak_filter')
+--
+-- Often, in such case, another callback or a macro call will pop the original.
+--
+-- In practice one will install a new handler, like in:
+--
+--   callbacks.push('linebreak_filter', function(...)
+--       return something_done(...)
+--   end)
+--
+-- Even more interesting is:
+--
+--  callbacks.push('linebreak_filter', function(...)
+--      callbacks.pop('linebreak_filter')
+--      return something_done(...)
+--  end)
+--
+-- This does a one-shot.
+--
+-- Callbacks may result in Lua doing some hard work which takes time and above all
+-- resourses. Sometimes it makes sense to disable or tune the garbage collector in
+-- order to keep the use of resources acceptable.
+--
+-- At some point in the development we did some tests with counting nodes (in this
+-- case 121049).
+--
+--   setstepmul  seconds  megabytes
+--      200       24.0      80.5
+--      175       21.0      78.2
+--      150       22.0      74.6
+--      160       22.0      74.6
+--      165       21.0      77.6
+--      125       21.5      89.2
+--      100       21.5      88.4
+--
+-- The following code is kind of experimental. In the documents that describe the
+-- development of LuaTeX we report on speed tests. One observation is that it
+-- sometimes helps to restart the collector. Okay, experimental code has been
+-- removed, because messing around with the gc is too unpredictable.
+--
 -- For the moment we keep this here and not in util-gbc.lua or so.
 
 utilities                  = utilities or { }
diff --git a/tex/context/base/mkiv/luat-ini.lua b/tex/context/base/mkiv/luat-ini.lua
index dcca8cec7..83fe0713d 100644
--- a/tex/context/base/mkiv/luat-ini.lua
+++ b/tex/context/base/mkiv/luat-ini.lua
@@ -6,11 +6,9 @@ if not modules then modules = { } end modules ['luat-ini'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>We cannot load anything yet. However what we will do us reserve a few tables.
-These can be used for runtime user data or third party modules and will not be
-cluttered by macro package code.</p>
---ldx]]--
+-- We cannot load anything yet. However what we will do us reserve a few tables.
+-- These can be used for runtime user data or third party modules and will not be
+-- cluttered by macro package code.
 
 userdata      = userdata      or { } -- for users (e.g. functions etc)
 thirddata     = thirddata     or { } -- only for third party modules
diff --git a/tex/context/base/mkiv/lxml-aux.lua b/tex/context/base/mkiv/lxml-aux.lua
index fc17371e5..217f81c13 100644
--- a/tex/context/base/mkiv/lxml-aux.lua
+++ b/tex/context/base/mkiv/lxml-aux.lua
@@ -110,11 +110,7 @@ function xml.processattributes(root,pattern,handle)
     return collected
 end
 
---[[ldx--
-<p>The following functions collect elements and texts.</p>
---ldx]]--
-
--- are these still needed -> lxml-cmp.lua
+-- The following functions collect elements and texts.
 
 function xml.collect(root, pattern)
     return xmlapplylpath(root,pattern)
@@ -153,9 +149,7 @@ function xml.collect_tags(root, pattern, nonamespace)
     end
 end
 
---[[ldx--
-<p>We've now arrived at the functions that manipulate the tree.</p>
---ldx]]--
+-- We've now arrived at the functions that manipulate the tree.
 
 local no_root = { no_root = true }
 
@@ -780,9 +774,7 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
     end
 end
 
---[[ldx--
-<p>Helper (for q2p).</p>
---ldx]]--
+-- Helper (for q2p).
 
 function xml.cdatatotext(e)
     local dt = e.dt
@@ -879,9 +871,7 @@ end
 -- xml.addentitiesdoctype(x,"hexadecimal")
 -- print(x)
 
---[[ldx--
-<p>Here are a few synonyms.</p>
---ldx]]--
+-- Here are a few synonyms:
 
 xml.all     = xml.each
 xml.insert  = xml.insertafter
diff --git a/tex/context/base/mkiv/lxml-ent.lua b/tex/context/base/mkiv/lxml-ent.lua
index df80a7985..1d6d058b6 100644
--- a/tex/context/base/mkiv/lxml-ent.lua
+++ b/tex/context/base/mkiv/lxml-ent.lua
@@ -10,14 +10,10 @@ local next = next
 local byte, format = string.byte, string.format
 local setmetatableindex = table.setmetatableindex
 
---[[ldx--
-<p>We provide (at least here) two entity handlers. The more extensive
-resolver consults a hash first, tries to convert to <l n='utf'/> next,
-and finaly calls a handler when defines. When this all fails, the
-original entity is returned.</p>
-
-<p>We do things different now but it's still somewhat experimental</p>
---ldx]]--
+-- We provide (at least here) two entity handlers. The more extensive resolver
+-- consults a hash first, tries to convert to UTF next, and finaly calls a handler
+-- when defines. When this all fails, the original entity is returned. We do things
+-- different now but it's still somewhat experimental.
 
 local trace_entities = false  trackers.register("xml.entities", function(v) trace_entities = v end)
 
diff --git a/tex/context/base/mkiv/lxml-lpt.lua b/tex/context/base/mkiv/lxml-lpt.lua
index 78a9fca2e..d242b07de 100644
--- a/tex/context/base/mkiv/lxml-lpt.lua
+++ b/tex/context/base/mkiv/lxml-lpt.lua
@@ -20,28 +20,21 @@ local formatters = string.formatters -- no need (yet) as paths are cached anyway
 -- beware, this is not xpath ... e.g. position is different (currently) and
 -- we have reverse-sibling as reversed preceding sibling
 
---[[ldx--
-<p>This module can be used stand alone but also inside <l n='mkiv'/> in
-which case it hooks into the tracker code. Therefore we provide a few
-functions that set the tracers. Here we overload a previously defined
-function.</p>
-<p>If I can get in the mood I will make a variant that is XSLT compliant
-but I wonder if it makes sense.</P>
---ldx]]--
-
---[[ldx--
-<p>Expecially the lpath code is experimental, we will support some of xpath, but
-only things that make sense for us; as compensation it is possible to hook in your
-own functions. Apart from preprocessing content for <l n='context'/> we also need
-this module for process management, like handling <l n='ctx'/> and <l n='rlx'/>
-files.</p>
-
-<typing>
-a/b/c /*/c
-a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
-a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
-</typing>
---ldx]]--
+-- This module can be used stand alone but also inside ConTeXt in which case it
+-- hooks into the tracker code. Therefore we provide a few functions that set the
+-- tracers. Here we overload a previously defined function.
+--
+-- If I can get in the mood I will make a variant that is XSLT compliant but I
+-- wonder if it makes sense.
+--
+-- Expecially the lpath code is experimental, we will support some of xpath, but
+-- only things that make sense for us; as compensation it is possible to hook in
+-- your own functions. Apart from preprocessing content for ConTeXt we also need
+-- this module for process management, like handling CTX and RLX files.
+--
+--   a/b/c /*/c
+--   a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
+--   a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
 
 local trace_lpath    = false
 local trace_lparse   = false
@@ -62,11 +55,9 @@ if trackers then
     end)
 end
 
---[[ldx--
-<p>We've now arrived at an interesting part: accessing the tree using a subset
-of <l n='xpath'/> and since we're not compatible we call it <l n='lpath'/>. We
-will explain more about its usage in other documents.</p>
---ldx]]--
+-- We've now arrived at an interesting part: accessing the tree using a subset of
+-- XPATH and since we're not compatible we call it LPATH. We will explain more about
+-- its usage in other documents.
 
 local xml = xml
 
@@ -1273,9 +1264,8 @@ do
 end
 
 local applylpath = xml.applylpath
---[[ldx--
-<p>This is the main filter function. It returns whatever is asked for.</p>
---ldx]]--
+
+-- This is the main filter function. It returns whatever is asked for.
 
 function xml.filter(root,pattern) -- no longer funny attribute handling here
     return applylpath(root,pattern)
@@ -1525,21 +1515,16 @@ expressions.tag = function(e,n) -- only tg
     end
 end
 
---[[ldx--
-<p>Often using an iterators looks nicer in the code than passing handler
-functions. The <l n='lua'/> book describes how to use coroutines for that
-purpose (<url href='http://www.lua.org/pil/9.3.html'/>). This permits
-code like:</p>
-
-<typing>
-for r, d, k in xml.elements(xml.load('text.xml'),"title") do
-    print(d[k]) -- old method
-end
-for e in xml.collected(xml.load('text.xml'),"title") do
-    print(e) -- new one
-end
-</typing>
---ldx]]--
+-- Often using an iterators looks nicer in the code than passing handler functions.
+-- The LUA book describes how to use coroutines for that purpose
+-- 'href="http://www.lua.org/pil/9.3.html"'. This permits code like:
+--
+--   for r, d, k in xml.elements(xml.load('text.xml'),"title") do
+--       print(d[k]) -- old method
+--   end
+--   for e in xml.collected(xml.load('text.xml'),"title") do
+--       print(e) -- new one
+--   end
 
 -- local wrap, yield = coroutine.wrap, coroutine.yield
 -- local dummy = function() end
diff --git a/tex/context/base/mkiv/lxml-mis.lua b/tex/context/base/mkiv/lxml-mis.lua
index 04ba7b35c..ea62550bb 100644
--- a/tex/context/base/mkiv/lxml-mis.lua
+++ b/tex/context/base/mkiv/lxml-mis.lua
@@ -17,13 +17,10 @@ local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, l
 lpegpatterns.xml  = lpegpatterns.xml or { }
 local xmlpatterns = lpegpatterns.xml
 
---[[ldx--
-<p>The following helper functions best belong to the <t>lxml-ini</t>
-module. Some are here because we need then in the <t>mk</t>
-document and other manuals, others came up when playing with
-this module. Since this module is also used in <l n='mtxrun'/> we've
-put them here instead of loading mode modules there then needed.</p>
---ldx]]--
+-- The following helper functions best belong to the 'lxml-ini' module. Some are
+-- here because we need then in the 'mk' document and other manuals, others came up
+-- when playing with this module. Since this module is also used in 'mtxrun' we've
+-- put them here instead of loading mode modules there then needed.
 
 local function xmlgsub(t,old,new) -- will be replaced
     local dt = t.dt
diff --git a/tex/context/base/mkiv/lxml-tab.lua b/tex/context/base/mkiv/lxml-tab.lua
index e18362bd8..a06b59065 100644
--- a/tex/context/base/mkiv/lxml-tab.lua
+++ b/tex/context/base/mkiv/lxml-tab.lua
@@ -18,13 +18,12 @@ local trace_entities = false  trackers.register("xml.entities", function(v) trac
 
 local report_xml = logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
 
---[[ldx--
-<p>The parser used here is inspired by the variant discussed in the lua book, but
-handles comment and processing instructions, has a different structure, provides
-parent access; a first version used different trickery but was less optimized to we
-went this route. First we had a find based parser, now we have an <l n='lpeg'/> based one.
-The find based parser can be found in l-xml-edu.lua along with other older code.</p>
---ldx]]--
+-- The parser used here is inspired by the variant discussed in the lua book, but
+-- handles comment and processing instructions, has a different structure, provides
+-- parent access; a first version used different trickery but was less optimized to
+-- we went this route. First we had a find based parser, now we have an LPEG based
+-- one. The find based parser can be found in l-xml-edu.lua along with other older
+-- code.
 
 if lpeg.setmaxstack then lpeg.setmaxstack(1000) end -- deeply nested xml files
 
@@ -42,26 +41,19 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
 local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
 local formatters = string.formatters
 
---[[ldx--
-<p>First a hack to enable namespace resolving. A namespace is characterized by
-a <l n='url'/>. The following function associates a namespace prefix with a
-pattern. We use <l n='lpeg'/>, which in this case is more than twice as fast as a
-find based solution where we loop over an array of patterns. Less code and
-much cleaner.</p>
---ldx]]--
+-- First a hack to enable namespace resolving. A namespace is characterized by a
+-- URL. The following function associates a namespace prefix with a pattern. We use
+-- LPEG, which in this case is more than twice as fast as a find based solution
+-- where we loop over an array of patterns. Less code and much cleaner.
 
 do -- begin of namespace closure (we ran out of locals)
 
 xml.xmlns = xml.xmlns or { }
 
---[[ldx--
-<p>The next function associates a namespace prefix with an <l n='url'/>. This
-normally happens independent of parsing.</p>
-
-<typing>
-xml.registerns("mml","mathml")
-</typing>
---ldx]]--
+-- The next function associates a namespace prefix with an URL. This normally
+-- happens independent of parsing.
+--
+--   xml.registerns("mml","mathml")
 
 local check = P(false)
 local parse = check
@@ -71,15 +63,11 @@ function xml.registerns(namespace, pattern) -- pattern can be an lpeg
     parse = P { P(check) + 1 * V(1) }
 end
 
---[[ldx--
-<p>The next function also registers a namespace, but this time we map a
-given namespace prefix onto a registered one, using the given
-<l n='url'/>. This used for attributes like <t>xmlns:m</t>.</p>
-
-<typing>
-xml.checkns("m","http://www.w3.org/mathml")
-</typing>
---ldx]]--
+-- The next function also registers a namespace, but this time we map a given
+-- namespace prefix onto a registered one, using the given URL. This used for
+-- attributes like 'xmlns:m'.
+--
+--   xml.checkns("m","http://www.w3.org/mathml")
 
 function xml.checkns(namespace,url)
     local ns = lpegmatch(parse,lower(url))
@@ -88,68 +76,54 @@ function xml.checkns(namespace,url)
     end
 end
 
---[[ldx--
-<p>Next we provide a way to turn an <l n='url'/> into a registered
-namespace. This used for the <t>xmlns</t> attribute.</p>
-
-<typing>
-resolvedns = xml.resolvens("http://www.w3.org/mathml")
-</typing>
-
-This returns <t>mml</t>.
---ldx]]--
+-- Next we provide a way to turn an URL into a registered namespace. This used for
+-- the 'xmlns' attribute.
+--
+--  resolvedns = xml.resolvens("http://www.w3.org/mathml")
+--
+-- This returns MATHML.
 
 function xml.resolvens(url)
      return lpegmatch(parse,lower(url)) or ""
 end
 
---[[ldx--
-<p>A namespace in an element can be remapped onto the registered
-one efficiently by using the <t>xml.xmlns</t> table.</p>
---ldx]]--
+-- A namespace in an element can be remapped onto the registered one efficiently by
+-- using the 'xml.xmlns' table.
 
 end -- end of namespace closure
 
---[[ldx--
-<p>This version uses <l n='lpeg'/>. We follow the same approach as before, stack and top and
-such. This version is about twice as fast which is mostly due to the fact that
-we don't have to prepare the stream for cdata, doctype etc etc. This variant is
-is dedicated to Luigi Scarso, who challenged me with 40 megabyte <l n='xml'/> files that
-took 12.5 seconds to load (1.5 for file io and the rest for tree building). With
-the <l n='lpeg'/> implementation we got that down to less 7.3 seconds. Loading the 14
-<l n='context'/> interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.</p>
-
-<p>Next comes the parser. The rather messy doctype definition comes in many
-disguises so it is no surprice that later on have to dedicate quite some
-<l n='lpeg'/> code to it.</p>
-
-<typing>
-<!DOCTYPE Something PUBLIC "... ..." "..." [ ... ] >
-<!DOCTYPE Something PUBLIC "... ..." "..." >
-<!DOCTYPE Something SYSTEM "... ..." [ ... ] >
-<!DOCTYPE Something SYSTEM "... ..." >
-<!DOCTYPE Something [ ... ] >
-<!DOCTYPE Something >
-</typing>
-
-<p>The code may look a bit complex but this is mostly due to the fact that we
-resolve namespaces and attach metatables. There is only one public function:</p>
-
-<typing>
-local x = xml.convert(somestring)
-</typing>
-
-<p>An optional second boolean argument tells this function not to create a root
-element.</p>
-
-<p>Valid entities are:</p>
-
-<typing>
-<!ENTITY xxxx SYSTEM "yyyy" NDATA zzzz>
-<!ENTITY xxxx PUBLIC "yyyy" >
-<!ENTITY xxxx "yyyy" >
-</typing>
---ldx]]--
+-- This version uses LPEG. We follow the same approach as before, stack and top and
+-- such. This version is about twice as fast which is mostly due to the fact that we
+-- don't have to prepare the stream for cdata, doctype etc etc. This variant is is
+-- dedicated to Luigi Scarso, who challenged me with 40 megabyte XML files that took
+-- 12.5 seconds to load (1.5 for file io and the rest for tree building). With the
+-- LPEG implementation we got that down to less 7.3 seconds. Loading the 14 ConTeXt
+-- interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.
+--
+-- Next comes the parser. The rather messy doctype definition comes in many
+-- disguises so it is no surprice that later on have to dedicate quite some LPEG
+-- code to it.
+--
+--  <!DOCTYPE Something PUBLIC "... ..." "..." [ ... ] >
+--  <!DOCTYPE Something PUBLIC "... ..." "..." >
+--  <!DOCTYPE Something SYSTEM "... ..." [ ... ] >
+--  <!DOCTYPE Something SYSTEM "... ..." >
+--  <!DOCTYPE Something [ ... ] >
+--  <!DOCTYPE Something >
+--
+-- The code may look a bit complex but this is mostly due to the fact that we
+-- resolve namespaces and attach metatables. There is only one public function:
+--
+--   local x = xml.convert(somestring)
+--
+-- An optional second boolean argument tells this function not to create a root
+-- element.
+--
+-- Valid entities are:
+--
+--   <!ENTITY xxxx SYSTEM "yyyy" NDATA zzzz>
+--   <!ENTITY xxxx PUBLIC "yyyy" >
+--   <!ENTITY xxxx "yyyy" >
 
 -- not just one big nested table capture (lpeg overflow)
 
@@ -1332,10 +1306,8 @@ function xml.inheritedconvert(data,xmldata,cleanup) -- xmldata is parent
     return xc
 end
 
---[[ldx--
-<p>Packaging data in an xml like table is done with the following
-function. Maybe it will go away (when not used).</p>
---ldx]]--
+-- Packaging data in an xml like table is done with the following function. Maybe it
+-- will go away (when not used).
 
 function xml.is_valid(root)
     return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
@@ -1354,11 +1326,8 @@ end
 
 xml.errorhandler = report_xml
 
---[[ldx--
-<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
-the whole file first. The function accepts a string representing
-a filename or a file handle.</p>
---ldx]]--
+-- We cannot load an LPEG from a filehandle so we need to load the whole file first.
+-- The function accepts a string representing a filename or a file handle.
 
 function xml.load(filename,settings)
     local data = ""
@@ -1382,10 +1351,8 @@ function xml.load(filename,settings)
     end
 end
 
---[[ldx--
-<p>When we inject new elements, we need to convert strings to
-valid trees, which is what the next function does.</p>
---ldx]]--
+-- When we inject new elements, we need to convert strings to valid trees, which is
+-- what the next function does.
 
 local no_root = { no_root = true }
 
@@ -1398,11 +1365,9 @@ function xml.toxml(data)
     end
 end
 
---[[ldx--
-<p>For copying a tree we use a dedicated function instead of the
-generic table copier. Since we know what we're dealing with we
-can speed up things a bit. The second argument is not to be used!</p>
---ldx]]--
+-- For copying a tree we use a dedicated function instead of the generic table
+-- copier. Since we know what we're dealing with we can speed up things a bit. The
+-- second argument is not to be used!
 
 -- local function copy(old)
 --     if old then
@@ -1466,13 +1431,10 @@ end
 
 xml.copy = copy
 
---[[ldx--
-<p>In <l n='context'/> serializing the tree or parts of the tree is a major
-actitivity which is why the following function is pretty optimized resulting
-in a few more lines of code than needed. The variant that uses the formatting
-function for all components is about 15% slower than the concatinating
-alternative.</p>
---ldx]]--
+-- In ConTeXt serializing the tree or parts of the tree is a major actitivity which
+-- is why the following function is pretty optimized resulting in a few more lines
+-- of code than needed. The variant that uses the formatting function for all
+-- components is about 15% slower than the concatinating alternative.
 
 -- todo: add <?xml version='1.0' standalone='yes'?> when not present
 
@@ -1490,10 +1452,8 @@ function xml.checkbom(root) -- can be made faster
     end
 end
 
---[[ldx--
-<p>At the cost of some 25% runtime overhead you can first convert the tree to a string
-and then handle the lot.</p>
---ldx]]--
+-- At the cost of some 25% runtime overhead you can first convert the tree to a
+-- string and then handle the lot.
 
 -- new experimental reorganized serialize
 
@@ -1711,21 +1671,18 @@ newhandlers {
     }
 }
 
---[[ldx--
-<p>How you deal with saving data depends on your preferences. For a 40 MB database
-file the timing on a 2.3 Core Duo are as follows (time in seconds):</p>
-
-<lines>
-1.3 : load data from file to string
-6.1 : convert string into tree
-5.3 : saving in file using xmlsave
-6.8 : converting to string using xml.tostring
-3.6 : saving converted string in file
-</lines>
 
-<p>Beware, these were timing with the old routine but measurements will not be that
-much different I guess.</p>
---ldx]]--
+-- How you deal with saving data depends on your preferences. For a 40 MB database
+-- file the timing on a 2.3 Core Duo are as follows (time in seconds):
+--
+-- 1.3 : load data from file to string
+-- 6.1 : convert string into tree
+-- 5.3 : saving in file using xmlsave
+-- 6.8 : converting to string using xml.tostring
+-- 3.6 : saving converted string in file
+--
+-- Beware, these were timing with the old routine but measurements will not be that
+-- much different I guess.
 
 -- maybe this will move to lxml-xml
 
@@ -1827,10 +1784,8 @@ xml.newhandlers     = newhandlers
 xml.serialize       = serialize
 xml.tostring        = xmltostring
 
---[[ldx--
-<p>The next function operated on the content only and needs a handle function
-that accepts a string.</p>
---ldx]]--
+-- The next function operated on the content only and needs a handle function that
+-- accepts a string.
 
 local function xmlstring(e,handle)
     if not handle or (e.special and e.tg ~= "@rt@") then
@@ -1849,9 +1804,7 @@ end
 
 xml.string = xmlstring
 
---[[ldx--
-<p>A few helpers:</p>
---ldx]]--
+-- A few helpers:
 
 --~ xmlsetproperty(root,"settings",settings)
 
@@ -1899,11 +1852,9 @@ function xml.name(root)
     end
 end
 
---[[ldx--
-<p>The next helper erases an element but keeps the table as it is,
-and since empty strings are not serialized (effectively) it does
-not harm. Copying the table would take more time. Usage:</p>
---ldx]]--
+-- The next helper erases an element but keeps the table as it is, and since empty
+-- strings are not serialized (effectively) it does not harm. Copying the table
+-- would take more time.
 
 function xml.erase(dt,k)
     if dt then
@@ -1915,13 +1866,9 @@ function xml.erase(dt,k)
     end
 end
 
---[[ldx--
-<p>The next helper assigns a tree (or string). Usage:</p>
-
-<typing>
-dt[k] = xml.assign(root) or xml.assign(dt,k,root)
-</typing>
---ldx]]--
+-- The next helper assigns a tree (or string). Usage:
+--
+--   dt[k] = xml.assign(root) or xml.assign(dt,k,root)
 
 function xml.assign(dt,k,root)
     if dt and k then
@@ -1932,15 +1879,10 @@ function xml.assign(dt,k,root)
     end
 end
 
--- the following helpers may move
-
---[[ldx--
-<p>The next helper assigns a tree (or string). Usage:</p>
-<typing>
-xml.tocdata(e)
-xml.tocdata(e,"error")
-</typing>
---ldx]]--
+-- The next helper assigns a tree (or string). Usage:
+--
+--   xml.tocdata(e)
+--   xml.tocdata(e,"error")
 
 function xml.tocdata(e,wrapper) -- a few more in the aux module
     local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
diff --git a/tex/context/base/mkiv/math-map.lua b/tex/context/base/mkiv/math-map.lua
index 5f93b43fc..153dde852 100644
--- a/tex/context/base/mkiv/math-map.lua
+++ b/tex/context/base/mkiv/math-map.lua
@@ -7,31 +7,13 @@ if not modules then modules = { } end modules ['math-map'] = {
     license   = "see context related readme files"
 }
 
--- todo: make sparse .. if self
-
---[[ldx--
-<p>Remapping mathematics alphabets.</p>
---ldx]]--
-
--- oldstyle: not really mathematics but happened to be part of
--- the mathematics fonts in cmr
---
--- persian: we will also provide mappers for other
--- scripts
-
--- todo: alphabets namespace
--- maybe: script/scriptscript dynamic,
-
--- superscripped primes get unscripted !
-
--- to be looked into once the fonts are ready (will become font
--- goodie):
---
--- (U+2202,U+1D715) : upright
--- (U+2202,U+1D715) : italic
--- (U+2202,U+1D715) : upright
---
--- plus add them to the regular vectors below so that they honor \it etc
+-- persian: we will also provide mappers for other scripts
+-- todo   : alphabets namespace
+-- maybe  : script/scriptscript dynamic,
+-- check  : (U+2202,U+1D715) : upright
+--          (U+2202,U+1D715) : italic
+--          (U+2202,U+1D715) : upright
+--          add them to the regular vectors below so that they honor \it etc
 
 local type, next = type, next
 local merged, sortedhash = table.merged, table.sortedhash
diff --git a/tex/context/base/mkiv/meta-fun.lua b/tex/context/base/mkiv/meta-fun.lua
index ddbbd9a52..aa388b0ca 100644
--- a/tex/context/base/mkiv/meta-fun.lua
+++ b/tex/context/base/mkiv/meta-fun.lua
@@ -13,15 +13,18 @@ local format, load, type = string.format, load, type
 local context    = context
 local metapost   = metapost
 
-metapost.metafun = metapost.metafun or { }
-local metafun    = metapost.metafun
+local metafun    = metapost.metafun or { }
+metapost.metafun = metafun
 
 function metafun.topath(t,connector)
     context("(")
     if #t > 0 then
+        if not connector then
+            connector = ".."
+        end
         for i=1,#t do
             if i > 1 then
-                context(connector or "..")
+                context(connector)
             end
             local ti = t[i]
             if type(ti) == "string" then
@@ -39,12 +42,15 @@ end
 function metafun.interpolate(f,b,e,s,c)
     local done = false
     context("(")
-    for i=b,e,(e-b)/s do
-        local d = load(format("return function(x) return %s end",f))
-        if d then
-            d = d()
+    local d = load(format("return function(x) return %s end",f))
+    if d then
+        d = d()
+        if not c then
+            c = "..."
+        end
+        for i=b,e,(e-b)/s do
             if done then
-                context(c or "...")
+                context(c)
             else
                 done = true
             end
diff --git a/tex/context/base/mkiv/mlib-fio.lua b/tex/context/base/mkiv/mlib-fio.lua
index 51c88eb22..39a709505 100644
--- a/tex/context/base/mkiv/mlib-fio.lua
+++ b/tex/context/base/mkiv/mlib-fio.lua
@@ -54,8 +54,18 @@ local function validftype(ftype)
     end
 end
 
+local remapped = {
+    -- We don't yet have an interface for adding more here but when needed
+    -- there will be one.
+    ["hatching.mp"] = "mp-remapped-hatching.mp",
+    ["boxes.mp"]    = "mp-remapped-boxes.mp",
+    ["hatching"]    = "mp-remapped-hatching.mp",
+    ["boxes"]       = "mp-remapped-boxes.mp",
+}
+
 finders.file = function(specification,name,mode,ftype)
-    return resolvers.findfile(name,validftype(ftype))
+    local usedname = remapped[name] or name
+    return resolvers.findfile(usedname,validftype(ftype))
 end
 
 local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi
diff --git a/tex/context/base/mkiv/mlib-run.lua b/tex/context/base/mkiv/mlib-run.lua
index 602d6f36c..82426668f 100644
--- a/tex/context/base/mkiv/mlib-run.lua
+++ b/tex/context/base/mkiv/mlib-run.lua
@@ -6,28 +6,12 @@ if not modules then modules = { } end modules ['mlib-run'] = {
     license   = "see context related readme files",
 }
 
--- cmyk       -> done, native
--- spot       -> done, but needs reworking (simpler)
--- multitone  ->
--- shade      -> partly done, todo: cm
--- figure     -> done
--- hyperlink  -> low priority, easy
-
--- new * run
--- or
--- new * execute^1 * finish
-
--- a*[b,c] == b + a * (c-b)
-
---[[ldx--
-<p>The directional helpers and pen analysis are more or less translated from the
-<l n='c'/> code. It really helps that Taco know that source so well. Taco and I spent
-quite some time on speeding up the <l n='lua'/> and <l n='c'/> code. There is not
-much to gain, especially if one keeps in mind that when integrated in <l n='tex'/>
-only a part of the time is spent in <l n='metapost'/>. Of course an integrated
-approach is way faster than an external <l n='metapost'/> and processing time
-nears zero.</p>
---ldx]]--
+-- The directional helpers and pen analysis are more or less translated from the C
+-- code. It really helps that Taco know that source so well. Taco and I spent quite
+-- some time on speeding up the Lua and C code. There is not much to gain,
+-- especially if one keeps in mind that when integrated in TeX only a part of the
+-- time is spent in MetaPost. Of course an integrated approach is way faster than an
+-- external MetaPost and processing time nears zero.
 
 local type, tostring, tonumber, next = type, tostring, tonumber, next
 local find, striplines = string.find, utilities.strings.striplines
diff --git a/tex/context/base/mkiv/mult-mps.lua b/tex/context/base/mkiv/mult-mps.lua
index 008bcbb9f..cfa821517 100644
--- a/tex/context/base/mkiv/mult-mps.lua
+++ b/tex/context/base/mkiv/mult-mps.lua
@@ -127,7 +127,7 @@ return {
         --
         "red", "green", "blue", "cyan", "magenta", "yellow", "black", "white", "background",
         --
-        "mm", "pt", "dd", "bp", "cm", "pc", "cc", "in", "dk",
+        "mm", "pt", "dd", "bp", "cm", "pc", "cc", "in", "dk", "es", "ts",
         --
         "triplet", "quadruplet", "totransform", "bymatrix", "closedcurve", "closedlines",
         --
diff --git a/tex/context/base/mkiv/node-ini.lua b/tex/context/base/mkiv/node-ini.lua
index ef7d4afed..ea726ff3d 100644
--- a/tex/context/base/mkiv/node-ini.lua
+++ b/tex/context/base/mkiv/node-ini.lua
@@ -6,50 +6,38 @@ if not modules then modules = { } end modules ['node-ini'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Most of the code that had accumulated here is now separated in modules.</p>
---ldx]]--
-
--- I need to clean up this module as it's a bit of a mess now. The latest luatex
--- has most tables but we have a few more in luametatex. Also, some are different
--- between these engines. We started out with hardcoded tables, that then ended
--- up as comments and are now gone (as they differ per engine anyway).
+-- Most of the code that had accumulated here is now separated in modules.
 
 local next, type, tostring = next, type, tostring
 local gsub = string.gsub
 local concat, remove = table.concat, table.remove
 local sortedhash, sortedkeys, swapped = table.sortedhash, table.sortedkeys, table.swapped
 
---[[ldx--
-<p>Access to nodes is what gives <l n='luatex'/> its power. Here we implement a
-few helper functions. These functions are rather optimized.</p>
---ldx]]--
-
---[[ldx--
-<p>When manipulating node lists in <l n='context'/>, we will remove nodes and
-insert new ones. While node access was implemented, we did quite some experiments
-in order to find out if manipulating nodes in <l n='lua'/> was feasible from the
-perspective of performance.</p>
-
-<p>First of all, we noticed that the bottleneck is more with excessive callbacks
-(some gets called very often) and the conversion from and to <l n='tex'/>'s
-datastructures. However, at the <l n='lua'/> end, we found that inserting and
-deleting nodes in a table could become a bottleneck.</p>
-
-<p>This resulted in two special situations in passing nodes back to <l n='tex'/>:
-a table entry with value <type>false</type> is ignored, and when instead of a
-table <type>true</type> is returned, the original table is used.</p>
-
-<p>Insertion is handled (at least in <l n='context'/> as follows. When we need to
-insert a node at a certain position, we change the node at that position by a
-dummy node, tagged <type>inline</type> which itself has_attribute the original
-node and one or more new nodes. Before we pass back the list we collapse the
-list. Of course collapsing could be built into the <l n='tex'/> engine, but this
-is a not so natural extension.</p>
-
-<p>When we collapse (something that we only do when really needed), we also
-ignore the empty nodes. [This is obsolete!]</p>
---ldx]]--
+-- Access to nodes is what gives LuaTeX its power. Here we implement a few helper
+-- functions. These functions are rather optimized.
+--
+-- When manipulating node lists in ConTeXt, we will remove nodes and insert new
+-- ones. While node access was implemented, we did quite some experiments in order
+-- to find out if manipulating nodes in Lua was feasible from the perspective of
+-- performance.
+--
+-- First of all, we noticed that the bottleneck is more with excessive callbacks
+-- (some gets called very often) and the conversion from and to TeX's
+-- datastructures. However, at the Lua end, we found that inserting and deleting
+-- nodes in a table could become a bottleneck.
+--
+-- This resulted in two special situations in passing nodes back to TeX: a table
+-- entry with value 'false' is ignored, and when instead of a table 'true' is
+-- returned, the original table is used.
+--
+-- Insertion is handled (at least in ConTeXt as follows. When we need to insert a
+-- node at a certain position, we change the node at that position by a dummy node,
+-- tagged 'inline' which itself has_attribute the original node and one or more new
+-- nodes. Before we pass back the list we collapse the list. Of course collapsing
+-- could be built into the TeX engine, but this is a not so natural extension.
+
+-- When we collapse (something that we only do when really needed), we also ignore
+-- the empty nodes. [This is obsolete!]
 
 -- local gf = node.direct.getfield
 -- local n = table.setmetatableindex("number")
diff --git a/tex/context/base/mkiv/node-res.lua b/tex/context/base/mkiv/node-res.lua
index 5c669f9da..f2c6e97e9 100644
--- a/tex/context/base/mkiv/node-res.lua
+++ b/tex/context/base/mkiv/node-res.lua
@@ -9,11 +9,6 @@ if not modules then modules = { } end modules ['node-res'] = {
 local type, next = type, next
 local gmatch, format = string.gmatch, string.format
 
---[[ldx--
-<p>The next function is not that much needed but in <l n='context'/> we use
-for debugging <l n='luatex'/> node management.</p>
---ldx]]--
-
 local nodes, node = nodes, node
 
 local report_nodes   = logs.reporter("nodes","housekeeping")
diff --git a/tex/context/base/mkiv/node-tra.lua b/tex/context/base/mkiv/node-tra.lua
index 67435f1c7..20e354392 100644
--- a/tex/context/base/mkiv/node-tra.lua
+++ b/tex/context/base/mkiv/node-tra.lua
@@ -6,10 +6,8 @@ if not modules then modules = { } end modules ['node-tra'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>This is rather experimental. We need more control and some of this
-might become a runtime module instead. This module will be cleaned up!</p>
---ldx]]--
+-- Some of the code here might become a runtime module instead. This old module will
+-- be cleaned up anyway!
 
 local next = next
 local utfchar = utf.char
diff --git a/tex/context/base/mkiv/pack-obj.lua b/tex/context/base/mkiv/pack-obj.lua
index 445085776..dda828749 100644
--- a/tex/context/base/mkiv/pack-obj.lua
+++ b/tex/context/base/mkiv/pack-obj.lua
@@ -6,10 +6,8 @@ if not modules then modules = { } end modules ['pack-obj'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>We save object references in the main utility table. jobobjects are
-reusable components.</p>
---ldx]]--
+-- We save object references in the main utility table; job objects are reusable
+-- components.
 
 local context         = context
 local codeinjections  = backends.codeinjections
diff --git a/tex/context/base/mkiv/pack-rul.lua b/tex/context/base/mkiv/pack-rul.lua
index 98117867c..20db028ec 100644
--- a/tex/context/base/mkiv/pack-rul.lua
+++ b/tex/context/base/mkiv/pack-rul.lua
@@ -7,10 +7,6 @@ if not modules then modules = { } end modules ['pack-rul'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>An explanation is given in the history document <t>mk</t>.</p>
---ldx]]--
-
 -- we need to be careful with display math as it uses shifts
 
 -- \framed[align={lohi,middle}]{$x$}
diff --git a/tex/context/base/mkiv/publ-dat.lua b/tex/context/base/mkiv/publ-dat.lua
index 64aaaf460..2e5f07f05 100644
--- a/tex/context/base/mkiv/publ-dat.lua
+++ b/tex/context/base/mkiv/publ-dat.lua
@@ -11,12 +11,6 @@ if not modules then modules = { } end modules ['publ-dat'] = {
 -- todo: dataset = datasets[dataset] => current = datasets[dataset]
 -- todo: maybe split this file
 
---[[ldx--
-<p>This is a prelude to integrated bibliography support. This file just loads
-bibtex files and converts them to xml so that the we access the content
-in a convenient way. Actually handling the data takes place elsewhere.</p>
---ldx]]--
-
 if not characters then
     dofile(resolvers.findfile("char-utf.lua"))
     dofile(resolvers.findfile("char-tex.lua"))
diff --git a/tex/context/base/mkiv/publ-ini.lua b/tex/context/base/mkiv/publ-ini.lua
index dac0ab441..aa96dd8bc 100644
--- a/tex/context/base/mkiv/publ-ini.lua
+++ b/tex/context/base/mkiv/publ-ini.lua
@@ -296,7 +296,8 @@ do
             local checksum = nil
             local username = file.addsuffix(file.robustname(formatters["%s-btx-%s"](prefix,name)),"lua")
             if userdata and next(userdata) then
-                if job.passes.first then
+                if environment.currentrun == 1 then
+             -- if job.passes.first then
                     local newdata = serialize(userdata)
                     checksum = md5.HEX(newdata)
                     io.savedata(username,newdata)
diff --git a/tex/context/base/mkiv/publ-ini.mkiv b/tex/context/base/mkiv/publ-ini.mkiv
index 6e34d3ab5..05d93ef85 100644
--- a/tex/context/base/mkiv/publ-ini.mkiv
+++ b/tex/context/base/mkiv/publ-ini.mkiv
@@ -342,7 +342,7 @@
 \newtoks\t_btx_cmd
 \newbox \b_btx_cmd
 
-\t_btx_cmd{\global\setbox\b_btx_cmd\hpack{\clf_btxcmdstring}}
+\t_btx_cmd{\global\setbox\b_btx_cmd\hbox{\clf_btxcmdstring}} % no \hpack, otherwise prerolling --- doesn't work
 
 \let\btxcmd\btxcommand
 
diff --git a/tex/context/base/mkiv/regi-ini.lua b/tex/context/base/mkiv/regi-ini.lua
index 2a3b2caaf..460d97d5e 100644
--- a/tex/context/base/mkiv/regi-ini.lua
+++ b/tex/context/base/mkiv/regi-ini.lua
@@ -6,11 +6,8 @@ if not modules then modules = { } end modules ['regi-ini'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Regimes take care of converting the input characters into
-<l n='utf'/> sequences. The conversion tables are loaded at
-runtime.</p>
---ldx]]--
+-- Regimes take care of converting the input characters into UTF sequences. The
+-- conversion tables are loaded at runtime.
 
 -- Todo: use regi-imp*.lua instead
 
@@ -30,9 +27,7 @@ local sequencers        = utilities.sequencers
 local textlineactions   = resolvers.openers.helpers.textlineactions
 local setmetatableindex = table.setmetatableindex
 
---[[ldx--
-<p>We will hook regime handling code into the input methods.</p>
---ldx]]--
+-- We will hook regime handling code into the input methods.
 
 local trace_translating = false  trackers.register("regimes.translating", function(v) trace_translating = v end)
 
diff --git a/tex/context/base/mkiv/sort-ini.lua b/tex/context/base/mkiv/sort-ini.lua
index 98f516c22..a375d7057 100644
--- a/tex/context/base/mkiv/sort-ini.lua
+++ b/tex/context/base/mkiv/sort-ini.lua
@@ -6,49 +6,45 @@ if not modules then modules = { } end modules ['sort-ini'] = {
     license   = "see context related readme files"
 }
 
--- It took a while to get there, but with Fleetwood Mac's "Don't Stop"
--- playing in the background we sort of got it done.
-
---[[<p>The code here evolved from the rather old mkii approach. There
-we concatinate the key and (raw) entry into a new string. Numbers and
-special characters get some treatment so that they sort ok. In
-addition some normalization (lowercasing, accent stripping) takes
-place and again data is appended ror prepended. Eventually these
-strings are sorted using a regular string sorter. The relative order
-of character is dealt with by weighting them. It took a while to
-figure this all out but eventually it worked ok for most languages,
-given that the right datatables were provided.</p>
-
-<p>Here we do follow a similar approach but this time we don't append
-the manipulated keys and entries but create tables for each of them
-with entries being tables themselves having different properties. In
-these tables characters are represented by numbers and sorting takes
-place using these numbers. Strings are simplified using lowercasing
-as well as shape codes. Numbers are filtered and after getting an offset
-they end up at the right end of the spectrum (more clever parser will
-be added some day). There are definitely more solutions to the problem
-and it is a nice puzzle to solve.</p>
-
-<p>In the future more methods can be added, as there is practically no
-limit to what goes into the tables. For that we will provide hooks.</p>
-
-<p>Todo: decomposition with specific order of accents, this is
-relatively easy to do.</p>
-
-<p>Todo: investigate what standards and conventions there are and see
-how they map onto this mechanism. I've learned that users can come up
-with any demand so nothing here is frozen.</p>
-
-<p>Todo: I ran into the Unicode Collation document and noticed that
-there are some similarities (like the weights) but using that method
-would still demand extra code for language specifics. One option is
-to use the allkeys.txt file for the uc vectors but then we would also
-use the collapsed key (sq, code is now commented). In fact, we could
-just hook those into the replacer code that we reun beforehand.</p>
-
-<p>In the future index entries will become more clever, i.e. they will
-have language etc properties that then can be used.</p>
-]]--
+-- It took a while to get there, but with Fleetwood Mac's "Don't Stop" playing in
+-- the background we sort of got it done.
+--
+-- The code here evolved from the rather old mkii approach. There we concatinate the
+-- key and (raw) entry into a new string. Numbers and special characters get some
+-- treatment so that they sort ok. In addition some normalization (lowercasing,
+-- accent stripping) takes place and again data is appended ror prepended.
+-- Eventually these strings are sorted using a regular string sorter. The relative
+-- order of character is dealt with by weighting them. It took a while to figure
+-- this all out but eventually it worked ok for most languages, given that the right
+-- datatables were provided.
+--
+-- Here we do follow a similar approach but this time we don't append the
+-- manipulated keys and entries but create tables for each of them with entries
+-- being tables themselves having different properties. In these tables characters
+-- are represented by numbers and sorting takes place using these numbers. Strings
+-- are simplified using lowercasing as well as shape codes. Numbers are filtered and
+-- after getting an offset they end up at the right end of the spectrum (more clever
+-- parser will be added some day). There are definitely more solutions to the
+-- problem and it is a nice puzzle to solve.
+--
+-- In the future more methods can be added, as there is practically no limit to what
+-- goes into the tables. For that we will provide hooks.
+--
+-- Todo: decomposition with specific order of accents, this is relatively easy to
+-- do.
+--
+-- Todo: investigate what standards and conventions there are and see how they map
+-- onto this mechanism. I've learned that users can come up with any demand so
+-- nothing here is frozen.
+--
+-- Todo: I ran into the Unicode Collation document and noticed that there are some
+-- similarities (like the weights) but using that method would still demand extra
+-- code for language specifics. One option is to use the allkeys.txt file for the uc
+-- vectors but then we would also use the collapsed key (sq, code is now commented).
+-- In fact, we could just hook those into the replacer code that we reun beforehand.
+--
+-- In the future index entries will become more clever, i.e. they will have language
+-- etc properties that then can be used.
 
 local gsub, find, rep, sub, sort, concat, tohash, format = string.gsub, string.find, string.rep, string.sub, table.sort, table.concat, table.tohash, string.format
 local utfbyte, utfchar, utfcharacters = utf.byte, utf.char, utf.characters
diff --git a/tex/context/base/mkiv/status-files.pdf b/tex/context/base/mkiv/status-files.pdf
index de994239b..476b1642f 100644
Binary files a/tex/context/base/mkiv/status-files.pdf and b/tex/context/base/mkiv/status-files.pdf differ
diff --git a/tex/context/base/mkiv/status-lua.pdf b/tex/context/base/mkiv/status-lua.pdf
index e6773acf4..734e7705c 100644
Binary files a/tex/context/base/mkiv/status-lua.pdf and b/tex/context/base/mkiv/status-lua.pdf differ
diff --git a/tex/context/base/mkiv/syst-con.lua b/tex/context/base/mkiv/syst-con.lua
index 6a11fa8d3..f0ea8546a 100644
--- a/tex/context/base/mkiv/syst-con.lua
+++ b/tex/context/base/mkiv/syst-con.lua
@@ -20,10 +20,9 @@ local implement  = interfaces.implement
 
 local formatters = string.formatters
 
---[[ldx--
-<p>For raw 8 bit characters, the offset is 0x110000 (bottom of plane 18) at
-the top of <l n='luatex'/>'s char range but outside the unicode range.</p>
---ldx]]--
+-- For raw 8 bit characters, the offset is 0x110000 (bottom of plane 18) at the top
+-- of LuaTeX's char range but outside the unicode range. This is no longer the case
+-- in LuaMetaTeX.
 
 function converters.hexstringtonumber(n) tonumber(n,16) end
 function converters.octstringtonumber(n) tonumber(n, 8) end
diff --git a/tex/context/base/mkiv/syst-ini.mkiv b/tex/context/base/mkiv/syst-ini.mkiv
index ae1978eb6..5f226958b 100644
--- a/tex/context/base/mkiv/syst-ini.mkiv
+++ b/tex/context/base/mkiv/syst-ini.mkiv
@@ -253,6 +253,9 @@
 
 \let\newfam\newfamily
 
+\let\newinteger  \newcount % just in case
+\let\newdimension\newdimen % just in case
+
 \firstvalidlanguage\plusone
 
 % Watch out, for the moment we disable the check for already being defined
diff --git a/tex/context/base/mkiv/tabl-tbl.mkiv b/tex/context/base/mkiv/tabl-tbl.mkiv
index 2ed104adf..8b6afb956 100644
--- a/tex/context/base/mkiv/tabl-tbl.mkiv
+++ b/tex/context/base/mkiv/tabl-tbl.mkiv
@@ -1551,7 +1551,8 @@
    \fi}
 
 \def\tabl_tabulate_vrule_reset_indeed
-  {\dofastloopcs\c_tabl_tabulate_max_vrulecolumn\tabl_tabulate_vrule_reset_step
+  {\gletcsname\??tabulatevrule0\endcsname\undefined
+   \dofastloopcs\c_tabl_tabulate_max_vrulecolumn\tabl_tabulate_vrule_reset_step
    \global\c_tabl_tabulate_max_vrulecolumn\zerocount}
 
 \def\tabl_tabulate_vrule_reset_step % undefined or relax
diff --git a/tex/context/base/mkiv/trac-lmx.lua b/tex/context/base/mkiv/trac-lmx.lua
index a531a76d6..56522e1e7 100644
--- a/tex/context/base/mkiv/trac-lmx.lua
+++ b/tex/context/base/mkiv/trac-lmx.lua
@@ -6,7 +6,8 @@ if not modules then modules = { } end modules ['trac-lmx'] = {
     license   = "see context related readme files"
 }
 
--- this one will be adpated to the latest helpers
+-- This one will be adpated to the latest helpers. It might even become a
+-- module instead.
 
 local type, tostring, rawget, loadstring, pcall = type, tostring, rawget, loadstring, pcall
 local format, sub, gsub = string.format, string.sub, string.gsub
diff --git a/tex/context/base/mkiv/util-dim.lua b/tex/context/base/mkiv/util-dim.lua
index bb9eca966..6462f3e49 100644
--- a/tex/context/base/mkiv/util-dim.lua
+++ b/tex/context/base/mkiv/util-dim.lua
@@ -6,14 +6,10 @@ if not modules then modules = { } end modules ['util-dim'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Internally <l n='luatex'/> work with scaled point, which are
-represented by integers. However, in practice, at east at the
-<l n='tex'/> end we work with more generic units like points (pt). Going
-from scaled points (numbers) to one of those units can be
-done by using the conversion factors collected in the following
-table.</p>
---ldx]]--
+-- Internally LuaTeX work with scaled point, which are represented by integers.
+-- However, in practice, at east at the TeX end we work with more generic units like
+-- points (pt). Going from scaled points (numbers) to one of those units can be done
+-- by using the conversion factors collected in the following table.
 
 local format, match, gsub, type, setmetatable = string.format, string.match, string.gsub, type, setmetatable
 local P, S, R, Cc, C, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.Cc, lpeg.C, lpeg.match
@@ -45,7 +41,9 @@ local dimenfactors = allocate {
     ["dd"] = ( 1157/ 1238)/65536,
     ["cc"] = ( 1157/14856)/65536,
  -- ["nd"] = (20320/21681)/65536,
- -- ["nc"] = ( 5080/65043)/65536
+ -- ["nc"] = ( 5080/65043)/65536,
+    ["es"] = ( 9176/  129)/65536,
+    ["ts"] = ( 4588/  645)/65536,
 }
 
 -- print(table.serialize(dimenfactors))
@@ -86,10 +84,8 @@ local dimenfactors = allocate {
 --   ["sp"]=1,
 --  }
 
---[[ldx--
-<p>A conversion function that takes a number, unit (string) and optional
-format (string) is implemented using this table.</p>
---ldx]]--
+-- A conversion function that takes a number, unit (string) and optional format
+-- (string) is implemented using this table.
 
 local f_none = formatters["%s%s"]
 local f_true = formatters["%0.5F%s"]
@@ -110,9 +106,7 @@ local function numbertodimen(n,unit,fmt) -- will be redefined later !
     end
 end
 
---[[ldx--
-<p>We collect a bunch of converters in the <type>number</type> namespace.</p>
---ldx]]--
+-- We collect a bunch of converters in the 'number' namespace.
 
 number.maxdimen     = 1073741823
 number.todimen      = numbertodimen
@@ -122,7 +116,7 @@ function number.topoints      (n,fmt) return numbertodimen(n,"pt",fmt) end
 function number.toinches      (n,fmt) return numbertodimen(n,"in",fmt) end
 function number.tocentimeters (n,fmt) return numbertodimen(n,"cm",fmt) end
 function number.tomillimeters (n,fmt) return numbertodimen(n,"mm",fmt) end
-function number.toscaledpoints(n,fmt) return numbertodimen(n,"sp",fmt) end
+-------- number.toscaledpoints(n,fmt) return numbertodimen(n,"sp",fmt) end
 function number.toscaledpoints(n)     return            n .. "sp"      end
 function number.tobasepoints  (n,fmt) return numbertodimen(n,"bp",fmt) end
 function number.topicas       (n,fmt) return numbertodimen(n "pc",fmt) end
@@ -130,14 +124,13 @@ function number.todidots      (n,fmt) return numbertodimen(n,"dd",fmt) end
 function number.tociceros     (n,fmt) return numbertodimen(n,"cc",fmt) end
 -------- number.tonewdidots   (n,fmt) return numbertodimen(n,"nd",fmt) end
 -------- number.tonewciceros  (n,fmt) return numbertodimen(n,"nc",fmt) end
+function number.toediths      (n,fmt) return numbertodimen(n,"es",fmt) end
+function number.totoves       (n,fmt) return numbertodimen(n,"ts",fmt) end
 
---[[ldx--
-<p>More interesting it to implement a (sort of) dimen datatype, one
-that permits calculations too. First we define a function that
-converts a string to scaledpoints. We use <l n='lpeg'/>. We capture
-a number and optionally a unit. When no unit is given a constant
-capture takes place.</p>
---ldx]]--
+-- More interesting it to implement a (sort of) dimen datatype, one that permits
+-- calculations too. First we define a function that converts a string to
+-- scaledpoints. We use LPEG. We capture a number and optionally a unit. When no
+-- unit is given a constant capture takes place.
 
 local amount = (S("+-")^0 * R("09")^0 * P(".")^0 * R("09")^0) + Cc("0")
 local unit   = R("az")^1 + P("%")
@@ -152,21 +145,16 @@ function number.splitdimen(str)
     return lpegmatch(splitter,str)
 end
 
---[[ldx--
-<p>We use a metatable to intercept errors. When no key is found in
-the table with factors, the metatable will be consulted for an
-alternative index function.</p>
---ldx]]--
+-- We use a metatable to intercept errors. When no key is found in the table with
+-- factors, the metatable will be consulted for an alternative index function.
 
 setmetatableindex(dimenfactors, function(t,s)
  -- error("wrong dimension: " .. (s or "?")) -- better a message
     return false
 end)
 
---[[ldx--
-<p>We redefine the following function later on, so we comment it
-here (which saves us bytecodes.</p>
---ldx]]--
+-- We redefine the following function later on, so we comment it here (which saves
+-- us bytecodes.
 
 -- function string.todimen(str)
 --     if type(str) == "number" then
@@ -182,44 +170,38 @@ here (which saves us bytecodes.</p>
 local stringtodimen -- assigned later (commenting saves bytecode)
 
 local amount = S("+-")^0 * R("09")^0 * S(".,")^0 * R("09")^0
-local unit   = P("pt") + P("cm") + P("mm") + P("sp") + P("bp") + P("in")  +
-               P("pc") + P("dd") + P("cc") + P("nd") + P("nc")
+local unit   = P("pt") + P("cm") + P("mm") + P("sp") + P("bp")
+             + P("es") + P("ts") + P("pc") + P("dd") + P("cc")
+             + P("in")
+          -- + P("nd") + P("nc")
 
 local validdimen = amount * unit
 
 lpeg.patterns.validdimen = validdimen
 
---[[ldx--
-<p>This converter accepts calls like:</p>
-
-<typing>
-string.todimen("10")
-string.todimen(".10")
-string.todimen("10.0")
-string.todimen("10.0pt")
-string.todimen("10pt")
-string.todimen("10.0pt")
-</typing>
-
-<p>With this in place, we can now implement a proper datatype for dimensions, one
-that permits us to do this:</p>
-
-<typing>
-s = dimen "10pt" + dimen "20pt" + dimen "200pt"
-        - dimen "100sp" / 10 + "20pt" + "0pt"
-</typing>
-
-<p>We create a local metatable for this new type:</p>
---ldx]]--
+-- This converter accepts calls like:
+--
+--   string.todimen("10")
+--   string.todimen(".10")
+--   string.todimen("10.0")
+--   string.todimen("10.0pt")
+--   string.todimen("10pt")
+--   string.todimen("10.0pt")
+--
+-- With this in place, we can now implement a proper datatype for dimensions, one
+-- that permits us to do this:
+--
+--   s = dimen "10pt" + dimen "20pt" + dimen "200pt"
+--           - dimen "100sp" / 10 + "20pt" + "0pt"
+--
+-- We create a local metatable for this new type:
 
 local dimensions = { }
 
---[[ldx--
-<p>The main (and globally) visible representation of a dimen is defined next: it is
-a one-element table. The unit that is returned from the match is normally a number
-(one of the previously defined factors) but we also accept functions. Later we will
-see why. This function is redefined later.</p>
---ldx]]--
+-- The main (and globally) visible representation of a dimen is defined next: it is
+-- a one-element table. The unit that is returned from the match is normally a
+-- number (one of the previously defined factors) but we also accept functions.
+-- Later we will see why. This function is redefined later.
 
 -- function dimen(a)
 --     if a then
@@ -241,11 +223,9 @@ see why. This function is redefined later.</p>
 --     end
 -- end
 
---[[ldx--
-<p>This function return a small hash with a metatable attached. It is
-through this metatable that we can do the calculations. We could have
-shared some of the code but for reasons of speed we don't.</p>
---ldx]]--
+-- This function return a small hash with a metatable attached. It is through this
+-- metatable that we can do the calculations. We could have shared some of the code
+-- but for reasons of speed we don't.
 
 function dimensions.__add(a, b)
     local ta, tb = type(a), type(b)
@@ -281,20 +261,16 @@ function dimensions.__unm(a)
     return setmetatable({ - a }, dimensions)
 end
 
---[[ldx--
-<p>It makes no sense to implement the power and modulo function but
-the next two do make sense because they permits is code like:</p>
-
-<typing>
-local a, b = dimen "10pt", dimen "11pt"
-...
-if a > b then
-    ...
-end
-</typing>
---ldx]]--
-
--- makes no sense: dimensions.__pow and dimensions.__mod
+-- It makes no sense to implement the power and modulo function but
+-- the next two do make sense because they permits is code like:
+--
+--   local a, b = dimen "10pt", dimen "11pt"
+--   ...
+--   if a > b then
+--       ...
+--   end
+--
+-- This also makes no sense: dimensions.__pow and dimensions.__mod.
 
 function dimensions.__lt(a, b)
     return a[1] < b[1]
@@ -304,24 +280,17 @@ function dimensions.__eq(a, b)
     return a[1] == b[1]
 end
 
---[[ldx--
-<p>We also need to provide a function for conversion to string (so that
-we can print dimensions). We print them as points, just like <l n='tex'/>.</p>
---ldx]]--
+-- We also need to provide a function for conversion to string (so that we can print
+-- dimensions). We print them as points, just like TeX.
 
 function dimensions.__tostring(a)
     return a[1]/65536 .. "pt" -- instead of todimen(a[1])
 end
 
---[[ldx--
-<p>Since it does not take much code, we also provide a way to access
-a few accessors</p>
-
-<typing>
-print(dimen().pt)
-print(dimen().sp)
-</typing>
---ldx]]--
+-- Since it does not take much code, we also provide a way to access a few accessors
+--
+--   print(dimen().pt)
+--   print(dimen().sp)
 
 function dimensions.__index(tab,key)
     local d = dimenfactors[key]
@@ -332,41 +301,34 @@ function dimensions.__index(tab,key)
     return 1/d
 end
 
---[[ldx--
-<p>In the converter from string to dimension we support functions as
-factors. This is because in <l n='tex'/> we have a few more units:
-<type>ex</type> and <type>em</type>. These are not constant factors but
-depend on the current font. They are not defined by default, but need
-an explicit function call. This is because at the moment that this code
-is loaded, the relevant tables that hold the functions needed may not
-yet be available.</p>
---ldx]]--
-
-   dimenfactors["ex"] =  4 * 1/65536 --   4pt
-   dimenfactors["em"] = 10 * 1/65536 --  10pt
--- dimenfactors["%"]  =  4 * 1/65536 -- 400pt/100
-
---[[ldx--
-<p>The previous code is rather efficient (also thanks to <l n='lpeg'/>) but we
-can speed it up by caching converted dimensions. On my machine (2008) the following
-loop takes about 25.5 seconds.</p>
-
-<typing>
-for i=1,1000000 do
-    local s = dimen "10pt" + dimen "20pt" + dimen "200pt"
-        - dimen "100sp" / 10 + "20pt" + "0pt"
-end
-</typing>
-
-<p>When we cache converted strings this becomes 16.3 seconds. In order not
-to waste too much memory on it, we tag the values of the cache as being
-week which mean that the garbage collector will collect them in a next
-sweep. This means that in most cases the speed up is mostly affecting the
-current couple of calculations and as such the speed penalty is small.</p>
-
-<p>We redefine two previous defined functions that can benefit from
-this:</p>
---ldx]]--
+-- In the converter from string to dimension we support functions as factors. This
+-- is because in TeX we have a few more units: 'ex' and 'em'. These are not constant
+-- factors but depend on the current font. They are not defined by default, but need
+-- an explicit function call. This is because at the moment that this code is
+-- loaded, the relevant tables that hold the functions needed may not yet be
+-- available.
+
+   dimenfactors["ex"] =     4     /65536 --   4pt
+   dimenfactors["em"] =    10     /65536 --  10pt
+-- dimenfactors["%"]  =     4     /65536 -- 400pt/100
+   dimenfactors["eu"] = (9176/129)/65536 --  1es
+
+-- The previous code is rather efficient (also thanks to LPEG) but we can speed it
+-- up by caching converted dimensions. On my machine (2008) the following loop takes
+-- about 25.5 seconds.
+--
+--   for i=1,1000000 do
+--       local s = dimen "10pt" + dimen "20pt" + dimen "200pt"
+--           - dimen "100sp" / 10 + "20pt" + "0pt"
+--   end
+--
+-- When we cache converted strings this becomes 16.3 seconds. In order not to waste
+-- too much memory on it, we tag the values of the cache as being week which mean
+-- that the garbage collector will collect them in a next sweep. This means that in
+-- most cases the speed up is mostly affecting the current couple of calculations
+-- and as such the speed penalty is small.
+--
+-- We redefine two previous defined functions that can benefit from this:
 
 local known = { } setmetatable(known, { __mode = "v" })
 
@@ -436,14 +398,10 @@ function number.toscaled(d)
     return format("%0.5f",d/0x10000) -- 2^16
 end
 
---[[ldx--
-<p>In a similar fashion we can define a glue datatype. In that case we
-probably use a hash instead of a one-element table.</p>
---ldx]]--
-
---[[ldx--
-<p>Goodie:s</p>
---ldx]]--
+-- In a similar fashion we can define a glue datatype. In that case we probably use
+-- a hash instead of a one-element table.
+--
+-- A goodie:
 
 function number.percent(n,d) -- will be cleaned up once luatex 0.30 is out
     d = d or texget("hsize")
diff --git a/tex/context/base/mkiv/util-fmt.lua b/tex/context/base/mkiv/util-fmt.lua
index fe80c6420..4da4ef985 100644
--- a/tex/context/base/mkiv/util-fmt.lua
+++ b/tex/context/base/mkiv/util-fmt.lua
@@ -11,7 +11,7 @@ utilities.formatters = utilities.formatters or { }
 local formatters     = utilities.formatters
 
 local concat, format = table.concat, string.format
-local tostring, type = tostring, type
+local tostring, type, unpack = tostring, type, unpack
 local strip = string.strip
 
 local lpegmatch = lpeg.match
@@ -21,12 +21,15 @@ function formatters.stripzeros(str)
     return lpegmatch(stripper,str)
 end
 
-function formatters.formatcolumns(result,between)
+function formatters.formatcolumns(result,between,header)
     if result and #result > 0 then
-        between = between or "   "
-        local widths, numbers = { }, { }
-        local first = result[1]
-        local n = #first
+        local widths    = { }
+        local numbers   = { }
+        local templates = { }
+        local first     = result[1]
+        local n         = #first
+              between   = between or "   "
+        --
         for i=1,n do
             widths[i] = 0
         end
@@ -35,13 +38,6 @@ function formatters.formatcolumns(result,between)
             for j=1,n do
                 local rj = r[j]
                 local tj = type(rj)
---                 if tj == "number" then
---                     numbers[j] = true
---                 end
---                 if tj ~= "string" then
---                     rj = tostring(rj)
---                     r[j] = rj
---                 end
                 if tj == "number" then
                     numbers[j] = true
                     rj = tostring(rj)
@@ -55,29 +51,59 @@ function formatters.formatcolumns(result,between)
                 end
             end
         end
+        if header then
+            for i=1,#header do
+                local h = header[i]
+                for j=1,n do
+                    local hj = tostring(h[j])
+                    h[j] = hj
+                    local w = #hj
+                    if w > widths[j] then
+                        widths[j] = w
+                    end
+                end
+            end
+        end
         for i=1,n do
             local w = widths[i]
             if numbers[i] then
                 if w > 80 then
-                    widths[i] = "%s" .. between
-                 else
-                    widths[i] = "%0" .. w .. "i" .. between
+                    templates[i] = "%s" .. between
+                else
+                    templates[i] = "% " .. w .. "i" .. between
                 end
             else
                 if w > 80 then
-                    widths[i] = "%s" .. between
-                 elseif w > 0 then
-                    widths[i] = "%-" .. w .. "s" .. between
+                    templates[i] = "%s" .. between
+                elseif w > 0 then
+                    templates[i] = "%-" .. w .. "s" .. between
                 else
-                    widths[i] = "%s"
+                    templates[i] = "%s"
                 end
             end
         end
-        local template = strip(concat(widths))
+        local template = strip(concat(templates))
         for i=1,#result do
             local str = format(template,unpack(result[i]))
             result[i] = strip(str)
         end
+        if header then
+            for i=1,n do
+                local w = widths[i]
+                if w > 80 then
+                    templates[i] = "%s" .. between
+                elseif w > 0 then
+                    templates[i] = "%-" .. w .. "s" .. between
+                else
+                    templates[i] = "%s"
+                end
+            end
+            local template = strip(concat(templates))
+            for i=1,#header do
+                local str = format(template,unpack(header[i]))
+                header[i] = strip(str)
+            end
+        end
     end
-    return result
+    return result, header
 end
diff --git a/tex/context/base/mkiv/util-seq.lua b/tex/context/base/mkiv/util-seq.lua
index 35839f230..49952dd98 100644
--- a/tex/context/base/mkiv/util-seq.lua
+++ b/tex/context/base/mkiv/util-seq.lua
@@ -6,15 +6,13 @@ if not modules then modules = { } end modules ['util-seq'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Here we implement a mechanism for chaining the special functions
-that we use in <l n="context"> to deal with mode list processing. We
-assume that namespaces for the functions are used, but for speed we
-use locals to refer to them when compiling the chain.</p>
---ldx]]--
-
+-- Here we implement a mechanism for chaining the special functions that we use in
+-- ConteXt to deal with mode list processing. We assume that namespaces for the
+-- functions are used, but for speed we use locals to refer to them when compiling
+-- the chain.
+--
 -- todo: delayed: i.e. we register them in the right order already but delay usage
-
+--
 -- todo: protect groups (as in tasks)
 
 local gsub, gmatch = string.gsub, string.gmatch
diff --git a/tex/context/base/mkxl/attr-ini.lmt b/tex/context/base/mkxl/attr-ini.lmt
index 8b2ec8911..32fc36cdd 100644
--- a/tex/context/base/mkxl/attr-ini.lmt
+++ b/tex/context/base/mkxl/attr-ini.lmt
@@ -10,10 +10,8 @@ local next, type = next, type
 local osexit = os.exit
 local sortedhash = table.sortedhash
 
---[[ldx--
-<p>We start with a registration system for atributes so that we can use the
-symbolic names later on.</p>
---ldx]]--
+-- We start with a registration system for atributes so that we can use the symbolic
+-- names later on.
 
 local nodes             = nodes
 local context           = context
@@ -71,17 +69,13 @@ trackers.register("attributes.values", function(v) trace_values = v end)
 --     end
 -- end
 
---[[ldx--
-<p>We reserve this one as we really want it to be always set (faster).</p>
---ldx]]--
+-- We reserve this one as we really want it to be always set (faster).
 
 names[0], numbers["fontdynamic"] = "fontdynamic", 0
 
---[[ldx--
-<p>private attributes are used by the system and public ones are for users. We use dedicated
-ranges of numbers for them. Of course a the <l n='context'/> end a private attribute can be
-accessible too, so a private attribute can have a public appearance.</p>
---ldx]]--
+-- Private attributes are used by the system and public ones are for users. We use
+-- dedicated ranges of numbers for them. Of course a the TeX end a private attribute
+-- can be accessible too, so a private attribute can have a public appearance.
 
 sharedstorage.attributes_last_private = sharedstorage.attributes_last_private or   15 -- very private
 sharedstorage.attributes_last_public  = sharedstorage.attributes_last_public  or 1024 -- less private
diff --git a/tex/context/base/mkxl/char-tex.lmt b/tex/context/base/mkxl/char-tex.lmt
index 31023136d..0ac297d59 100644
--- a/tex/context/base/mkxl/char-tex.lmt
+++ b/tex/context/base/mkxl/char-tex.lmt
@@ -46,17 +46,14 @@ local trace_defining        = false  trackers.register("characters.defining", fu
 
 local report_defining       = logs.reporter("characters")
 
---[[ldx--
-<p>In order to deal with 8-bit output, we need to find a way to go from <l n='utf'/> to
-8-bit. This is handled in the <l n='luatex'/> engine itself.</p>
-
-<p>This leaves us problems with characters that are specific to <l n='tex'/> like
-<type>{}</type>, <type>$</type> and alike. We can remap some chars that tex input files
-are sensitive for to a private area (while writing to a utility file) and revert then
-to their original slot when we read in such a file. Instead of reverting, we can (when
-we resolve characters to glyphs) map them to their right glyph there. For this purpose
-we can use the private planes 0x0F0000 and 0x100000.</p>
---ldx]]--
+-- In order to deal with 8-bit output, we need to find a way to go from UTF to
+-- 8-bit. This is handled in the 32 bit engine itself. This leaves us problems with
+-- characters that are specific to TeX, like curly braces and dollars. We can remap
+-- some chars that tex input files are sensitive for to a private area (while
+-- writing to a utility file) and revert then to their original slot when we read in
+-- such a file. Instead of reverting, we can (when we resolve characters to glyphs)
+-- map them to their right glyph there. For this purpose we can use the private
+-- planes 0x0F0000 and 0x100000.
 
 local low     = allocate()
 local high    = allocate()
@@ -106,21 +103,6 @@ private.escape  = utf.remapper(escapes) -- maybe: ,"dynamic"
 private.replace = utf.remapper(low)     -- maybe: ,"dynamic"
 private.revert  = utf.remapper(high)    -- maybe: ,"dynamic"
 
---[[ldx--
-<p>We get a more efficient variant of this when we integrate
-replacements in collapser. This more or less renders the previous
-private code redundant. The following code is equivalent but the
-first snippet uses the relocated dollars.</p>
-
-<typing>
-[󰀤x󰀤] [$x$]
-</typing>
---ldx]]--
-
--- using the tree-lpeg-mapper would be nice but we also need to deal with end-of-string
--- cases: "\"\i" and don't want "\relax" to be seen as \r e lax" (for which we need to mess
--- with spaces
-
 local accentmapping = allocate {
     ['"'] = { [""] = "¨",
         A = "Ä", a = "ä",
@@ -288,12 +270,12 @@ local commandmapping = allocate {
 
 texcharacters.commandmapping = commandmapping
 
-local ligaturemapping = allocate {
-    ["''"]  = "”",
-    ["``"]  = "“",
-    ["--"]  = "–",
-    ["---"] = "—",
-}
+-- local ligaturemapping = allocate {
+--     ["''"]  = "”",
+--     ["``"]  = "“",
+--     ["--"]  = "–",
+--     ["---"] = "—",
+-- }
 
 -- Older accent handling code can be found in char-def.lua but in the meantime
 -- we moved on. First the one with commands:
@@ -321,9 +303,9 @@ local function toutfpattern()
             hash["{\\"..k.."}"] = v
             hash["{\\"..k.." }"] = v
         end
-        for k, v in next, ligaturemapping do
-            hash[k] = v
-        end
+     -- for k, v in next, ligaturemapping do
+     --     hash[k] = v
+     -- end
         untex = utfchartabletopattern(hash) / hash
     end
     return untex
@@ -376,9 +358,9 @@ local function toutfpattern()
         for k, v in next, commandmapping do
             hash[k] = v
         end
-        for k, v in next, ligaturemapping do
-            hash[k] = v
-        end
+     -- for k, v in next, ligaturemapping do
+     --     hash[k] = v
+     -- end
         untex = utfchartabletopattern(hash) / hash
     end
     return untex
@@ -580,10 +562,8 @@ implement { -- a waste of scanner but consistent
     actions = texcharacters.defineaccents
 }
 
---[[ldx--
-<p>Instead of using a <l n='tex'/> file to define the named glyphs, we
-use the table. After all, we have this information available anyway.</p>
---ldx]]--
+-- Instead of using a TeX file to define the named glyphs, we use the table. After
+-- all, we have this information available anyway.
 
 local function to_number(s)
     local n = tonumber(s)
@@ -878,10 +858,6 @@ function characters.setactivecatcodes(cct)
     tex.catcodetable = saved
 end
 
---[[ldx--
-<p>Setting the lccodes is also done in a loop over the data table.</p>
---ldx]]--
-
 implement {
     name      = "chardescription",
     arguments = "integer",
diff --git a/tex/context/base/mkxl/cont-new.mkxl b/tex/context/base/mkxl/cont-new.mkxl
index 9a6fc93da..53ccef0b6 100644
--- a/tex/context/base/mkxl/cont-new.mkxl
+++ b/tex/context/base/mkxl/cont-new.mkxl
@@ -13,7 +13,7 @@
 
 % \normalend % uncomment this to get the real base runtime
 
-\newcontextversion{2023.03.20 15:42}
+\newcontextversion{2023.04.01 09:28}
 
 %D This file is loaded at runtime, thereby providing an excellent place for hacks,
 %D patches, extensions and new features. There can be local overloads in cont-loc
diff --git a/tex/context/base/mkxl/context.mkxl b/tex/context/base/mkxl/context.mkxl
index 1a07772eb..6f4b7d052 100644
--- a/tex/context/base/mkxl/context.mkxl
+++ b/tex/context/base/mkxl/context.mkxl
@@ -29,7 +29,7 @@
 %D {YYYY.MM.DD HH:MM} format.
 
 \immutable\edef\contextformat {\jobname}
-\immutable\edef\contextversion{2023.03.20 15:42}
+\immutable\edef\contextversion{2023.04.01 09:28}
 
 %overloadmode 1 % check frozen / warning
 %overloadmode 2 % check frozen / error
@@ -215,8 +215,9 @@
 
 \loadmkxlfile{unic-ini}
 
-\loadmkxlfile{core-two}
+%loadmkxlfile{core-two} % retired, not in testsuite, not on garden, not in styles
 \loadmkxlfile{core-dat}
+\loadmkxlfile{core-pag}
 
 \loadmkxlfile{colo-ini}
 \loadmkxlfile{colo-nod}
@@ -647,26 +648,26 @@
 % we will definitely freeze mkiv and then use lmt files for futher development
 % of lmtx. We also no longer use the macro feature to replace 5.3 compatible
 % function calls by native 5.4 features as lmt files assume 5.4 anyway. This
-% makes format generation a little faster (not that it's that slow). It might \
+% makes format generation a little faster (not that it's that slow). It might
 % take a while before we dealt with all of them because I'll also clean them
-% up a bit when doing.
+% up a bit when doing. Some will probably always be shared, like char-def.lua.
 %
 % % luat-bas.mkxl l-macro-imp-optimize % this is no longer used
 
-% c:/data/develop/context/sources/buff-imp-default.lua
-% c:/data/develop/context/sources/buff-imp-escaped.lua
-% c:/data/develop/context/sources/buff-imp-lua.lua
-% c:/data/develop/context/sources/buff-imp-mp.lua
-% c:/data/develop/context/sources/buff-imp-nested.lua
-% c:/data/develop/context/sources/buff-imp-parsed-xml.lua
-% c:/data/develop/context/sources/buff-imp-tex.lua
-% c:/data/develop/context/sources/buff-imp-xml.lua
-
 % c:/data/develop/context/sources/buff-par.lua
 % c:/data/develop/context/sources/buff-ver.lua
+%
+% c:/data/develop/context/sources/buff-imp-default.lua    % shared
+% c:/data/develop/context/sources/buff-imp-escaped.lua    % shared
+% c:/data/develop/context/sources/buff-imp-lua.lua        % shared
+% c:/data/develop/context/sources/buff-imp-mp.lua         % shared
+% c:/data/develop/context/sources/buff-imp-nested.lua     % shared
+% c:/data/develop/context/sources/buff-imp-parsed-xml.lua % shared
+% c:/data/develop/context/sources/buff-imp-tex.lua        % shared
+% c:/data/develop/context/sources/buff-imp-xml.lua        % shared
 
 % c:/data/develop/context/sources/char-cjk.lua
-% c:/data/develop/context/sources/char-def.lua
+% c:/data/develop/context/sources/char-def.lua % shared data file, a real big one
 % c:/data/develop/context/sources/char-enc.lua
 % c:/data/develop/context/sources/char-ent.lua
 % c:/data/develop/context/sources/char-fio.lua
@@ -680,7 +681,7 @@
 
 % c:/data/develop/context/sources/cldf-com.lua
 % c:/data/develop/context/sources/cldf-ini.lua
-% c:/data/develop/context/sources/cldf-prs.lua % use in chemistry
+% c:/data/develop/context/sources/cldf-prs.lua % used in chemistry
 % c:/data/develop/context/sources/cldf-scn.lua
 % c:/data/develop/context/sources/cldf-stp.lua
 % c:/data/develop/context/sources/cldf-ver.lua
@@ -690,8 +691,6 @@
 
 % c:/data/develop/context/sources/core-con.lua
 % c:/data/develop/context/sources/core-ctx.lua
-% c:/data/develop/context/sources/core-dat.lua
-% c:/data/develop/context/sources/core-two.lua
 
 % data...
 
@@ -700,7 +699,7 @@
 % c:/data/develop/context/sources/file-res.lua
 
 % c:/data/develop/context/sources/font-afk.lua
-% c:/data/develop/context/sources/font-agl.lua
+% c:/data/develop/context/sources/font-agl.lua % shared data file
 % c:/data/develop/context/sources/font-aux.lua
 % c:/data/develop/context/sources/font-cid.lua
 % c:/data/develop/context/sources/font-enc.lua
@@ -724,16 +723,16 @@
 % c:/data/develop/context/sources/font-trt.lua
 % c:/data/develop/context/sources/font-web.lua % proof of concept, never used
 
-% c:/data/develop/context/sources/font-imp-combining.lua  % shared, like typescript
-% c:/data/develop/context/sources/font-imp-dimensions.lua % idem
-% c:/data/develop/context/sources/font-imp-italics.lua    % idem
-% c:/data/develop/context/sources/font-imp-notused.lua    % idem
-% c:/data/develop/context/sources/font-imp-properties.lua % idem
-% c:/data/develop/context/sources/font-imp-reorder.lua    % idem
-% c:/data/develop/context/sources/font-imp-spacekerns.lua % idem
-% c:/data/develop/context/sources/font-imp-tex.lua        % idem
-% c:/data/develop/context/sources/font-imp-tweaks.lua     % idem
-% c:/data/develop/context/sources/font-imp-unicode.lua    % idem
+% c:/data/develop/context/sources/font-imp-combining.lua  % shared
+% c:/data/develop/context/sources/font-imp-dimensions.lua % shared
+% c:/data/develop/context/sources/font-imp-italics.lua    % shared
+% c:/data/develop/context/sources/font-imp-notused.lua    % shared
+% c:/data/develop/context/sources/font-imp-properties.lua % shared
+% c:/data/develop/context/sources/font-imp-reorder.lua    % shared
+% c:/data/develop/context/sources/font-imp-spacekerns.lua % shared
+% c:/data/develop/context/sources/font-imp-tex.lua        % shared
+% c:/data/develop/context/sources/font-imp-tweaks.lua     % shared
+% c:/data/develop/context/sources/font-imp-unicode.lua    % shared
 
 % c:/data/develop/context/sources/good-ctx.lua
 % c:/data/develop/context/sources/good-ini.lua
@@ -749,26 +748,26 @@
 
 % c:/data/develop/context/sources/java-ini.lua
 
-% c:/data/develop/context/sources/lang-cnt.lua
-% c:/data/develop/context/sources/lang-def.lua % these are data files
-% c:/data/develop/context/sources/lang-txt.lua % these are data files
+% c:/data/develop/context/sources/lang-cnt.lua % shared data file
+% c:/data/develop/context/sources/lang-def.lua % shared data file
+% c:/data/develop/context/sources/lang-txt.lua % shared data file
 % c:/data/develop/context/sources/lang-wrd.lua
 
 % c:/data/develop/context/sources/luat-exe.lua
 % c:/data/develop/context/sources/luat-iop.lua
 % c:/data/develop/context/sources/luat-mac.lua % will become lmt
 
-% c:/data/develop/context/sources/lxml-aux.lua
-% c:/data/develop/context/sources/lxml-css.lua
-% c:/data/develop/context/sources/lxml-dir.lua
-% c:/data/develop/context/sources/lxml-ent.lua
-% c:/data/develop/context/sources/lxml-ini.lua
-% c:/data/develop/context/sources/lxml-lpt.lua
-% c:/data/develop/context/sources/lxml-mis.lua
-% c:/data/develop/context/sources/lxml-sor.lua
-% c:/data/develop/context/sources/lxml-tab.lua
-% c:/data/develop/context/sources/lxml-tex.lua
-% c:/data/develop/context/sources/lxml-xml.lua
+% c:/data/develop/context/sources/lxml-aux.lua % the xml interfcace is rather stable
+% c:/data/develop/context/sources/lxml-css.lua % and is also provided/used in lua so
+% c:/data/develop/context/sources/lxml-dir.lua % might as well share these because they
+% c:/data/develop/context/sources/lxml-ent.lua % are unlikely to change
+% c:/data/develop/context/sources/lxml-ini.lua %
+% c:/data/develop/context/sources/lxml-lpt.lua %
+% c:/data/develop/context/sources/lxml-mis.lua %
+% c:/data/develop/context/sources/lxml-sor.lua %
+% c:/data/develop/context/sources/lxml-tab.lua %
+% c:/data/develop/context/sources/lxml-tex.lua %
+% c:/data/develop/context/sources/lxml-xml.lua %
 
 % c:/data/develop/context/sources/meta-blb.lua
 % c:/data/develop/context/sources/meta-fun.lua
@@ -788,16 +787,16 @@
 % c:/data/develop/context/sources/page-pst.lua
 
 % c:/data/develop/context/sources/publ-aut.lua % shared
-% c:/data/develop/context/sources/publ-dat.lua
-% c:/data/develop/context/sources/publ-fnd.lua
-% c:/data/develop/context/sources/publ-inc.lua
-% c:/data/develop/context/sources/publ-ini.lua
-% c:/data/develop/context/sources/publ-jrn.lua
-% c:/data/develop/context/sources/publ-oth.lua
-% c:/data/develop/context/sources/publ-reg.lua
-% c:/data/develop/context/sources/publ-sor.lua
-% c:/data/develop/context/sources/publ-tra.lua
-% c:/data/develop/context/sources/publ-usr.lua
+% c:/data/develop/context/sources/publ-dat.lua % shared
+% c:/data/develop/context/sources/publ-fnd.lua % shared
+% c:/data/develop/context/sources/publ-inc.lua % shared
+% c:/data/develop/context/sources/publ-ini.lua % shared
+% c:/data/develop/context/sources/publ-jrn.lua % shared
+% c:/data/develop/context/sources/publ-oth.lua % shared
+% c:/data/develop/context/sources/publ-reg.lua % shared
+% c:/data/develop/context/sources/publ-sor.lua % shared
+% c:/data/develop/context/sources/publ-tra.lua % shared
+% c:/data/develop/context/sources/publ-usr.lua % shared
 
 % c:/data/develop/context/sources/scrn-but.lua
 % c:/data/develop/context/sources/scrn-fld.lua
@@ -828,6 +827,3 @@
 % c:/data/develop/context/sources/trac-lmx.lua
 % c:/data/develop/context/sources/trac-par.lua
 % c:/data/develop/context/sources/trac-tex.lua
-
-% c:/data/develop/context/sources/typo-cln.lua -- wrong name for what it does
-% c:/data/develop/context/sources/typo-dha.lua
diff --git a/tex/context/base/mkxl/core-dat.lmt b/tex/context/base/mkxl/core-dat.lmt
new file mode 100644
index 000000000..fd8aa0fb6
--- /dev/null
+++ b/tex/context/base/mkxl/core-dat.lmt
@@ -0,0 +1,225 @@
+if not modules then modules = { } end modules ['core-dat'] = {
+    version   = 1.001,
+    comment   = "companion to core-dat.mkiv",
+    author    = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+    copyright = "PRAGMA ADE / ConTeXt Development Team",
+    license   = "see context related readme files"
+}
+
+-- This module provides a (multipass) container for arbitrary data. It replaces the
+-- twopass data mechanism.
+
+local tonumber, tostring, type = tonumber, tostring, type
+
+local context           = context
+
+local trace_datasets    = false  trackers.register("job.datasets" ,  function(v) trace_datasets   = v end)
+
+local report_dataset    = logs.reporter("dataset")
+
+local allocate          = utilities.storage.allocate
+local settings_to_hash  = utilities.parsers.settings_to_hash
+
+local texgetcount       = tex.getcount
+local texsetcount       = tex.setcount
+
+local v_yes             = interfaces.variables.yes
+
+local new_latelua       = nodes.pool.latelua
+
+local implement         = interfaces.implement
+
+local c_realpageno      = tex.iscount("realpageno")
+
+local collected = allocate()
+local tobesaved = allocate()
+
+local datasets = {
+    collected = collected,
+    tobesaved = tobesaved,
+}
+
+job.datasets = datasets
+
+local function initializer()
+    collected = datasets.collected
+    tobesaved = datasets.tobesaved
+end
+
+job.register('job.datasets.collected', tobesaved, initializer, nil)
+
+local sets = { }
+
+table.setmetatableindex(tobesaved, function(t,k)
+    local v = { }
+    t[k] = v
+    return v
+end)
+
+table.setmetatableindex(sets, function(t,k)
+    local v = {
+        index = 0,
+        order = 0,
+    }
+    t[k] = v
+    return v
+end)
+
+local function setdata(settings)
+    local name = settings.name
+    local tag  = settings.tag
+    local data = settings.data
+    local list = tobesaved[name]
+    if settings.convert and type(data) == "string" then
+        data = settings_to_hash(data)
+    end
+    if type(data) ~= "table" then
+        data = { data = data }
+    end
+    if not tag then
+        tag = #list + 1
+    else
+        tag = tonumber(tag) or tag -- autonumber saves keys
+    end
+    list[tag] = data
+    if settings.delay == v_yes then
+        local set = sets[name]
+        local index = set.index + 1
+        set.index = index
+        data.index = index
+        data.order = index
+        data.realpage = texgetcount(c_realpageno)
+        if trace_datasets then
+            report_dataset("action %a, name %a, tag %a, index %a","assign delayed",name,tag,index)
+        end
+    elseif trace_datasets then
+        report_dataset("action %a, name %a, tag %a","assign immediate",name,tag)
+    end
+    return name, tag, data
+end
+
+datasets.setdata = setdata
+
+function datasets.extend(name,tag)
+    if type(name) == "table" then
+        name, tag = name.name, name.tag
+    end
+    local set = sets[name]
+    local order = set.order + 1
+    local realpage = texgetcount(c_realpageno)
+    set.order = order
+    local t = tobesaved[name][tag]
+    t.realpage = realpage
+    t.order = order
+    if trace_datasets then
+        report_dataset("action %a, name %a, tag %a, page %a, index %a","flush by order",name,tag,t.index or 0,order,realpage)
+    end
+end
+
+function datasets.getdata(name,tag,key,default)
+    local t = collected[name]
+    if t == nil then
+        if trace_datasets then
+            report_dataset("error: unknown dataset, name %a",name)
+        end
+    elseif type(t) ~= "table" then
+        return t
+    else
+        t = t[tag] or t[tonumber(tag)]
+        if not t then
+            if trace_datasets then
+                report_dataset("error: unknown dataset, name %a, tag %a",name,tag)
+            end
+        elseif key then
+            return t[key] or default
+        else
+            return t
+        end
+    end
+    return default
+end
+
+local function setdataset(settings)
+    settings.convert = true
+    local name, tag = setdata(settings)
+    if settings.delay ~= v_yes then
+        --
+    else
+        context(new_latelua { action = job.datasets.extend, name = name, tag = tag })
+    end
+end
+
+local cache = table.setmetatableindex(function(t,k)
+    local v = table.load(k..".tuc")
+    if v then
+        v = v.job
+        if v then
+            v = v.datasets
+            if v then
+                v = v.collected
+            end
+        end
+    end
+    if not v then
+        v = { }
+        if trace_datasets then
+            report_dataset("error: unknown dataset job %a",k)
+        end
+    end
+    t[k] = v
+    return v
+end)
+
+local function datasetvariable(name,tag,key,cache)
+    local t = (cache or collected)[name]
+    if t == nil then
+        if trace_datasets then
+            report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name) -- no tag
+        end
+    elseif type(t) ~= "table" then
+        context(tostring(t))
+    else
+        t = t and (t[tag] or t[tonumber(tag)])
+        if not t then
+            if trace_datasets then
+                report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name,tag)
+            end
+        elseif type(t) == "table" then
+            local s = t[key]
+            if type(s) ~= "table" then
+                context(tostring(s))
+            elseif trace_datasets then
+                report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name,tag)
+            end
+        end
+    end
+end
+
+local function datasetvariablefromjob(jobnname,name,tag,key)
+    datasetvariable(name,tag,key,cache[jobnname])
+end
+
+implement {
+    name      = "setdataset",
+    actions   = setdataset,
+    arguments = {
+        {
+            { "name" },
+            { "tag" },
+            { "delay" },
+            { "data" },
+        }
+    }
+}
+
+implement {
+    name      = "datasetvariable",
+    actions   = datasetvariable,
+    arguments = "3 strings",
+}
+
+implement {
+    name      = "datasetvariablefromjob",
+    arguments = { "string", "string", "string", "string" },
+    actions   = datasetvariablefromjob
+}
diff --git a/tex/context/base/mkxl/core-dat.mkxl b/tex/context/base/mkxl/core-dat.mkxl
index ab40d874c..6d7d1bd14 100644
--- a/tex/context/base/mkxl/core-dat.mkxl
+++ b/tex/context/base/mkxl/core-dat.mkxl
@@ -1,6 +1,6 @@
 %D \module
 %D   [       file=core-dat,
-%D        version=20122.04.17, % replaces core-two from 1997.03.31,
+%D        version=2021.04.17, % replaces core-two from 1997.03.31,
 %D          title=\CONTEXT\ Core Macros,
 %D       subtitle=Multipass Datasets,
 %D         author=Hans Hagen,
@@ -42,7 +42,7 @@
 
 \unprotect
 
-\registerctxluafile{core-dat}{}
+\registerctxluafile{core-dat}{autosuffix}
 
 \installcorenamespace{dataset}
 
@@ -78,50 +78,4 @@
      \expandafter\clf_datasetvariable
    \fi}
 
-\installcorenamespace{pagestate}
-\installcorenamespace{pagestatecounter}
-
-\installcommandhandler \??pagestate {pagestate} \??pagestate
-
-\def\syst_pagestates_allocate
-  {\expandafter\newinteger\csname\??pagestatecounter\currentpagestate\endcsname}
-
-\appendtoks
-    \syst_pagestates_allocate
-\to \everydefinepagestate
-
-\setuppagestate
-  [\c!delay=\v!yes]
-
-\permanent\tolerant\protected\def\setpagestate[#1]#*[#2]%
-  {\begingroup
-   \edef\currentpagestate{#1}%
-   \ifcsname\??pagestatecounter\currentpagestate\endcsname
-     \scratchcounter\lastnamedcs
-     \advanceby\scratchcounter\plusone
-   \else
-     \scratchcounter\plusone
-     \syst_pagestates_allocate
-   \fi
-   \global\csname\??pagestatecounter\currentpagestate\endcsname\scratchcounter
-   \clf_setpagestate
-      name  {\currentpagestate}%
-      tag   {\ifparameter#2\or#2\else\number\scratchcounter\fi}%
-      delay {\pagestateparameter\c!delay}%
-   \relax
-   \endgroup}
-
-\permanent\protected\def\autosetpagestate#1%
-  {\setpagestate[#1]\relax}
-
-\permanent\def\autopagestatenumber#1{\begincsname\??pagestatecounter#1\endcsname}
-
-\permanent\def\pagestaterealpage     #1#2{\clf_pagestaterealpage     {#1}{#2}}
-\permanent\def\setpagestaterealpageno#1#2{\clf_setpagestaterealpageno{#1}{#2}}
-\permanent\def\pagestaterealpageorder#1#2{\clf_pagestaterealpageorder{#1}#2\relax}
-
-\permanent\def\autopagestaterealpage     #1{\clf_pagestaterealpage     {#1}{\number\autopagestatenumber{#1}}}
-\permanent\def\setautopagestaterealpageno#1{\clf_setpagestaterealpageno{#1}{\number\autopagestatenumber{#1}}}
-\permanent\def\autopagestaterealpageorder#1{\clf_pagestaterealpageorder{#1}\numexpr\autopagestatenumber{#1}\relax}
-
 \protect
diff --git a/tex/context/base/mkxl/core-pag.lmt b/tex/context/base/mkxl/core-pag.lmt
new file mode 100644
index 000000000..219171d42
--- /dev/null
+++ b/tex/context/base/mkxl/core-pag.lmt
@@ -0,0 +1,160 @@
+if not modules then modules = { } end modules ['core-dat'] = {
+    version   = 1.001,
+    comment   = "companion to core-dat.mkiv",
+    author    = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+    copyright = "PRAGMA ADE / ConTeXt Development Team",
+    license   = "see context related readme files"
+}
+
+-- This module provides a (multipass) container for arbitrary data. It replaces the
+-- twopass data mechanism.
+
+local tonumber = tonumber
+
+local context           = context
+local ctx_latelua       = context.latelua
+
+local trace_pagestates  = false  trackers.register("job.pagestates", function(v) trace_pagestates = v end)
+
+local report_pagestate  = logs.reporter("pagestate")
+
+local allocate          = utilities.storage.allocate
+
+local texgetcount       = tex.getcount
+local texsetcount       = tex.setcount
+
+local new_latelua       = nodes.pool.latelua
+
+local implement         = interfaces.implement
+local getnamespace      = interfaces.getnamespace
+
+local c_realpageno      = tex.iscount("realpageno")
+local c_realpagestateno = tex.iscount("realpagestateno")
+
+local collected = allocate()
+local tobesaved = allocate()
+
+local pagestates = {
+    collected = collected,
+    tobesaved = tobesaved,
+}
+
+job.pagestates = pagestates
+
+local function initializer()
+    collected = pagestates.collected
+    tobesaved = pagestates.tobesaved
+end
+
+job.register("job.pagestates.collected", tobesaved, initializer, nil)
+
+table.setmetatableindex(tobesaved, "table")
+
+local function setstate(settings)
+    local name = settings.name
+    local tag  = settings.tag
+    local list = tobesaved[name]
+    if not tag then
+        tag = #list + 1
+    else
+        tag = tonumber(tag) or tag -- autonumber saves keys
+    end
+    local realpage = texgetcount(c_realpageno)
+    local data = realpage
+    list[tag] = data
+    if trace_pagestates then
+        report_pagestate("action %a, name %a, tag %a, preset %a","set",name,tag,realpage)
+    end
+    return name, tag, data
+end
+
+local function extend(name,tag)
+    local realpage = texgetcount(c_realpageno)
+    if trace_pagestates then
+        report_pagestate("action %a, name %a, tag %a, preset %a","synchronize",name,tag,realpage)
+    end
+    tobesaved[name][tag] = realpage
+end
+
+local function realpage(name,tag,default)
+    local t = collected[name]
+    if t then
+        t = t[tag] or t[tonumber(tag)]
+        if t then
+            return tonumber(t or default)
+        elseif trace_pagestates then
+            report_pagestate("error: unknown dataset, name %a, tag %a",name,tag)
+        end
+    elseif trace_pagestates then
+        report_pagestate("error: unknown dataset, name %a, tag %a",name) -- nil
+    end
+    return default
+end
+
+local function realpageorder(name,tag)
+    local t = collected[name]
+    if t then
+        local p = t[tag]
+        if p then
+            local n = 1
+            for i=tag-1,1,-1 do
+                if t[i] == p then
+                    n = n  +1
+                end
+            end
+            return n
+        end
+    end
+    return 0
+end
+
+pagestates.setstate      = setstate
+pagestates.extend        = extend
+pagestates.realpage      = realpage
+pagestates.realpageorder = realpageorder
+
+function pagestates.countervalue(name)
+    return name and texgetcount(getnamespace("pagestatecounter") .. name) or 0
+end
+
+local function setpagestate(settings)
+    local name, tag = setstate(settings)
+ -- context(new_latelua(function() extend(name,tag) end))
+    ctx_latelua(function() extend(name,tag) end)
+end
+
+local function setpagestaterealpageno(name,tag)
+    local t = collected[name]
+    t = t and (t[tag] or t[tonumber(tag)])
+    texsetcount("realpagestateno",t or texgetcount(c_realpageno))
+end
+
+implement {
+    name      = "setpagestate",
+    actions   = setpagestate,
+    arguments = {
+        {
+            { "name" },
+            { "tag" },
+            { "delay" },
+        }
+    }
+}
+
+implement {
+    name      = "pagestaterealpage",
+    actions   = { realpage, context },
+    arguments = "2 strings",
+}
+
+implement {
+    name      = "setpagestaterealpageno",
+    actions   = setpagestaterealpageno,
+    arguments = "2 strings",
+}
+
+implement {
+    name      = "pagestaterealpageorder",
+    actions   = { realpageorder, context },
+    arguments = { "string", "integer" }
+}
diff --git a/tex/context/base/mkxl/core-pag.mkxl b/tex/context/base/mkxl/core-pag.mkxl
new file mode 100644
index 000000000..43b398b16
--- /dev/null
+++ b/tex/context/base/mkxl/core-pag.mkxl
@@ -0,0 +1,68 @@
+%D \module
+%D   [       file=core-pag,
+%D        version=2023.03.23, % moved from core-dat
+%D          title=\CONTEXT\ Core Macros,
+%D       subtitle=Multipass Pagestate,
+%D         author=Hans Hagen,
+%D           date=\currentdate,
+%D      copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Core Macros / Multipass Pagestate}
+
+\unprotect
+
+\newinteger\realpagestateno
+
+\registerctxluafile{core-pag}{autosuffix}
+
+\installcorenamespace{pagestate}
+\installcorenamespace{pagestatecounter}
+
+\installcommandhandler \??pagestate {pagestate} \??pagestate
+
+\def\syst_pagestates_allocate
+  {\expandafter\newinteger\csname\??pagestatecounter\currentpagestate\endcsname}
+
+\appendtoks
+    \syst_pagestates_allocate
+\to \everydefinepagestate
+
+\setuppagestate
+  [\c!delay=\v!yes]
+
+\permanent\tolerant\protected\def\setpagestate[#1]#*[#2]%
+  {\begingroup
+   \edef\currentpagestate{#1}%
+   \ifcsname\??pagestatecounter\currentpagestate\endcsname
+     \scratchcounter\lastnamedcs
+     \advanceby\scratchcounter\plusone
+   \else
+     \scratchcounter\plusone
+     \syst_pagestates_allocate
+   \fi
+   \global\csname\??pagestatecounter\currentpagestate\endcsname\scratchcounter
+   \clf_setpagestate
+      name  {\currentpagestate}%
+      tag   {\ifparameter#2\or#2\else\number\scratchcounter\fi}%
+      delay {\pagestateparameter\c!delay}%
+   \relax
+   \endgroup}
+
+\permanent\protected\def\autosetpagestate#1%
+  {\setpagestate[#1]\relax}
+
+\permanent\def\autopagestatenumber#1{\begincsname\??pagestatecounter#1\endcsname}
+
+\permanent\def\pagestaterealpage     #1#2{\clf_pagestaterealpage     {#1}{#2}}
+\permanent\def\setpagestaterealpageno#1#2{\clf_setpagestaterealpageno{#1}{#2}}
+\permanent\def\pagestaterealpageorder#1#2{\clf_pagestaterealpageorder{#1}#2\relax}
+
+\permanent\def\autopagestaterealpage     #1{\clf_pagestaterealpage     {#1}{\number\autopagestatenumber{#1}}}
+\permanent\def\setautopagestaterealpageno#1{\clf_setpagestaterealpageno{#1}{\number\autopagestatenumber{#1}}}
+\permanent\def\autopagestaterealpageorder#1{\clf_pagestaterealpageorder{#1}\numexpr\autopagestatenumber{#1}\relax}
+
+\protect
diff --git a/tex/context/base/mkxl/core-two.lmt b/tex/context/base/mkxl/core-two.lmt
new file mode 100644
index 000000000..7ea42374e
--- /dev/null
+++ b/tex/context/base/mkxl/core-two.lmt
@@ -0,0 +1,210 @@
+if not modules then modules = { } end modules ['core-two'] = {
+    version   = 1.001,
+    comment   = "companion to core-two.mkiv",
+    author    = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+    copyright = "PRAGMA ADE / ConTeXt Development Team",
+    license   = "see context related readme files"
+}
+
+-- This is actually one of the oldest MkIV files and basically a port of MkII but
+-- the old usage has long be phased out. Also, the public part is now handled by
+-- datasets which makes this a more private store.
+
+-- local next = next
+-- local remove, concat = table.remove, table.concat
+
+local allocate = utilities.storage.allocate
+
+local collected = allocate()
+local tobesaved = allocate()
+
+local jobpasses = {
+    collected = collected,
+    tobesaved = tobesaved,
+}
+
+job.passes = jobpasses
+
+local function initializer()
+    collected = jobpasses.collected
+    tobesaved = jobpasses.tobesaved
+end
+
+job.register('job.passes.collected', tobesaved, initializer, nil)
+
+function jobpasses.getcollected(id)
+    return collected[id] or { }
+end
+
+function jobpasses.gettobesaved(id)
+    local t = tobesaved[id]
+    if not t then
+        t = { }
+        tobesaved[id] = t
+    end
+    return t
+end
+
+-- local function define(id)
+--     local p = tobesaved[id]
+--     if not p then
+--         p = { }
+--         tobesaved[id] = p
+--     end
+--     return p
+-- end
+--
+-- local function save(id,str,index)
+--     local jti = define(id)
+--     if index then
+--         jti[index] = str
+--     else
+--         jti[#jti+1] = str
+--     end
+-- end
+--
+-- local function savetagged(id,tag,str)
+--     local jti = define(id)
+--     jti[tag] = str
+-- end
+--
+-- local function getdata(id,index,default)
+--     local jti = collected[id]
+--     local value = jti and jti[index]
+--     return value ~= "" and value or default or ""
+-- end
+--
+-- local function getfield(id,index,tag,default)
+--     local jti = collected[id]
+--     jti = jti and jti[index]
+--     local value = jti and jti[tag]
+--     return value ~= "" and value or default or ""
+-- end
+--
+-- local function getcollected(id)
+--     return collected[id] or { }
+-- end
+--
+-- local function gettobesaved(id)
+--     return define(id)
+-- end
+--
+-- local function get(id)
+--     local jti = collected[id]
+--     if jti and #jti > 0 then
+--         return remove(jti,1)
+--     end
+-- end
+--
+-- local function first(id)
+--     local jti = collected[id]
+--     return jti and jti[1]
+-- end
+--
+-- local function last(id)
+--     local jti = collected[id]
+--     return jti and jti[#jti]
+-- end
+--
+-- local function find(id,n)
+--     local jti = collected[id]
+--     return jti and jti[n] or nil
+-- end
+--
+-- local function count(id)
+--     local jti = collected[id]
+--     return jti and #jti or 0
+-- end
+--
+-- local function list(id)
+--     local jti = collected[id]
+--     if jti then
+--         return concat(jti,',')
+--     end
+-- end
+--
+-- local function inlist(id,str)
+--     local jti = collected[id]
+--     if jti then
+--         for _, v in next, jti do
+--             if v == str then
+--                 return true
+--             end
+--         end
+--     end
+--     return false
+-- end
+--
+-- local check = first
+--
+-- jobpasses.define       = define
+-- jobpasses.save         = save
+-- jobpasses.savetagged   = savetagged
+-- jobpasses.getdata      = getdata
+-- jobpasses.getfield     = getfield
+-- jobpasses.getcollected = getcollected
+-- jobpasses.gettobesaved = gettobesaved
+-- jobpasses.get          = get
+-- jobpasses.first        = first
+-- jobpasses.last         = last
+-- jobpasses.find         = find
+-- jobpasses.list         = list
+-- jobpasses.count        = count
+-- jobpasses.check        = check
+-- jobpasses.inlist       = inlist
+--
+-- -- interface
+--
+-- local implement = interfaces.implement
+--
+-- implement { name = "gettwopassdata",     actions = { get,   context }, arguments = "string" }
+-- implement { name = "getfirsttwopassdata",actions = { first, context }, arguments = "string" }
+-- implement { name = "getlasttwopassdata", actions = { last,  context }, arguments = "string" }
+-- implement { name = "findtwopassdata",    actions = { find,  context }, arguments = "2 strings" }
+-- implement { name = "gettwopassdatalist", actions = { list,  context }, arguments = "string" }
+-- implement { name = "counttwopassdata",   actions = { count, context }, arguments = "string" }
+-- implement { name = "checktwopassdata",   actions = { check, context }, arguments = "string" }
+--
+-- implement {
+--     name      = "definetwopasslist",
+--     actions   = define,
+--     arguments = "string"
+-- }
+--
+-- implement {
+--     name      = "savetwopassdata",
+--     actions   = save,
+--     arguments = "2 strings",
+-- }
+--
+-- implement {
+--     name      = "savetaggedtwopassdata",
+--     actions   = savetagged,
+--     arguments = "3 strings",
+-- }
+--
+-- implement {
+--     name      = "doifelseintwopassdata",
+--     actions   = { inlist, commands.doifelse },
+--     arguments = "2 strings",
+-- }
+--
+-- -- local ctx_latelua = context.latelua
+--
+-- -- implement {
+-- --     name      = "lazysavetwopassdata",
+-- --     arguments = "3 strings",
+-- --     public    = true,
+-- --     actions   = function(a,b,c)
+-- --         ctx_latelua(function() save(a,c) end)
+-- --     end,
+-- -- }
+--
+-- -- implement {
+-- --     name      = "lazysavetaggedtwopassdata",
+-- --     arguments = "3 strings",
+-- --     public    = true,
+-- --     actions   = function(a,b,c)
+-- --         ctx_latelua(function() savetagged(a,b,c) end)
+-- --     end,
+-- -- }
diff --git a/tex/context/base/mkxl/core-two.mkxl b/tex/context/base/mkxl/core-two.mkxl
index 38f03c7c4..10a7eec9e 100644
--- a/tex/context/base/mkxl/core-two.mkxl
+++ b/tex/context/base/mkxl/core-two.mkxl
@@ -1,6 +1,6 @@
 %D \module
 %D   [       file=core-two, % moved from core-uti
-%D        version=1997.03.31,
+%D        version=1997.03.31, % stripped down 2023-03-21
 %D          title=\CONTEXT\ Core Macros,
 %D       subtitle=Two Pass Data,
 %D         author=Hans Hagen,
@@ -11,102 +11,110 @@
 %C therefore copyrighted by \PRAGMA. See mreadme.pdf for
 %C details.
 
-\writestatus{loading}{ConTeXt Core Macros / Two Pass Data}
+%D The public interface is replaced by datasets and two pass data is now private
+%D to the engine. For the moment we keep some commands commented. The unused
+%D (second) argument is an inheritance from \MKII. If needed we can bring back
+%D a compatible interface.
 
-%D This is a rather old mechanism which has not changed much over time, apart from
-%D adding a few more selectors. This code used to be part of \type {core-uti}. The
-%D following examples demonstrate the interface.
-%D
-%D \startbuffer
-%D \definetwopasslist{test-1}
-%D
-%D \gettwopassdatalist{test-1} [\twopassdatalist=]
-%D \checktwopassdata  {test-1} [\twopassdata=]
-%D \checktwopassdata  {test-1} [\twopassdata=]
-%D \gettwopassdata    {test-1} [\twopassdata=]
-%D \gettwopassdata    {test-1} [\twopassdata=]
-%D
-%D \definetwopasslist{test-2}
-%D
-%D \lazysavetwopassdata{test-2}{1}{x}
-%D \lazysavetwopassdata{test-2}{2}{y}
-%D \lazysavetwopassdata{test-2}{3}{z}
-%D
-%D \gettwopassdatalist{test-2} [\twopassdatalist=x,y,z]
-%D \checktwopassdata  {test-2} [\twopassdata=x]
-%D \checktwopassdata  {test-2} [\twopassdata=x]
-%D \gettwopassdata    {test-2} [\twopassdata=x]
-%D \gettwopassdata    {test-2} [\twopassdata=y]
-%D \gettwopassdata    {test-2} [\twopassdata=z]
-%D \gettwopassdata    {test-2} [\twopassdata=]
-%D
-%D \definetwopasslist{test-3}
-%D
-%D \lazysavetaggedtwopassdata{test-3}{1}{x}{a}
-%D \lazysavetaggedtwopassdata{test-3}{2}{y}{b}
-%D \lazysavetaggedtwopassdata{test-3}{3}{z}{c}
-%D
-%D \findtwopassdata{test-3}{x} [\twopassdata=a]
-%D \findtwopassdata{test-3}{y} [\twopassdata=b]
-%D \findtwopassdata{test-3}{z} [\twopassdata=c]
-%D \findtwopassdata{test-3}{w} [\twopassdata=]
-%D
-%D \definetwopasslist{test-4}
-%D
-%D \lazysavetwopassdata{test-4}{1}{A}
-%D \lazysavetwopassdata{test-4}{2}{B}
-%D \lazysavetwopassdata{test-4}{3}{C}
-%D
-%D \getfirsttwopassdata{test-4}    [\twopassdata=A]
-%D \getlasttwopassdata {test-4}    [\twopassdata=C]
-%D \getfirsttwopassdata{test-4}    [\twopassdata=A]
-%D \getlasttwopassdata {test-4}    [\twopassdata=C]
-%D \getfromtwopassdata {test-4}{1} [\twopassdata=A]
-%D \getfromtwopassdata {test-4}{3} [\twopassdata=C]
-%D \getfromtwopassdata {test-4}{2} [\twopassdata=B]
-%D \stopbuffer
-%D
-%D \getbuffer \typebuffer
+\writestatus{loading}{ConTeXt Core Macros / Two Pass Data}
 
 \unprotect
 
-\registerctxluafile{core-two}{}
-
-\permanent\def\immediatesavetwopassdata   #1#2#3{\normalexpanded{\noexpand\clf_savetwopassdata{#1}{#3}}}
-\permanent\def     \lazysavetwopassdata   #1#2#3{\normalexpanded{\noexpand\ctxlatecommand{savetwopassdata("#1","#3")}}}
-\permanent\let         \savetwopassdata          \lazysavetwopassdata
-\permanent\def    \savetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\clf_savetaggedtwopassdata{#1}{#3}{#4}}}
-\permanent\def\lazysavetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\ctxlatecommand{savetaggedtwopassdata("#1",'#3',"#4")}}}
-
-% temp hack: needs a proper \starteverytimeluacode
-
-\setfalse\twopassdatafound
-
-\mutable\lettonothing\twopassdata
-\mutable\lettonothing\twopassdatalist
-
-\mutable\let\noftwopassitems\!!zeropoint
-
-\def\syst_twopass_check % can be delegated to lua once obsolete is gone
-  {\ifempty\twopassdata
-     \setfalse\twopassdatafound
-   \else
-     \settrue\twopassdatafound
-   \fi}
-
-\permanent\protected\def\definetwopasslist        #1{\clf_definetwopasslist{#1}}
-\permanent\protected\def\gettwopassdata           #1{\edef\twopassdata    {\clf_gettwopassdata      {#1}}\syst_twopass_check}
-\permanent\protected\def\checktwopassdata         #1{\edef\twopassdata    {\clf_checktwopassdata    {#1}}\syst_twopass_check}
-\permanent\protected\def\findtwopassdata        #1#2{\edef\twopassdata    {\clf_findtwopassdata {#1}{#2}}\syst_twopass_check}
-\permanent\protected\def\getfirsttwopassdata      #1{\edef\twopassdata    {\clf_getfirsttwopassdata {#1}}\syst_twopass_check}
-\permanent\protected\def\getlasttwopassdata       #1{\edef\twopassdata    {\clf_getlasttwopassdata  {#1}}%
-                                                     \edef\noftwopassitems{\clf_counttwopassdata    {#1}}\syst_twopass_check}
-\permanent\protected\def\getnamedtwopassdatalist#1#2{\edef              #1{\clf_gettwopassdatalist  {#2}}}
-\permanent\protected\def\gettwopassdatalist       #1{\edef\twopassdatalist{\clf_gettwopassdatalist  {#1}}}
-
-\permanent\protected\def\doifelseintwopassdata  #1#2{\clf_doifelseintwopassdata{#1}{#2}}
+\registerctxluafile{core-two}{autosuffix}
 
-\aliased\let\doifintwopassdataelse\doifelseintwopassdata
-\aliased\let\getfromtwopassdata   \findtwopassdata
+% %D This is a rather old mechanism which has not changed much over time, apart from
+% %D adding a few more selectors. This code used to be part of \type {core-uti}. The
+% %D following examples demonstrate the interface.
+% %D
+% %D \startbuffer
+% %D \definetwopasslist{test-1}
+% %D
+% %D \gettwopassdatalist{test-1} [\twopassdatalist=]
+% %D \checktwopassdata  {test-1} [\twopassdata=]
+% %D \checktwopassdata  {test-1} [\twopassdata=]
+% %D \gettwopassdata    {test-1} [\twopassdata=]
+% %D \gettwopassdata    {test-1} [\twopassdata=]
+% %D
+% %D \definetwopasslist{test-2}
+% %D
+% %D \lazysavetwopassdata{test-2}{1}{x}
+% %D \lazysavetwopassdata{test-2}{2}{y}
+% %D \lazysavetwopassdata{test-2}{3}{z}
+% %D
+% %D \gettwopassdatalist{test-2} [\twopassdatalist=x,y,z]
+% %D \checktwopassdata  {test-2} [\twopassdata=x]
+% %D \checktwopassdata  {test-2} [\twopassdata=x]
+% %D \gettwopassdata    {test-2} [\twopassdata=x]
+% %D \gettwopassdata    {test-2} [\twopassdata=y]
+% %D \gettwopassdata    {test-2} [\twopassdata=z]
+% %D \gettwopassdata    {test-2} [\twopassdata=]
+% %D
+% %D \definetwopasslist{test-3}
+% %D
+% %D \lazysavetaggedtwopassdata{test-3}{1}{x}{a}
+% %D \lazysavetaggedtwopassdata{test-3}{2}{y}{b}
+% %D \lazysavetaggedtwopassdata{test-3}{3}{z}{c}
+% %D
+% %D \findtwopassdata{test-3}{x} [\twopassdata=a]
+% %D \findtwopassdata{test-3}{y} [\twopassdata=b]
+% %D \findtwopassdata{test-3}{z} [\twopassdata=c]
+% %D \findtwopassdata{test-3}{w} [\twopassdata=]
+% %D
+% %D \definetwopasslist{test-4}
+% %D
+% %D \lazysavetwopassdata{test-4}{1}{A}
+% %D \lazysavetwopassdata{test-4}{2}{B}
+% %D \lazysavetwopassdata{test-4}{3}{C}
+% %D
+% %D \getfirsttwopassdata{test-4}    [\twopassdata=A]
+% %D \getlasttwopassdata {test-4}    [\twopassdata=C]
+% %D \getfirsttwopassdata{test-4}    [\twopassdata=A]
+% %D \getlasttwopassdata {test-4}    [\twopassdata=C]
+% %D \getfromtwopassdata {test-4}{1} [\twopassdata=A]
+% %D \getfromtwopassdata {test-4}{3} [\twopassdata=C]
+% %D \getfromtwopassdata {test-4}{2} [\twopassdata=B]
+% %D \stopbuffer
+% %D
+% %D \getbuffer \typebuffer
+%
+% %D The next code can be simplified (read: defined at the \LUA\ end) but we never use this
+% %D mechanism which has been replaced by datasets so it's not worth the effort.
+%
+% \permanent\def\immediatesavetwopassdata   #1#2#3{\normalexpanded{\noexpand\clf_savetwopassdata{#1}{#3}}}
+% \permanent\def     \lazysavetwopassdata   #1#2#3{\normalexpanded{\noexpand\ctxlatecommand{savetwopassdata("#1","#3")}}}
+% \permanent\let         \savetwopassdata          \lazysavetwopassdata
+% \permanent\def    \savetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\clf_savetaggedtwopassdata{#1}{#3}{#4}}}
+% \permanent\def\lazysavetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\ctxlatecommand{savetaggedtwopassdata("#1","#3","#4")}}}
+%
+% % temp hack: needs a proper \starteverytimeluacode
+%
+% \setfalse\twopassdatafound
+%
+% \mutable\lettonothing\twopassdata
+% \mutable\lettonothing\twopassdatalist
+%
+% \mutable\let\noftwopassitems\!!zeropoint
+%
+% \def\syst_twopass_check % can be delegated to lua once obsolete is gone
+%   {\ifempty\twopassdata
+%      \setfalse\twopassdatafound
+%    \else
+%      \settrue\twopassdatafound
+%    \fi}
+%
+% \permanent\protected\def\definetwopasslist        #1{\clf_definetwopasslist{#1}}
+% \permanent\protected\def\gettwopassdata           #1{\edef\twopassdata    {\clf_gettwopassdata      {#1}}\syst_twopass_check}
+% \permanent\protected\def\checktwopassdata         #1{\edef\twopassdata    {\clf_checktwopassdata    {#1}}\syst_twopass_check}
+% \permanent\protected\def\findtwopassdata        #1#2{\edef\twopassdata    {\clf_findtwopassdata {#1}{#2}}\syst_twopass_check}
+% \permanent\protected\def\getfirsttwopassdata      #1{\edef\twopassdata    {\clf_getfirsttwopassdata {#1}}\syst_twopass_check}
+% \permanent\protected\def\getlasttwopassdata       #1{\edef\twopassdata    {\clf_getlasttwopassdata  {#1}}%
+%                                                      \edef\noftwopassitems{\clf_counttwopassdata    {#1}}\syst_twopass_check}
+% \permanent\protected\def\getnamedtwopassdatalist#1#2{\edef              #1{\clf_gettwopassdatalist  {#2}}}
+% \permanent\protected\def\gettwopassdatalist       #1{\edef\twopassdatalist{\clf_gettwopassdatalist  {#1}}}
+%
+% \permanent\protected\def\doifelseintwopassdata  #1#2{\clf_doifelseintwopassdata{#1}{#2}}
+%
+% \aliased\let\doifintwopassdataelse\doifelseintwopassdata
+% \aliased\let\getfromtwopassdata   \findtwopassdata
 
 \protect \endinput
diff --git a/tex/context/base/mkxl/core-uti.lmt b/tex/context/base/mkxl/core-uti.lmt
index 966428b36..e4b6606e3 100644
--- a/tex/context/base/mkxl/core-uti.lmt
+++ b/tex/context/base/mkxl/core-uti.lmt
@@ -6,16 +6,13 @@ if not modules then modules = { } end modules ['core-uti'] = {
     license   = "see context related readme files"
 }
 
--- todo: keep track of changes here (hm, track access, and only true when
--- accessed and changed)
-
---[[ldx--
-<p>A utility file has always been part of <l n='context'/> and with
-the move to <l n='luatex'/> we also moved a lot of multi-pass info
-to a <l n='lua'/> table. Instead of loading a <l n='tex'/> based
-utility file under different setups, we now load a table once. This
-saves much runtime but at the cost of more memory usage.</p>
---ldx]]--
+-- A utility file has always been part of ConTeXt and with the move to LuaTeX we
+-- also moved a lot of multi-pass info to a Lua table. Instead of loading a TeX
+-- based utility file under different setups, we now load a table once. This saves
+-- much runtime but at the cost of more memory usage.
+--
+-- In the meantime the overhead is a bit more due to the amount of data being saved
+-- and more agressive compacting.
 
 local math = math
 local next, type, tostring, tonumber, setmetatable, load = next, type, tostring, tonumber, setmetatable, load
@@ -46,14 +43,9 @@ local job            = job
 job.version          = 1.33
 job.packversion      = 1.02
 
--- some day we will implement loading of other jobs and then we need
--- job.jobs
-
---[[ldx--
-<p>Variables are saved using in the previously defined table and passed
-onto <l n='tex'/> using the following method. Of course one can also
-directly access the variable using a <l n='lua'/> call.</p>
---ldx]]--
+-- Variables are saved using in the previously defined table and passed onto TeX
+-- using the following method. Of course one can also directly access the variable
+-- using a Lua call.
 
 local savelist, comment = { }, { }
 
@@ -382,6 +374,12 @@ function job.load(filename)
 end
 
 function job.loadother(filename)
+    local jobname = environment.jobname
+    if filename == jobname then
+        return
+    else
+        report_passes("integrating list %a into %a",filename,jobname)
+    end
     statistics.starttiming(loadedfiles)
     filename = file.addsuffix(filename,"tuc")
     local unpacked = othercache[filename]
diff --git a/tex/context/base/mkxl/file-mod.lmt b/tex/context/base/mkxl/file-mod.lmt
index d10abf533..567387a3a 100644
--- a/tex/context/base/mkxl/file-mod.lmt
+++ b/tex/context/base/mkxl/file-mod.lmt
@@ -6,17 +6,11 @@ if not modules then modules = { } end modules ['file-mod'] = {
     license   = "see context related readme files"
 }
 
--- This module will be redone! For instance, the prefixes will move to data-*
--- as they arr sort of generic along with home:// etc/.
-
--- context is not defined yet! todo! (we need to load tupp-fil after cld)
--- todo: move startreadingfile to lua and push regime there
-
---[[ldx--
-<p>It's more convenient to manipulate filenames (paths) in
-<l n='lua'/> than in <l n='tex'/>. These methods have counterparts
-at the <l n='tex'/> side.</p>
---ldx]]--
+-- This module will be redone! For instance, the prefixes will move to data-* as
+-- they are sort of generic along with home:// etc/.
+--
+-- It is more convenient to manipulate filenames (paths) in Lua than in TeX. The
+-- methods below have counterparts at the TeX end.
 
 local format, find, concat, tonumber = string.format, string.find, table.concat, tonumber
 local sortedhash = table.sortedhash
diff --git a/tex/context/base/mkxl/font-con.lmt b/tex/context/base/mkxl/font-con.lmt
index 073af7d2e..5a887d61d 100644
--- a/tex/context/base/mkxl/font-con.lmt
+++ b/tex/context/base/mkxl/font-con.lmt
@@ -22,11 +22,9 @@ local trace_scaling   = false  trackers.register("fonts.scaling",   function(v)
 
 local report_defining = logs.reporter("fonts","defining")
 
--- watch out: no negative depths and negative eights permitted in regular fonts
-
---[[ldx--
-<p>Here we only implement a few helper functions.</p>
---ldx]]--
+-- Watch out: no negative depths and negative heights are permitted in regular
+-- fonts. Also, the code in LMTX is a bit different. Here we only implement a
+-- few helper functions.
 
 local fonts                  = fonts
 local constructors           = fonts.constructors or { }
@@ -53,11 +51,9 @@ constructors.loadedfonts    = loadedfonts
 
 ----- scalecommands         = fonts.helpers.scalecommands
 
---[[ldx--
-<p>We need to normalize the scale factor (in scaled points). This has to
-do with the fact that <l n='tex'/> uses a negative multiple of 1000 as
-a signal for a font scaled based on the design size.</p>
---ldx]]--
+-- We need to normalize the scale factor (in scaled points). This has to do with the
+-- fact that TeX uses a negative multiple of 1000 as a signal for a font scaled
+-- based on the design size.
 
 local factors = {
     pt = 65536.0,
@@ -112,33 +108,29 @@ function constructors.getmathparameter(tfmdata,name)
     end
 end
 
---[[ldx--
-<p>Beware, the boundingbox is passed as reference so we may not overwrite it
-in the process; numbers are of course copies. Here 65536 equals 1pt. (Due to
-excessive memory usage in CJK fonts, we no longer pass the boundingbox.)</p>
---ldx]]--
-
--- The scaler is only used for otf and afm and virtual fonts. If a virtual font has italic
--- correction make sure to set the hasitalics flag. Some more flags will be added in the
--- future.
-
---[[ldx--
-<p>The reason why the scaler was originally split, is that for a while we experimented
-with a helper function. However, in practice the <l n='api'/> calls are too slow to
-make this profitable and the <l n='lua'/> based variant was just faster. A days
-wasted day but an experience richer.</p>
---ldx]]--
+-- Beware, the boundingbox is passed as reference so we may not overwrite it in the
+-- process; numbers are of course copies. Here 65536 equals 1pt. (Due to excessive
+-- memory usage in CJK fonts, we no longer pass the boundingbox.)
+--
+-- The scaler is only used for OTF and AFM and virtual fonts. If a virtual font has
+-- italic correction make sure to set the hasitalics flag. Some more flags will be
+-- added in the future.
+--
+-- The reason why the scaler was originally split, is that for a while we
+-- experimented with a helper function. However, in practice the API calls are too
+-- slow to make this profitable and the Lua based variant was just faster. A days
+-- wasted day but an experience richer.
 
 -- experimental, sharing kerns (unscaled and scaled) saves memory
 -- local sharedkerns, basekerns = constructors.check_base_kerns(tfmdata)
 -- loop over descriptions (afm and otf have descriptions, tfm not)
 -- there is no need (yet) to assign a value to chr.tonunicode
-
+--
 -- constructors.prepare_base_kerns(tfmdata) -- optimalization
-
--- we have target.name=metricfile and target.fullname=RealName and target.filename=diskfilename
--- when collapsing fonts, luatex looks as both target.name and target.fullname as ttc files
--- can have multiple subfonts
+--
+-- We have target.name = metricfile and target.fullname = RealName and
+-- target.filename = diskfilename when collapsing fonts. LuaTeX looks at both
+-- target.name and target.fullname because TTC files can have multiple subfonts.
 
 function constructors.calculatescale(tfmdata,scaledpoints)
     -- implemented in font-ctx.lmt
@@ -1008,9 +1000,7 @@ function constructors.finalize(tfmdata)
     return tfmdata
 end
 
---[[ldx--
-<p>A unique hash value is generated by:</p>
---ldx]]--
+-- A unique hash value is generated by:
 
 local hashmethods        = { }
 constructors.hashmethods = hashmethods
@@ -1069,13 +1059,11 @@ hashmethods.normal = function(list)
     end
 end
 
---[[ldx--
-<p>In principle we can share tfm tables when we are in need for a font, but then
-we need to define a font switch as an id/attr switch which is no fun, so in that
-case users can best use dynamic features ... so, we will not use that speedup. Okay,
-when we get rid of base mode we can optimize even further by sharing, but then we
-loose our testcases for <l n='luatex'/>.</p>
---ldx]]--
+-- In principle we can share tfm tables when we are in need for a font, but then we
+-- need to define a font switch as an id/attr switch which is no fun, so in that
+-- case users can best use dynamic features ... so, we will not use that speedup.
+-- Okay, when we get rid of base mode we can optimize even further by sharing, but
+-- then we loose our testcases for LuaTeX.
 
 function constructors.hashinstance(specification,force)
     -- implemented in font-ctx.lmt
@@ -1407,10 +1395,7 @@ do
 
 end
 
---[[ldx--
-<p>We need to check for default features. For this we provide
-a helper function.</p>
---ldx]]--
+-- We need to check for default features. For this we provide a helper function.
 
 function constructors.checkedfeatures(what,features)
     local defaults = handlers[what].features.defaults
diff --git a/tex/context/base/mkxl/font-ctx.lmt b/tex/context/base/mkxl/font-ctx.lmt
index 77953d64a..1d59ad728 100644
--- a/tex/context/base/mkxl/font-ctx.lmt
+++ b/tex/context/base/mkxl/font-ctx.lmt
@@ -529,19 +529,13 @@ do
 
 end
 
---[[ldx--
-<p>So far we haven't really dealt with features (or whatever we want
-to pass along with the font definition. We distinguish the following
-situations:</p>
-situations:</p>
-
-<code>
-name:xetex like specs
-name@virtual font spec
-name*context specification
-</code>
---ldx]]--
-
+-- So far we haven't really dealt with features (or whatever we want to pass along
+-- with the font definition. We distinguish the following situations:
+--
+--   name:xetex like specs
+--   name@virtual font spec
+--   name*context specification
+--
 -- Currently fonts are scaled while constructing the font, so we have to do scaling
 -- of commands in the vf at that point using e.g. "local scale = g.parameters.factor
 -- or 1" after all, we need to work with copies anyway and scaling needs to be done
@@ -2269,10 +2263,8 @@ dimenfactors.em   = nil
 dimenfactors["%"] = nil
 dimenfactors.pct  = nil
 
---[[ldx--
-<p>Before a font is passed to <l n='tex'/> we scale it. Here we also need
-to scale virtual characters.</p>
---ldx]]--
+-- Before a font is passed to TeX we scale it. Here we also need to scale virtual
+-- characters.
 
 do
 
diff --git a/tex/context/base/mkxl/font-def.lmt b/tex/context/base/mkxl/font-def.lmt
index 6afeeb474..ea6b2d0c0 100644
--- a/tex/context/base/mkxl/font-def.lmt
+++ b/tex/context/base/mkxl/font-def.lmt
@@ -24,10 +24,9 @@ trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading
 
 local report_defining = logs.reporter("fonts","defining")
 
---[[ldx--
-<p>Here we deal with defining fonts. We do so by intercepting the
-default loader that only handles <l n='tfm'/>.</p>
---ldx]]--
+-- Here we deal with defining fonts. We do so by intercepting the default loader
+-- that only handles TFM files. Although, we started out that way but in the
+-- meantime we can hardly speak of TFM any more.
 
 local nextfont      = font.nextid
 
@@ -55,25 +54,18 @@ local designsizes   = constructors.designsizes
 
 local resolvefile   = fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
 
---[[ldx--
-<p>We hardly gain anything when we cache the final (pre scaled)
-<l n='tfm'/> table. But it can be handy for debugging, so we no
-longer carry this code along. Also, we now have quite some reference
-to other tables so we would end up with lots of catches.</p>
---ldx]]--
-
---[[ldx--
-<p>We can prefix a font specification by <type>name:</type> or
-<type>file:</type>. The first case will result in a lookup in the
-synonym table.</p>
-
-<typing>
-[ name: | file: ] identifier [ separator [ specification ] ]
-</typing>
-
-<p>The following function split the font specification into components
-and prepares a table that will move along as we proceed.</p>
---ldx]]--
+-- We hardly gain anything when we cache the final (pre scaled) TFM table. But it
+-- can be handy for debugging, so we no longer carry this code along. Also, we now
+-- have quite some reference to other tables so we would end up with lots of
+-- catches.
+--
+-- We can prefix a font specification by "name:" or "file:". The first case will
+-- result in a lookup in the synonym table.
+--
+--   [ name: | file: ] identifier [ separator [ specification ] ]
+--
+-- The following function split the font specification into components and prepares
+-- a table that will move along as we proceed.
 
 -- beware, we discard additional specs
 --
@@ -166,9 +158,7 @@ do
 
 end
 
---[[ldx--
-<p>We can resolve the filename using the next function:</p>
---ldx]]--
+-- We can resolve the filename using the next function:
 
 definers.resolvers = definers.resolvers or { }
 local resolvers    = definers.resolvers
@@ -261,23 +251,17 @@ function definers.resolve(specification)
     return specification
 end
 
---[[ldx--
-<p>The main read function either uses a forced reader (as determined by
-a lookup) or tries to resolve the name using the list of readers.</p>
-
-<p>We need to cache when possible. We do cache raw tfm data (from <l
-n='tfm'/>, <l n='afm'/> or <l n='otf'/>). After that we can cache based
-on specificstion (name) and size, that is, <l n='tex'/> only needs a number
-for an already loaded fonts. However, it may make sense to cache fonts
-before they're scaled as well (store <l n='tfm'/>'s with applied methods
-and features). However, there may be a relation between the size and
-features (esp in virtual fonts) so let's not do that now.</p>
-
-<p>Watch out, here we do load a font, but we don't prepare the
-specification yet.</p>
---ldx]]--
-
--- very experimental:
+-- The main read function either uses a forced reader (as determined by a lookup) or
+-- tries to resolve the name using the list of readers.
+--
+-- We need to cache when possible. We do cache raw tfm data (from TFM, AFM or OTF).
+-- After that we can cache based on specificstion (name) and size, that is, TeX only
+-- needs a number for an already loaded fonts. However, it may make sense to cache
+-- fonts before they're scaled as well (store TFM's with applied methods and
+-- features). However, there may be a relation between the size and features (esp in
+-- virtual fonts) so let's not do that now.
+--
+-- Watch out, here we do load a font, but we don't prepare the specification yet.
 
 function definers.applypostprocessors(tfmdata)
     local postprocessors = tfmdata.postprocessors
@@ -431,17 +415,13 @@ function constructors.readanddefine(name,size) -- no id -- maybe a dummy first
     return fontdata[id], id
 end
 
---[[ldx--
-<p>So far the specifiers. Now comes the real definer. Here we cache
-based on id's. Here we also intercept the virtual font handler. Since
-it evolved stepwise I may rewrite this bit (combine code).</p>
-
-In the previously defined reader (the one resulting in a <l n='tfm'/>
-table) we cached the (scaled) instances. Here we cache them again, but
-this time based on id. We could combine this in one cache but this does
-not gain much. By the way, passing id's back to in the callback was
-introduced later in the development.</p>
---ldx]]--
+-- So far the specifiers. Now comes the real definer. Here we cache based on id's.
+-- Here we also intercept the virtual font handler.
+--
+-- In the previously defined reader (the one resulting in a TFM table) we cached the
+-- (scaled) instances. Here we cache them again, but this time based on id. We could
+-- combine this in one cache but this does not gain much. By the way, passing id's
+-- back to in the callback was introduced later in the development.
 
 function definers.registered(hash)
     local id = internalized[hash]
diff --git a/tex/context/base/mkxl/font-fbk.lmt b/tex/context/base/mkxl/font-fbk.lmt
index bdc5265ae..09f20b42c 100644
--- a/tex/context/base/mkxl/font-fbk.lmt
+++ b/tex/context/base/mkxl/font-fbk.lmt
@@ -10,10 +10,6 @@ local cos, tan, rad, format = math.cos, math.tan, math.rad, string.format
 local utfbyte, utfchar = utf.byte, utf.char
 local next = next
 
---[[ldx--
-<p>This is very experimental code!</p>
---ldx]]--
-
 local trace_visualize    = false  trackers.register("fonts.composing.visualize", function(v) trace_visualize = v end)
 local trace_define       = false  trackers.register("fonts.composing.define",    function(v) trace_define    = v end)
 
diff --git a/tex/context/base/mkxl/font-fil.mklx b/tex/context/base/mkxl/font-fil.mklx
index 79535ea11..73348645d 100644
--- a/tex/context/base/mkxl/font-fil.mklx
+++ b/tex/context/base/mkxl/font-fil.mklx
@@ -294,7 +294,7 @@
 % pre-expansion.
 
 \def\font_helpers_update_font_class_parameters
-  {\edef\m_font_class_direction {\begincsname\??fontclass\fontclass\fontstyle\s!direction \endcsname}%
+  {%edef\m_font_class_direction {\begincsname\??fontclass\fontclass\fontstyle\s!direction \endcsname}%
    \edef\m_font_class_features  {\begincsname\??fontclass\fontclass\fontstyle\s!features  \endcsname}%
    \edef\m_font_class_fallbacks {\begincsname\??fontclass\fontclass\fontstyle\s!fallbacks \endcsname}%
    \edef\m_font_class_goodies   {\begincsname\??fontclass\fontclass\fontstyle\s!goodies   \endcsname}%
diff --git a/tex/context/base/mkxl/font-ini.lmt b/tex/context/base/mkxl/font-ini.lmt
index bc68fa83d..dcec8594e 100644
--- a/tex/context/base/mkxl/font-ini.lmt
+++ b/tex/context/base/mkxl/font-ini.lmt
@@ -6,10 +6,6 @@ if not modules then modules = { } end modules ['font-ini'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Not much is happening here.</p>
---ldx]]--
-
 local sortedhash, setmetatableindex = table.sortedhash, table.setmetatableindex
 local allocate = utilities.storage.allocate
 
diff --git a/tex/context/base/mkxl/font-ini.mklx b/tex/context/base/mkxl/font-ini.mklx
index 6efae2ae1..ea727bde4 100644
--- a/tex/context/base/mkxl/font-ini.mklx
+++ b/tex/context/base/mkxl/font-ini.mklx
@@ -755,6 +755,16 @@
 
 \immutable\dimensiondef\d_font_default_size 10pt
 
+%lettonothing\m_font_class_direction   % no longer used
+\lettonothing\m_font_class_features
+\lettonothing\m_font_class_fallbacks
+\lettonothing\m_font_class_goodies
+
+\lettonothing\m_font_direction
+\lettonothing\m_font_features
+\lettonothing\m_font_fallbacks
+\lettonothing\m_font_goodies
+
 \protected\def\font_helpers_low_level_define
   {\ifconditional\c_font_compact
      \expandafter\font_helpers_low_level_define_compact
diff --git a/tex/context/base/mkxl/font-mat.mklx b/tex/context/base/mkxl/font-mat.mklx
index 76f6f87b9..54473a347 100644
--- a/tex/context/base/mkxl/font-mat.mklx
+++ b/tex/context/base/mkxl/font-mat.mklx
@@ -337,15 +337,17 @@
 %D 0 while in rl mode 0 is a copy of 1. There is no real overhead involved in this.
 %D This also permits different font definitions for normal and mixed.
 
-\lettonothing\m_font_class_direction
-\lettonothing\m_font_class_features
-\lettonothing\m_font_class_fallbacks
-\lettonothing\m_font_class_goodies
-
-\lettonothing\m_font_direction
-\lettonothing\m_font_features
-\lettonothing\m_font_fallbacks
-\lettonothing\m_font_goodies
+% moved to ini
+%
+% \lettonothing\m_font_class_direction
+% \lettonothing\m_font_class_features
+% \lettonothing\m_font_class_fallbacks
+% \lettonothing\m_font_class_goodies
+%
+% \lettonothing\m_font_direction
+% \lettonothing\m_font_features
+% \lettonothing\m_font_fallbacks
+% \lettonothing\m_font_goodies
 
 \appendtoks
     \font_helpers_set_math_family\c_font_fam_mr\s!mr
diff --git a/tex/context/base/mkxl/font-one.lmt b/tex/context/base/mkxl/font-one.lmt
index 453f61192..71694dcca 100644
--- a/tex/context/base/mkxl/font-one.lmt
+++ b/tex/context/base/mkxl/font-one.lmt
@@ -7,18 +7,16 @@ if not modules then modules = { } end modules ['font-one'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Some code may look a bit obscure but this has to do with the fact that we also use
-this code for testing and much code evolved in the transition from <l n='tfm'/> to
-<l n='afm'/> to <l n='otf'/>.</p>
-
-<p>The following code still has traces of intermediate font support where we handles
-font encodings. Eventually font encoding went away but we kept some code around in
-other modules.</p>
-
-<p>This version implements a node mode approach so that users can also more easily
-add features.</p>
---ldx]]--
+-- Some code may look a bit obscure but this has to do with the fact that we also
+-- use this code for testing and much code evolved in the transition from TFM to AFM
+-- to OTF.
+--
+-- The following code still has traces of intermediate font support where we handles
+-- font encodings. Eventually font encoding went away but we kept some code around
+-- in other modules.
+--
+-- This version implements a node mode approach so that users can also more easily
+-- add features.
 
 local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers
 
@@ -71,15 +69,13 @@ local overloads           = fonts.mappings.overloads
 
 local applyruntimefixes   = fonts.treatments and fonts.treatments.applyfixes
 
---[[ldx--
-<p>We cache files. Caching is taken care of in the loader. We cheat a bit by adding
-ligatures and kern information to the afm derived data. That way we can set them faster
-when defining a font.</p>
-
-<p>We still keep the loading two phased: first we load the data in a traditional
-fashion and later we transform it to sequences. Then we apply some methods also
-used in opentype fonts (like <t>tlig</t>).</p>
---ldx]]--
+-- We cache files. Caching is taken care of in the loader. We cheat a bit by adding
+-- ligatures and kern information to the afm derived data. That way we can set them
+-- faster when defining a font.
+--
+-- We still keep the loading two phased: first we load the data in a traditional
+-- fashion and later we transform it to sequences. Then we apply some methods also
+-- used in opentype fonts (like tlig).
 
 function afm.load(filename)
     filename = resolvers.findfile(filename,'afm') or ""
@@ -312,10 +308,8 @@ local function enhance_fix_names(data)
     end
 end
 
---[[ldx--
-<p>These helpers extend the basic table with extra ligatures, texligatures
-and extra kerns. This saves quite some lookups later.</p>
---ldx]]--
+-- These helpers extend the basic table with extra ligatures, texligatures and extra
+-- kerns. This saves quite some lookups later.
 
 local addthem = function(rawdata,ligatures)
     if ligatures then
@@ -349,17 +343,14 @@ local function enhance_add_ligatures(rawdata)
     addthem(rawdata,afm.helpdata.ligatures)
 end
 
---[[ldx--
-<p>We keep the extra kerns in separate kerning tables so that we can use
-them selectively.</p>
---ldx]]--
-
--- This is rather old code (from the beginning when we had only tfm). If
--- we unify the afm data (now we have names all over the place) then
--- we can use shcodes but there will be many more looping then. But we
--- could get rid of the tables in char-cmp then. Als, in the generic version
--- we don't use the character database. (Ok, we can have a context specific
--- variant).
+-- We keep the extra kerns in separate kerning tables so that we can use them
+-- selectively.
+--
+-- This is rather old code (from the beginning when we had only tfm). If we unify
+-- the afm data (now we have names all over the place) then we can use shcodes but
+-- there will be many more looping then. But we could get rid of the tables in
+-- char-cmp then. Als, in the generic version we don't use the character database.
+-- (Ok, we can have a context specific variant).
 
 local function enhance_add_extra_kerns(rawdata) -- using shcodes is not robust here
     local descriptions = rawdata.descriptions
@@ -440,9 +431,7 @@ local function enhance_add_extra_kerns(rawdata) -- using shcodes is not robust h
     do_it_copy(afm.helpdata.rightkerned)
 end
 
---[[ldx--
-<p>The copying routine looks messy (and is indeed a bit messy).</p>
---ldx]]--
+-- The copying routine looks messy (and is indeed a bit messy).
 
 local function adddimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name
     if data then
@@ -619,11 +608,9 @@ end
     return nil
 end
 
---[[ldx--
-<p>Originally we had features kind of hard coded for <l n='afm'/> files but since I
-expect to support more font formats, I decided to treat this fontformat like any
-other and handle features in a more configurable way.</p>
---ldx]]--
+-- Originally we had features kind of hard coded for AFM files but since I expect to
+-- support more font formats, I decided to treat this fontformat like any other and
+-- handle features in a more configurable way.
 
 function afm.setfeatures(tfmdata,features)
     local okay = constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm)
@@ -715,13 +702,10 @@ local function afmtotfm(specification)
     end
 end
 
---[[ldx--
-<p>As soon as we could intercept the <l n='tfm'/> reader, I implemented an
-<l n='afm'/> reader. Since traditional <l n='pdftex'/> could use <l n='opentype'/>
-fonts with <l n='afm'/> companions, the following method also could handle
-those cases, but now that we can handle <l n='opentype'/> directly we no longer
-need this features.</p>
---ldx]]--
+-- As soon as we could intercept the TFM reader, I implemented an AFM reader. Since
+-- traditional pdfTeX could use OpenType fonts with AFM companions, the following
+-- method also could handle those cases, but now that we can handle OpenType
+-- directly we no longer need this features.
 
 local function read_from_afm(specification)
     local tfmdata = afmtotfm(specification)
@@ -736,9 +720,7 @@ local function read_from_afm(specification)
     return tfmdata
 end
 
---[[ldx--
-<p>We have the usual two modes and related features initializers and processors.</p>
---ldx]]--
+-- We have the usual two modes and related features initializers and processors.
 
 registerafmfeature {
     name         = "mode",
diff --git a/tex/context/base/mkxl/font-onr.lmt b/tex/context/base/mkxl/font-onr.lmt
index d28c247df..04f9d3bb2 100644
--- a/tex/context/base/mkxl/font-onr.lmt
+++ b/tex/context/base/mkxl/font-onr.lmt
@@ -7,18 +7,16 @@ if not modules then modules = { } end modules ['font-onr'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Some code may look a bit obscure but this has to do with the fact that we also use
-this code for testing and much code evolved in the transition from <l n='tfm'/> to
-<l n='afm'/> to <l n='otf'/>.</p>
-
-<p>The following code still has traces of intermediate font support where we handles
-font encodings. Eventually font encoding went away but we kept some code around in
-other modules.</p>
-
-<p>This version implements a node mode approach so that users can also more easily
-add features.</p>
---ldx]]--
+-- Some code may look a bit obscure but this has to do with the fact that we also
+-- use this code for testing and much code evolved in the transition from TFM to AFM
+-- to OTF.
+--
+-- The following code still has traces of intermediate font support where we handles
+-- font encodings. Eventually font encoding went away but we kept some code around
+-- in other modules.
+--
+-- This version implements a node mode approach so that users can also more easily
+-- add features.
 
 local fonts, logs, trackers, resolvers = fonts, logs, trackers, resolvers
 
@@ -49,12 +47,9 @@ pfb.version              = 1.002
 local readers            = afm.readers or { }
 afm.readers              = readers
 
---[[ldx--
-<p>We start with the basic reader which we give a name similar to the built in <l n='tfm'/>
-and <l n='otf'/> reader.</p>
-<p>We use a new (unfinished) pfb loader but I see no differences between the old
-and new vectors (we actually had one bad vector with the old loader).</p>
---ldx]]--
+-- We start with the basic reader which we give a name similar to the built in TFM
+-- and OTF reader. We use a PFB loader but I see no differences between the old and
+-- new vectors (we actually had one bad vector with the old loader).
 
 local get_indexes, get_shapes
 
@@ -71,7 +66,7 @@ do
          -- local plain  = bxor(cipher,rshift(r,8))
             local plain  = (cipher ~ ((r >> 8) & 0xFFFFFFFF))
          -- r = ((cipher + r) * c1 + c2) % 65536
-            r = ((cipher + r) * c1 + c2) % 0x10000         
+            r = ((cipher + r) * c1 + c2) % 0x10000
             return char(plain)
         end
 
@@ -366,11 +361,10 @@ do
 
 end
 
---[[ldx--
-<p>We start with the basic reader which we give a name similar to the built in <l n='tfm'/>
-and <l n='otf'/> reader. We only need data that is relevant for our use. We don't support
-more complex arrangements like multiple master (obsolete), direction specific kerning, etc.</p>
---ldx]]--
+-- We start with the basic reader which we give a name similar to the built in TFM
+-- and OTF reader. We only need data that is relevant for our use. We don't support
+-- more complex arrangements like multiple master (obsolete), direction specific
+-- kerning, etc.
 
 local spacer     = patterns.spacer
 local whitespace = patterns.whitespace
diff --git a/tex/context/base/mkxl/font-ota.lmt b/tex/context/base/mkxl/font-ota.lmt
index 157270ef1..6e8130741 100644
--- a/tex/context/base/mkxl/font-ota.lmt
+++ b/tex/context/base/mkxl/font-ota.lmt
@@ -56,10 +56,8 @@ local chardata            = characters and characters.data
 local otffeatures         = fonts.constructors.features.otf
 local registerotffeature  = otffeatures.register
 
---[[ldx--
-<p>Analyzers run per script and/or language and are needed in order to
-process features right.</p>
---ldx]]--
+-- Analyzers run per script and/or language and are needed in order to process
+-- features right.
 
 local setstate = nuts.setstate
 local getstate = nuts.getstate
diff --git a/tex/context/base/mkxl/font-ots.lmt b/tex/context/base/mkxl/font-ots.lmt
index e7fcfc576..0e99de6d1 100644
--- a/tex/context/base/mkxl/font-ots.lmt
+++ b/tex/context/base/mkxl/font-ots.lmt
@@ -7,92 +7,90 @@ if not modules then modules = { } end modules ['font-ots'] = { -- sequences
     license   = "see context related readme files",
 }
 
---[[ldx--
-<p>I need to check the description at the microsoft site ... it has been improved
-so maybe there are some interesting details there. Most below is based on old and
-incomplete documentation and involved quite a bit of guesswork (checking with the
-abstract uniscribe of those days. But changing things is tricky!</p>
-
-<p>This module is a bit more split up that I'd like but since we also want to test
-with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
-and discussion about improvements and functionality mostly happens on the
-<l n='context'/> mailing list.</p>
-
-<p>The specification of OpenType is (or at least decades ago was) kind of vague.
-Apart from a lack of a proper free specifications there's also the problem that
-Microsoft and Adobe may have their own interpretation of how and in what order to
-apply features. In general the Microsoft website has more detailed specifications
-and is a better reference. There is also some information in the FontForge help
-files. In the end we rely most on the Microsoft specification.</p>
-
-<p>Because there is so much possible, fonts might contain bugs and/or be made to
-work with certain rederers. These may evolve over time which may have the side
-effect that suddenly fonts behave differently. We don't want to catch all font
-issues.</p>
-
-<p>After a lot of experiments (mostly by Taco, me and Idris) the first implementation
-was already quite useful. When it did most of what we wanted, a more optimized version
-evolved. Of course all errors are mine and of course the code can be improved. There
-are quite some optimizations going on here and processing speed is currently quite
-acceptable and has been improved over time. Many complex scripts are not yet supported
-yet, but I will look into them as soon as <l n='context'/> users ask for it.</p>
-
-<p>The specification leaves room for interpretation. In case of doubt the Microsoft
-implementation is the reference as it is the most complete one. As they deal with
-lots of scripts and fonts, Kai and Ivo did a lot of testing of the generic code and
-their suggestions help improve the code. I'm aware that not all border cases can be
-taken care of, unless we accept excessive runtime, and even then the interference
-with other mechanisms (like hyphenation) are not trivial.</p>
-
-<p>Especially discretionary handling has been improved much by Kai Eigner who uses complex
-(latin) fonts. The current implementation is a compromis between his patches and my code
-and in the meantime performance is quite ok. We cannot check all border cases without
-compromising speed but so far we're okay. Given good test cases we can probably improve
-it here and there. Especially chain lookups are non trivial with discretionaries but
-things got much better over time thanks to Kai.</p>
-
-<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
-relationship with unicode at all, apart from the fact that a font might cover certain
-ranges of characters. One character can have multiple shapes. However, at the
-<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
-space. This is needed because we need to access them and <l n='tex'/> has to include
-then in the output eventually.</p>
-
-<p>The initial data table is rather close to the open type specification and also not
-that different from the one produced by <l n='fontforge'/> but we uses hashes instead.
-In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
-so that successive runs can use the optimized table (after loading the table is
-unpacked).</p>
-
-<p>This module is sparsely documented because it is has been a moving target. The
-table format of the reader changed a bit over time and we experiment a lot with
-different methods for supporting features. By now the structures are quite stable</p>
-
-<p>Incrementing the version number will force a re-cache. We jump the number by one
-when there's a fix in the reader or processing code that can result in different
-results.</p>
-
-<p>This code is also used outside context but in context it has to work with other
-mechanisms. Both put some constraints on the code here.</p>
-
---ldx]]--
-
--- Remark: We assume that cursives don't cross discretionaries which is okay because it
--- is only used in semitic scripts.
+-- I need to check the description at the microsoft site ... it has been improved so
+-- maybe there are some interesting details there. Most below is based on old and
+-- incomplete documentation and involved quite a bit of guesswork (checking with the
+-- abstract uniscribe of those days. But changing things is tricky!
+--
+-- This module is a bit more split up that I'd like but since we also want to test
+-- with plain TeX it has to be so. This module is part of ConTeXt and discussion
+-- about improvements and functionality mostly happens on the ConTeXt mailing list.
+--
+-- The specification of OpenType is (or at least decades ago was) kind of vague.
+-- Apart from a lack of a proper free specifications there's also the problem that
+-- Microsoft and Adobe may have their own interpretation of how and in what order to
+-- apply features. In general the Microsoft website has more detailed specifications
+-- and is a better reference. There is also some information in the FontForge help
+-- files. In the end we rely most on the Microsoft specification.
+--
+-- Because there is so much possible, fonts might contain bugs and/or be made to
+-- work with certain rederers. These may evolve over time which may have the side
+-- effect that suddenly fonts behave differently. We don't want to catch all font
+-- issues.
+--
+-- After a lot of experiments (mostly by Taco, me and Idris) the first
+-- implementation was already quite useful. When it did most of what we wanted, a
+-- more optimized version evolved. Of course all errors are mine and of course the
+-- code can be improved. There are quite some optimizations going on here and
+-- processing speed is currently quite acceptable and has been improved over time.
+-- Many complex scripts are not yet supported yet, but I will look into them as soon
+-- as ConTeXt users ask for it.
+--
+-- The specification leaves room for interpretation. In case of doubt the Microsoft
+-- implementation is the reference as it is the most complete one. As they deal with
+-- lots of scripts and fonts, Kai and Ivo did a lot of testing of the generic code
+-- and their suggestions help improve the code. I'm aware that not all border cases
+-- can be taken care of, unless we accept excessive runtime, and even then the
+-- interference with other mechanisms (like hyphenation) are not trivial.
+--
+-- Especially discretionary handling has been improved much by Kai Eigner who uses
+-- complex (latin) fonts. The current implementation is a compromis between his
+-- patches and my code and in the meantime performance is quite ok. We cannot check
+-- all border cases without compromising speed but so far we're okay. Given good
+-- test cases we can probably improve it here and there. Especially chain lookups
+-- are non trivial with discretionaries but things got much better over time thanks
+-- to Kai.
+--
+-- Glyphs are indexed not by unicode but in their own way. This is because there is
+-- no relationship with unicode at all, apart from the fact that a font might cover
+-- certain ranges of characters. One character can have multiple shapes. However, at
+-- the TeX end we use unicode so and all extra glyphs are mapped into a private
+-- space. This is needed because we need to access them and TeX has to include then
+-- in the output eventually.
+--
+-- The initial data table is rather close to the open type specification and also
+-- not that different from the one produced by Fontforge but we uses hashes instead.
+-- In ConTeXt that table is packed (similar tables are shared) and cached on disk so
+-- that successive runs can use the optimized table (after loading the table is
+-- unpacked).
+--
+-- This module is sparsely documented because it is has been a moving target. The
+-- table format of the reader changed a bit over time and we experiment a lot with
+-- different methods for supporting features. By now the structures are quite stable
+--
+-- Incrementing the version number will force a re-cache. We jump the number by one
+-- when there's a fix in the reader or processing code that can result in different
+-- results.
+--
+-- This code is also used outside ConTeXt but in ConTeXt it has to work with other
+-- mechanisms. Both put some constraints on the code here.
+--
+-- Remark: We assume that cursives don't cross discretionaries which is okay because
+-- it is only used in semitic scripts.
 --
 -- Remark: We assume that marks precede base characters.
 --
--- Remark: When complex ligatures extend into discs nodes we can get side effects. Normally
--- this doesn't happen; ff\d{l}{l}{l} in lm works but ff\d{f}{f}{f}.
+-- Remark: When complex ligatures extend into discs nodes we can get side effects.
+-- Normally this doesn't happen; ff\d{l}{l}{l} in lm works but ff\d{f}{f}{f}.
 --
 -- Todo: check if we copy attributes to disc nodes if needed.
 --
--- Todo: it would be nice if we could get rid of components. In other places we can use
--- the unicode properties. We can just keep a lua table.
+-- Todo: it would be nice if we could get rid of components. In other places we can
+-- use the unicode properties. We can just keep a lua table.
 --
--- Remark: We do some disc juggling where we need to keep in mind that the pre, post and
--- replace fields can have prev pointers to a nesting node ... I wonder if that is still
--- needed.
+-- Remark: We do some disc juggling where we need to keep in mind that the pre, post
+-- and replace fields can have prev pointers to a nesting node ... I wonder if that
+-- is still needed.
 --
 -- Remark: This is not possible:
 --
@@ -1092,10 +1090,8 @@ function handlers.gpos_pair(head,start,dataset,sequence,kerns,rlmode,skiphash,st
     end
 end
 
---[[ldx--
-<p>We get hits on a mark, but we're not sure if the it has to be applied so
-we need to explicitly test for basechar, baselig and basemark entries.</p>
---ldx]]--
+-- We get hits on a mark, but we're not sure if the it has to be applied so we need
+-- to explicitly test for basechar, baselig and basemark entries.
 
 function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode,skiphash)
     local markchar = getchar(start)
@@ -1292,10 +1288,8 @@ function handlers.gpos_cursive(head,start,dataset,sequence,exitanchors,rlmode,sk
     return head, start, false
 end
 
---[[ldx--
-<p>I will implement multiple chain replacements once I run into a font that uses
-it. It's not that complex to handle.</p>
---ldx]]--
+-- I will implement multiple chain replacements once I run into a font that uses it.
+-- It's not that complex to handle.
 
 local chainprocs = { }
 
@@ -1348,29 +1342,22 @@ end
 
 chainprocs.reversesub = reversesub
 
---[[ldx--
-<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
-applied: single, alternate, multiple or ligature where ligature can be an invalid
-one in the sense that it will replace multiple by one but not neccessary one that
-looks like the combination (i.e. it is the counterpart of multiple then). For
-example, the following is valid:</p>
-
-<typing>
-<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
-</typing>
-
-<p>Therefore we we don't really do the replacement here already unless we have the
-single lookup case. The efficiency of the replacements can be improved by deleting
-as less as needed but that would also make the code even more messy.</p>
---ldx]]--
-
---[[ldx--
-<p>Here we replace start by a single variant.</p>
---ldx]]--
-
--- To be done (example needed): what if > 1 steps
-
--- this is messy: do we need this disc checking also in alternates?
+-- This chain stuff is somewhat tricky since we can have a sequence of actions to be
+-- applied: single, alternate, multiple or ligature where ligature can be an invalid
+-- one in the sense that it will replace multiple by one but not neccessary one that
+-- looks like the combination (i.e. it is the counterpart of multiple then). For
+-- example, the following is valid:
+--
+--   xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx
+--
+-- Therefore we we don't really do the replacement here already unless we have the
+-- single lookup case. The efficiency of the replacements can be improved by
+-- deleting as less as needed but that would also make the code even more messy.
+--
+-- Here we replace start by a single variant.
+--
+-- To be done   : what if > 1 steps (example needed)
+-- This is messy: do we need this disc checking also in alternates?
 
 local function reportzerosteps(dataset,sequence)
     logwarning("%s: no steps",cref(dataset,sequence))
@@ -1446,9 +1433,7 @@ function chainprocs.gsub_single(head,start,stop,dataset,sequence,currentlookup,r
     return head, start, false
 end
 
---[[ldx--
-<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
---ldx]]--
+-- Here we replace start by new glyph. First we delete the rest of the match.
 
 -- char_1 mark_1 -> char_x mark_1 (ignore marks)
 -- char_1 mark_1 -> char_x
@@ -1500,9 +1485,7 @@ function chainprocs.gsub_alternate(head,start,stop,dataset,sequence,currentlooku
     return head, start, false
 end
 
---[[ldx--
-<p>Here we replace start by a sequence of new glyphs.</p>
---ldx]]--
+-- Here we replace start by a sequence of new glyphs.
 
 function chainprocs.gsub_multiple(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex)
     local mapping = currentlookup.mapping
@@ -1526,11 +1509,9 @@ function chainprocs.gsub_multiple(head,start,stop,dataset,sequence,currentlookup
     return head, start, false
 end
 
---[[ldx--
-<p>When we replace ligatures we use a helper that handles the marks. I might change
-this function (move code inline and handle the marks by a separate function). We
-assume rather stupid ligatures (no complex disc nodes).</p>
---ldx]]--
+-- When we replace ligatures we use a helper that handles the marks. I might change
+-- this function (move code inline and handle the marks by a separate function). We
+-- assume rather stupid ligatures (no complex disc nodes).
 
 -- compare to handlers.gsub_ligature which is more complex ... why
 
diff --git a/tex/context/base/mkxl/font-tfm.lmt b/tex/context/base/mkxl/font-tfm.lmt
index 9fce8fc5f..d6857b39e 100644
--- a/tex/context/base/mkxl/font-tfm.lmt
+++ b/tex/context/base/mkxl/font-tfm.lmt
@@ -50,21 +50,18 @@ constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua
 fonts.formats.tfm              = "type1" -- we need to have at least a value here
 fonts.formats.ofm              = "type1" -- we need to have at least a value here
 
---[[ldx--
-<p>The next function encapsulates the standard <l n='tfm'/> loader as
-supplied by <l n='luatex'/>.</p>
---ldx]]--
-
--- this might change: not scaling and then apply features and do scaling in the
--- usual way with dummy descriptions but on the other hand .. we no longer use
--- tfm so why bother
-
--- ofm directive blocks local path search unless set; btw, in context we
--- don't support ofm files anyway as this format is obsolete
-
--- we need to deal with nested virtual fonts, but because we load in the
--- frontend we also need to make sure we don't nest too deep (esp when sizes
--- get large)
+-- The next function encapsulates the standard TFM loader as supplied by LuaTeX.
+--
+-- This might change: not scaling and then apply features and do scaling in the
+-- usual way with dummy descriptions but on the other hand. However, we no longer
+-- use TFM (except for the JMN math fonts) so why bother.
+--
+-- The ofm directive blocks a local path search unless set. Actually, in ConTeXt we
+-- never had to deal with OFM files anyway as this format is obsolete (there are
+-- hardly any fonts in that format that are of use).
+--
+-- We need to deal with nested virtual fonts, but because we load in the frontend we
+-- also need to make sure we don't nest too deep (esp when sizes get large)
 --
 -- (VTITLE Example of a recursion)
 -- (MAPFONT D 0 (FONTNAME recurse)(FONTAT D 2))
@@ -72,7 +69,8 @@ supplied by <l n='luatex'/>.</p>
 -- (CHARACTER C B (CHARWD D 2)(CHARHT D 2)(MAP (SETCHAR C A)))
 -- (CHARACTER C C (CHARWD D 4)(CHARHT D 4)(MAP (SETCHAR C B)))
 --
--- we added the same checks as below to the luatex engine
+-- The virtual fonts are handled in the backend and therefore LMTX provides more
+-- features than in the original specification. LuaTeX already had a few more.
 
 function tfm.setfeatures(tfmdata,features)
     local okay = constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm)
diff --git a/tex/context/base/mkxl/lang-url.lmt b/tex/context/base/mkxl/lang-url.lmt
index b918464d0..7607d7d84 100644
--- a/tex/context/base/mkxl/lang-url.lmt
+++ b/tex/context/base/mkxl/lang-url.lmt
@@ -23,12 +23,10 @@ local v_after   = variables.after
 
 local is_letter = characters.is_letter
 
---[[
-<p>Hyphenating <l n='url'/>'s is somewhat tricky and a matter of taste. I did
-consider using a dedicated hyphenation pattern or dealing with it by node
-parsing, but the following solution suits as well. After all, we're mostly
-dealing with <l n='ascii'/> characters.</p>
-]]--
+-- Hyphenating URL's is somewhat tricky and a matter of taste. I did consider using
+-- a dedicated hyphenation pattern or dealing with it by node parsing, but the
+-- following solution suits as well. After all, we're mostly dealing with ASCII
+-- characters.
 
 local urls     = { }
 languages.urls = urls
diff --git a/tex/context/base/mkxl/lpdf-ano.lmt b/tex/context/base/mkxl/lpdf-ano.lmt
index 55b145730..2e19ffd5e 100644
--- a/tex/context/base/mkxl/lpdf-ano.lmt
+++ b/tex/context/base/mkxl/lpdf-ano.lmt
@@ -725,6 +725,7 @@ lpdf.action = pdfaction
 
 function codeinjections.prerollreference(actions) -- share can become option
     if actions then
+-- inspect(actions)
         local main, n = pdfaction(actions)
         if main then
             local bs, bc = pdfborder()
diff --git a/tex/context/base/mkxl/lpdf-pde.lmt b/tex/context/base/mkxl/lpdf-pde.lmt
index 68712d58d..4e5d73e04 100644
--- a/tex/context/base/mkxl/lpdf-pde.lmt
+++ b/tex/context/base/mkxl/lpdf-pde.lmt
@@ -67,7 +67,6 @@ local lpdf              = lpdf
 local lpdf_epdf         = { }
       lpdf.epdf         = lpdf_epdf
 
-local pdfopen           = pdfe.open
 local pdfopenfile       = pdfe.openfile
 local pdfnew            = pdfe.new
 local pdfclose          = pdfe.close
@@ -540,10 +539,9 @@ function lpdf_epdf.load(filename,userpassword,ownerpassword,fromstring)
         local __file__
         if fromstring then
             __data__ = pdfnew(filename,#filename)
-        elseif pdfopenfile then
-            __data__ = pdfopenfile(ioopen(filename,"rb"))
         else
-            __data__ = pdfopen(filename)
+            local f = ioopen(filename,"rb")
+            __data__ = f and pdfopenfile(f)
         end
         if __data__ then
             if userpassword and getstatus(__data__) < 0 then
diff --git a/tex/context/base/mkxl/luat-cbk.lmt b/tex/context/base/mkxl/luat-cbk.lmt
index 744d12e27..2a3a58b04 100644
--- a/tex/context/base/mkxl/luat-cbk.lmt
+++ b/tex/context/base/mkxl/luat-cbk.lmt
@@ -12,20 +12,16 @@ local collectgarbage, type, next = collectgarbage, type, next
 local round = math.round
 local sortedhash, sortedkeys, tohash = table.sortedhash, table.sortedkeys, table.tohash
 
---[[ldx--
-<p>Callbacks are the real asset of <l n='luatex'/>. They permit you to hook
-your own code into the <l n='tex'/> engine. Here we implement a few handy
-auxiliary functions.</p>
---ldx]]--
+-- Callbacks are the real asset of LuaTeX. They permit you to hook your own code
+-- into the TeX engine. Here we implement a few handy auxiliary functions. Watch
+-- out, there are diferences between LuateX and LuaMetaTeX.
 
 callbacks       = callbacks or { }
 local callbacks = callbacks
 
---[[ldx--
-<p>When you (temporarily) want to install a callback function, and after a
-while wants to revert to the original one, you can use the following two
-functions. This only works for non-frozen ones.</p>
---ldx]]--
+-- When you (temporarily) want to install a callback function, and after a while
+-- wants to revert to the original one, you can use the following two functions.
+-- This only works for non-frozen ones.
 
 local trace_callbacks   = false  trackers.register("system.callbacks", function(v) trace_callbacks = v end)
 local trace_calls       = false  -- only used when analyzing performance and initializations
@@ -47,13 +43,12 @@ local list              = callbacks.list
 local permit_overloads  = false
 local block_overloads   = false
 
---[[ldx--
-<p>By now most callbacks are frozen and most provide a way to plug in your own code. For instance
-all node list handlers provide before/after namespaces and the file handling code can be extended
-by adding schemes and if needed I can add more hooks. So there is no real need to overload a core
-callback function. It might be ok for quick and dirty testing but anyway you're on your own if
-you permanently overload callback functions.</p>
---ldx]]--
+-- By now most callbacks are frozen and most provide a way to plug in your own code.
+-- For instance all node list handlers provide before/after namespaces and the file
+-- handling code can be extended by adding schemes and if needed I can add more
+-- hooks. So there is no real need to overload a core callback function. It might be
+-- ok for quick and dirty testing but anyway you're on your own if you permanently
+-- overload callback functions.
 
 -- This might become a configuration file only option when it gets abused too much.
 
diff --git a/tex/context/base/mkxl/luat-cod.mkxl b/tex/context/base/mkxl/luat-cod.mkxl
index ed4a13981..322076aa1 100644
--- a/tex/context/base/mkxl/luat-cod.mkxl
+++ b/tex/context/base/mkxl/luat-cod.mkxl
@@ -42,7 +42,7 @@
 \toksapp \everydump {%
    \permanent\let\ctxlatelua       \latelua
    \permanent\def\ctxlatecommand#1{\latelua{commands.#1}}%
-    \aliased\let\lateluacode       \ctxlatelua
+     \aliased\let\lateluacode       \ctxlatelua
 } % no \appendtoks yet
 
 \protect \endinput
diff --git a/tex/context/base/mkxl/luat-ini.lmt b/tex/context/base/mkxl/luat-ini.lmt
index 3202ea42b..56e3bd1c1 100644
--- a/tex/context/base/mkxl/luat-ini.lmt
+++ b/tex/context/base/mkxl/luat-ini.lmt
@@ -6,11 +6,9 @@ if not modules then modules = { } end modules ['luat-ini'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>We cannot load anything yet. However what we will do us reserve a few tables.
-These can be used for runtime user data or third party modules and will not be
-cluttered by macro package code.</p>
---ldx]]--
+-- We cannot load anything yet. However what we will do us reserve a few tables.
+-- These can be used for runtime user data or third party modules and will not be
+-- cluttered by macro package code.
 
 userdata      = userdata      or { } -- for users (e.g. functions etc)
 thirddata     = thirddata     or { } -- only for third party modules
diff --git a/tex/context/base/mkxl/math-act.lmt b/tex/context/base/mkxl/math-act.lmt
index 0c75147f6..4a46baff9 100644
--- a/tex/context/base/mkxl/math-act.lmt
+++ b/tex/context/base/mkxl/math-act.lmt
@@ -533,7 +533,7 @@ do
         k = mathgaps[k] or k
         local character = targetcharacters[k]
         if character then
---             if not character.tweaked then -- todo: add a force
+         -- if not character.tweaked then -- todo: add a force
                 local t = type(v)
                 if t == "number" then
                     v = list[v]
@@ -666,7 +666,7 @@ do
                 else
                     report_mathtweak("invalid dimension entry %U",k)
                 end
--- character.tweaked = true
+             -- character.tweaked = true
                 if v.all then
                     local nxt = character.next
                     if nxt then
@@ -680,7 +680,7 @@ do
                         end
                     end
                 end
---             end
+         -- end
         else
             report_tweak("no character %U",target,original,k)
         end
@@ -1938,63 +1938,178 @@ do
     -- vfmath.builders.extension(target)
 
     local rbe = newprivateslot("radical bar extender")
+    local fbe = newprivateslot("fraction bar extender")
+
+    local frp = {
+        newprivateslot("flat rule left piece"),
+        newprivateslot("flat rule middle piece"),
+        newprivateslot("flat rule right piece"),
+    }
+
+    local rrp = {
+        newprivateslot("radical rule middle piece"),
+        newprivateslot("radical rule right piece"),
+    }
+
+    local mrp = {
+        newprivateslot("minus rule left piece"),
+        newprivateslot("minus rule middle piece"),
+        newprivateslot("minus rule right piece"),
+    }
 
-    local function useminus(unicode,characters,parameters)
+    local function useminus(target,unicode,characters,parameters,skipfirst,what)
         local minus   = characters[0x2212]
-        local xoffset = parameters.xoffset or .075
-        local yoffset = parameters.yoffset or .9
-        local xscale  = parameters.xscale or 1
-        local yscale  = parameters.yscale or 1
-        local xwidth  = parameters.width   or (1 - 2*xoffset)
-        local xheight = parameters.height  or (1 - yoffset)
-        local mheight = minus.height
-        local mwidth  = minus.width
-        local height  = xheight*mheight
-        local xshift  = xoffset * mwidth
-        local yshift  = yoffset * mheight
-        local advance = xwidth  * mwidth
-        local step    = mwidth  / 2
-        characters[unicode] = {
-            height   = height,
-            depth    = height,
-            width    = advance,
-            commands = {
-            push,
-                leftcommand[xshift],
-                downcommand[yshift],
-             -- slotcommand[0][0x2212],
-                { "slot", 0, 0x2212, xscale, yscale },
-            pop,
-            },
-            unicode = unicode,
-         -- parts = {
-         --     { extender = 0, glyph = first,  ["end"] = fw/2, start = 0,    advance = fw },
-         --     { extender = 1, glyph = middle, ["end"] = mw/2, start = mw/2, advance = mw },
-         --     { extender = 0, glyph = last,   ["end"] = 0,    start = lw/2, advance = lw },
-         -- },
-            parts   = {
-                { extender = 0, glyph = unicode, ["end"] = step, start = 0,    advance = advance },
-                { extender = 1, glyph = unicode, ["end"] = step, start = step, advance = advance },
-            },
-            partsorientation = "horizontal",
-        }
+        local parts   = minus.parts
+        if parameters == true then
+            parameters = { }
+        end
+        if parts then
+            parts  = copytable(parts)
+            local xscale  = parameters.xscale  or 1
+            local yscale  = parameters.yscale  or 1
+            local mheight = minus.height
+            local height = (parameters.height  or 1) * mheight
+            local yshift = (parameters.yoffset or 0) * mheight
+            if skipfirst then
+                table.remove(parts,1)
+            end
+            height = height / 2
+            yshift = yshift + height
+            for i=1,#parts do
+                local part   = parts[i]
+                local glyph  = part.glyph
+                local gdata  = characters[glyph]
+                local width  = gdata.width
+                local xshift = 0
+                if i == 1 and parameters.leftoffset then
+                    xshift = (parameters.leftoffset) * width
+                    width  = width - xshift
+                elseif i == #parts and parameters.rightoffset then
+                    width  = (1 + parameters.rightoffset) * width
+                end
+                characters[what[i]] = {
+                    height   = height,
+                    depth    = height,
+                    width    = width,
+                    commands = {
+                        leftcommand[xshift],
+                        downcommand[yshift],
+--                         slotcommand[0][glyph],
+                    { "slot", 0, glyph, xscale, yscale },
+                    },
+                }
+                part.glyph   = what[i]
+                part.advance = width
+            end
+            characters[unicode] = {
+                height   = height,
+                depth    = height,
+                width    = advance,
+                commands = {
+                    downcommand[yshift],
+--                     slotcommand[0][0x2212],
+                    { "slot", 0, 0x2212, xscale, yscale },
+                },
+                unicode          = unicode,
+                parts            = parts,
+                partsorientation = "horizontal",
+            }
+        end
+    end
+
+    -- add minus parts of not there and create clipped clone
+
+    local function checkminus(target,unicode,characters,parameters,skipfirst,what)
+        local minus = characters[unicode]
+        local parts = minus.parts
+        if parameters == true then
+            parameters = { }
+        end
+        local p_normal = 0
+        local p_flat   = 0
+        local mwidth   = minus.width
+        local height   = minus.height
+        local depth    = minus.depth
+        local loffset  = parameters.leftoffset  or 0
+        local roffset  = parameters.rightoffset or 0
+        local lshift   = mwidth * loffset
+        local rshift   = mwidth * roffset
+        local width    = mwidth - lshift - rshift
+        if parts then
+         -- print("minus has parts")
+            if lshift ~= 0 or width ~= mwidth then
+                parts = copytable(parts)
+                for i=1,#parts do
+                    local part    = parts[i]
+                    local glyph   = part.glyph
+                    local gdata   = characters[glyph]
+                    local width   = gdata.width
+                    local advance = part.advance
+                    local lshift = 0
+                    if i == 1 and left ~= 0 then
+                        lshift  = loffset * width
+                        width   = width - lshift
+                        advance = advance - lshift
+                    elseif i == #parts and roffset ~= 0 then
+                        width   = width - rshift
+                        advance = advance - rshift
+                    end
+                    characters[what[i]] = {
+                        height   = height,
+                        depth    = depth,
+                        width    = width,
+                        commands = {
+                            leftcommand[lshift],
+                            slotcommand[0][glyph],
+                        },
+                    }
+                    part.glyph   = what[i]
+                    part.advance = advance
+                end
+                minus.parts = parts
+                minus.partsorientation = "horizontal"
+
+            end
+        else
+            local f_normal = formatters["M-NORMAL-%H"](unicode)
+         -- local p_normal = hasprivate(main,f_normal)
+            p_normal = addprivate(target,f_normal,{
+                height   = height,
+                width    = width,
+                commands = {
+                    push,
+                    leftcommand[lshift],
+                    slotcommand[0][unicode],
+                    pop,
+                },
+            })
+            local step = width/2
+            minus.parts = {
+                { extender = 0, glyph = p_normal, ["end"] = step, start = 0,    advance = width },
+                { extender = 1, glyph = p_normal, ["end"] = step, start = step, advance = width },
+                { extender = 0, glyph = p_normal, ["end"] = 0,    start = step, advance = width },
+            }
+            minus.partsorientation = "horizontal"
+        end
     end
 
     function mathtweaks.replacerules(target,original,parameters)
         local characters = target.characters
+        local minus      = parameters.minus
         local fraction   = parameters.fraction
         local radical    = parameters.radical
+        local stacker    = parameters.stacker
+        if minus then
+            checkminus(target,0x2212,characters,minus,false,mrp)
+        end
         if fraction then
-            local template = fraction.template
-            if template == 0x2212 or template == "minus" then
-                useminus(0x203E,characters,fraction)
-            end
+            useminus(target,fbe,characters,fraction,false,frp)
         end
         if radical then
-            local template = radical.template
-            if template == 0x2212 or template == "minus" then
-                useminus(rbe,characters,radical)
-            end
+            useminus(target,rbe,characters,radical,true,rrp)
+        end
+        if stacker then
+            useminus(target,0x203E,characters,stacker,false,frp)
         end
     end
 
@@ -2110,6 +2225,7 @@ do
         return {
             --
             [0x002D] = { { left = slack, right = slack, glyph = 0x2212 }, single }, -- rel
+-- [0x2212] = { { left = slack, right = slack, glyph = 0x2212 }, single }, -- rel
             --
             [0x2190] = leftsingle, -- leftarrow
             [0x219E] = leftsingle, -- twoheadleftarrow
@@ -3091,59 +3207,6 @@ do
     local double <const> = 0x2016
     local triple <const> = 0x2980
 
- -- local nps = fonts.helpers.newprivateslot
- --
- -- local function variantlist(characters,unicode,chardata,what,total,used)
- --     local parenthesis = characters[0x28].next
- --     local width  = chardata.width
- --     local height = chardata.height
- --     local depth  = chardata.depth
- --     local total  = height + depth
- --     local count  = 1
- --     while parenthesis do
- --         local private   = nps(what .. " size " .. count)
- --         local pardata   = characters[parenthesis]
- --         local parheight = pardata.height
- --         local pardepth  = pardata.depth
- --         local scale     = (parheight+pardepth)/total
- --         local offset    = - pardepth + scale * depth
- --         chardata.next   = private
- --         chardata = {
- --             unicode  = unicode,
- --             width    = width,
- --             height   = parheight,
- --             depth    = pardepth,
- --             commands = {
- --                 { "offset", 0, offset, unicode, 1, scale }
- --             },
- --         }
- --         characters[private] = chardata
- --         parenthesis = pardata.next
- --         if paranthesis then
- --             pardata = characters[parenthesis]
- --         end
- --         count = count + 1
- --     end
- --     chardata.parts = {
- --         {
- --             advance  = total,
- --             ["end"]  = used,
- --             glyph    = unicode,
- --             start    = 0,
- --          -- start    = used/5,
- --         },
- --         {
- --             advance  = total,
- --          -- ["end"]  = 0,
- --             ["end"]  = used/5, -- prevents small gap with inward curved endpoints
- --             extender = 1,
- --             glyph    = unicode,
- --             start    = used,
- --         },
- --     }
- --     chardata.partsorientation = "vertical"
- -- end
-
     local function variantlist(unicode,chardata,total,used)
         chardata.varianttemplate = 0x0028
         chardata.parts = {
diff --git a/tex/context/base/mkxl/math-ali.mkxl b/tex/context/base/mkxl/math-ali.mkxl
index b37887332..b90bad174 100644
--- a/tex/context/base/mkxl/math-ali.mkxl
+++ b/tex/context/base/mkxl/math-ali.mkxl
@@ -1403,9 +1403,41 @@
    \c!toffset=.25\exheight,
    \c!boffset=\mathmatrixparameter\c!toffset]
 
-\noaligned\permanent\tolerant\protected\def\math_matrix_HL[#1]#*%
+% \noaligned\permanent\tolerant\protected\def\math_matrix_HL[#1]#*%
+%   {\noalign\bgroup
+%      \math_matrix_check_rule[#1]%
+%      \divideby\scratchdimen\plustwo
+%      \ifdim\scratchdimen>\zeropoint
+%      % \autorule\s!height\scratchdimen\s!depth\scratchdimen\relax
+%        \scratchdistance\mathmatrixparameter\c!toffset\relax
+%        \ifdim\scratchdistance>\zeropoint
+%          \nohrule
+%              \s!attr  \mathalignmentvruleattribute\plustwo
+%              \s!height\scratchdistance
+%              \s!depth \zeropoint
+%          \relax
+%        \fi
+%        \hrule
+%            \s!attr  \mathalignmentvruleattribute\plusthree
+%            \s!height\scratchdimen
+%            \s!depth \scratchdimen
+%        \relax
+%        \scratchdistance\mathmatrixparameter\c!boffset\relax
+%        \ifdim\scratchdistance>\zeropoint
+%          \nohrule
+%              \s!attr  \mathalignmentvruleattribute\plusfour
+%              \s!height\zeropoint
+%              \s!depth \scratchdistance
+%          \relax
+%        \fi
+%      \else
+%         % zero dimensions disable the rule
+%      \fi
+%    \egroup}
+
+\def\math_matrix_HL_indeed#1#2%
   {\noalign\bgroup
-     \math_matrix_check_rule[#1]%
+     \math_matrix_check_rule[#2]%
      \divideby\scratchdimen\plustwo
      \ifdim\scratchdimen>\zeropoint
      % \autorule\s!height\scratchdimen\s!depth\scratchdimen\relax
@@ -1422,6 +1454,17 @@
            \s!height\scratchdimen
            \s!depth \scratchdimen
        \relax
+       \ifnum#1>\plusone
+         \localcontrolledloop\plustwo#1\plusone
+            {\kern.125\d_math_eqalign_distance % hskip
+             \hrule
+                 \s!attr  \mathalignmentvruleattribute\plusthree
+                 \s!height\scratchdimen
+                 \s!depth \scratchdimen
+              \relax}%
+         \kern-2\scratchdimen
+         \kern-.125\d_math_eqalign_distance % hskip
+       \fi
        \scratchdistance\mathmatrixparameter\c!boffset\relax
        \ifdim\scratchdistance>\zeropoint
          \nohrule
@@ -1435,6 +1478,9 @@
      \fi
    \egroup}
 
+\permanent\tolerant\noaligned\protected\def\math_matrix_HL  [#1]#*{\math_matrix_HL_indeed\plusone{#1}}
+\permanent\tolerant\noaligned\protected\def\math_matrix_HLHL[#1]#*{\math_matrix_HL_indeed\plustwo{#1}}
+
 \protected\def\math_matrix_vertical_rule_indeed#1#2%
   {\math_matrix_check_rule[#2]%
    \enablematrixrules
@@ -1514,19 +1560,38 @@
    %boundary\c_math_matrix_sl_boundary
    \enforced\let\NR\math_matrix_NL_NR}
 
-\permanent\tolerant\protected\def\math_matrix_VL[#1]#*%
+% \permanent\tolerant\protected\def\math_matrix_VL[#1]#*%
+%   {\span\omit
+%    \ifconditional\c_math_matrix_first\else
+%      \kern.5\d_math_eqalign_distance % hskip
+%    \fi
+%    \math_matrix_vertical_rule_yes{#1}%
+%    \kern.5\d_math_eqalign_distance % hskip
+%    \global\setfalse\c_math_matrix_first
+%    \aligntab
+%    \boundary\c_math_matrix_vl_boundary
+%    \enforced\let\NR\math_matrix_NL_NR
+%    }
+
+\def\math_matrix_VL_indeed#1#2%%
   {\span\omit
    \ifconditional\c_math_matrix_first\else
      \kern.5\d_math_eqalign_distance % hskip
    \fi
-   \math_matrix_vertical_rule_yes{#1}%
-   \kern.5\d_math_eqalign_distance % hskip
+   \math_matrix_vertical_rule_yes{#2}%
+   \localcontrolledloop\plustwo#1\plusone
+     {\kern.125\d_math_eqalign_distance % hskip
+      \math_matrix_vertical_rule_yes{#2}}%
+   \kern.5\d_math_eqalign_distance
    \global\setfalse\c_math_matrix_first
    \aligntab
    \boundary\c_math_matrix_vl_boundary
    \enforced\let\NR\math_matrix_NL_NR
    }
 
+\permanent\tolerant\protected\def\math_matrix_VL  [#1]#*{\math_matrix_VL_indeed\plusone{#1}}
+\permanent\tolerant\protected\def\math_matrix_VLVL[#1]#*{\math_matrix_VL_indeed\plustwo{#1}}
+
 \permanent\tolerant\protected\def\math_matrix_NL[#1]#*%
   {\span\omit
    \ifconditional\c_math_matrix_first\else
@@ -1585,6 +1650,9 @@
    \enforced\let\VC\math_matrix_VC % bonus, extra column
    \enforced\let\VT\math_matrix_VT % bonus, idem but tight
    \enforced\let\TB\math_common_TB
+   % just because it's easy:
+   \enforced\let\VLVL\math_matrix_VLVL
+   \enforced\let\HLHL\math_matrix_HLHL
 \to \everymathmatrix
 
 \definesystemattribute[mathmatrixornament][public]
diff --git a/tex/context/base/mkxl/math-fnt.lmt b/tex/context/base/mkxl/math-fnt.lmt
index 911e0adb5..7e2c0c75c 100644
--- a/tex/context/base/mkxl/math-fnt.lmt
+++ b/tex/context/base/mkxl/math-fnt.lmt
@@ -63,9 +63,11 @@ local function register_extensible(font,char,style,box)
         return nil
     else
         local bx = tonut(box)
-        updaters.apply("tagging.state.disable") -- fast enough
-        nodes.handlers.finalizelist(bx)
-        updaters.apply("tagging.state.enable")
+        -- actually we don't want colors and such so if we do finalize we
+        -- should be more selctive:
+--         updaters.apply("tagging.state.disable")
+--         nodes.handlers.finalizelist(bx)
+--         updaters.apply("tagging.state.enable")
         local id = getid(bx)
         local al = getattrlst(bx)
         local wd, ht, dp = getwhd(bx)
diff --git a/tex/context/base/mkxl/math-frc.mkxl b/tex/context/base/mkxl/math-frc.mkxl
index 47edc52c4..5c1eab8dd 100644
--- a/tex/context/base/mkxl/math-frc.mkxl
+++ b/tex/context/base/mkxl/math-frc.mkxl
@@ -104,6 +104,14 @@
    \c!vfactor=\plusthousand,
    \c!rule=\v!auto]
 
+%D We now default to nice bars:
+
+\integerdef\fractionbarextenderuc \privatecharactercode{fraction bar extender}
+
+\setupmathfractions
+  [\c!rule=\v!symbol,
+   \c!middle=\fractionbarextenderuc]
+
 \appendtoks
     \instance\frozen\protected\edefcsname\currentmathfraction\endcsname{\math_frac{\currentmathfraction}}%
 \to \everydefinemathfraction
diff --git a/tex/context/base/mkxl/math-ini.mkxl b/tex/context/base/mkxl/math-ini.mkxl
index 8c0615eb6..6f2dfc1c2 100644
--- a/tex/context/base/mkxl/math-ini.mkxl
+++ b/tex/context/base/mkxl/math-ini.mkxl
@@ -1399,6 +1399,10 @@
     % \im{1\unit{hour} 20 \unit{minute} 56 \unit{second}}
     %
     \inherited\setmathspacing \mathdimensioncode       \mathdigitcode           \allmathstyles    \thickmuskip
+    \inherited\setmathspacing \mathdimensioncode       \mathbinarycode          \allsplitstyles   \medmuskip
+    \inherited\setmathspacing \mathdimensioncode       \mathbinarycode          \allunsplitstyles \pettymuskip
+    \inherited\setmathspacing \mathdimensioncode       \mathrelationcode        \allsplitstyles   \thickmuskip
+    \inherited\setmathspacing \mathdimensioncode       \mathrelationcode        \allunsplitstyles \pettymuskip
     %
     \inherited\setmathspacing \mathfakecode            \mathallcode             \allmathstyles    \tinymuskip
     \inherited\setmathspacing \mathallcode             \mathfakecode            \allmathstyles    \tinymuskip
@@ -2814,50 +2818,83 @@
 
 \installcorenamespace {mathautopunctuation}
 
-\bgroup
-
-    % This can and will be replaced by classes:
-
-    \catcode\commaasciicode    \activecatcode
-    \catcode\periodasciicode   \activecatcode
-    \catcode\semicolonasciicode\activecatcode
-
-    \gdefcsname\??mathautopunctuation\v!no\endcsname
-      {\let,\math_punctuation_nop_comma
-       \let.\math_punctuation_nop_period
-       \let;\math_punctuation_nop_semicolon}
-
-    \gdefcsname\??mathautopunctuation\v!yes\endcsname
-      {\let,\math_punctuation_yes_comma
-       \let.\math_punctuation_yes_period
-       \let;\math_punctuation_nop_semicolon}
-
-    \gdefcsname\??mathautopunctuation\v!all\endcsname
-      {\let,\math_punctuation_all_comma
-       \let.\math_punctuation_all_period
-       \let;\math_punctuation_nop_semicolon}
-
-    \gdefcsname\??mathautopunctuation comma\endcsname
-      {\let,\math_punctuation_yes_comma
-       \let.\math_punctuation_yes_period
-       \let;\math_punctuation_nop_semicolon}
-
-    \gdefcsname\??mathautopunctuation\v!yes\string,semicolon\endcsname
-      {\let,\math_punctuation_yes_comma
-       \let.\math_punctuation_yes_period
-       \let;\math_punctuation_yes_semicolon}
-
-    \gdefcsname\??mathautopunctuation comma\string,semicolon\endcsname
-      {\let,\math_punctuation_yes_comma
-       \let.\math_punctuation_yes_period
-       \let;\math_punctuation_yes_semicolon}
-
-    \gdefcsname\??mathautopunctuation\v!all\string,semicolon\endcsname
-      {\let,\math_punctuation_all_comma
-       \let.\math_punctuation_all_period
-       \let;\math_punctuation_all_semicolon}
+% \bgroup
+%
+%     \catcode\commaasciicode    \activecatcode
+%     \catcode\periodasciicode   \activecatcode
+%     \catcode\semicolonasciicode\activecatcode
+%
+%     \gdefcsname\??mathautopunctuation\v!no\endcsname
+%       {\let,\math_punctuation_nop_comma
+%        \let.\math_punctuation_nop_period
+%        \let;\math_punctuation_nop_semicolon}
+%
+%     \gdefcsname\??mathautopunctuation\v!yes\endcsname
+%       {\let,\math_punctuation_yes_comma
+%        \let.\math_punctuation_yes_period
+%        \let;\math_punctuation_nop_semicolon}
+%
+%     \gdefcsname\??mathautopunctuation\v!all\endcsname
+%       {\let,\math_punctuation_all_comma
+%        \let.\math_punctuation_all_period
+%        \let;\math_punctuation_nop_semicolon}
+%
+%     \gdefcsname\??mathautopunctuation comma\endcsname
+%       {\let,\math_punctuation_yes_comma
+%        \let.\math_punctuation_yes_period
+%        \let;\math_punctuation_nop_semicolon}
+%
+%     \gdefcsname\??mathautopunctuation\v!yes\string,semicolon\endcsname
+%       {\let,\math_punctuation_yes_comma
+%        \let.\math_punctuation_yes_period
+%        \let;\math_punctuation_yes_semicolon}
+%
+%     \gdefcsname\??mathautopunctuation comma\string,semicolon\endcsname
+%       {\let,\math_punctuation_yes_comma
+%        \let.\math_punctuation_yes_period
+%        \let;\math_punctuation_yes_semicolon}
+%
+%     \gdefcsname\??mathautopunctuation\v!all\string,semicolon\endcsname
+%       {\let,\math_punctuation_all_comma
+%        \let.\math_punctuation_all_period
+%        \let;\math_punctuation_all_semicolon}
+%
+% \egroup
 
-\egroup
+\defcsname\??mathautopunctuation\v!no\endcsname
+  {\letcharcode\commaasciicode    \math_punctuation_nop_comma
+   \letcharcode\periodasciicode   \math_punctuation_nop_period
+   \letcharcode\semicolonasciicode\math_punctuation_nop_semicolon}
+
+\defcsname\??mathautopunctuation\v!yes\endcsname
+  {\letcharcode\commaasciicode    \math_punctuation_yes_comma
+   \letcharcode\periodasciicode   \math_punctuation_yes_period
+   \letcharcode\semicolonasciicode\math_punctuation_nop_semicolon}
+
+\defcsname\??mathautopunctuation\v!all\endcsname
+  {\letcharcode\commaasciicode    \math_punctuation_all_comma
+   \letcharcode\periodasciicode   \math_punctuation_all_period
+   \letcharcode\semicolonasciicode\math_punctuation_nop_semicolon}
+
+\defcsname\??mathautopunctuation comma\endcsname
+  {\letcharcode\commaasciicode    \math_punctuation_yes_comma
+   \letcharcode\periodasciicode   \math_punctuation_yes_period
+   \letcharcode\semicolonasciicode\math_punctuation_nop_semicolon}
+
+\defcsname\??mathautopunctuation\v!yes\string,semicolon\endcsname
+  {\letcharcode\commaasciicode    \math_punctuation_yes_comma
+   \letcharcode\periodasciicode   \math_punctuation_yes_period
+   \letcharcode\semicolonasciicode\math_punctuation_yes_semicolon}
+
+\defcsname\??mathautopunctuation comma\string,semicolon\endcsname
+  {\letcharcode\commaasciicode    \math_punctuation_yes_comma
+   \letcharcode\periodasciicode   \math_punctuation_yes_period
+   \letcharcode\semicolonasciicode\math_punctuation_yes_semicolon}
+
+\defcsname\??mathautopunctuation\v!all\string,semicolon\endcsname
+  {\letcharcode\commaasciicode    \math_punctuation_all_comma
+   \letcharcode\periodasciicode   \math_punctuation_all_period
+   \letcharcode\semicolonasciicode\math_punctuation_all_semicolon}
 
 % \appendtoks
 %     \global\mathcode\commaasciicode    \c_math_special
diff --git a/tex/context/base/mkxl/math-map.lmt b/tex/context/base/mkxl/math-map.lmt
index 98cc59c89..0bd75d748 100644
--- a/tex/context/base/mkxl/math-map.lmt
+++ b/tex/context/base/mkxl/math-map.lmt
@@ -7,31 +7,13 @@ if not modules then modules = { } end modules ['math-map'] = {
     license   = "see context related readme files"
 }
 
--- todo: make sparse .. if self
-
---[[ldx--
-<p>Remapping mathematics alphabets.</p>
---ldx]]--
-
--- oldstyle: not really mathematics but happened to be part of
--- the mathematics fonts in cmr
---
--- persian: we will also provide mappers for other
--- scripts
-
--- todo: alphabets namespace
--- maybe: script/scriptscript dynamic,
-
--- superscripped primes get unscripted !
-
--- to be looked into once the fonts are ready (will become font
--- goodie):
---
--- (U+2202,U+1D715) : upright
--- (U+2202,U+1D715) : italic
--- (U+2202,U+1D715) : upright
---
--- plus add them to the regular vectors below so that they honor \it etc
+-- persian: we will also provide mappers for other scripts
+-- todo   : alphabets namespace
+-- maybe  : script/scriptscript dynamic,
+-- check  : (U+2202,U+1D715) : upright
+--          (U+2202,U+1D715) : italic
+--          (U+2202,U+1D715) : upright
+--          add them to the regular vectors below so that they honor \it etc
 
 local type, next = type, next
 local merged, sortedhash = table.merged, table.sortedhash
diff --git a/tex/context/base/mkxl/math-noa.lmt b/tex/context/base/mkxl/math-noa.lmt
index 4a0cb5744..f64783ed9 100644
--- a/tex/context/base/mkxl/math-noa.lmt
+++ b/tex/context/base/mkxl/math-noa.lmt
@@ -890,39 +890,43 @@ do
                         local data       = fontdata[font]
                         local characters = data.characters
                         local olddata    = characters[oldchar]
---                         local oldheight  = olddata.height or 0
---                         local olddepth   = olddata.depth or 0
-                        local template   = olddata.varianttemplate
-                        local newchar    = mathematics.big(data,template or oldchar,size,method)
-                        local newdata    = characters[newchar]
-                        local newheight  = newdata.height or 0
-                        local newdepth   = newdata.depth or 0
-                        if template then
---                             local ratio = (newheight + newdepth) / (oldheight + olddepth)
---                             setheight(pointer,ratio * oldheight)
---                             setdepth(pointer,ratio * olddepth)
-                            setheight(pointer,newheight)
-                            setdepth(pointer,newdepth)
-                            if not olddata.extensible then
-                                -- check this on bonum and antykwa
-                                setoptions(pointer,0)
-                            end
-                            if trace_fences then
---                                 report_fences("replacing %C using method %a, size %a, template %C and ratio %.3f",newchar,method,size,template,ratio)
-                                report_fences("replacing %C using method %a, size %a and template %C",newchar,method,size,template)
-                            end
-                        else
-                            -- 1 scaled point is a signal, for now
-                            if ht == 1 then
+                        if olddata then
+--                          local oldheight  = olddata.height or 0
+--                          local olddepth   = olddata.depth or 0
+                            local template   = olddata.varianttemplate
+                            local newchar    = mathematics.big(data,template or oldchar,size,method)
+                            local newdata    = characters[newchar]
+                            local newheight  = newdata.height or 0
+                            local newdepth   = newdata.depth or 0
+                            if template then
+--                              local ratio = (newheight + newdepth) / (oldheight + olddepth)
+--                              setheight(pointer,ratio * oldheight)
+--                              setdepth(pointer,ratio * olddepth)
                                 setheight(pointer,newheight)
-                            end
-                            if dp == 1 then
                                 setdepth(pointer,newdepth)
+                                if not olddata.extensible then
+                                    -- check this on bonum and antykwa
+                                    setoptions(pointer,0)
+                                end
+                                if trace_fences then
+--                                  report_fences("replacing %C using method %a, size %a, template %C and ratio %.3f",newchar,method,size,template,ratio)
+                                    report_fences("replacing %C using method %a, size %a and template %C",newchar,method,size,template)
+                                end
+                            else
+                                -- 1 scaled point is a signal, for now
+                                if ht == 1 then
+                                    setheight(pointer,newheight)
+                                end
+                                if dp == 1 then
+                                    setdepth(pointer,newdepth)
+                                end
+                                setchar(delimiter,newchar)
+                                if trace_fences then
+                                    report_fences("replacing %C by %C using method %a and size %a",oldchar,char,method,size)
+                                end
                             end
-                            setchar(delimiter,newchar)
-                            if trace_fences then
-                                report_fences("replacing %C by %C using method %a and size %a",oldchar,char,method,size)
-                            end
+                        elseif trace_fences then
+                            report_fences("not replacing %C using method %a and size %a",oldchar,method,size)
                         end
                     end
                 end
diff --git a/tex/context/base/mkxl/math-rad.mklx b/tex/context/base/mkxl/math-rad.mklx
index 863bb2128..ee91243e0 100644
--- a/tex/context/base/mkxl/math-rad.mklx
+++ b/tex/context/base/mkxl/math-rad.mklx
@@ -378,6 +378,12 @@
 \integerdef\delimitedrightanutityuc \privatecharactercode{delimited right annuity}
 \integerdef\radicalbarextenderuc    \privatecharactercode{radical bar extender}
 
+%D We now default to nice bars:
+
+\setupmathradical
+  [\c!rule=\v!symbol,
+   \c!top=\radicalbarextenderuc]
+
 \definemathradical
   [rannuity]
   [\c!left=\zerocount,
diff --git a/tex/context/base/mkxl/math-spa.lmt b/tex/context/base/mkxl/math-spa.lmt
index d2927ff58..a575b1714 100644
--- a/tex/context/base/mkxl/math-spa.lmt
+++ b/tex/context/base/mkxl/math-spa.lmt
@@ -41,6 +41,7 @@ local getnormalizedline = node.direct.getnormalizedline
 local getbox            = nuts.getbox
 local setoffsets        = nuts.setoffsets
 local addxoffset        = nuts.addxoffset
+local setattrlist       = nuts.setattrlist
 
 local nextglue          = nuts.traversers.glue
 local nextlist          = nuts.traversers.list
@@ -48,7 +49,9 @@ local nextboundary      = nuts.traversers.boundary
 local nextnode          = nuts.traversers.node
 
 local insertafter       = nuts.insertafter
+local insertbefore      = nuts.insertbefore
 local newkern           = nuts.pool.kern
+local newstrutrule      = nuts.pool.strutrule
 
 local texsetdimen       = tex.setdimen
 local texgetdimen       = tex.getdimen
@@ -68,6 +71,10 @@ local d_strc_math_first_height = texisdimen("d_strc_math_first_height")
 local d_strc_math_last_depth   = texisdimen("d_strc_math_last_depth")
 local d_strc_math_indent       = texisdimen("d_strc_math_indent")
 
+local report = logs.reporter("mathalign")
+
+local trace  = false  trackers.register("mathalign",function(v) trace = v end )
+
 local function moveon(s)
     for n, id, subtype in nextnode, getnext(s) do
         s = n
@@ -138,15 +145,20 @@ stages[1] = function(specification,stage)
                 p = getprev(p)
             end
         end
-        -- we use a hangindent so we need to treat the first one
-        local f = found[1]
-        local delta = f[2] - max
-        if delta ~= 0 then
-            insertafter(head,moveon(head),newkern(-delta))
-        end
-        for i=2,#found do
+        for i=1,#found do
             local f = found[i]
-            insertafter(head,moveon(f[3]),newkern(-f[2])) -- check head
+            local w = f[2]
+            local d = i == 1 and (max-w) or -w
+            local k = newkern(d)
+            local r = newstrutrule(0,2*65536,2*65536)
+            local s = moveon(f[3])
+            if trace then
+                report("row %i, width %p, delta %p",i,w,d)
+            end
+            setattrlist(r,head)
+            setattrlist(k,head)
+            insertbefore(head,s,r)
+            insertafter(head,r,k)
         end
     end
     texsetdimen("global",d_strc_math_indent,max)
diff --git a/tex/context/base/mkxl/math-stc.mklx b/tex/context/base/mkxl/math-stc.mklx
index fdad71978..5a701426a 100644
--- a/tex/context/base/mkxl/math-stc.mklx
+++ b/tex/context/base/mkxl/math-stc.mklx
@@ -1043,7 +1043,7 @@
 \definemathstackers [\v!medium] [\v!mathematics] [\c!hoffset=1.5\mathemwidth]
 \definemathstackers [\v!big]    [\v!mathematics] [\c!hoffset=2\mathemwidth]
 
-\definemathextensible [\v!reverse] [xrel]                ["002D]
+\definemathextensible [\v!reverse] [xrel]                ["2212] % ["002D]
 \definemathextensible [\v!reverse] [xequal]              ["003D]
 \definemathextensible [\v!reverse] [xleftarrow]          ["2190] % ["27F5]
 \definemathextensible [\v!reverse] [xrightarrow]         ["2192] % ["27F6]
@@ -1066,7 +1066,7 @@
 \definemathextensible [\v!reverse] [xrightleftharpoons]  ["21CC]
 \definemathextensible [\v!reverse] [xtriplerel]          ["2261]
 
-\definemathextensible [\v!mathematics] [mrel]                ["002D]
+\definemathextensible [\v!mathematics] [mrel]                ["2212] % ["002D]
 \definemathextensible [\v!mathematics] [mequal]              ["003D]
 \definemathextensible [\v!mathematics] [mleftarrow]          ["2190] % ["27F5]
 \definemathextensible [\v!mathematics] [mrightarrow]         ["2192] % ["27F6]
@@ -1089,7 +1089,7 @@
 \definemathextensible [\v!mathematics] [mrightleftharpoons]  ["21CC]
 \definemathextensible [\v!mathematics] [mtriplerel]          ["2261]
 
-\definemathextensible [\v!text] [trel]                ["002D]
+\definemathextensible [\v!text] [trel]                ["2212] % ["002D]
 \definemathextensible [\v!text] [tequal]              ["003D]
 \definemathextensible [\v!text] [tmapsto]             ["21A6]
 \definemathextensible [\v!text] [tleftarrow]          ["2190] % ["27F5]
@@ -1168,9 +1168,9 @@
 %D in the backend (okay, we still need to deal with some cut and paste issues but at
 %D least we now know what we deal with.
 
-\definemathoverextensible   [\v!vfenced] [overbar]       ["203E]
-\definemathunderextensible  [\v!vfenced] [underbar]              ["203E] % ["0332]
-\definemathdoubleextensible [\v!vfenced] [doublebar]     ["203E] ["203E] % ["0332]
+\definemathoverextensible   [\v!vfenced] [overbar]       ["203E]         % todo: private
+\definemathunderextensible  [\v!vfenced] [underbar]              ["203E] % todo: private
+\definemathdoubleextensible [\v!vfenced] [doublebar]     ["203E] ["203E] % todo: private
 
 \definemathoverextensible   [\v!vfenced] [overbrace]     ["23DE]
 \definemathunderextensible  [\v!vfenced] [underbrace]            ["23DF]
@@ -1186,13 +1186,13 @@
 
 %D For mathml:
 
-\definemathdoubleextensible    [\v!both]     [overbarunderbar]         ["203E] ["203E]
+\definemathdoubleextensible    [\v!both]     [overbarunderbar]         ["203E] ["203E] % todo: private
 \definemathdoubleextensible    [\v!both]     [overbraceunderbrace]     ["23DE] ["23DF]
 \definemathdoubleextensible    [\v!both]     [overparentunderparent]   ["23DC] ["23DD]
 \definemathdoubleextensible    [\v!both]     [overbracketunderbracket] ["23B4] ["23B5]
 
-\definemathovertextextensible  [\v!bothtext] [overbartext]             ["203E]
-\definemathundertextextensible [\v!bothtext] [underbartext]                    ["203E]
+\definemathovertextextensible  [\v!bothtext] [overbartext]             ["203E]         % todo: private
+\definemathundertextextensible [\v!bothtext] [underbartext]                    ["203E] % todo: private
 \definemathovertextextensible  [\v!bothtext] [overbracetext]           ["23DE]
 \definemathundertextextensible [\v!bothtext] [underbracetext]                  ["23DF]
 \definemathovertextextensible  [\v!bothtext] [overparenttext]          ["23DC]
@@ -1285,8 +1285,8 @@
 \permanent\tolerant\protected\def\defineextensiblefiller[#1]#*[#2]%
   {\frozen\instance\edefcsname#1\endcsname{\mathfiller{\number#2}}}
 
-%defineextensiblefiller [barfill]                ["203E] % yet undefined
-\defineextensiblefiller [relfill]                ["002D]
+%defineextensiblefiller [barfill]                ["203E] % % todo: private
+\defineextensiblefiller [relfill]                ["2212] % ["002D]
 \defineextensiblefiller [equalfill]              ["003D]
 \defineextensiblefiller [leftarrowfill]          ["2190]
 \defineextensiblefiller [rightarrowfill]         ["2192]
diff --git a/tex/context/base/mkxl/math-twk.mkxl b/tex/context/base/mkxl/math-twk.mkxl
index 6ffb36818..6e015d3de 100644
--- a/tex/context/base/mkxl/math-twk.mkxl
+++ b/tex/context/base/mkxl/math-twk.mkxl
@@ -95,5 +95,12 @@
 \permanent\protected\def\minute{\iffontchar\font\textminute\textminute\else\mathminute\fi}
 \permanent\protected\def\second{\iffontchar\font\textsecond\textsecond\else\mathsecond\fi}
 
+% \startsetups[math:rules]
+%     \letmathfractionparameter\c!rule\v!symbol
+%     \setmathfractionparameter\c!middle{"203E}%
+%     \letmathradicalparameter \c!rule\v!symbol
+%     \setmathradicalparameter \c!top{\radicalbarextenderuc}%
+%     \setmathfenceparameter   \c!alternative{1}%
+% \stopsetups
 
 \protect
diff --git a/tex/context/base/mkxl/math-vfu.lmt b/tex/context/base/mkxl/math-vfu.lmt
index 0a2b440a1..1639517b5 100644
--- a/tex/context/base/mkxl/math-vfu.lmt
+++ b/tex/context/base/mkxl/math-vfu.lmt
@@ -83,27 +83,37 @@ nps("flat double rule left piece")
 nps("flat double rule middle piece")
 nps("flat double rule right piece")
 
+nps("minus rule left piece")
+nps("minus rule middle piece")
+nps("minus rule right piece")
+
 do
 
-    local function horibar(main,unicode,rule,left,right,normal)
+    -- this overlaps with math-act
+
+    local function horibar(main,unicode,rule,left,right,normal,force,m,l,r)
         local characters = main.characters
-        if not characters[unicode] then
+        local data       = characters[unicode]
+        if force or not data then
             local height = main.mathparameters.defaultrulethickness or 4*65536/10
-            local f_rule = rule  and formatters["M-HORIBAR-RULE-%H"](rule)
-            local p_rule = rule  and hasprivate(main,f_rule)
+            local f_rule = rule and formatters["M-HORIBAR-M-%H"](rule)
+            local p_rule = rule and hasprivate(main,f_rule)
+            local ndata  = normal and characters[normal]
             if rule and left and right and normal then
-                local ldata  = characters[left]
-                local mdata  = characters[rule]
-                local rdata  = characters[right]
-                local ndata  = characters[normal]
+                local ldata  = characters[l or left]
+                local mdata  = characters[m or rule]
+                local rdata  = characters[r or right]
                 local lwidth = ldata.width or 0
                 local mwidth = mdata.width or 0
                 local rwidth = rdata.width or 0
                 local nwidth = ndata.width or 0
                 local down   = (mdata.height / 2) - height
-                --
-                local f_left  = right and formatters["M-HORIBAR-LEFT-%H"](right)
-                local f_right = right and formatters["M-HORIBAR-RIGHT-%H"](right)
+if unicode == normal then
+    height = ndata.height
+    down   = 0
+end                --
+                local f_left  = left  and formatters["M-HORIBAR-L-%H"](left)
+                local f_right = right and formatters["M-HORIBAR-R-%H"](right)
                 local p_left  = left  and hasprivate(main,f_left)
                 local p_right = right and hasprivate(main,f_right)
                 --
@@ -116,7 +126,7 @@ do
                             push,
                             leftcommand[.025*mwidth],
                             downcommand[down],
-                            slotcommand[0][rule],
+                            slotcommand[0][m or rule],
                             pop,
                         },
                     })
@@ -130,7 +140,7 @@ do
                             push,
                             leftcommand[.025*lwidth],
                             downcommand[down],
-                            slotcommand[0][left],
+                            slotcommand[0][l or left],
                             pop,
                         },
                     })
@@ -144,48 +154,72 @@ do
                             push,
                             leftcommand[.025*rwidth],
                             downcommand[down],
-                            slotcommand[0][right],
+                            slotcommand[0][r or right],
                             pop,
                         },
                     })
                 end
-                characters[unicode] = {
-                    keepvirtual      = true,
-                    partsorientation = "horizontal",
-                    height           = height,
-                    width            = nwidth,
--- keepvirtual      = true,
-                    commands         = {
+if unicode ~= normal then
+                data = {
+                    unicode  = unicode,
+                    height   = height,
+                    width    = nwidth,
+                    commands = {
                         downcommand[down],
                         slotcommand[0][normal]
                     },
-                    parts            = {
-                        { glyph = p_left, ["end"] = 0.4*lwidth },
-                        { glyph = p_rule, extender = 1, ["start"] = mwidth, ["end"] = mwidth },
-                        { glyph = p_right, ["start"] = 0.6*rwidth },
-                    }
+                }
+                characters[unicode] = data
+end
+                data.parts = {
+                    { glyph = p_left, ["end"] = 0.4*lwidth },
+                    { glyph = p_rule, extender = 1, ["start"] = mwidth, ["end"] = mwidth },
+                    { glyph = p_right, ["start"] = 0.6*rwidth },
                 }
             else
-                local width = main.parameters.quad/4 or 4*65536
+                local width = main.parameters.quad/2 or 4*65536 -- 3
                 if not characters[p_rule] then
-                    p_rule = addprivate(main,f_rule,{
-                        height   = height,
-                        width    = width,
--- keepvirtual      = true,
-                        commands = { push, { "rule", height, width }, pop },
-                    })
+                    if unicode == normal then
+                        p_rule = addprivate(main,f_rule,{
+                            height   = ndata.height,
+                            width    = width,
+                            commands = {
+                                push,
+                                upcommand[(ndata.height - height)/2],
+                                { "rule", height, width },
+                                pop
+                            },
+                        })
+                    else
+                        p_rule = addprivate(main,f_rule,{
+                            height   = height,
+                            width    = width,
+                            commands = {
+                                push,
+                                { "rule", height, width },
+                                pop
+                            },
+                        })
+                    end
                 end
-                characters[unicode] = {
-                    height           = height,
-                    width            = nwidth,
--- keepvirtual      = true,
-                    partsorientation = "horizontal",
-                    parts            = {
-                        { glyph = p_rule },
-                        { glyph = p_rule, extender = 1, ["start"] = width/2, ["end"] = width/2 },
+if unicode ~= normal then
+                data = {
+                    unicode  = unicode,
+                    height   = height,
+                    width    = width,
+                    commands = {
+                        slotcommand[0][p_rule]
                     }
                 }
+                characters[unicode] = data
+end
+                data.parts = {
+                    { glyph = p_rule, ["start"] = width/2, ["end"] = width/2 },
+                    { glyph = p_rule, extender = 1, ["start"] = width/2, ["end"] = width/2 },
+                }
             end
+            data.keepvirtual = true -- i need to figure this out
+            data.partsorientation = "horizontal"
         end
     end
 
@@ -205,8 +239,8 @@ do
                 local nwidth = ndata.width or 0
                 local down   = (mdata.height / 2) - height
                 --
-                local f_rule  = rule  and formatters["M-ROOTBAR-RULE-%H"](rule)
-                local f_right = right and formatters["M-ROOTBAR-RIGHT-%H"](right)
+                local f_rule  = rule  and formatters["M-ROOTBAR-M-%H"](rule)
+                local f_right = right and formatters["M-ROOTBAR-R-%H"](right)
                 local p_rule  = rule  and hasprivate(main,f_rule)
                 local p_right = right and hasprivate(main,f_right)
                 --
diff --git a/tex/context/base/mkxl/meta-imp-newmath.mkxl b/tex/context/base/mkxl/meta-imp-newmath.mkxl
new file mode 100644
index 000000000..af49f82ac
--- /dev/null
+++ b/tex/context/base/mkxl/meta-imp-newmath.mkxl
@@ -0,0 +1,76 @@
+%D \module
+%D   [       file=meta-imp-newmath,
+%D        version=2023.04.01,
+%D          title=\METAPOST\ Graphics,
+%D       subtitle=New Math Symbols,
+%D         author=Mikael Sundqvist & Hans Hagen,
+%D           date=\currentdate,
+%D      copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D In this file we will collect solutions for special math symbols. When such symbols
+%D are used in publications the CMS will contact the Unicode Consortium to suggest that
+%D they get a slot, because then we have proof of usage. We also consider old obsolete
+%D symbols because they can be treated like some ancient out|-|of|-|use script and fit
+%D into the \type {ancient math script}.
+
+\startMPextensions
+    vardef math_ornament_hat(expr w,h,d,o,l) =
+        image ( path p ; p :=
+            (w/2,h + 10l) --
+            (o + w,h + o) --
+            (w/2,h + 7l) --
+            (-o,h + o) --
+            cycle ;
+            fill p randomized o ;
+            setbounds currentpicture to (-o,0) -- (w+o,0) -- (w+o,h+2o) -- (-o,h+2o) -- cycle ;
+        )
+    enddef ;
+\stopMPextensions
+
+\startuniqueMPgraphic{math:ornament:hat}
+    draw
+        math_ornament_hat(
+            OverlayWidth,
+            OverlayHeight,
+            OverlayDepth,
+            OverlayOffset,
+            OverlayLineWidth
+        )
+    withpen
+        pencircle
+            xscaled (2OverlayLineWidth)
+            yscaled (3OverlayLineWidth/4)
+            rotated 30
+    withcolor
+        OverlayLineColor ;
+%         draw boundingbox currentpicture;
+\stopuniqueMPgraphic
+
+\definemathornament [widerandomhat] [mp=math:ornament:hat]
+
+\continueifinputfile{meta-imp-newnmath.mkxl}
+
+\starttext
+
+This symbol was designed for one of Mikaels students working on a thesis on
+probability. This student needed to typeset the characteristic function of a
+random variable \im {X} with density function \im {f_{X}}, and it was insisted to
+use another notation than the (wide) hat, that was already used for something
+else. For this reason the \tex {widerandomhat} was introduced,
+
+\startformula
+    E[\ee^{\ii tX}] = \widerandomhat{f_{X}}(t)\mtp{,}
+    E[\ee^{\ii t(X_1+X_2)}] = \widerandomhat{f_{X_1} \ast f_{X_2}}(t)\mtp{.}
+\stopformula
+
+Naturally, it is automatically scaled, just like the ordinary wide hat
+
+\startformula
+    \widehat{a+b+c+d+e+f} \neq \widerandomhat{a+b+c+d+e+f}
+\stopformula
+
+\stoptext
diff --git a/tex/context/base/mkxl/mlib-run.lmt b/tex/context/base/mkxl/mlib-run.lmt
index 0e955818e..de5ceb1db 100644
--- a/tex/context/base/mkxl/mlib-run.lmt
+++ b/tex/context/base/mkxl/mlib-run.lmt
@@ -6,28 +6,16 @@ if not modules then modules = { } end modules ['mlib-run'] = {
     license   = "see context related readme files",
 }
 
--- cmyk       -> done, native
--- spot       -> done, but needs reworking (simpler)
--- multitone  ->
--- shade      -> partly done, todo: cm
--- figure     -> done
--- hyperlink  -> low priority, easy
-
--- new * run
--- or
--- new * execute^1 * finish
-
--- a*[b,c] == b + a * (c-b)
-
---[[ldx--
-<p>The directional helpers and pen analysis are more or less translated from the
-<l n='c'/> code. It really helps that Taco know that source so well. Taco and I spent
-quite some time on speeding up the <l n='lua'/> and <l n='c'/> code. There is not
-much to gain, especially if one keeps in mind that when integrated in <l n='tex'/>
-only a part of the time is spent in <l n='metapost'/>. Of course an integrated
-approach is way faster than an external <l n='metapost'/> and processing time
-nears zero.</p>
---ldx]]--
+-- The directional helpers and pen analysis are more or less translated from the C
+-- code. In LuaTeX we spent quite some time on speeding up the Lua interface as well
+-- as the C code. There is not much to gain, especially if one keeps in mind that
+-- when integrated in TeX only a part of the time is spent in MetaPost. Of course an
+-- integrated approach is way faster than an external MetaPost and processing time
+-- nears zero.
+--
+-- In LuaMetaTeX the MetaPost core has been cleaned up a it and as a result
+-- processing in double mode is now faster than in scaled mode. There are also extra
+-- features and interfaces, so the MkIV and MkXL (LMTX) implementation differ!
 
 local type, tostring, tonumber, next = type, tostring, tonumber, next
 local find, striplines = string.find, utilities.strings.striplines
diff --git a/tex/context/base/mkxl/node-ini.lmt b/tex/context/base/mkxl/node-ini.lmt
index f1b9bb452..38f55c160 100644
--- a/tex/context/base/mkxl/node-ini.lmt
+++ b/tex/context/base/mkxl/node-ini.lmt
@@ -6,19 +6,13 @@ if not modules then modules = { } end modules ['node-ini'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Most of the code that had accumulated here is now separated in modules.</p>
---ldx]]--
-
 local next, type, tostring = next, type, tostring
 local gsub = string.gsub
 local concat, remove = table.concat, table.remove
 local sortedhash, sortedkeys, swapped = table.sortedhash, table.sortedkeys, table.swapped
 
---[[ldx--
-<p>Access to nodes is what gives <l n='luatex'/> its power. Here we implement a
-few helper functions. These functions are rather optimized.</p>
---ldx]]--
+-- Access to nodes is what gives LuaTeX its power. Here we implement a few helper
+-- functions. These functions are rather optimized.
 
 nodes                = nodes or { }
 local nodes          = nodes
diff --git a/tex/context/base/mkxl/node-res.lmt b/tex/context/base/mkxl/node-res.lmt
index 6fed08b63..2d2c31965 100644
--- a/tex/context/base/mkxl/node-res.lmt
+++ b/tex/context/base/mkxl/node-res.lmt
@@ -10,11 +10,6 @@ local type, next, rawset = type, next, rawset
 local gmatch, format = string.gmatch, string.format
 local round = math.round
 
---[[ldx--
-<p>The next function is not that much needed but in <l n='context'/> we use
-for debugging <l n='luatex'/> node management.</p>
---ldx]]--
-
 local nodes, node = nodes, node
 
 local report_nodes   = logs.reporter("nodes","housekeeping")
diff --git a/tex/context/base/mkxl/node-tra.lmt b/tex/context/base/mkxl/node-tra.lmt
index 1ef1bb8ad..fe212f787 100644
--- a/tex/context/base/mkxl/node-tra.lmt
+++ b/tex/context/base/mkxl/node-tra.lmt
@@ -6,10 +6,8 @@ if not modules then modules = { } end modules ['node-tra'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>This is rather experimental. We need more control and some of this
-might become a runtime module instead. This module will be cleaned up!</p>
---ldx]]--
+-- Some of the code here might become a runtime module instead. This old module will
+-- be cleaned up anyway!
 
 local next = next
 local utfchar = utf.char
diff --git a/tex/context/base/mkxl/pack-obj.lmt b/tex/context/base/mkxl/pack-obj.lmt
index 1e22515b9..a18f5e7e7 100644
--- a/tex/context/base/mkxl/pack-obj.lmt
+++ b/tex/context/base/mkxl/pack-obj.lmt
@@ -6,10 +6,8 @@ if not modules then modules = { } end modules ['pack-obj'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>We save object references in the main utility table. jobobjects are
-reusable components.</p>
---ldx]]--
+-- We save object references in the main utility table; job objects are reusable
+-- components.
 
 local context         = context
 local codeinjections  = backends.codeinjections
diff --git a/tex/context/base/mkxl/pack-rul.lmt b/tex/context/base/mkxl/pack-rul.lmt
index 12d131c88..62a904901 100644
--- a/tex/context/base/mkxl/pack-rul.lmt
+++ b/tex/context/base/mkxl/pack-rul.lmt
@@ -7,10 +7,6 @@ if not modules then modules = { } end modules ['pack-rul'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>An explanation is given in the history document <t>mk</t>.</p>
---ldx]]--
-
 -- we need to be careful with display math as it uses shifts
 
 -- \framed[align={lohi,middle}]{$x$}
diff --git a/tex/context/base/mkxl/publ-ini.mkxl b/tex/context/base/mkxl/publ-ini.mkxl
index b75a933ad..802768a8c 100644
--- a/tex/context/base/mkxl/publ-ini.mkxl
+++ b/tex/context/base/mkxl/publ-ini.mkxl
@@ -342,7 +342,7 @@
 \newtoks\t_btx_cmd
 \newbox \b_btx_cmd
 
-\t_btx_cmd{\global\setbox\b_btx_cmd\hpack{\clf_btxcmdstring}}
+\t_btx_cmd{\global\setbox\b_btx_cmd\hbox{\clf_btxcmdstring}} % no \hpack, otherwise prerolling --- doesn't work
 
 \aliased\let\btxcmd\btxcommand
 
diff --git a/tex/context/base/mkxl/regi-ini.lmt b/tex/context/base/mkxl/regi-ini.lmt
index c0cd4f1c8..efacd5128 100644
--- a/tex/context/base/mkxl/regi-ini.lmt
+++ b/tex/context/base/mkxl/regi-ini.lmt
@@ -6,11 +6,8 @@ if not modules then modules = { } end modules ['regi-ini'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Regimes take care of converting the input characters into
-<l n='utf'/> sequences. The conversion tables are loaded at
-runtime.</p>
---ldx]]--
+-- Regimes take care of converting the input characters into UTF sequences. The
+-- conversion tables are loaded at runtime.
 
 local tostring = tostring
 local utfchar = utf.char
diff --git a/tex/context/base/mkxl/scrn-wid.lmt b/tex/context/base/mkxl/scrn-wid.lmt
index caa09adbd..f2112aa11 100644
--- a/tex/context/base/mkxl/scrn-wid.lmt
+++ b/tex/context/base/mkxl/scrn-wid.lmt
@@ -42,8 +42,6 @@ interactions.linkedlists = linkedlists
 
 local texsetbox          = tex.setbox
 
-local jobpasses          = job.passes
-
 local texgetcount        = tex.getcount
 
 local codeinjections     = backends.codeinjections
@@ -277,7 +275,24 @@ implement {
     }
 }
 
--- Linkedlists (only a context interface)
+-- Linkedlists (only a context interface) .. untested, just adapted from old code.
+
+local collected = allocate()
+local tobesaved = allocate()
+
+local linkedlists = {
+    collected = collected,
+    tobesaved = tobesaved,
+}
+
+job.linkedlists = linkedlists
+
+local function initializer()
+    collected = linkedlists.collected
+    tobesaved = linkedlists.tobesaved
+end
+
+job.register("job.linkedlists.collected", tobesaved, initializer, nil)
 
 implement {
     name      = "definelinkedlist",
@@ -291,10 +306,12 @@ implement {
     name      = "enhancelinkedlist",
     arguments = { "string", "integer" },
     actions   = function(tag,n)
-                    local ll = jobpasses.gettobesaved(tag)
-                    if ll then
-                        ll[n] = texgetcount("realpageno")
+                    local linkedlist = tobesaved[tag]
+                    if not linkedlist then
+                        linkedlist     = { }
+                        tobesaved[tag] = linkedlist
                     end
+                    linkedlist[n] = texgetcount("realpageno")
                 end
 }
 
@@ -302,15 +319,18 @@ implement {
     name      = "addlinklistelement",
     arguments = "string",
     actions   = function(tag)
-                    local tobesaved   = jobpasses.gettobesaved(tag)
-                    local collected   = jobpasses.getcollected(tag) or { }
+                    local tobesaved   = tobesaved[tag] or { }
+                    local collected   = collected[tag] or { }
                     local currentlink = #tobesaved + 1
                     local noflinks    = #collected
-                    tobesaved[currentlink] = 0
+                    --
+                    tobesaved[currentlink] = 0 -- needs checking
+                    --
                     local f = collected[1] or 0
                     local l = collected[noflinks] or 0
                     local p = collected[currentlink-1] or f
                     local n = collected[currentlink+1] or l
+                    --
                     context.setlinkedlistproperties(currentlink,noflinks,f,p,n,l)
                  -- context.ctxlatelua(function() commands.enhancelinkedlist(tag,currentlink) end)
                 end
diff --git a/tex/context/base/mkxl/spac-pag.mkxl b/tex/context/base/mkxl/spac-pag.mkxl
index d61ddcbe6..2e3e1bc00 100644
--- a/tex/context/base/mkxl/spac-pag.mkxl
+++ b/tex/context/base/mkxl/spac-pag.mkxl
@@ -16,7 +16,6 @@
 \unprotect
 
 \newif      \ifpagestatemismatch
-\newinteger \realpagestateno
 \newconstant\frozenpagestate
 
 \permanent\protected\def\dotrackpagestate#1#2%
diff --git a/tex/context/base/mkxl/strc-itm.lmt b/tex/context/base/mkxl/strc-itm.lmt
index f9153c98e..4ee084ca3 100644
--- a/tex/context/base/mkxl/strc-itm.lmt
+++ b/tex/context/base/mkxl/strc-itm.lmt
@@ -6,20 +6,28 @@ if not modules then modules = { } end modules ['strc-itm'] = {
     license   = "see context related readme files"
 }
 
-local structures  = structures
-local itemgroups  = structures.itemgroups
-local jobpasses   = job.passes
-
+local allocate    = utilities.storage.allocate
 local implement   = interfaces.implement
 
-local setvariable = jobpasses.save
-local getvariable = jobpasses.getfield
-
 local texsetcount = tex.setcount
 local texsetdimen = tex.setdimen
 
-local f_stamp     = string.formatters["itemgroup:%s:%s"]
-local counts      = table.setmetatableindex("number")
+local itemgroups = structures.itemgroups
+
+local collected = allocate()
+local tobesaved = allocate()
+
+itemgroups.collected = collected
+itemgroups.tobesaved = tobesaved
+
+local function initializer()
+    collected = itemgroups.collected
+    tobesaved = itemgroups.tobesaved
+end
+
+if job then
+    job.register("structures.itemgroups.collected", tobesaved, initializer)
+end
 
 local c_strc_itemgroups_max_items = tex.iscount("c_strc_itemgroups_max_items")
 local d_strc_itemgroups_max_width = tex.isdimen("d_strc_itemgroups_max_width")
@@ -28,6 +36,8 @@ local d_strc_itemgroups_max_width = tex.isdimen("d_strc_itemgroups_max_width")
 -- an itemgroup which in turn makes for less passes when one itemgroup
 -- entry is added or removed.
 
+local counts = table.setmetatableindex("number")
+
 local trialtypesetting = context.trialtypesetting
 
 local function analyzeitemgroup(name,level)
@@ -36,16 +46,37 @@ local function analyzeitemgroup(name,level)
         n = n + 1
         counts[name] = n
     end
-    local stamp = f_stamp(name,n)
-    texsetcount(c_strc_itemgroups_max_items,getvariable(stamp,level,1,0))
-    texsetdimen(d_strc_itemgroups_max_width,getvariable(stamp,level,2,0))
+    local items = 0
+    local width = 0
+    local itemgroup = collected[name]
+    if itemgroup then
+        local entry = itemgroup[n]
+        if entry then
+            local l = entry[level]
+            if l then
+                items = l[1] or 0
+                width = l[2] or 0
+            end
+        end
+    end
+    texsetcount(c_strc_itemgroups_max_items,items)
+    texsetdimen(d_strc_itemgroups_max_width,width)
 end
 
 local function registeritemgroup(name,level,nofitems,maxwidth)
     local n = counts[name]
     if not trialtypesetting() then
-        -- no trialtypsetting
-        setvariable(f_stamp(name,n), { nofitems, maxwidth }, level)
+        local itemgroup = tobesaved[name]
+        if not itemgroup then
+            itemgroup       = { }
+            tobesaved[name] = itemgroup
+        end
+        local entry = itemgroup[n]
+        if not entry then
+            entry        = { }
+            itemgroup[n] = entry
+        end
+        entry[level] = { nofitems, maxwidth }
     elseif level == 1 then
         counts[name] = n - 1
     end
diff --git a/tex/context/base/mkxl/strc-lst.lmt b/tex/context/base/mkxl/strc-lst.lmt
index b60b75208..d54129f29 100644
--- a/tex/context/base/mkxl/strc-lst.lmt
+++ b/tex/context/base/mkxl/strc-lst.lmt
@@ -1571,7 +1571,7 @@ end
 
 function lists.integrate(utilitydata)
     local filename = utilitydata.comment.file
-    if filename then
+    if filename and filename ~= environment.jobname then
         local structures = utilitydata.structures
         if structures then
             local lists = structures.lists.collected or { }
diff --git a/tex/context/base/mkxl/strc-ref.lmt b/tex/context/base/mkxl/strc-ref.lmt
index 26b189475..945364b18 100644
--- a/tex/context/base/mkxl/strc-ref.lmt
+++ b/tex/context/base/mkxl/strc-ref.lmt
@@ -561,7 +561,7 @@ end
 
 function references.integrate(utilitydata)
     local filename = utilitydata.comment.file
-    if filename then
+    if filename and filename ~= environment.jobname then
         -- lists are already internalized
         local structures = utilitydata.structures
         if structures then
diff --git a/tex/context/base/mkxl/strc-reg.lmt b/tex/context/base/mkxl/strc-reg.lmt
index b66b22921..27d7e2586 100644
--- a/tex/context/base/mkxl/strc-reg.lmt
+++ b/tex/context/base/mkxl/strc-reg.lmt
@@ -1045,6 +1045,7 @@ function registers.use(tag,filename,class,prefix)
         filename = filename,
         data     = job.loadother(filename),
         prefix   = prefix or class,
+        label    = prefix or class,
     }
 end
 
@@ -1054,13 +1055,43 @@ implement {
     actions   = registers.use,
 }
 
+-- function registers.use(tag,specification)
+--     local class    = specification.class
+--     local filename = specification.filename
+--     local prefix   = specification.prefix or class
+--     local label    = specification.label  or prefix
+--     if class and filename then
+--         used[tag] = {
+--             class      = class,
+--             filename   = filename,
+--             data       = job.loadother(filename),
+--             prefix     = prefix,
+--             label      = label,
+--         }
+--     end
+-- end
+
+-- implement {
+--     name      = "useregister",
+--     actions   = registers.use,
+--     arguments = {
+--         "string",
+--         {
+--             { "filename" },
+--             { "class" },
+--             { "prefix" },
+--             { "label" },
+--         },
+--     }
+-- }
+
 implement {
-    name      = "registerprefix",
+    name      = "registerlabel",
     arguments = "string",
     actions   = function(tag)
         local u = used[tag]
         if u then
-            context(u.prefix)
+            context(u.label)
         end
     end
 }
@@ -1075,7 +1106,13 @@ local function analyzeregister(class,options)
         local list       = utilities.parsers.settings_to_array(class)
         local entries    = { }
         local nofentries = 0
-        local metadata   = false
+        local multiple   = false
+        for i=1,#list do
+            if used[list[i]] then
+                multiple = true
+                break
+            end
+        end
         for i=1,#list do
             local l = list[i]
             local u = used[l]
@@ -1089,9 +1126,14 @@ local function analyzeregister(class,options)
             end
             if d then
                 local e = d.entries
-                local u = u and { u.prefix } or nil
+--                 local u = u and { u.prefix } or nil
+local u = multiple and { string.formatters["%03i"](i) } or nil -- maybe prefix but then how about main
                 for i=1,#e do
                     local ei = e[i]
+if multiple and ei.metadata.kind == "see" then
+    -- skip see, can become an option
+else
+
                     nofentries = nofentries + 1
                     entries[nofentries] = ei
                     if u then
@@ -1099,6 +1141,7 @@ local function analyzeregister(class,options)
                         eil[#eil+1] = u
                         ei.external = l -- this is the (current) abstract tag, used for prefix
                     end
+end
                 end
                 if not metadata then
                     metadata = d.metadata
@@ -1107,9 +1150,11 @@ local function analyzeregister(class,options)
         end
         data = {
             metadata = metadata or { },
+            multiple = multiple,
             entries  = entries,
         }
         collected[class] = data
+        options.multiple = multiple
     end
     if data and data.entries then
         options = options or { }
@@ -1322,7 +1367,9 @@ function registers.flush(data,options,prefixspec,pagespec)
                  -- report_registers("invalid see entry in register %a, reference %a",entry.metadata.name,list[1][1])
                 end
             end
-            if entry.external then
+-- move up ?
+--             if entry.external then
+            if options.multiple or entry.external then
                 local list = entry.list
                 list[#list] = nil
             end
@@ -1741,7 +1788,7 @@ interfaces.implement {
 
 function registers.integrate(utilitydata)
     local filename = utilitydata.comment.file
-    if filename then
+    if filename and filename ~= environment.jobname then
         local structures = utilitydata.structures
         if structures then
             local registers = structures.registers.collected or { }
diff --git a/tex/context/base/mkxl/strc-reg.mkxl b/tex/context/base/mkxl/strc-reg.mkxl
index afe3d27a0..464ac4eb1 100644
--- a/tex/context/base/mkxl/strc-reg.mkxl
+++ b/tex/context/base/mkxl/strc-reg.mkxl
@@ -792,6 +792,25 @@
   {\doifelsefiledefined{#1}{}{\usefile[#1][#2]}%
    \clf_useregister{#1}{#2}{#3}{#4}}
 
+% \permanent\protected\tolerant\def\useregister[#1]#*[#2]#*[#3]#*[#4]%  tag file class prefix
+%   {\begingroup
+%    \doifelsefiledefined{#1}{}{\usefile[#1][#2]}%
+%    \ifhastok={#4}%
+%      \getdummyparameters[\c!prefix=#1,\c!label=#1,#4]%
+%    \else
+%      \getdummyparameters[\c!prefix=#4,\c!label=#4]%
+%    \fi
+%    \clf_useregister
+%      {#1}
+%      {
+%         filename {#2}
+%         class    {#3}
+%         prefix   {\dummyparameter\c!prefix}
+%         label    {\dummyparameter\c!label}
+%      }
+%    \relax
+%    \endgroup}
+
 %D Character rendering (sections):
 
 \installcorenamespace{registerindicator}
@@ -1123,15 +1142,16 @@
 % todo: adapt \strc_references_goto_internal to take an extra argument, the ref
 
 \permanent\protected\def\withregisterpagecommand#1#2#3#4% #1:processor #2:internal #3:realpage #4:page
-  {\ifcase#3\relax
-     {\tt [entry\space not\space flushed]}%
+  {\begingroup
+   \ifcase#3\relax
+     \tt [entry\space not\space flushed]%
    \else
      \def\currentregisterpageindex{#2}%
      \def\currentregisterrealpage{#3}%
      \ifchknum\currentregisterpageindex\or
        \lettonothing\currentregisterpageprefix
      \else
-       \def\currentregisterpageprefix{\clf_registerprefix{\currentregisterpageindex}}%
+       \def\currentregisterpageprefix{\clf_registerlabel{\currentregisterpageindex}}%
      \fi
      \iflocation
        \ifempty\currentregisterpageprefix
@@ -1144,7 +1164,8 @@
        \setlocationattributes
      \fi
      \applyprocessor{#1}{\currentregisterpageprefix\registerparameter\c!pagecommand{#4}}%
-   \fi}
+   \fi
+   \endgroup}
 
 \lettonothing\m_current_register
 
@@ -1281,7 +1302,7 @@
    \ifchknum\currentregisterseeindex\or
      \lettonothing\currentregisterpageprefix
    \else
-     \def\currentregisterpageprefix{\clf_registerprefix{\currentregisterseeindex}}%
+     \def\currentregisterpageprefix{\clf_registerlabel{\currentregisterseeindex}}%
    \fi
    \iflocation
      \ifempty\currentregisterpageprefix
diff --git a/tex/context/base/mkxl/tabl-ntb.mkxl b/tex/context/base/mkxl/tabl-ntb.mkxl
index 6e95512cd..b82dcb585 100644
--- a/tex/context/base/mkxl/tabl-ntb.mkxl
+++ b/tex/context/base/mkxl/tabl-ntb.mkxl
@@ -1634,13 +1634,22 @@
 
 % enabled per 2018-02-22
 
-\def\tabl_ntb_table_get_max_width_step
-  {\advanceby\scratchdimen\tabl_ntb_get_wid\fastloopindex
-   \advanceby\scratchdimen\tabl_ntb_get_dis\fastloopindex}
+% \def\tabl_ntb_table_get_max_width_step
+%   {\advanceby\scratchdimen\tabl_ntb_get_wid\fastloopindex
+%    \advanceby\scratchdimen\tabl_ntb_get_dis\fastloopindex}
+%
+% \def\tabl_ntb_table_get_max_width
+%   {\scratchdimen\zeropoint
+%    \dofastloopcs\c_tabl_ntb_maximum_col\tabl_ntb_table_get_max_width_step
+%    \ifdim\scratchdimen<\wd\scratchbox\relax
+%      \scratchdimen\wd\scratchbox\relax
+%    \fi}
 
 \def\tabl_ntb_table_get_max_width
   {\scratchdimen\zeropoint
-   \dofastloopcs\c_tabl_ntb_maximum_col\tabl_ntb_table_get_max_width_step
+   \localcontrolledloop\zerocount\c_tabl_ntb_maximum_col\plusone
+     {\advanceby\scratchdimen\tabl_ntb_get_wid\currentloopiterator
+      \advanceby\scratchdimen\tabl_ntb_get_dis\currentloopiterator}%
    \ifdim\scratchdimen<\wd\scratchbox\relax
      \scratchdimen\wd\scratchbox\relax
    \fi}
diff --git a/tex/context/base/mkxl/tabl-tbl.mkxl b/tex/context/base/mkxl/tabl-tbl.mkxl
index d353074d5..6b5e38f3a 100644
--- a/tex/context/base/mkxl/tabl-tbl.mkxl
+++ b/tex/context/base/mkxl/tabl-tbl.mkxl
@@ -1608,13 +1608,19 @@
      \tabl_tabulate_vrule_reset_indeed
    \fi}
 
+% \def\tabl_tabulate_vrule_reset_indeed
+%   {\gletcsname\??tabulatevrule0\endcsname\undefined
+%    \dofastloopcs\c_tabl_tabulate_max_vrulecolumn\tabl_tabulate_vrule_reset_step
+%    \global\c_tabl_tabulate_max_vrulecolumn\zerocount}
+%
+% \def\tabl_tabulate_vrule_reset_step % undefined or relax
+%   {\gletcsname\??tabulatevrule\the\fastloopindex\endcsname\undefined}
+
 \def\tabl_tabulate_vrule_reset_indeed
-  {\dofastloopcs\c_tabl_tabulate_max_vrulecolumn\tabl_tabulate_vrule_reset_step
+  {\localcontrolledloop\zerocount\c_tabl_tabulate_max_vrulecolumn\plusone % start at 0
+     {\gletcsname\??tabulatevrule\the\currentloopiterator\endcsname\undefined}%
    \global\c_tabl_tabulate_max_vrulecolumn\zerocount}
 
-\def\tabl_tabulate_vrule_reset_step % undefined or relax
-  {\gletcsname\??tabulatevrule\the\fastloopindex\endcsname\undefined}
-
 \appendtoks
     \tabl_tabulate_vrule_reset
 \to \t_tabl_tabulate_every_after_row
@@ -1798,11 +1804,16 @@
      \tabl_tabulate_color_reset_indeed
    \fi}
 
-\def\tabl_tabulate_color_reset_indeed
-  {\dofastloopcs\c_tabl_tabulate_max_colorcolumn\tabl_tabulate_color_reset_step}
+% \def\tabl_tabulate_color_reset_indeed
+%   {\dofastloopcs\c_tabl_tabulate_max_colorcolumn\tabl_tabulate_color_reset_step}
+%
+% \def\tabl_tabulate_color_reset_step % undefined or empty?
+%   {\gletcsname\??tabulatecolor\number\fastloopindex\endcsname\undefined}
 
-\def\tabl_tabulate_color_reset_step % undefined or empty?
-  {\gletcsname\??tabulatecolor\number\fastloopindex\endcsname\undefined}
+\def\tabl_tabulate_color_reset_indeed
+  {\localcontrolledloop\zerocount\c_tabl_tabulate_max_colorcolumn\plusone % start at 1
+     {\gletcsname\??tabulatecolor\the\currentloopiterator\endcsname\undefined}%
+   \global\c_tabl_tabulate_max_colorcolumn\zerocount} % why not like vrule?
 
 \appendtoks
     \tabl_tabulate_color_reset
@@ -2201,34 +2212,38 @@
 %   {\glettonothing\tabl_tabulate_flush_collected_indeed
 %    \global\c_tabl_tabulate_column\zerocount
 %    \tabl_tabulate_pbreak_check
+%    \global\setfalse\c_tabl_tabulate_split_done % new 27/12/2022
 %    \dofastloopcs\c_tabl_tabulate_columns\tabl_tabulate_flush_second_step
+%    \ifconditional\c_tabl_tabulate_split_done\else
+%      \glet\tabl_tabulate_tm\s!reset % new 27/12/2022
+%    \fi
 %    \global\settrue\c_tabl_tabulate_firstflushed}
-%
+
 % \protected\def\tabl_tabulate_flush_second_step
-%   {\ifvoid\b_tabl_tabulate_current\fastloopindex\else
+%   {\ifvoid\b_tabl_tabulate_current\fastloopindex
+%    \else
 %      \gdef\tabl_tabulate_flush_collected_indeed{\the\t_tabl_tabulate_dummy}%
+%      \ifvoid\b_tabl_tabulate_current\fastloopindex \else
+%        \global\settrue\c_tabl_tabulate_split_done % new 27/12/2022
+%      \fi
 %    \fi}
-%
-% \def\tabl_tabulate_flush_second
-%   {\noalign{\tabl_tabulate_flush_second_indeed}%
-%    \tabl_tabulate_flush_collected_indeed}
 
 \protected\def\tabl_tabulate_flush_second_indeed
   {\glettonothing\tabl_tabulate_flush_collected_indeed
    \global\c_tabl_tabulate_column\zerocount
    \tabl_tabulate_pbreak_check
    \global\setfalse\c_tabl_tabulate_split_done % new 27/12/2022
-   \dofastloopcs\c_tabl_tabulate_columns\tabl_tabulate_flush_second_step
+   \localcontrolledloop\plusone\c_tabl_tabulate_columns\plusone{\tabl_tabulate_flush_second_step}%
    \ifconditional\c_tabl_tabulate_split_done\else
      \glet\tabl_tabulate_tm\s!reset % new 27/12/2022
    \fi
    \global\settrue\c_tabl_tabulate_firstflushed}
 
 \protected\def\tabl_tabulate_flush_second_step
-  {\ifvoid\b_tabl_tabulate_current\fastloopindex
+  {\ifvoid\b_tabl_tabulate_current\currentloopiterator
    \else
      \gdef\tabl_tabulate_flush_collected_indeed{\the\t_tabl_tabulate_dummy}%
-     \ifvoid\b_tabl_tabulate_current\fastloopindex \else
+     \ifvoid\b_tabl_tabulate_current\currentloopiterator \else
        \global\settrue\c_tabl_tabulate_split_done % new 27/12/2022
      \fi
    \fi}
@@ -3262,7 +3277,7 @@
 %\letcsname\??tabulatespana r\endcsname\relax
 
 \noaligned\tolerant\def\tabl_tabulate_NS[#1]#*[#2]%
-  {\NC\loopcs{#1}\tabl_tabulate_span
+  {\NC\loopcs{#1}\tabl_tabulate_span % use localloop and quit
    \gdef\tabl_tabulate_kooh
      {\begincsname\??tabulatespana#2\endcsname
       \glet\tabl_tabulate_kooh\relax}%
diff --git a/tex/context/base/mkxl/trac-vis.lmt b/tex/context/base/mkxl/trac-vis.lmt
index dddb4799d..c9b68b407 100644
--- a/tex/context/base/mkxl/trac-vis.lmt
+++ b/tex/context/base/mkxl/trac-vis.lmt
@@ -1946,7 +1946,7 @@ do
                     head, current = ruledkern(head,current,vertical)
                 end
             end
-            goto next;
+            goto next
             ::list::
             if id == hlist_code then
                 local content = getlist(current)
diff --git a/tex/context/base/mkxl/typo-cln.lmt b/tex/context/base/mkxl/typo-cln.lmt
new file mode 100644
index 000000000..469859162
--- /dev/null
+++ b/tex/context/base/mkxl/typo-cln.lmt
@@ -0,0 +1,109 @@
+if not modules then modules = { } end modules ['typo-cln'] = {
+    version   = 1.001,
+    comment   = "companion to typo-cln.mkiv",
+    author    = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+    copyright = "PRAGMA ADE / ConTeXt Development Team",
+    license   = "see context related readme files"
+}
+
+-- This quick and dirty hack took less time than listening to a CD (In
+-- this case Dream Theaters' Octavium). Of course extensions will take
+-- more time.
+
+-- This feature is probably never used so we can get rid of it.
+
+local tonumber = tonumber
+local utfbyte = utf.byte
+
+local trace_cleaners = false  trackers.register("typesetters.cleaners",         function(v) trace_cleaners = v end)
+local trace_autocase = false  trackers.register("typesetters.cleaners.autocase",function(v) trace_autocase = v end)
+
+local report_cleaners = logs.reporter("nodes","cleaners")
+local report_autocase = logs.reporter("nodes","autocase")
+
+typesetters.cleaners  = typesetters.cleaners or { }
+local cleaners        = typesetters.cleaners
+
+local variables       = interfaces.variables
+
+local nodecodes       = nodes.nodecodes
+
+local enableaction    = nodes.tasks.enableaction
+
+local texsetattribute = tex.setattribute
+
+local nuts            = nodes.nuts
+
+local getattr         = nuts.getattr
+local setattr         = nuts.setattr
+
+local setchar         = nuts.setchar
+
+local nextglyph       = nuts.traversers.glyph
+
+local unsetvalue      = attributes.unsetvalue
+
+local glyph_code      = nodecodes.glyph
+local uccodes         = characters.uccodes
+
+local a_cleaner       = attributes.private("cleaner")
+
+local resetter = { -- this will become an entry in char-def
+    [utfbyte(".")] = true
+}
+
+-- Contrary to the casing code we need to keep track of a state.
+-- We could extend the casing code with a status tracker but on
+-- the other hand we might want to apply casing afterwards. So,
+-- cleaning comes first.
+
+function cleaners.handler(head)
+    local inline = false
+    for n, char, font in nextglyph, head do
+        if resetter[char] then
+            inline = false
+        elseif not inline then
+            local a = getattr(n,a_cleaner)
+            if a == 1 then -- currently only one cleaner so no need to be fancy
+                local upper = uccodes[char]
+                if type(upper) == "table" then
+                    -- some day, not much change that \SS ends up here
+                else
+                    setchar(n,upper)
+                    if trace_autocase then
+                        report_autocase("")
+                    end
+                end
+            end
+            inline = true
+        end
+    end
+    return head
+end
+
+-- see typo-cap for a more advanced settings handler .. not needed now
+
+local enabled = false
+
+function cleaners.set(n)
+    if n == variables.reset or not tonumber(n) or n == 0 then
+        texsetattribute(a_cleaner,unsetvalue)
+    else
+        if not enabled then
+            enableaction("processors","typesetters.cleaners.handler")
+            if trace_cleaners then
+                report_cleaners("enabling cleaners")
+            end
+            enabled = true
+        end
+        texsetattribute(a_cleaner,tonumber(n))
+    end
+end
+
+-- interface
+
+interfaces.implement {
+    name      = "setcharactercleaning",
+    actions   = cleaners.set,
+    arguments = "string"
+}
diff --git a/tex/context/base/mkxl/typo-cln.mkxl b/tex/context/base/mkxl/typo-cln.mkxl
index 84fc1d235..fba9d4ab8 100644
--- a/tex/context/base/mkxl/typo-cln.mkxl
+++ b/tex/context/base/mkxl/typo-cln.mkxl
@@ -15,7 +15,7 @@
 
 \unprotect
 
-\registerctxluafile{typo-cln}{}
+\registerctxluafile{typo-cln}{autosuffix}
 
 \definesystemattribute[cleaner][public]
 
diff --git a/tex/context/base/mkxl/typo-dha.lmt b/tex/context/base/mkxl/typo-dha.lmt
new file mode 100644
index 000000000..e1a6662c4
--- /dev/null
+++ b/tex/context/base/mkxl/typo-dha.lmt
@@ -0,0 +1,481 @@
+if not modules then modules = { } end modules ['typo-dha'] = {
+    version   = 1.001,
+    comment   = "companion to typo-dir.mkiv",
+    author    = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+    copyright = "PRAGMA ADE / ConTeXt Development Team",
+    license   = "see context related readme files"
+}
+
+-- Some analysis by Idris:
+--
+-- 1. Assuming the reading- vs word-order distinction (bidi-char types) is governing;
+-- 2. Assuming that 'ARAB' represents an actual arabic string in raw input order, not word-order;
+-- 3. Assuming that 'BARA' represent the correct RL word order;
+--
+-- Then we have, with input: LATIN ARAB
+--
+-- \textdirection 1 LATIN ARAB => LATIN BARA
+-- \textdirection 1 LATIN ARAB => LATIN BARA
+-- \textdirection 1 LRO LATIN ARAB => LATIN ARAB
+-- \textdirection 1 LRO LATIN ARAB => LATIN ARAB
+-- \textdirection 1 RLO LATIN ARAB => NITAL ARAB
+-- \textdirection 1 RLO LATIN ARAB => NITAL ARAB
+
+-- elseif d == "es"  then -- European Number Separator
+-- elseif d == "et"  then -- European Number Terminator
+-- elseif d == "cs"  then -- Common Number Separator
+-- elseif d == "nsm" then -- Non-Spacing Mark
+-- elseif d == "bn"  then -- Boundary Neutral
+-- elseif d == "b"   then -- Paragraph Separator
+-- elseif d == "s"   then -- Segment Separator
+-- elseif d == "ws"  then -- Whitespace
+-- elseif d == "on"  then -- Other Neutrals
+
+-- todo  : use new dir functions
+-- todo  : make faster
+-- todo  : move dir info into nodes
+-- todo  : swappable tables and floats i.e. start-end overloads (probably loop in builders)
+
+-- I removed the original tracing code and now use the colorful one. If I ever want to change
+-- something I will just inject prints for tracing.
+
+local nodes, node = nodes, node
+
+local trace_directions   = false  trackers.register("typesetters.directions", function(v) trace_directions = v end)
+
+local report_directions  = logs.reporter("typesetting","text directions")
+
+local nuts               = nodes.nuts
+
+local getnext            = nuts.getnext
+local getprev            = nuts.getprev
+local getchar            = nuts.getchar
+local getid              = nuts.getid
+local getsubtype         = nuts.getsubtype
+local getlist            = nuts.getlist
+local getattr            = nuts.getattr
+local getprop            = nuts.getprop
+local getdirection       = nuts.getdirection
+local isglyph            = nuts.isglyph -- or ischar
+
+local setprop            = nuts.setprop
+local setstate           = nuts.setstate
+local setchar            = nuts.setchar
+
+local insertnodebefore   = nuts.insertbefore
+local insertnodeafter    = nuts.insertafter
+local remove_node        = nuts.remove
+local endofmath          = nuts.endofmath
+
+local startofpar         = nuts.startofpar
+
+local nodepool           = nuts.pool
+
+local nodecodes          = nodes.nodecodes
+local gluecodes          = nodes.gluecodes
+
+local glyph_code         = nodecodes.glyph
+local math_code          = nodecodes.math
+local kern_code          = nodecodes.kern
+local glue_code          = nodecodes.glue
+local dir_code           = nodecodes.dir
+local par_code           = nodecodes.par
+
+local dirvalues          = nodes.dirvalues
+local lefttoright_code   = dirvalues.lefttoright
+local righttoleft_code   = dirvalues.righttoleft
+
+local parfillskip_code   = gluecodes.parfillskip
+
+local new_direction      = nodepool.direction
+
+local insert             = table.insert
+
+local fonthashes         = fonts.hashes
+local fontchar           = fonthashes.characters
+
+local chardirections     = characters.directions
+local charmirrors        = characters.mirrors
+local charclasses        = characters.textclasses
+
+local directions         = typesetters.directions
+local setcolor           = directions.setcolor
+local getglobal          = directions.getglobal
+
+local a_directions       = attributes.private('directions')
+
+local strip              = false
+
+local s_isol             = fonts.analyzers.states.isol
+
+local function stopdir(finish) -- we could use finish directly
+    local n = new_direction(finish == righttoleft_code and righttoleft_code or lefttoright_code,true)
+    setprop(n,"direction",true)
+    return n
+end
+
+local function startdir(finish) -- we could use finish directly
+    local n = new_direction(finish == righttoleft_code and righttoleft_code or lefttoright_code)
+    setprop(n,"direction",true)
+    return n
+end
+
+local function nextisright(current)
+    current = getnext(current)
+    local character, id = isglyph(current)
+    if character then
+        local direction = chardirections[character]
+        return direction == "r" or direction == "al" or direction == "an"
+    end
+end
+
+local function previsright(current)
+    current = getprev(current)
+    local character, id = isglyph(current)
+    if character then
+        local direction = chardirections[character]
+        return direction == "r" or direction == "al" or direction == "an"
+    end
+end
+
+local function process(start)
+
+    local head     = start
+    local current  = head
+    local autodir  = 0
+    local embedded = 0
+    local override = 0
+    local pardir   = 0
+    local textdir  = 0
+    local done     = false
+    local stack    = { }
+    local top      = 0
+    local obsolete = { }
+    local rlo      = false
+    local lro      = false
+    local prevattr = false
+    local fences   = { }
+
+    while current do
+        -- no isglyph here as we test for skips first
+        local id   = getid(current)
+        local next = getnext(current)
+        if id == math_code then
+            current = getnext(endofmath(next))
+        elseif getprop(current,"direction") then
+            -- this handles unhbox etc
+            current = next
+        else
+            local attr = getattr(current,a_directions)
+            if attr and attr > 0 then
+                if attr ~= prevattr then
+                    if not getglobal(a) then
+                        lro = false
+                        rlo = false
+                    end
+                    prevattr = attr
+                end
+            end
+            local prop = true
+            if id == glyph_code then
+                if attr and attr > 0 then
+                    local character, font = isglyph(current)
+                    if character == 0 then
+                        -- skip signals
+                     -- setprop(current,"direction",true)
+                    else
+                        local direction = chardirections[character]
+                        local reversed  = false
+                        if rlo or override > 0 then
+                            if direction == "l" then
+                                direction = "r"
+                                reversed  = true
+                            end
+                        elseif lro or override < 0 then
+                            if direction == "r" or direction == "al" then
+                                setstate(current,s_isol) -- hm
+                                direction = "l"
+                                reversed  = true
+                            end
+                        end
+                        if direction == "on" then
+                            local mirror = charmirrors[character]
+                            if mirror and fontchar[font][mirror] then
+                                local class = charclasses[character]
+                                if class == "open" then
+                                    if nextisright(current) then
+                                        setchar(current,mirror)
+                                     -- setprop(current,"direction","r")
+                                        prop = "r"
+                                    elseif autodir < 0 then
+                                        setchar(current,mirror)
+                                     -- setprop(current,"direction","r")
+                                        prop = "r"
+                                    else
+                                        mirror = false
+                                     -- setprop(current,"direction","l")
+                                        prop = "l"
+                                    end
+                                    local fencedir = autodir == 0 and textdir or autodir
+                                    fences[#fences+1] = fencedir
+                                elseif class == "close" and #fences > 0 then
+                                    local fencedir = fences[#fences]
+                                    fences[#fences] = nil
+                                    if fencedir < 0 then
+                                        setchar(current,mirror)
+                                     -- setprop(current,"direction","r")
+                                        prop = "r"
+                                    else
+                                     -- setprop(current,"direction","l")
+                                        prop = "l"
+                                        mirror = false
+                                    end
+                                elseif autodir < 0 then
+                                    setchar(current,mirror)
+                                 -- setprop(current,"direction","r")
+                                    prop = "r"
+                                else
+                                 -- setprop(current,"direction","l")
+                                    prop = "l"
+                                    mirror = false
+                                end
+                            else
+                             -- setprop(current,"direction",true)
+                            end
+                            if trace_directions then
+                                setcolor(current,direction,false,mirror)
+                            end
+                        elseif direction == "l" then
+                            if trace_directions then
+                                setcolor(current,"l",reversed)
+                            end
+                         -- setprop(current,"direction","l")
+                            prop = "l"
+                        elseif direction == "r" then
+                            if trace_directions then
+                                setcolor(current,"r",reversed)
+                            end
+                         -- setprop(current,"direction","r")
+                            prop = "r"
+                        elseif direction == "en" then -- european number
+                            if trace_directions then
+                                setcolor(current,"l")
+                            end
+                         -- setprop(current,"direction","l")
+                            prop = "l"
+                        elseif direction == "al" then -- arabic letter
+                            if trace_directions then
+                                setcolor(current,"r")
+                            end
+                         -- setprop(current,"direction","r")
+                            prop = "r"
+                        elseif direction == "an" then -- arabic number
+                            -- needs a better scanner as it can be a float
+                            if trace_directions then
+                                setcolor(current,"l") -- was r
+                            end
+                         -- setprop(current,"direction","n") -- was r
+                            prop = "n"
+                        elseif direction == "lro" then -- Left-to-Right Override -> right becomes left
+                            top        = top + 1
+                            stack[top] = { override, embedded }
+                            override   = -1
+                            obsolete[#obsolete+1] = current
+                            goto obsolete
+                        elseif direction == "rlo" then -- Right-to-Left Override -> left becomes right
+                            top        = top + 1
+                            stack[top] = { override, embedded }
+                            override   = 1
+                            obsolete[#obsolete+1] = current
+                            goto obsolete
+                        elseif direction == "lre" then -- Left-to-Right Embedding -> lefttoright_code
+                            top        = top + 1
+                            stack[top] = { override, embedded }
+                            embedded   = 1
+                            obsolete[#obsolete+1] = current
+                            goto obsolete
+                        elseif direction == "rle" then -- Right-to-Left Embedding -> righttoleft_code
+                            top        = top + 1
+                            stack[top] = { override, embedded }
+                            embedded   = -1
+                            obsolete[#obsolete+1] = current
+                            goto obsolete
+                        elseif direction == "pdf" then -- Pop Directional Format
+                            if top > 0 then
+                                local s  = stack[top]
+                                override = s[1]
+                                embedded = s[2]
+                                top      = top - 1
+                            else
+                                override = 0
+                                embedded = 0
+                            end
+                            obsolete[#obsolete+1] = current
+                            goto obsolete
+                        elseif trace_directions then
+                            setcolor(current)
+                         -- setprop(current,"direction",true)
+                        else
+                         -- setprop(current,"direction",true)
+                        end
+                    end
+                else
+                 -- setprop(current,"direction",true)
+                end
+            elseif id == glue_code then
+                if getsubtype(current) == parfillskip_code then
+                 -- setprop(current,"direction","!")
+                    prop = "!"
+                else
+                 -- setprop(current,"direction","g")
+                    prop = "g"
+                end
+            elseif id == kern_code then
+             -- setprop(current,"direction","k")
+                prop = "k"
+            elseif id == dir_code then
+                local direction, pop = getdirection(current)
+                if direction == righttoleft_code then
+                    if not pop then
+                        autodir = -1
+                    elseif embedded and embedded~= 0 then
+                        autodir = embedded
+                    else
+                        autodir = 0
+                    end
+                elseif direction == lefttoright_code then
+                    if not pop then
+                        autodir = 1
+                    elseif embedded and embedded~= 0 then
+                        autodir = embedded
+                    else
+                        autodir = 0
+                    end
+                end
+                textdir = autodir
+             -- setprop(current,"direction",true)
+            elseif id == par_code and startofpar(current) then
+                local direction = getdirection(current)
+                if direction == righttoleft_code then
+                    autodir = -1
+                elseif direction == lefttoright_code then
+                    autodir = 1
+                end
+                pardir  = autodir
+                textdir = pardir
+             -- setprop(current,"direction",true)
+            else
+             -- setprop(current,"direction",true)
+            end
+            setprop(current,"direction",prop)
+          ::obsolete::
+            current = next
+        end
+    end
+
+    -- todo: track if really needed
+    -- todo: maybe we need to set the property (as it can be a copied list)
+
+    if done and strip then
+        local n = #obsolete
+        if n > 0 then
+            for i=1,n do
+                remove_node(head,obsolete[i],true)
+            end
+            if trace_directions then
+                report_directions("%s character nodes removed",n)
+            end
+        end
+    end
+
+    local state    = false
+    local last     = false
+    local collapse = true
+    current        = head
+
+    -- todo: textdir
+    -- todo: inject before parfillskip
+
+    while current do
+        local id = getid(current)
+        if id == math_code then
+            -- todo: this might be tricky nesting
+            current = getnext(endofmath(getnext(current)))
+        else
+            local cp = getprop(current,"direction")
+            if cp == "n" then
+                local swap = state == "r"
+                if swap then
+                    head = insertnodebefore(head,current,startdir(lefttoright_code))
+                end
+                setprop(current,"direction",true)
+                while true do
+                    local n = getnext(current)
+                    if n and getprop(n,"direction") == "n" then
+                        current = n
+                        setprop(current,"direction",true)
+                    else
+                        break
+                    end
+                end
+                if swap then
+                    head, current = insertnodeafter(head,current,stopdir(lefttoright_code))
+                end
+            elseif cp == "l" then
+                if state ~= "l" then
+                    if state == "r" then
+                        head = insertnodebefore(head,last or current,stopdir(righttoleft_code))
+                    end
+                    head  = insertnodebefore(head,current,startdir(lefttoright_code))
+                    state = "l"
+                    done  = true
+                end
+                last  = false
+            elseif cp == "r" then
+                if state ~= "r" then
+                    if state == "l" then
+                        head = insertnodebefore(head,last or current,stopdir(lefttoright_code))
+                    end
+                    head  = insertnodebefore(head,current,startdir(righttoleft_code))
+                    state = "r"
+                    done  = true
+                end
+                last = false
+            elseif collapse then
+                if cp == "k" or cp == "g" then
+                    last = last or current
+                else
+                    last = false
+                end
+            else
+                if state == "r" then
+                    head = insertnodebefore(head,current,stopdir(righttoleft_code))
+                elseif state == "l" then
+                    head = insertnodebefore(head,current,stopdir(lefttoright_code))
+                end
+                state = false
+                last  = false
+            end
+            setprop(current,"direction",true)
+        end
+        local next = getnext(current)
+        if next then
+            current = next
+        else
+            local sd = (state == "r" and stopdir(righttoleft_code)) or (state == "l" and stopdir(lefttoright_code))
+            if sd then
+                if id == glue_code and getsubtype(current) == parfillskip_code then
+                    head = insertnodebefore(head,current,sd)
+                else
+                    head = insertnodeafter(head,current,sd)
+                end
+            end
+            break
+        end
+    end
+
+    return head
+
+end
+
+directions.installhandler(interfaces.variables.default,process)
diff --git a/tex/context/base/mkxl/typo-dir.mkxl b/tex/context/base/mkxl/typo-dir.mkxl
index a5a4bc568..d9937ce73 100644
--- a/tex/context/base/mkxl/typo-dir.mkxl
+++ b/tex/context/base/mkxl/typo-dir.mkxl
@@ -19,9 +19,7 @@
 \unprotect
 
 \registerctxluafile{typo-dir}{autosuffix}
-\registerctxluafile{typo-dha}{}
-%registerctxluafile{typo-dua}{}
-%registerctxluafile{typo-dub}{}
+\registerctxluafile{typo-dha}{autosuffix}
 \registerctxluafile{typo-duc}{autosuffix}
 
 \definesystemattribute[directions][public,pickup]
diff --git a/tex/context/base/mkxl/typo-prc.mklx b/tex/context/base/mkxl/typo-prc.mklx
index f2df32986..f9a8f8e5e 100644
--- a/tex/context/base/mkxl/typo-prc.mklx
+++ b/tex/context/base/mkxl/typo-prc.mklx
@@ -54,6 +54,8 @@
 
 \installcommandhandler \??processor {processor} \??processor
 
+\mutable\let\currentprocessor\empty % weird that this is needed
+
 \appendtoks
     \letcsname\??processorcheck\currentprocessor\endcsname\relax
     \clf_registerstructureprocessor{\currentprocessor}% global, but it permits using processor that are yet undefined
diff --git a/tex/context/fonts/mkiv/bonum-math.lfg b/tex/context/fonts/mkiv/bonum-math.lfg
index 56262eb32..aa35c7b5a 100644
--- a/tex/context/fonts/mkiv/bonum-math.lfg
+++ b/tex/context/fonts/mkiv/bonum-math.lfg
@@ -232,6 +232,13 @@ return {
                 {
                     tweak = "addrules",
                 },
+                {
+                    tweak    = "replacerules",
+                 -- minus    = true, -- we have an extensible
+                    fraction = { height = .244, yoffset = .757 },
+                    radical  = { height = .244, yoffset = .754 },
+                    stacker  = { height = .244, yoffset = .757 },
+                },
                 {
                     -- This tweak is only needed for the funny arrows and these now get properly
                     -- centered. (Could actually be done in the engine).
diff --git a/tex/context/fonts/mkiv/cambria-math.lfg b/tex/context/fonts/mkiv/cambria-math.lfg
index a855a2513..9ad2c3afc 100644
--- a/tex/context/fonts/mkiv/cambria-math.lfg
+++ b/tex/context/fonts/mkiv/cambria-math.lfg
@@ -130,6 +130,26 @@ return {
                 {
                     tweak = "addrules",
                 },
+                {
+                    tweak    = "replacerules",
+                    minus = {
+                        leftoffset  = .0925,
+                        rightoffset = .0925,
+                    },
+                    fraction = {
+                        height  = .204,
+                        yoffset = .796,
+                    },
+                    radical  = {
+                        height     = .204,
+                        yoffset    = .796,
+                     -- leftoffset = .075,
+                    },
+                    stacker  = {
+                        height  = .204,
+                        yoffset = .796,
+                    },
+                },
                 {
                     tweak = "wipecues",
                 },
diff --git a/tex/context/fonts/mkiv/common-math-jmn.lfg b/tex/context/fonts/mkiv/common-math-jmn.lfg
index bcbe77962..1ebb7db39 100644
--- a/tex/context/fonts/mkiv/common-math-jmn.lfg
+++ b/tex/context/fonts/mkiv/common-math-jmn.lfg
@@ -55,6 +55,10 @@ return {
                 local sfm = ps("flat rule middle piece")
                 local sfr = ps("flat rule right piece")
 
+                local mrl = ps("minus rule left piece")
+                local mrm = ps("minus rule middle piece")
+                local mrr = ps("minus rule right piece")
+
                 local dfl = ps("flat double rule left piece")
                 local dfm = ps("flat double rule middle piece")
                 local dfr = ps("flat double rule right piece")
@@ -66,6 +70,7 @@ return {
                 local dar = ps("double arrow right piece")
 
                 local rad = ps("radical bar extender")
+                local frc = ps("fraction bar extender")
 
                 local antykwa = characters[srm]
 
@@ -104,11 +109,13 @@ return {
                 builders.jointwo(main,0x27FA,dal,joinrelfactor,dar)
 
                 if antykwa then
-                    builders.horibar(main,0x203E,srm,srl,srr,0x02212) -- overbar underbar fraction (we take 90/91/92 too!)
-                 -- builders.horibar(main,0x203E,srm,srl,srr,0x0002D) -- overbar underbar fraction (we take 90/91/92 too!)
-                    builders.rootbar(main,rad,srm,srr,0x02212) -- radical
+                    builders.horibar(main,0x2212,mrm,mrl,mrr,0x2212,true,srm,srl,srr) -- minus
+                    builders.horibar(main,0x203E,srm,srl,srr,0x2212) -- overbar underbar fraction (we take 90/91/92 too!)
+                    builders.horibar(main,frc,srm,srl,srr,0x2212)    -- fraction
+                    builders.rootbar(main,rad,srm,srr,0x2212)        -- radical
                 else
-                    builders.horibar(main,0x203E,0xFE073) -- overbar underbar
+                    builders.horibar(main,0x2212,false,false,false,0x2212,true) -- minus
+                    builders.horibar(main,0x203E,false,false,false,0x02212)
                 end
 
                 local ffactor = antykwa and 1 or 1
diff --git a/tex/context/fonts/mkiv/concrete-math.lfg b/tex/context/fonts/mkiv/concrete-math.lfg
index b69ee0103..53972ab05 100644
--- a/tex/context/fonts/mkiv/concrete-math.lfg
+++ b/tex/context/fonts/mkiv/concrete-math.lfg
@@ -86,6 +86,13 @@ return {
                     tweak   = "addbars",
                     advance = 0.6,
                 },
+                {
+                    tweak    = "replacerules",
+                    minus    = true,
+                    fraction = { height = .2, yoffset = .8 },
+                    radical  = { height = .2, yoffset = .8, leftoffset = .2 },
+                    stacker  = { height = .2, yoffset = .8 },
+                },
                 {
                     tweak = "addactuarian",
                 },
diff --git a/tex/context/fonts/mkiv/dejavu-math.lfg b/tex/context/fonts/mkiv/dejavu-math.lfg
index 03f869288..f3e1011ad 100644
--- a/tex/context/fonts/mkiv/dejavu-math.lfg
+++ b/tex/context/fonts/mkiv/dejavu-math.lfg
@@ -144,6 +144,13 @@ return {
                 {
                     tweak = "addrules",
                 },
+                {
+                    tweak    = "replacerules",
+                 -- minus    = true, -- we have an extensible
+                    fraction = { height = .254, yoffset = .746 },
+                    radical  = { height = .254, yoffset = .746, yscale = .912 },
+                    stacker  = { height = .254, yoffset = .746 },
+                },
                 {
                     tweak = "wipecues",
                 },
diff --git a/tex/context/fonts/mkiv/ebgaramond-math.lfg b/tex/context/fonts/mkiv/ebgaramond-math.lfg
index f6f552dfe..bf9a2e027 100644
--- a/tex/context/fonts/mkiv/ebgaramond-math.lfg
+++ b/tex/context/fonts/mkiv/ebgaramond-math.lfg
@@ -174,20 +174,23 @@ return {
                 },
                 {
                     tweak    = "replacerules",
+                    minus = {
+                        leftoffset  = .075,
+                        rightoffset = .075,
+                    },
                     fraction = {
-                        template = "minus", -- 0x2212,
-                        xoffset  = 0.075,
-                        yoffset  = 0.9,
-                     -- width    = 0.85,
-                     -- height   = 0.1,
+                        height  = .2,
+                        yoffset = .8,
                     },
                     radical = {
-                        template = "minus", -- 0x2212,
-                        xoffset  = 0.075,
-                        yoffset  = 0.9,
-                        yscale   = 0.975,
-                     -- width    = 0.85,
-                     -- height   = 0.1,
+                        height     = .2,
+                        yoffset    = .8,
+                        leftoffset = .075,
+                        yscale     = .9775,
+                    },
+                    stacker = {
+                        height  = .2,
+                        yoffset = .8,
                     },
                 },
                 {
diff --git a/tex/context/fonts/mkiv/erewhon-math.lfg b/tex/context/fonts/mkiv/erewhon-math.lfg
index 54cc687fa..68a088d3f 100644
--- a/tex/context/fonts/mkiv/erewhon-math.lfg
+++ b/tex/context/fonts/mkiv/erewhon-math.lfg
@@ -90,6 +90,28 @@ return {
                 {
                     tweak = "addrules",
                 },
+{
+    tweak    = "replacerules",
+    -- minus = {
+    --     height      = 0.188,
+    --     yoffset     = 0.812,
+    --     leftoffset  = 0.2,
+    --     rightoffset = 0.2,
+    -- },
+    fraction = {
+        height  = .188,
+        yoffset = .812,
+    },
+    radical = {
+        height     = .188,
+        yoffset    = .812,
+        leftoffset = 0.075,
+    },
+    stacker = {
+        height  = .188,
+        yoffset = .812,
+    },
+},
                 {
                     tweak = "addactuarian",
                 },
diff --git a/tex/context/fonts/mkiv/kpfonts-math.lfg b/tex/context/fonts/mkiv/kpfonts-math.lfg
index 67ad3841f..5896323b4 100644
--- a/tex/context/fonts/mkiv/kpfonts-math.lfg
+++ b/tex/context/fonts/mkiv/kpfonts-math.lfg
@@ -112,6 +112,28 @@ return {
                 {
                     tweak = "addrules",
                 },
+{
+    tweak    = "replacerules",
+    -- minus = {
+    --     height      = 0.1818,
+    --     yoffset     = 0.818,
+    --     leftoffset  = 0.2,
+    --     rightoffset = 0.2,
+    -- },
+    fraction = {
+        height  = .1818,
+        yoffset = .818,
+    },
+    radical = {
+        height     = .1818,
+        yoffset    = .818,
+        leftoffset = 0.075,
+    },
+    stacker = {
+        height  = .1818,
+        yoffset = .818,
+    },
+},
                 {
                     -- This will be fixed. Check if new version comes out!
                     tweak   = "addbars",
diff --git a/tex/context/fonts/mkiv/libertinus-math.lfg b/tex/context/fonts/mkiv/libertinus-math.lfg
index 6f707ed2b..ac15d6674 100644
--- a/tex/context/fonts/mkiv/libertinus-math.lfg
+++ b/tex/context/fonts/mkiv/libertinus-math.lfg
@@ -138,6 +138,29 @@ return {
                     tweak   = "addbars",
                     advance = 0.5,
                 },
+                {
+                    tweak    = "replacerules",
+                    minus    = {
+                        height      = .176,
+                        yoffset     = .825,
+                        leftoffset  = .065,
+                        rightoffset = .065,
+                    },
+                    fraction = {
+                        height  = .176,
+                        yoffset = .825,
+                    },
+                    radical  = {
+                        height     = .140,
+                        yoffset    = .800,
+                        leftoffset = .075,
+                        yscale     = .950,
+                    },
+                    stacker  = {
+                        height  = .176,
+                        yoffset = .825,
+                    },
+                },
                 {
                     tweak = "addactuarian",
                 },
diff --git a/tex/context/fonts/mkiv/lucida-math.lfg b/tex/context/fonts/mkiv/lucida-math.lfg
index 28510ac2d..50dce6907 100644
--- a/tex/context/fonts/mkiv/lucida-math.lfg
+++ b/tex/context/fonts/mkiv/lucida-math.lfg
@@ -64,14 +64,14 @@ return {
                     keep     = true,
                     list     = {
                         { source = "latinsupplement" },
-                --         { source = "latinextendeda" },
-                --         { source = "latinextendedadditional" },
-                --         { source = "latinextendedb" },
-                --         { source = "latinextendedc" },
-                --         { source = "latinextendedd" },
-                --         { source = "latinextendede" },
-                --         { source = "latinextendedf" },
-                --         { source = "latinextendedg" },
+                     -- { source = "latinextendeda" },
+                     -- { source = "latinextendedadditional" },
+                     -- { source = "latinextendedb" },
+                     -- { source = "latinextendedc" },
+                     -- { source = "latinextendedd" },
+                     -- { source = "latinextendede" },
+                     -- { source = "latinextendedf" },
+                     -- { source = "latinextendedg" },
                     },
                 },
                 {
@@ -188,6 +188,27 @@ return {
                 {
                     tweak = "addrules",
                 },
+                {
+                    tweak    = "replacerules",
+                    minus    = {
+                        leftoffset  = .2,
+                        rightoffset = .2,
+                    },
+                    fraction = {
+                        height  = .2,
+                        yoffset = .825,
+                    },
+                    radical  = {
+                        height     = .2,
+                        yoffset    = .825,
+                        leftoffset = .075,
+                        yscale     = .940,
+                    },
+                    stacker  = {
+                        height  = .2,
+                        yoffset = .825,
+                    },
+                },
                 {
                     tweak = "addactuarian",
                 },
diff --git a/tex/context/fonts/mkiv/modern-math.lfg b/tex/context/fonts/mkiv/modern-math.lfg
index 4af740789..e2560f0f1 100644
--- a/tex/context/fonts/mkiv/modern-math.lfg
+++ b/tex/context/fonts/mkiv/modern-math.lfg
@@ -323,6 +323,13 @@ return {
                 {
                     tweak = "addrules",
                 },
+                {
+                    tweak    = "replacerules",
+                 -- minus    = true, -- we have an extensible
+                    fraction = { height = .15, yoffset = .85 },
+                    radical  = { height = .15, yoffset = .85 },
+                    stacker  = { height = .15, yoffset = .85 },
+                },
                 {
                     tweak   = "addbars",
                     advance = 0.52,
@@ -346,6 +353,8 @@ return {
                     feature = "emulatelmtx",
                     comment = "this is for mkiv",
                 },
+-- { tweak = "inspect", slot = 0x2212 },
+-- { tweak = "inspect", slot = 0x003D },
             },
         },
         bigslots = {
diff --git a/tex/context/fonts/mkiv/newcomputermodern-math.lfg b/tex/context/fonts/mkiv/newcomputermodern-math.lfg
index cae69aecc..bb881eda0 100644
--- a/tex/context/fonts/mkiv/newcomputermodern-math.lfg
+++ b/tex/context/fonts/mkiv/newcomputermodern-math.lfg
@@ -113,7 +113,7 @@ return {
                         ["0x27EB.variants.*"]   = { topright = -0.3,  bottomright = -0.3  },
 
                     },
-                },                
+                },
                 {
                     tweak = "checkspacing",
                 },
@@ -129,6 +129,13 @@ return {
                 {
                     tweak = "addrules",
                 },
+                {
+                    tweak    = "replacerules",
+                 -- minus    = true, -- we have an extensible
+                    fraction = { height = .15, yoffset = .85 },
+                    radical  = { height = .15, yoffset = .85 },
+                    stacker  = { height = .15, yoffset = .85 },
+                },
                 {
                     tweak = "addfourier",
                     variant = 1,
diff --git a/tex/context/fonts/mkiv/pagella-math.lfg b/tex/context/fonts/mkiv/pagella-math.lfg
index c1d0c7dd5..230f81c9c 100644
--- a/tex/context/fonts/mkiv/pagella-math.lfg
+++ b/tex/context/fonts/mkiv/pagella-math.lfg
@@ -154,7 +154,7 @@ return {
                         [0x27EB]                = { topright = -0.2,  bottomright = -0.2  },
                         ["0x27EB.variants.*"]   = { topright = -0.3,  bottomright = -0.3  },
                         --
-                        [0x00393] = { bottomright = -0.20, }, -- upright Gamma
+                        [0x0393]                = { bottomright = -0.20, }, -- upright Gamma
                         --
                         ["0x222B.parts.bottom"] = { bottomright = -0.20 }, -- int
                         ["0x222C.parts.bottom"] = { bottomright = -0.15 }, -- iint
@@ -184,6 +184,13 @@ return {
                     tweak   = "addbars",
                     advance = 0.2,
                 },
+                {
+                    tweak    = "replacerules",
+                    minus    = { rightoffset = .045 },
+                    fraction = { height = .2, yoffset = .8, rightoffset = .04 },
+                    radical  = { height = .2, yoffset = .8, rightoffset = .04 },
+                    stacker  = { height = .2, yoffset = .8, rightoffset = .04 },
+                },
                 {
                     tweak = "addactuarian",
                 },
diff --git a/tex/context/fonts/mkiv/schola-math.lfg b/tex/context/fonts/mkiv/schola-math.lfg
index e4a3ad397..2e36f0825 100644
--- a/tex/context/fonts/mkiv/schola-math.lfg
+++ b/tex/context/fonts/mkiv/schola-math.lfg
@@ -116,6 +116,13 @@ return {
                 {
                     tweak = "addrules",
                 },
+                {
+                    tweak    = "replacerules",
+                 -- minus    = true, -- we have an extensible
+                    fraction = { height = .25, yoffset = .75 },
+                    radical  = { height = .25, yoffset = .75 },
+                    stacker  = { height = .25, yoffset = .75 },
+                },
                 {
                     tweak   = "addbars",
                     advance = 0.27,
diff --git a/tex/context/fonts/mkiv/stixtwo-math.lfg b/tex/context/fonts/mkiv/stixtwo-math.lfg
index b6b1757b6..c0c97862f 100644
--- a/tex/context/fonts/mkiv/stixtwo-math.lfg
+++ b/tex/context/fonts/mkiv/stixtwo-math.lfg
@@ -167,6 +167,29 @@ return {
                     tweak   = "addbars",
                     advance = 0.4,
                 },
+                {
+                    tweak    = "replacerules",
+                    minus    = {
+                        height      = .233,
+                        yoffset     = .768,
+                     -- leftoffset  = .2,
+                     -- rightoffset = .2,
+                    },
+                    fraction = {
+                        height  = .233,
+                        yoffset = .768,
+                    },
+                    radical  = {
+                        height      = .233,
+                        yoffset     = .768,
+                        leftoffset  = .05,
+                        rightoffset = .05,
+                    },
+                    stacker  = {
+                        height  = .233,
+                        yoffset = .768,
+                    },
+                },
                 {
                     tweak = "addactuarian",
                 },
diff --git a/tex/context/fonts/mkiv/termes-math.lfg b/tex/context/fonts/mkiv/termes-math.lfg
index d9c53ee28..364ea8369 100644
--- a/tex/context/fonts/mkiv/termes-math.lfg
+++ b/tex/context/fonts/mkiv/termes-math.lfg
@@ -122,6 +122,13 @@ return {
                 {
                     tweak = "addrules",
                 },
+                {
+                    tweak    = "replacerules",
+                 -- minus    = true, -- we have an extensible
+                    fraction = { height = .2, yoffset = .8 },
+                    radical  = { height = .2, yoffset = .8 },
+                    stacker  = { height = .2, yoffset = .8 },
+                },
                 {
                     tweak   = "addbars",
                     advance = 0.3,
diff --git a/tex/context/fonts/mkiv/type-imp-antykwa.mkiv b/tex/context/fonts/mkiv/type-imp-antykwa.mkiv
index f5e3158f4..f2fd100d0 100644
--- a/tex/context/fonts/mkiv/type-imp-antykwa.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-antykwa.mkiv
@@ -14,14 +14,7 @@
 \starttypescriptcollection[antykwa-torunska]
 
     \startsetups[antykwa]
-      % \setupmathfraction[\c!rule=\v!symbol,\c!middle="0203E]%
-      % \setupmathradical [\c!rule=\v!symbol,\c!top   ="FE010]%
-      % \setupmathfence   [\c!alternative=1]%
-        \letmathfractionparameter\c!rule\v!symbol
-        \setmathfractionparameter\c!middle{"203E}%
-        \letmathradicalparameter \c!rule\v!symbol
-        \setmathradicalparameter \c!top{\radicalbarextenderuc}%
-        \setmathfenceparameter   \c!alternative{1}%
+        \setmathfenceparameter\c!alternative{1}%
     \stopsetups
 
     % cond => -cont as in iwona
diff --git a/tex/context/fonts/mkiv/type-imp-concrete.mkiv b/tex/context/fonts/mkiv/type-imp-concrete.mkiv
index abf9b2cb1..c383a27fe 100644
--- a/tex/context/fonts/mkiv/type-imp-concrete.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-concrete.mkiv
@@ -19,7 +19,7 @@
     %\definefontfeature[none-slanted-concrete]   [none]   [slant=.2]
 
     \doifunknownfontfeature {concrete-math-bold} {\definefontfeature[concrete-math-bold][boldened]}
-    \doifunknownfontfeature {concrete-text-bold} {\definefontfeature[concrete-text-bold][boldened-15]}
+    \doifunknownfontfeature {concrete-text-bold} {\definefontfeature[concrete-text-bold][boldened-10]}
 
     \starttypescript [\s!serif] [concrete]
         \definefontsynonym [\s!Serif]            [LMTypewriterVarWd-Regular]     [\s!features={\s!default,concrete-text-bold}]
diff --git a/tex/context/fonts/mkiv/type-imp-ebgaramond.mkiv b/tex/context/fonts/mkiv/type-imp-ebgaramond.mkiv
index 966e50ba8..42575a61c 100644
--- a/tex/context/fonts/mkiv/type-imp-ebgaramond.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-ebgaramond.mkiv
@@ -65,13 +65,9 @@
 
 \starttypescriptcollection[ebgaramond]
 
-    \startsetups[ebgaramond]
-        \letmathfractionparameter\c!rule\v!symbol
-        \setmathfractionparameter\c!middle{"203E}%
-        \letmathradicalparameter \c!rule\v!symbol
-        \setmathradicalparameter \c!top{\radicalbarextenderuc}%
-      % \setmathfenceparameter   \c!alternative{1}%
-    \stopsetups
+%   \startsetups[ebgaramond]
+%     % \setmathfenceparameter   \c!alternative{1}%
+%   \stopsetups
 
     \doifunknownfontfeature {ebgaramond-math-bold} {\definefontfeature[ebgaramond-math-bold][boldened]}
 
diff --git a/tex/context/fonts/mkiv/type-imp-iwona.mkiv b/tex/context/fonts/mkiv/type-imp-iwona.mkiv
index 528cb3208..01d859071 100644
--- a/tex/context/fonts/mkiv/type-imp-iwona.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-iwona.mkiv
@@ -14,13 +14,7 @@
 \starttypescriptcollection[iwona]
 
     \startsetups[iwona]
-      % \setupmathfence   [\c!alternative=1]%
-        \setmathfenceparameter   \c!alternative{1}%
-%         \letmathfractionparameter\c!rule\v!symbol
-%         \setmathfractionparameter\c!middle{"203E}%
-%         \letmathradicalparameter \c!rule\v!symbol
-%         \setmathradicalparameter \c!top{\radicalbarextenderuc}%
-%         \setmathfenceparameter   \c!alternative{1}%
+        \setmathfenceparameter\c!alternative{1}%
     \stopsetups
 
     \startsetups[iwona-light]      \directsetup{antykwa}\stopsetups
diff --git a/tex/context/fonts/mkiv/type-imp-kurier.mkiv b/tex/context/fonts/mkiv/type-imp-kurier.mkiv
index af1e2a28d..0ff7852fc 100644
--- a/tex/context/fonts/mkiv/type-imp-kurier.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-kurier.mkiv
@@ -14,13 +14,7 @@
 \starttypescriptcollection [kurier]
 
     \startsetups[kurier]
-      % \setupmathfence   [\c!alternative=1]%
-        \setmathfenceparameter   \c!alternative{1}%
-%         \letmathfractionparameter\c!rule\v!symbol
-%         \setmathfractionparameter\c!middle{"203E}%
-%         \letmathradicalparameter \c!rule\v!symbol
-%         \setmathradicalparameter \c!top{\radicalbarextenderuc}%
-%         \setmathfenceparameter   \c!alternative{1}%
+        \setmathfenceparameter\c!alternative{1}%
     \stopsetups
 
     \startsetups[kurier-light]      \directsetup{antykwa}\stopsetups
diff --git a/tex/context/fonts/mkiv/xcharter-math.lfg b/tex/context/fonts/mkiv/xcharter-math.lfg
index 193c0fd1b..3c349ee88 100644
--- a/tex/context/fonts/mkiv/xcharter-math.lfg
+++ b/tex/context/fonts/mkiv/xcharter-math.lfg
@@ -77,6 +77,28 @@ return {
                 {
                     tweak = "addrules",
                 },
+{
+    tweak    = "replacerules",
+    -- minus = {
+    --     height      = 0.188,
+    --     yoffset     = 0.812,
+    --     leftoffset  = 0.2,
+        -- rightoffset = 0.2,
+    -- },
+    fraction = {
+        height  = .188,
+        yoffset = .812,
+    },
+    radical = {
+        height     = .188,
+        yoffset    = .812,
+        leftoffset = 0.2,-- no effect?
+    },
+    stacker = {
+        height  = .188,
+        yoffset = .812,
+    },
+},
                 {
                     tweak = "addactuarian",
                 },
diff --git a/tex/context/modules/mkiv/m-tikz.mkiv b/tex/context/modules/mkiv/m-tikz.mkiv
index 221c074ad..ef1b6b7e3 100644
--- a/tex/context/modules/mkiv/m-tikz.mkiv
+++ b/tex/context/modules/mkiv/m-tikz.mkiv
@@ -42,6 +42,7 @@
    \catcode`\@=11
    \catcode`\|=12
    \catcode`\!=12
+   \catcode`\~=12
    \relax}
 
 \permanent\protected\def\stoptikzinput
diff --git a/tex/context/modules/mkiv/s-abbreviations-logos.tex b/tex/context/modules/mkiv/s-abbreviations-logos.tex
index ab2b98a56..d04706ca6 100644
--- a/tex/context/modules/mkiv/s-abbreviations-logos.tex
+++ b/tex/context/modules/mkiv/s-abbreviations-logos.tex
@@ -177,10 +177,16 @@
 \logo [LMX]           {lmx}
 \logo [LPEG]          {lpeg}
 \logo [LUA]           {Lua}
-\logo [LUAJIT]        {Lua\wordboundary JIT}
-\logo [LUAJITTEX]     {Lua\wordboundary jit\TeXsuffix}
-\logo [LUAMETATEX]    {\Lua\wordboundary Meta\wordboundary\TeXsuffix}
-\logo [LUATEX]        {Lua\wordboundary\TeXsuffix}
+% \logo [LUAJIT]        {Lua\wordboundary JIT}
+% \logo [LUAJITTEX]     {Lua\wordboundary jit\TeXsuffix}
+% \logo [LUAMETATEX]    {\Lua\wordboundary Meta\wordboundary\TeXsuffix}
+% \logo [LUATEX]        {Lua\wordboundary\TeXsuffix}
+% \logo [LUAMETAFUN]    {\Lua\wordboundary\MetaFun}
+\logo [LUAJIT]        {Lua\-JIT}
+\logo [LUAJITTEX]     {Lua\-jit\-\TeXsuffix}
+\logo [LUAMETATEX]    {\Lua\-Meta\-\TeXsuffix}
+\logo [LUATEX]        {Lua\-\TeXsuffix}
+\logo [LUAMETAFUN]    {\Lua\-\MetaFun}
 \logo [LUATOOLS]      {luatools}
 \logo [MACOSX]        {MacOSX}
 %logo [MACROTEX]      {Macro\TeXsuffix}
@@ -189,7 +195,6 @@
 \logo [MAPS]          {Maps}
 \logo [MATHML]        {MathML}
 \logo [METAFONT]      {\MetaFont}
-\logo [LUAMETAFUN]    {\Lua\wordboundary\MetaFun}
 \logo [METAFUN]       {\MetaFun}
 \logo [METAPOST]      {\MetaPost}
 \logo [METATEX]       {Meta\TeXsuffix}
diff --git a/tex/context/modules/mkiv/x-asciimath.lua b/tex/context/modules/mkiv/x-asciimath.lua
index fdcab141c..f158065aa 100644
--- a/tex/context/modules/mkiv/x-asciimath.lua
+++ b/tex/context/modules/mkiv/x-asciimath.lua
@@ -6,15 +6,14 @@ if not modules then modules = { } end modules ['x-asciimath'] = {
     license   = "see context related readme files"
 }
 
---[[ldx--
-<p>Some backgrounds are discussed in <t>x-asciimath.mkiv</t>. This is a third version. I first
-tried a to make a proper expression parser but it's not that easy. First we have to avoid left
-recursion, which is not that trivial (maybe a future version of lpeg will provide that), and
-second there is not really a syntax but a mix of expressions and sequences with some fuzzy logic
-applied. Most problematic are fractions and we also need to handle incomplete expressions. So,
-instead we (sort of) tokenize the string and then do some passes over the result. Yes, it's real
-ugly and unsatisfying code mess down here. Don't take this as an example.</p>
---ldx]]--
+-- Some backgrounds are discussed in 'x-asciimath.mkiv'. This is a third version. I
+-- first tried a to make a proper expression parser but it's not that easy. First we
+-- have to avoid left recursion, which is not that trivial (maybe a future version
+-- of lpeg will provide that), and second there is not really a syntax but a mix of
+-- expressions and sequences with some fuzzy logic applied. Most problematic are
+-- fractions and we also need to handle incomplete expressions. So, instead we (sort
+-- of) tokenize the string and then do some passes over the result. Yes, it's real
+-- ugly and unsatisfying code mess down here. Don't take this as an example.
 
 -- todo: spaces around all elements in cleanup?
 -- todo: filter from files listed in tuc file
diff --git a/tex/context/modules/mkxl/m-tikz.mkxl b/tex/context/modules/mkxl/m-tikz.mkxl
index 21544d14e..6b173227c 100644
--- a/tex/context/modules/mkxl/m-tikz.mkxl
+++ b/tex/context/modules/mkxl/m-tikz.mkxl
@@ -45,6 +45,7 @@
    \catcode`\@=11
    \catcode`\|=12
    \catcode`\!=12
+   \catcode`\~=13
    \autoparagraphmode\zerocount}
 
 \permanent\protected\def\stoptikzinput
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index e885a56ed..7d9befa51 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
 -- merged file : c:/data/develop/context/sources/luatex-fonts-merged.lua
 -- parent file : c:/data/develop/context/sources/luatex-fonts.lua
--- merge date  : 2023-03-20 15:42
+-- merge date  : 2023-04-01 09:28
 
 do -- begin closure to overcome local limits and interference
 
@@ -37807,7 +37807,7 @@ local fonts=fonts
 local otf=fonts.handlers.otf
 local registerotffeature=otf.features.register
 local addotffeature=otf.addfeature
-local specification={
+local tlig={
  type="ligature",
  order={ "tlig" },
  prepend=true,
@@ -37816,12 +37816,17 @@ local specification={
   [0x2014]={ 0x002D,0x002D,0x002D },
  },
 }
-addotffeature("tlig",specification)
-registerotffeature {
- name="tlig",
- description="tex ligatures",
+local tquo={
+ type="ligature",
+ order={ "tquo" },
+ prepend=true,
+ data={
+  [0x201C]={ 0x0060,0x0060 },
+  [0x201D]={ 0x0027,0x0027 },
+  [0x201E]={ 0x002C,0x002C },
+ },
 }
-local specification={
+local trep={
  type="substitution",
  order={ "trep" },
  prepend=true,
@@ -37829,11 +37834,12 @@ local specification={
   [0x0027]=0x2019,
  },
 }
-addotffeature("trep",specification)
-registerotffeature {
- name="trep",
- description="tex replacements",
-}
+addotffeature("trep",trep) 
+addotffeature("tlig",tlig)
+addotffeature("tquo",tquo) 
+registerotffeature { name="tlig",description="tex ligatures" }
+registerotffeature { name="tquo",description="tex quotes" }
+registerotffeature { name="trep",description="tex replacements" }
 local anum_arabic={
  [0x0030]=0x0660,
  [0x0031]=0x0661,
diff --git a/tex/generic/context/luatex/luatex-mplib.lua b/tex/generic/context/luatex/luatex-mplib.lua
index 1839c44ee..99a23b03c 100644
--- a/tex/generic/context/luatex/luatex-mplib.lua
+++ b/tex/generic/context/luatex/luatex-mplib.lua
@@ -6,19 +6,14 @@ if not modules then modules = { } end modules ['luatex-mplib'] = {
     license   = "public domain",
 }
 
---[[ldx--
-<p>This module is a stripped down version of libraries that are used
-by <l n='context'/>. It can be used in other macro packages and/or
-serve as an example. Embedding in a macro package is upto others and
-normally boils down to inputting <t>supp-mpl.tex</t>.</p>
---ldx]]--
+-- This module is a stripped down version of libraries that are used by ConTeXt. It
+-- can be used in other macro packages and/or serve as an example. Embedding in a
+-- macro package is upto others and normally boils down to inputting 'supp-mpl.tex'.
 
 if metapost and metapost.version then
 
-    --[[ldx--
-    <p>Let's silently quit and make sure that no one loads it
-    manually in <l n='context'/>.</p>
-    --ldx]]--
+    -- Let's silently quit and make sure that no one loads it manually in
+    -- ConTeXt.
 
 else
 
@@ -29,27 +24,25 @@ else
     local mplib = require ('mplib')
     local kpse  = require ('kpse')
 
-    --[[ldx--
-    <p>We create a namespace and some variables to it. If a namespace is
-    already defined it wil not be initialized. This permits hooking
-    in code beforehand.</p>
+    -- We create a namespace and some variables to it. If a namespace is already
+    -- defined it wil not be initialized. This permits hooking in code beforehand.
 
-    <p>We don't make a format automatically. After all, distributions
-    might have their own preferences and normally a format (mem) file will
-    have some special place in the <l n='tex'/> tree. Also, there can already
-    be format files, different memort settings and other nasty pitfalls that
-    we don't want to interfere with. If you want, you can define a function
-    <t>metapost.make(name,mem_name) that does the job.</t></p>
-    --ldx]]--
+    -- We don't make a format automatically. After all, distributions might have
+    -- their own preferences and normally a format (mem) file will have some
+    -- special place in the TeX tree. Also, there can already be format files,
+    -- different memort settings and other nasty pitfalls that we don't want to
+    -- interfere with. If you want, you can define a function
+    --
+    --   metapost.make (name,mem_name)
+    --
+    -- that does the job.
 
     metapost          = metapost or { }
     metapost.version  = 1.00
     metapost.showlog  = metapost.showlog or false
     metapost.lastlog  = ""
 
-    --[[ldx--
-    <p>A few helpers, taken from <t>l-file.lua</t>.</p>
-    --ldx]]--
+    -- A few helpers, taken from 'l-file.lua'.
 
     local file = file or { }
 
@@ -61,10 +54,7 @@ else
         return (string.gsub(filename,"%.[%a%d]+$",""))
     end
 
-    --[[ldx--
-    <p>We use the <l n='kpse'/> library unless a finder is already
-    defined.</p>
-    --ldx]]--
+    -- We use the KPSE library unless a finder is already defined.
 
     local mpkpse = kpse.new("luatex","mpost")
 
@@ -76,10 +66,9 @@ else
         end
     end
 
-    --[[ldx--
-    <p>You can use your own reported if needed, as long as it handles multiple
-    arguments and formatted strings.</p>
-    --ldx]]--
+    -- You can use your own reported if needed, as long as it handles multiple
+    -- arguments and formatted strings.
+
 
     metapost.report = metapost.report or function(...)
         if logs.report then
@@ -89,11 +78,9 @@ else
         end
     end
 
-    --[[ldx--
-    <p>The rest of this module is not documented. More info can be found in the
-    <l n='luatex'/> manual, articles in user group journals and the files that
-    ship with <l n='context'/>.</p>
-    --ldx]]--
+    -- The rest of this module is not documented. More info can be found in the
+    -- LuaTeX manual, articles in user group journals and the files that ship
+    -- with ConTeXt.
 
     function metapost.resetlastlog()
         metapost.lastlog = ""
@@ -329,9 +316,8 @@ else
         return true -- done
     end
 
-    --[[ldx--
-    <p>We removed some message and tracing code. We might even remove the flusher</p>
-    --ldx]]--
+    -- We removed some message and tracing code. We might even remove the
+    -- flusher.
 
     local function pdf_startfigure(n,llx,lly,urx,ury)
         tex.sprint(format("\\startMPLIBtoPDF{%s}{%s}{%s}{%s}",llx,lly,urx,ury))
@@ -443,9 +429,7 @@ else
         return t
     end
 
-    --[[ldx--
-    <p>Support for specials has been removed.</p>
-    --ldx]]--
+    -- Support for specials has been removed.
 
     function metapost.flush(result,flusher)
         if result then
diff --git a/tex/generic/context/luatex/luatex-preprocessor.lua b/tex/generic/context/luatex/luatex-preprocessor.lua
index 8faa0b47e..b1debcd5c 100644
--- a/tex/generic/context/luatex/luatex-preprocessor.lua
+++ b/tex/generic/context/luatex/luatex-preprocessor.lua
@@ -6,11 +6,9 @@ if not modules then modules = { } end modules ['luatex-preprocessor'] = {
     license   = "see context related readme files"
 }
 
---[[ldx
-<p>This is a stripped down version of the preprocessor. In
-<l n='context'/> we have a bit more, use a different logger, and
-use a few optimizations. A few examples are shown at the end.</p>
---ldx]]
+-- This is a stripped down version of the preprocessor. In ConTeXt we have a bit
+-- more, use a different logger, and use a few optimizations. A few examples are
+-- shown at the end.
 
 local rep, sub, gmatch = string.rep, string.sub, string.gmatch
 local insert, remove = table.insert, table.remove
@@ -99,10 +97,6 @@ local parser = lpeg.Cs { "converter",
     converter   = (lpeg.V("definition") + anything)^1,
 }
 
---[[ldx
-<p>We provide a few commands.</p>
---ldx]]
-
 -- local texkpse
 
 local function find_file(...)
diff --git a/tex/latex/context/ppchtex/m-ch-de.sty b/tex/latex/context/ppchtex/m-ch-de.sty
deleted file mode 100644
index d35f8cf2d..000000000
--- a/tex/latex/context/ppchtex/m-ch-de.sty
+++ /dev/null
@@ -1,19 +0,0 @@
-\ProvidesPackage{m-ch-de}[2004/07/30 package wrapper for m-ch-de.tex]
-
-\newif\ifPPCH@PSTRICKS
-
-\DeclareOption{pstricks}{\PPCH@PSTRICKStrue}
-\DeclareOption{pictex}{\PPCH@PSTRICKSfalse}
-
-\ExecuteOptions{pictex}
-\ProcessOptions\relax
-
-\ifPPCH@PSTRICKS
-  \RequirePackage{pstricks,pst-plot}
-\else
-  \RequirePackage{m-pictex}
-\fi
-
-\input{m-ch-de.tex}
-
-\endinput
\ No newline at end of file
diff --git a/tex/latex/context/ppchtex/m-ch-en.sty b/tex/latex/context/ppchtex/m-ch-en.sty
deleted file mode 100644
index e93a49867..000000000
--- a/tex/latex/context/ppchtex/m-ch-en.sty
+++ /dev/null
@@ -1,19 +0,0 @@
-\ProvidesPackage{m-ch-en}[2004/07/30 package wrapper for m-ch-en.tex]
-
-\newif\ifPPCH@PSTRICKS
-
-\DeclareOption{pstricks}{\PPCH@PSTRICKStrue}
-\DeclareOption{pictex}{\PPCH@PSTRICKSfalse}
-
-\ExecuteOptions{pictex}
-\ProcessOptions\relax
-
-\ifPPCH@PSTRICKS
-  \RequirePackage{pstricks,pst-plot}
-\else
-  \RequirePackage{m-pictex}
-\fi
-
-\input{m-ch-en.tex}
-
-\endinput
\ No newline at end of file
diff --git a/tex/latex/context/ppchtex/m-ch-nl.sty b/tex/latex/context/ppchtex/m-ch-nl.sty
deleted file mode 100644
index 6e2b8d43d..000000000
--- a/tex/latex/context/ppchtex/m-ch-nl.sty
+++ /dev/null
@@ -1,19 +0,0 @@
-\ProvidesPackage{m-ch-nl}[2004/07/30 package wrapper for m-ch-nl.tex]
-
-\newif\ifPPCH@PSTRICKS
-
-\DeclareOption{pstricks}{\PPCH@PSTRICKStrue}
-\DeclareOption{pictex}{\PPCH@PSTRICKSfalse}
-
-\ExecuteOptions{pictex}
-\ProcessOptions\relax
-
-\ifPPCH@PSTRICKS
-  \RequirePackage{pstricks,pst-plot}
-\else
-  \RequirePackage{m-pictex}
-\fi
-
-\input{m-ch-nl.tex}
-
-\endinput
\ No newline at end of file
diff --git a/tex/latex/context/ppchtex/m-pictex.sty b/tex/latex/context/ppchtex/m-pictex.sty
deleted file mode 100644
index a967b362d..000000000
--- a/tex/latex/context/ppchtex/m-pictex.sty
+++ /dev/null
@@ -1,5 +0,0 @@
-\ProvidesPackage{m-pictex}[2004/07/30 package wrapper for m-pictex.tex]
-
-\input{m-pictex.mkii}
-
-\endinput
-- 
cgit v1.2.3