From 624cbb5da392e9403984dd1cf368c0d408b1c2a8 Mon Sep 17 00:00:00 2001
From: Context Git Mirror Bot
Date: Sat, 3 May 2014 13:55:34 +0200
Subject: 2014-01-03 00:42:00
---
.../lexers/data/scite-context-data-context.lua | 2 +-
.../lexers/data/scite-context-data-metafun.lua | 2 +-
.../data/scite/lexers/scite-context-lexer-mps.lua | 2 +-
.../scite/scite-context-data-context.properties | 89 +-
.../scite/scite-context-data-metafun.properties | 86 +-
context/data/scite/scite-context-readme.pdf | Bin 210958 -> 210827 bytes
context/data/scite/scite-context-readme.tex | 6 -
context/data/scite/scite-context.properties | 49 +-
context/data/scite/scite-ctx.properties | 19 +-
doc/context/manuals/allkind/mkiv-publications.bib | 34 -
doc/context/manuals/allkind/mkiv-publications.tex | 1325 --
doc/context/manuals/allkind/publications-en.xml | 369 -
doc/context/scripts/mkiv/mtx-bibtex.html | 53 -
doc/context/scripts/mkiv/mtx-bibtex.man | 30 -
doc/context/scripts/mkiv/mtx-bibtex.xml | 26 -
metapost/context/base/mp-base.mpii | 19 +-
metapost/context/base/mp-base.mpiv | 71 +-
metapost/context/base/mp-grap.mpiv | 268 +-
metapost/context/base/mp-tool.mpii | 2683 ++-
metapost/context/base/mp-tool.mpiv | 23 +-
scripts/context/lua/mtx-bibtex.lua | 106 -
scripts/context/lua/mtx-context.lua | 37 +-
scripts/context/lua/mtx-fonts.lua | 14 +-
scripts/context/lua/mtx-plain.lua | 2 +-
scripts/context/lua/mtxrun.lua | 355 +-
scripts/context/stubs/install/first-setup.bat | 87 -
scripts/context/stubs/install/first-setup.sh | 120 -
scripts/context/stubs/mswin/context.exe | Bin 4608 -> 4608 bytes
scripts/context/stubs/mswin/ctxtools.exe | Bin 4608 -> 4608 bytes
scripts/context/stubs/mswin/first-setup.bat | 87 +
scripts/context/stubs/mswin/luatools.exe | Bin 4608 -> 4608 bytes
scripts/context/stubs/mswin/metatex.exe | Bin 4608 -> 4608 bytes
scripts/context/stubs/mswin/mptopdf.exe | Bin 4608 -> 4608 bytes
scripts/context/stubs/mswin/mtxrun.dll | Bin 7680 -> 7680 bytes
scripts/context/stubs/mswin/mtxrun.exe | Bin 4608 -> 4608 bytes
scripts/context/stubs/mswin/mtxrun.lua | 355 +-
scripts/context/stubs/mswin/mtxrunjit.exe | Bin 4608 -> 0 bytes
scripts/context/stubs/mswin/mtxworks.exe | Bin 4608 -> 4608 bytes
scripts/context/stubs/mswin/pstopdf.exe | Bin 4608 -> 4608 bytes
scripts/context/stubs/mswin/setuptex.bat | 70 +
scripts/context/stubs/mswin/texexec.exe | Bin 4608 -> 4608 bytes
scripts/context/stubs/mswin/texmfstart.exe | Bin 4608 -> 4608 bytes
scripts/context/stubs/setup/setuptex | 167 -
scripts/context/stubs/setup/setuptex.bat | 70 -
scripts/context/stubs/setup/setuptex.csh | 164 -
scripts/context/stubs/source/mtxrun_dll.c | 142 +-
scripts/context/stubs/source/readme.txt | 42 +-
scripts/context/stubs/unix/contextjit | 5 -
scripts/context/stubs/unix/ctxtools | 2 +
scripts/context/stubs/unix/mptopdf | 2 +
scripts/context/stubs/unix/mtxrun | 355 +-
scripts/context/stubs/unix/mtxrunjit | 5 -
scripts/context/stubs/unix/pstopdf | 2 +
scripts/context/stubs/win64/context.exe | Bin 15360 -> 0 bytes
scripts/context/stubs/win64/contextjit.exe | Bin 15360 -> 0 bytes
scripts/context/stubs/win64/ctxtools.exe | Bin 15360 -> 0 bytes
scripts/context/stubs/win64/luatools.exe | Bin 15360 -> 0 bytes
scripts/context/stubs/win64/metatex.exe | Bin 15360 -> 0 bytes
scripts/context/stubs/win64/mptopdf.exe | Bin 15360 -> 0 bytes
scripts/context/stubs/win64/mtxrun.dll | Bin 18432 -> 0 bytes
scripts/context/stubs/win64/mtxrun.exe | Bin 15360 -> 0 bytes
scripts/context/stubs/win64/mtxrun.lua | 18175 -------------------
scripts/context/stubs/win64/mtxrunjit.exe | Bin 15360 -> 0 bytes
scripts/context/stubs/win64/mtxworks.exe | Bin 15360 -> 0 bytes
scripts/context/stubs/win64/pstopdf.exe | Bin 15360 -> 0 bytes
scripts/context/stubs/win64/texexec.exe | Bin 15360 -> 0 bytes
scripts/context/stubs/win64/texmfstart.exe | Bin 15360 -> 0 bytes
tex/context/base/anch-bar.mkiv | 8 +-
tex/context/base/anch-pos.lua | 72 +-
tex/context/base/attr-ini.mkiv | 2 +-
tex/context/base/back-exp.lua | 162 +-
tex/context/base/bibl-tra.lua | 2 +-
tex/context/base/buff-ver.mkiv | 4 +-
tex/context/base/char-def.lua | 2 -
tex/context/base/char-utf.lua | 87 +-
tex/context/base/cont-new.mkiv | 12 +-
tex/context/base/context-version.pdf | Bin 4096 -> 4115 bytes
tex/context/base/context-version.png | Bin 38170 -> 40350 bytes
tex/context/base/context.mkiv | 22 +-
tex/context/base/core-env.lua | 14 -
tex/context/base/core-sys.mkiv | 2 -
tex/context/base/core-two.lua | 2 +-
tex/context/base/core-uti.lua | 6 +-
tex/context/base/data-aux.lua | 3 +-
tex/context/base/data-use.lua | 4 +-
tex/context/base/enco-ini.mkiv | 15 +-
tex/context/base/export-example.css | 15 -
tex/context/base/file-job.lua | 44 +-
tex/context/base/file-job.mkvi | 2 +-
tex/context/base/file-res.lua | 16 +-
tex/context/base/font-chk.lua | 28 +-
tex/context/base/font-col.lua | 21 +-
tex/context/base/font-ctx.lua | 42 +-
tex/context/base/font-gds.lua | 33 +-
tex/context/base/font-map.lua | 30 +-
tex/context/base/font-mis.lua | 2 +-
tex/context/base/font-nod.lua | 121 +-
tex/context/base/font-odv.lua | 1160 +-
tex/context/base/font-otf.lua | 207 +-
tex/context/base/font-otn.lua | 648 +-
tex/context/base/font-ott.lua | 3 -
tex/context/base/font-otx.lua | 130 +-
tex/context/base/font-pat.lua | 2 +-
tex/context/base/font-pre.mkiv | 18 -
tex/context/base/font-sol.lua | 174 +-
tex/context/base/font-syn.lua | 68 +-
tex/context/base/l-dir.lua | 61 +-
tex/context/base/l-lpeg.lua | 30 +-
tex/context/base/l-lua.lua | 6 -
tex/context/base/l-string.lua | 5 -
tex/context/base/l-table.lua | 4 +-
tex/context/base/l-url.lua | 10 +-
tex/context/base/lang-lab.mkiv | 6 +-
tex/context/base/lang-rep.lua | 157 +-
tex/context/base/lang-rep.mkiv | 75 -
tex/context/base/lang-wrd.lua | 37 +-
tex/context/base/lpdf-mis.lua | 74 +-
tex/context/base/lpdf-nod.lua | 81 +-
tex/context/base/lpdf-tag.lua | 82 +-
tex/context/base/luat-cnf.lua | 2 +-
tex/context/base/luat-sto.lua | 1 -
tex/context/base/lxml-tab.lua | 41 +-
tex/context/base/m-oldbibtex.mkiv | 16 -
tex/context/base/math-dir.lua | 42 +-
tex/context/base/math-fbk.lua | 27 +-
tex/context/base/math-fen.mkiv | 2 +-
tex/context/base/math-ini.lua | 11 +-
tex/context/base/math-noa.lua | 411 +-
tex/context/base/math-tag.lua | 129 +-
tex/context/base/mult-de.mkii | 2 -
tex/context/base/mult-def.lua | 6 -
tex/context/base/mult-def.mkiv | 4 -
tex/context/base/mult-en.mkii | 2 -
tex/context/base/mult-fr.mkii | 2 -
tex/context/base/mult-fun.lua | 2 +-
tex/context/base/mult-it.mkii | 2 -
tex/context/base/mult-low.lua | 6 +-
tex/context/base/mult-nl.mkii | 2 -
tex/context/base/mult-pe.mkii | 2 -
tex/context/base/mult-ro.mkii | 2 -
tex/context/base/node-acc.lua | 120 +-
tex/context/base/node-aux.lua | 374 +-
tex/context/base/node-bck.lua | 111 +-
tex/context/base/node-fin.lua | 302 +-
tex/context/base/node-fnt.lua | 41 +-
tex/context/base/node-inj.lua | 200 +-
tex/context/base/node-ltp.lua | 1614 +-
tex/context/base/node-met.lua | 53 -
tex/context/base/node-mig.lua | 97 +-
tex/context/base/node-nut.lua | 650 -
tex/context/base/node-pro.lua | 113 +-
tex/context/base/node-ref.lua | 220 +-
tex/context/base/node-res.lua | 530 +-
tex/context/base/node-rul.lua | 125 +-
tex/context/base/node-tra.lua | 355 +-
tex/context/base/node-tst.lua | 69 +-
tex/context/base/node-typ.lua | 71 +-
tex/context/base/pack-rul.lua | 71 +-
tex/context/base/pack-rul.mkiv | 25 +-
tex/context/base/page-brk.mkiv | 245 +-
tex/context/base/page-lay.mkiv | 6 +-
tex/context/base/page-lin.lua | 112 +-
tex/context/base/page-mak.mkvi | 63 +-
tex/context/base/page-mix.lua | 229 +-
tex/context/base/page-mix.mkiv | 3 +-
tex/context/base/page-mul.mkiv | 8 +-
tex/context/base/page-str.lua | 6 +-
tex/context/base/page-str.mkiv | 2 +
tex/context/base/publ-aut.lua | 550 -
tex/context/base/publ-dat.lua | 529 -
tex/context/base/publ-imp-apa.mkiv | 547 -
tex/context/base/publ-imp-cite.mkiv | 74 -
tex/context/base/publ-imp-commands.mkiv | 15 -
tex/context/base/publ-imp-definitions.mkiv | 68 -
tex/context/base/publ-ini.lua | 1425 --
tex/context/base/publ-ini.mkiv | 963 -
tex/context/base/publ-old.mkiv | 22 -
tex/context/base/publ-oth.lua | 146 -
tex/context/base/publ-tra.lua | 296 -
tex/context/base/publ-tra.mkiv | 35 -
tex/context/base/publ-usr.lua | 91 -
tex/context/base/publ-usr.mkiv | 2 -
tex/context/base/publ-xml.mkiv | 114 -
tex/context/base/s-abr-01.tex | 2 -
tex/context/base/s-inf-03.mkiv | 5 +-
tex/context/base/s-languages-hyphenation.lua | 2 +-
tex/context/base/s-math-coverage.lua | 4 +-
tex/context/base/scrp-cjk.lua | 131 +-
tex/context/base/scrp-eth.lua | 22 +-
tex/context/base/scrp-ini.lua | 85 +-
tex/context/base/sort-ini.lua | 54 +-
tex/context/base/sort-lan.lua | 2 +-
tex/context/base/spac-ali.lua | 46 +-
tex/context/base/spac-ali.mkiv | 25 +-
tex/context/base/spac-chr.lua | 95 +-
tex/context/base/spac-ver.lua | 430 +-
tex/context/base/status-files.pdf | Bin 24795 -> 24556 bytes
tex/context/base/status-lua.pdf | Bin 226993 -> 228200 bytes
tex/context/base/status-mkiv.lua | 98 +-
tex/context/base/strc-lst.mkvi | 1 -
tex/context/base/strc-mar.lua | 29 +-
tex/context/base/strc-mat.mkiv | 8 +-
tex/context/base/strc-pag.lua | 14 +-
tex/context/base/strc-pag.mkiv | 5 -
tex/context/base/supp-box.lua | 147 +-
tex/context/base/supp-mat.mkiv | 34 +-
tex/context/base/syst-ini.mkiv | 10 +-
tex/context/base/tabl-ntb.mkiv | 4 +-
tex/context/base/tabl-tbl.mkiv | 5 +-
tex/context/base/tabl-xtb.lua | 131 +-
tex/context/base/task-ini.lua | 8 +-
tex/context/base/trac-inf.lua | 5 +-
tex/context/base/trac-jus.lua | 59 +-
tex/context/base/trac-par.lua | 39 +-
tex/context/base/trac-pro.lua | 6 +-
tex/context/base/trac-tim.lua | 2 +-
tex/context/base/trac-vis.lua | 294 +-
tex/context/base/type-imp-buy.mkiv | 136 +-
tex/context/base/type-ini.lua | 4 +-
tex/context/base/typo-bld.lua | 28 +-
tex/context/base/typo-brk.lua | 122 +-
tex/context/base/typo-cap.lua | 103 +-
tex/context/base/typo-cln.lua | 17 +-
tex/context/base/typo-dha.lua | 75 +-
tex/context/base/typo-dig.lua | 58 +-
tex/context/base/typo-dir.lua | 32 +-
tex/context/base/typo-drp.lua | 375 +-
tex/context/base/typo-drp.mkiv | 56 +-
tex/context/base/typo-dua.lua | 78 +-
tex/context/base/typo-dub.lua | 79 +-
tex/context/base/typo-fln.lua | 91 +-
tex/context/base/typo-itc.lua | 63 +-
tex/context/base/typo-krn.lua | 192 +-
tex/context/base/typo-mar.lua | 148 +-
tex/context/base/typo-pag.lua | 76 +-
tex/context/base/typo-par.mkiv | 29 -
tex/context/base/typo-rep.lua | 50 +-
tex/context/base/typo-spa.lua | 57 +-
tex/context/base/typo-tal.lua | 80 +-
tex/context/base/util-deb.lua | 46 +-
tex/context/base/util-str.lua | 173 +-
tex/context/base/util-tab.lua | 3 +-
tex/context/base/x-mathml.lua | 32 +-
tex/context/base/x-mathml.mkiv | 15 +-
tex/context/base/x-set-11.mkiv | 28 +-
tex/context/interface/keys-cs.xml | 2 -
tex/context/interface/keys-de.xml | 2 -
tex/context/interface/keys-en.xml | 2 -
tex/context/interface/keys-fr.xml | 2 -
tex/context/interface/keys-it.xml | 2 -
tex/context/interface/keys-nl.xml | 2 -
tex/context/interface/keys-pe.xml | 2 -
tex/context/interface/keys-ro.xml | 2 -
tex/generic/context/luatex/luatex-fonts-inj.lua | 526 -
tex/generic/context/luatex/luatex-fonts-merged.lua | 1069 +-
tex/generic/context/luatex/luatex-fonts-otn.lua | 2848 ---
tex/generic/context/luatex/luatex-fonts.lua | 4 +-
257 files changed, 7842 insertions(+), 41406 deletions(-)
delete mode 100644 doc/context/manuals/allkind/mkiv-publications.bib
delete mode 100644 doc/context/manuals/allkind/mkiv-publications.tex
delete mode 100644 doc/context/manuals/allkind/publications-en.xml
delete mode 100644 doc/context/scripts/mkiv/mtx-bibtex.html
delete mode 100644 doc/context/scripts/mkiv/mtx-bibtex.man
delete mode 100644 doc/context/scripts/mkiv/mtx-bibtex.xml
delete mode 100644 scripts/context/lua/mtx-bibtex.lua
delete mode 100644 scripts/context/stubs/install/first-setup.bat
delete mode 100644 scripts/context/stubs/install/first-setup.sh
create mode 100644 scripts/context/stubs/mswin/first-setup.bat
delete mode 100644 scripts/context/stubs/mswin/mtxrunjit.exe
create mode 100644 scripts/context/stubs/mswin/setuptex.bat
delete mode 100644 scripts/context/stubs/setup/setuptex
delete mode 100644 scripts/context/stubs/setup/setuptex.bat
delete mode 100644 scripts/context/stubs/setup/setuptex.csh
delete mode 100644 scripts/context/stubs/unix/contextjit
create mode 100644 scripts/context/stubs/unix/ctxtools
create mode 100644 scripts/context/stubs/unix/mptopdf
delete mode 100644 scripts/context/stubs/unix/mtxrunjit
create mode 100644 scripts/context/stubs/unix/pstopdf
delete mode 100644 scripts/context/stubs/win64/context.exe
delete mode 100644 scripts/context/stubs/win64/contextjit.exe
delete mode 100644 scripts/context/stubs/win64/ctxtools.exe
delete mode 100644 scripts/context/stubs/win64/luatools.exe
delete mode 100644 scripts/context/stubs/win64/metatex.exe
delete mode 100644 scripts/context/stubs/win64/mptopdf.exe
delete mode 100644 scripts/context/stubs/win64/mtxrun.dll
delete mode 100644 scripts/context/stubs/win64/mtxrun.exe
delete mode 100644 scripts/context/stubs/win64/mtxrun.lua
delete mode 100644 scripts/context/stubs/win64/mtxrunjit.exe
delete mode 100644 scripts/context/stubs/win64/mtxworks.exe
delete mode 100644 scripts/context/stubs/win64/pstopdf.exe
delete mode 100644 scripts/context/stubs/win64/texexec.exe
delete mode 100644 scripts/context/stubs/win64/texmfstart.exe
delete mode 100644 tex/context/base/lang-rep.mkiv
delete mode 100644 tex/context/base/m-oldbibtex.mkiv
delete mode 100644 tex/context/base/node-nut.lua
delete mode 100644 tex/context/base/publ-aut.lua
delete mode 100644 tex/context/base/publ-dat.lua
delete mode 100644 tex/context/base/publ-imp-apa.mkiv
delete mode 100644 tex/context/base/publ-imp-cite.mkiv
delete mode 100644 tex/context/base/publ-imp-commands.mkiv
delete mode 100644 tex/context/base/publ-imp-definitions.mkiv
delete mode 100644 tex/context/base/publ-ini.lua
delete mode 100644 tex/context/base/publ-ini.mkiv
delete mode 100644 tex/context/base/publ-old.mkiv
delete mode 100644 tex/context/base/publ-oth.lua
delete mode 100644 tex/context/base/publ-tra.lua
delete mode 100644 tex/context/base/publ-tra.mkiv
delete mode 100644 tex/context/base/publ-usr.lua
delete mode 100644 tex/context/base/publ-usr.mkiv
delete mode 100644 tex/context/base/publ-xml.mkiv
delete mode 100644 tex/context/base/typo-par.mkiv
delete mode 100644 tex/generic/context/luatex/luatex-fonts-inj.lua
delete mode 100644 tex/generic/context/luatex/luatex-fonts-otn.lua
diff --git a/context/data/scite/lexers/data/scite-context-data-context.lua b/context/data/scite/lexers/data/scite-context-data-context.lua
index 0d577c8da..f167c82c1 100644
--- a/context/data/scite/lexers/data/scite-context-data-context.lua
+++ b/context/data/scite/lexers/data/scite-context-data-context.lua
@@ -1,4 +1,4 @@
return {
["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "etexversion", "pdftexversion", "xetexversion", "xetexrevision", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifmodeelse", "doifnotmode", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "setupdocument", "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", "setupmodule", "currentmoduleparameter", "moduleparameter", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "globalenablemode", "globaldisablemode", "globalpreventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile", "luastringsep", "!!bs", "!!es", "lefttorightmark", "righttoleftmark", "breakablethinspace", "nobreakspace", "narrownobreakspace", "zerowidthnobreakspace", "ideographicspace", "ideographichalffillspace", "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace", "figurespace", "punctuationspace", "hairspace", "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj" },
- ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "doifelsecommandhandler", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "strippedcsname", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "normalbaselineskip", "normallineskip", "normallineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifinsetelse", "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifinstringelse", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", "newconstant", "setnewconstant", "setconstant", "setconstantvalue", "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "carryoverpar", "assumelongusagecs", "Umathbotaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "lefttoright", "righttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "lesshyphens", "morehyphens", "nohyphens", "dohyphens", "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath" },
+ ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "doifelsecommandhandler", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "strippedcsname", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "normalbaselineskip", "normallineskip", "normallineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifinsetelse", "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifinstringelse", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "carryoverpar", "assumelongusagecs", "Umathbotaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "lefttoright", "righttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "lesshyphens", "morehyphens", "nohyphens", "dohyphens" },
}
\ No newline at end of file
diff --git a/context/data/scite/lexers/data/scite-context-data-metafun.lua b/context/data/scite/lexers/data/scite-context-data-metafun.lua
index 50b9ecec4..1ca02de97 100644
--- a/context/data/scite/lexers/data/scite-context-data-metafun.lua
+++ b/context/data/scite/lexers/data/scite-context-data-metafun.lua
@@ -1,4 +1,4 @@
return {
- ["commands"]={ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", "withcircularshade", "withlinearshade", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", "transparent", "withtransparency", "property", "properties", "withproperties", "asgroup", "infont", "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", "space", "CRLF", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedpath", "constructedpairs", "punkedfunction", "curvedfunction", "tightfunction", "punkedpath", "curvedpath", "tightpath", "punkedpairs", "curvedpairs", "tightpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", "undashed", "decorated", "redecorated", "undecorated", "passvariable", "passarrayvariable", "tostring", "format", "formatted", "startpassingvariable", "stoppassingvariable" },
+ ["commands"]={ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "acosh", "asinh", "sinh", "cosh", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", "withcircularshade", "withlinearshade", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", "transparent", "withtransparency", "property", "properties", "withproperties", "asgroup", "infont", "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", "space", "CRLF", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedpath", "constructedpairs", "punkedfunction", "curvedfunction", "tightfunction", "punkedpath", "curvedpath", "tightpath", "punkedpairs", "curvedpairs", "tightpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", "undashed", "decorated", "redecorated", "undecorated", "passvariable", "passarrayvariable", "tostring", "format", "formatted", "startpassingvariable", "stoppassingvariable" },
["internals"]={ "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel", "shadefactor", "textextoffset", "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent", "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent", "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent", "metapostversion", "maxdimensions" },
}
\ No newline at end of file
diff --git a/context/data/scite/lexers/scite-context-lexer-mps.lua b/context/data/scite/lexers/scite-context-lexer-mps.lua
index f0d88eb3b..96c5e9c3c 100644
--- a/context/data/scite/lexers/scite-context-lexer-mps.lua
+++ b/context/data/scite/lexers/scite-context-lexer-mps.lua
@@ -98,7 +98,7 @@ local number = token('number', number)
local grouping = token('grouping', S("()[]{}")) -- can be an option
local special = token('special', S("#()[]{}<>=:\"")) -- or else := <> etc split
local texlike = token('warning', P("\\") * cstokentex^1)
-local extra = token('extra', P("+-+") + P("++") + S("`~%^&_-+*/\'|\\"))
+local extra = token('extra', S("`~%^&_-+*/\'|\\"))
local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
local texlike = token('embedded', P("\\") * (P("MP") + P("mp")) * mptoken^1)
diff --git a/context/data/scite/scite-context-data-context.properties b/context/data/scite/scite-context-data-context.properties
index fbd958f8a..140b0d96b 100644
--- a/context/data/scite/scite-context-data-context.properties
+++ b/context/data/scite/scite-context-data-context.properties
@@ -143,49 +143,48 @@ gobblethreearguments gobblefourarguments gobblefivearguments gobblesixarguments
gobbleeightarguments gobbleninearguments gobbletenarguments gobbleoneoptional gobbletwooptionals \
gobblethreeoptionals gobblefouroptionals gobblefiveoptionals dorecurse doloop \
exitloop dostepwiserecurse recurselevel recursedepth dofastloopcs \
-dowith newconstant setnewconstant setconstant setconstantvalue \
-newconditional settrue setfalse settruevalue setfalsevalue \
-newmacro setnewmacro newfraction newsignal dosingleempty \
-dodoubleempty dotripleempty doquadrupleempty doquintupleempty dosixtupleempty \
-doseventupleempty dosingleargument dodoubleargument dotripleargument doquadrupleargument \
-doquintupleargument dosixtupleargument doseventupleargument dosinglegroupempty dodoublegroupempty \
-dotriplegroupempty doquadruplegroupempty doquintuplegroupempty permitspacesbetweengroups dontpermitspacesbetweengroups \
-nopdfcompression maximumpdfcompression normalpdfcompression modulonumber dividenumber \
-getfirstcharacter doiffirstcharelse startnointerference stopnointerference twodigits \
-threedigits leftorright strut setstrut strutbox \
-strutht strutdp strutwd struthtdp begstrut \
-endstrut lineheight ordordspacing ordopspacing ordbinspacing \
-ordrelspacing ordopenspacing ordclosespacing ordpunctspacing ordinnerspacing \
-opordspacing opopspacing opbinspacing oprelspacing opopenspacing \
-opclosespacing oppunctspacing opinnerspacing binordspacing binopspacing \
-binbinspacing binrelspacing binopenspacing binclosespacing binpunctspacing \
-bininnerspacing relordspacing relopspacing relbinspacing relrelspacing \
-relopenspacing relclosespacing relpunctspacing relinnerspacing openordspacing \
-openopspacing openbinspacing openrelspacing openopenspacing openclosespacing \
-openpunctspacing openinnerspacing closeordspacing closeopspacing closebinspacing \
-closerelspacing closeopenspacing closeclosespacing closepunctspacing closeinnerspacing \
-punctordspacing punctopspacing punctbinspacing punctrelspacing punctopenspacing \
-punctclosespacing punctpunctspacing punctinnerspacing innerordspacing inneropspacing \
-innerbinspacing innerrelspacing inneropenspacing innerclosespacing innerpunctspacing \
-innerinnerspacing normalreqno startimath stopimath normalstartimath \
-normalstopimath startdmath stopdmath normalstartdmath normalstopdmath \
-uncramped cramped triggermathstyle mathstylefont mathsmallstylefont \
-mathstyleface mathsmallstyleface mathstylecommand mathpalette mathstylehbox \
-mathstylevbox mathstylevcenter mathstylevcenteredhbox mathstylevcenteredvbox mathtext \
-setmathsmalltextbox setmathtextbox triggerdisplaystyle triggertextstyle triggerscriptstyle \
-triggerscriptscriptstyle triggeruncrampedstyle triggercrampedstyle triggersmallstyle triggeruncrampedsmallstyle \
-triggercrampedsmallstyle triggerbigstyle triggeruncrampedbigstyle triggercrampedbigstyle luaexpr \
-expdoifelse expdoif expdoifnot expdoifcommonelse expdoifinsetelse \
-ctxdirectlua ctxlatelua ctxsprint ctxwrite ctxcommand \
-ctxdirectcommand ctxlatecommand ctxreport ctxlua luacode \
-lateluacode directluacode registerctxluafile ctxloadluafile luaversion \
-luamajorversion luaminorversion ctxluacode luaconditional luaexpanded \
-startluaparameterset stopluaparameterset luaparameterset definenamedlua obeylualines \
-obeyluatokens startluacode stopluacode startlua stoplua \
-carryoverpar assumelongusagecs Umathbotaccent righttolefthbox lefttorighthbox \
-righttoleftvbox lefttorightvbox righttoleftvtop lefttorightvtop rtlhbox \
-ltrhbox rtlvbox ltrvbox rtlvtop ltrvtop \
-autodirhbox autodirvbox autodirvtop lefttoright righttoleft \
-synchronizelayoutdirection synchronizedisplaydirection synchronizeinlinedirection lesshyphens morehyphens \
-nohyphens dohyphens Ucheckedstartdisplaymath Ucheckedstopdisplaymath
+dowith newconstant setnewconstant newconditional settrue \
+setfalse setconstant newmacro setnewmacro newfraction \
+newsignal dosingleempty dodoubleempty dotripleempty doquadrupleempty \
+doquintupleempty dosixtupleempty doseventupleempty dosingleargument dodoubleargument \
+dotripleargument doquadrupleargument doquintupleargument dosixtupleargument doseventupleargument \
+dosinglegroupempty dodoublegroupempty dotriplegroupempty doquadruplegroupempty doquintuplegroupempty \
+permitspacesbetweengroups dontpermitspacesbetweengroups nopdfcompression maximumpdfcompression normalpdfcompression \
+modulonumber dividenumber getfirstcharacter doiffirstcharelse startnointerference \
+stopnointerference twodigits threedigits leftorright strut \
+setstrut strutbox strutht strutdp strutwd \
+struthtdp begstrut endstrut lineheight ordordspacing \
+ordopspacing ordbinspacing ordrelspacing ordopenspacing ordclosespacing \
+ordpunctspacing ordinnerspacing opordspacing opopspacing opbinspacing \
+oprelspacing opopenspacing opclosespacing oppunctspacing opinnerspacing \
+binordspacing binopspacing binbinspacing binrelspacing binopenspacing \
+binclosespacing binpunctspacing bininnerspacing relordspacing relopspacing \
+relbinspacing relrelspacing relopenspacing relclosespacing relpunctspacing \
+relinnerspacing openordspacing openopspacing openbinspacing openrelspacing \
+openopenspacing openclosespacing openpunctspacing openinnerspacing closeordspacing \
+closeopspacing closebinspacing closerelspacing closeopenspacing closeclosespacing \
+closepunctspacing closeinnerspacing punctordspacing punctopspacing punctbinspacing \
+punctrelspacing punctopenspacing punctclosespacing punctpunctspacing punctinnerspacing \
+innerordspacing inneropspacing innerbinspacing innerrelspacing inneropenspacing \
+innerclosespacing innerpunctspacing innerinnerspacing normalreqno startimath \
+stopimath normalstartimath normalstopimath startdmath stopdmath \
+normalstartdmath normalstopdmath uncramped cramped triggermathstyle \
+mathstylefont mathsmallstylefont mathstyleface mathsmallstyleface mathstylecommand \
+mathpalette mathstylehbox mathstylevbox mathstylevcenter mathstylevcenteredhbox \
+mathstylevcenteredvbox mathtext setmathsmalltextbox setmathtextbox triggerdisplaystyle \
+triggertextstyle triggerscriptstyle triggerscriptscriptstyle triggeruncrampedstyle triggercrampedstyle \
+triggersmallstyle triggeruncrampedsmallstyle triggercrampedsmallstyle triggerbigstyle triggeruncrampedbigstyle \
+triggercrampedbigstyle luaexpr expdoifelse expdoif expdoifnot \
+expdoifcommonelse expdoifinsetelse ctxdirectlua ctxlatelua ctxsprint \
+ctxwrite ctxcommand ctxdirectcommand ctxlatecommand ctxreport \
+ctxlua luacode lateluacode directluacode registerctxluafile \
+ctxloadluafile luaversion luamajorversion luaminorversion ctxluacode \
+luaconditional luaexpanded startluaparameterset stopluaparameterset luaparameterset \
+definenamedlua obeylualines obeyluatokens startluacode stopluacode \
+startlua stoplua carryoverpar assumelongusagecs Umathbotaccent \
+righttolefthbox lefttorighthbox righttoleftvbox lefttorightvbox righttoleftvtop \
+lefttorightvtop rtlhbox ltrhbox rtlvbox ltrvbox \
+rtlvtop ltrvtop autodirhbox autodirvbox autodirvtop \
+lefttoright righttoleft synchronizelayoutdirection synchronizedisplaydirection synchronizeinlinedirection \
+lesshyphens morehyphens nohyphens dohyphens
diff --git a/context/data/scite/scite-context-data-metafun.properties b/context/data/scite/scite-context-data-metafun.properties
index 9381b4f8d..c0b080982 100644
--- a/context/data/scite/scite-context-data-metafun.properties
+++ b/context/data/scite/scite-context-data-metafun.properties
@@ -3,49 +3,49 @@ sqr log ln exp \
inv pow pi radian tand \
cotd sin cos tan cot \
atan asin acos invsin invcos \
-invtan acosh asinh sinh cosh \
-paired tripled unitcircle fulldiamond unitdiamond \
-fullsquare llcircle lrcircle urcircle ulcircle \
-tcircle bcircle lcircle rcircle lltriangle \
-lrtriangle urtriangle ultriangle smoothed cornered \
-superellipsed randomized squeezed enlonged shortened \
-punked curved unspiked simplified blownup \
-stretched enlarged leftenlarged topenlarged rightenlarged \
-bottomenlarged crossed laddered randomshifted interpolated \
-paralleled cutends peepholed llenlarged lrenlarged \
-urenlarged ulenlarged llmoved lrmoved urmoved \
-ulmoved rightarrow leftarrow centerarrow boundingbox \
-innerboundingbox outerboundingbox pushboundingbox popboundingbox bottomboundary \
-leftboundary topboundary rightboundary xsized ysized \
-xysized sized xyscaled intersection_point intersection_found \
-penpoint bbwidth bbheight withshade withlinearshading \
-withcircularshading withfromshadecolor withtoshadecolor withshading shadedinto \
-withcircularshade withlinearshade cmyk spotcolor multitonecolor \
-namedcolor drawfill undrawfill inverted uncolored \
-softened grayed greyed onlayer along \
-graphictext loadfigure externalfigure withmask figure \
-register bitmapimage colordecimals ddecimal dddecimal \
-ddddecimal textext thetextext rawtextext textextoffset \
-verbatim thelabel label autoalign transparent \
-withtransparency property properties withproperties asgroup \
-infont set_linear_vector linear_shade define_linear_shade define_circular_linear_shade \
-define_sampled_linear_shade set_circular_vector circular_shade define_circular_shade define_circular_linear_shade \
-define_sampled_circular_shade space CRLF grayscale greyscale \
-withgray withgrey colorpart readfile clearxy \
-unitvector center epsed anchored originpath \
-infinite break xstretched ystretched snapped \
-pathconnectors function constructedpath constructedpairs punkedfunction \
-curvedfunction tightfunction punkedpath curvedpath tightpath \
-punkedpairs curvedpairs tightpairs evenly oddly \
-condition pushcurrentpicture popcurrentpicture arrowpath tensecircle \
-roundedsquare colortype whitecolor blackcolor normalfill \
-normaldraw visualizepaths naturalizepaths drawboundary drawwholepath \
-visualizeddraw visualizedfill draworigin drawboundingbox drawpath \
-drawpoint drawpoints drawcontrolpoints drawcontrollines drawpointlabels \
-drawlineoptions drawpointoptions drawcontroloptions drawlabeloptions draworiginoptions \
-drawboundoptions drawpathoptions resetdrawoptions undashed decorated \
-redecorated undecorated passvariable passarrayvariable tostring \
-format formatted startpassingvariable stoppassingvariable
+acosh asinh sinh cosh paired \
+tripled unitcircle fulldiamond unitdiamond fullsquare \
+llcircle lrcircle urcircle ulcircle tcircle \
+bcircle lcircle rcircle lltriangle lrtriangle \
+urtriangle ultriangle smoothed cornered superellipsed \
+randomized squeezed enlonged shortened punked \
+curved unspiked simplified blownup stretched \
+enlarged leftenlarged topenlarged rightenlarged bottomenlarged \
+crossed laddered randomshifted interpolated paralleled \
+cutends peepholed llenlarged lrenlarged urenlarged \
+ulenlarged llmoved lrmoved urmoved ulmoved \
+rightarrow leftarrow centerarrow boundingbox innerboundingbox \
+outerboundingbox pushboundingbox popboundingbox bottomboundary leftboundary \
+topboundary rightboundary xsized ysized xysized \
+sized xyscaled intersection_point intersection_found penpoint \
+bbwidth bbheight withshade withlinearshading withcircularshading \
+withfromshadecolor withtoshadecolor withshading shadedinto withcircularshade \
+withlinearshade cmyk spotcolor multitonecolor namedcolor \
+drawfill undrawfill inverted uncolored softened \
+grayed greyed onlayer along graphictext \
+loadfigure externalfigure withmask figure register \
+bitmapimage colordecimals ddecimal dddecimal ddddecimal \
+textext thetextext rawtextext textextoffset verbatim \
+thelabel label autoalign transparent withtransparency \
+property properties withproperties asgroup infont \
+set_linear_vector linear_shade define_linear_shade define_circular_linear_shade define_sampled_linear_shade \
+set_circular_vector circular_shade define_circular_shade define_circular_linear_shade define_sampled_circular_shade \
+space CRLF grayscale greyscale withgray \
+withgrey colorpart readfile clearxy unitvector \
+center epsed anchored originpath infinite \
+break xstretched ystretched snapped pathconnectors \
+function constructedpath constructedpairs punkedfunction curvedfunction \
+tightfunction punkedpath curvedpath tightpath punkedpairs \
+curvedpairs tightpairs evenly oddly condition \
+pushcurrentpicture popcurrentpicture arrowpath tensecircle roundedsquare \
+colortype whitecolor blackcolor normalfill normaldraw \
+visualizepaths naturalizepaths drawboundary drawwholepath visualizeddraw \
+visualizedfill draworigin drawboundingbox drawpath drawpoint \
+drawpoints drawcontrolpoints drawcontrollines drawpointlabels drawlineoptions \
+drawpointoptions drawcontroloptions drawlabeloptions draworiginoptions drawboundoptions \
+drawpathoptions resetdrawoptions undashed decorated redecorated \
+undecorated passvariable passarrayvariable tostring format \
+formatted startpassingvariable stoppassingvariable
keywordclass.metafun.internals=\
nocolormodel greycolormodel graycolormodel rgbcolormodel \
diff --git a/context/data/scite/scite-context-readme.pdf b/context/data/scite/scite-context-readme.pdf
index 99f05a2a5..b6a751a36 100644
Binary files a/context/data/scite/scite-context-readme.pdf and b/context/data/scite/scite-context-readme.pdf differ
diff --git a/context/data/scite/scite-context-readme.tex b/context/data/scite/scite-context-readme.tex
index 42f5e0a98..ef1475fa2 100644
--- a/context/data/scite/scite-context-readme.tex
+++ b/context/data/scite/scite-context-readme.tex
@@ -821,18 +821,12 @@ from the on|-|line help pages.
\NC \type{Ctrl+Right} \NC next word; \type{Shift} extends selection \NC \NR
\NC \type{Ctrl+/} \NC previous word part; \type{Shift} extends selection \NC \NR
\NC \type{Ctrl+\ } \NC next word part; \type{Shift} extends selection \NC \NR
-\ML
-\NC \type{F12 / Ctrl+F7} \NC check (or process) \NC \NR
-\NC \type{Ctrl+F12 / Ctrl+F7} \NC process (run) \NC \NR
-\NC \type{Alt+F12 / Ctrl+F7} \NC process (run) using the luajit vm (if applicable) \NC \NR
\LL
\stoptabulate
\stopbuffer
\getbuffer[keybindings]
-\page
-
\subject{Affiliation}
\starttabulate[|l|l|]
diff --git a/context/data/scite/scite-context.properties b/context/data/scite/scite-context.properties
index bc1af717c..caf230de7 100644
--- a/context/data/scite/scite-context.properties
+++ b/context/data/scite/scite-context.properties
@@ -115,11 +115,9 @@ name.metafun.console=$(name.context.console)
name.example.console=$(name.context.console)
name.context.mtxrun=mtxrun --autogenerate
-name.context.mtxrunjit=mtxrunjit --autogenerate
name.context.check=$(name.context.mtxrun) --script check
name.context.run=$(name.context.mtxrun) --script context $(name.flag.pdfopen)
-name.context.runjit=$(name.context.mtxrunjit) --script context $(name.flag.pdfopen)
name.context.texshow=$(name.context.mtxrun) texshow
name.context.purge=$(name.context.mtxrun) --context --purge --all
name.context.showcase=$(name.context.mtxrun) --launch showcase.pdf
@@ -164,20 +162,20 @@ import scite-ctx
# hard coded compile / build / go
-command.build.$(file.patterns.context)=$(name.context.check) $(FileNameExt)
-command.build.$(file.patterns.metafun)=
-command.build.$(file.patterns.example)=$(name.example.xmlcheck) $(FileNameExt)
-command.build.*.fo=$(name.example.xmlcheck) $(FileNameExt)
+command.compile.$(file.patterns.context)=$(name.context.check) $(FileNameExt)
+command.compile.$(file.patterns.metafun)=
+command.compile.$(file.patterns.example)=$(name.example.xmlcheck) $(FileNameExt)
+command.compile.*.fo=$(name.example.xmlcheck) $(FileNameExt)
-command.compile.$(file.patterns.context)=$(name.context.run) $(FileNameExt)
-command.compile.$(file.patterns.metafun)=$(name.context.run) $(name.flag.pdfopen) $(FileNameExt)
-command.compile.$(file.patterns.example)=$(name.context.run) --forcexml $(FileNameExt)
-command.compile.*.fo=$(name.context.run) $(name.flag.pdfopen) --forcexml --use=foxet $(FileNameExt)
+command.build.$(file.patterns.context)=$(name.context.run) $(FileNameExt)
+command.build.$(file.patterns.metafun)=$(name.context.mtxrun) --script context $(name.flag.pdfopen) $(FileNameExt)
+command.build.$(file.patterns.example)=$(name.context.run) --forcexml $(FileNameExt)
+command.build.*.fo=$(name.context.run) $(name.flag.pdfopen) --forcexml --use=foxet $(FileNameExt)
-command.compile.subsystem.$(file.patterns.context)=1
-command.compile.subsystem.$(file.patterns.metafun)=1
-command.compile.subsystem.$(file.patterns.example)=1
-command.compile.subsystem.*.fo=1
+command.build.subsystem.$(file.patterns.context)=1
+command.build.subsystem.$(file.patterns.metafun)=1
+command.build.subsystem.$(file.patterns.example)=1
+command.build.subsystem.*.fo=1
if PLAT_WIN
command.go.$(file.patterns.context)=$(FileName).pdf
@@ -220,24 +218,6 @@ command.1.subsystem.$(file.patterns.context)=1
command.1.subsystem.$(file.patterns.metafun)=1
command.1.subsystem.$(file.patterns.example)=1
-command.name.29.*=Run with jit
-command.subsystem.29.*=1
-command.29.$(file.patterns.context)=$(name.context.runjit) $(FileNameExt)
-command.29.$(file.patterns.metafun)=$(name.context.runjit) $(FileNameExt) --metapost
-command.29.$(file.patterns.exmaple)=$(name.context.runjit) $(FileNameExt) --xml
-command.groupundo.29.*=yes
-command.save.before.29.*=2
-command.shortcut.29.*=Alt+F12
-
-command.name.30.*=Run with jit
-command.subsystem.30.*=1
-command.30.$(file.patterns.context)=$(name.context.runjit) $(FileNameExt)
-command.30.$(file.patterns.metafun)=$(name.context.runjit) $(FileNameExt) --metapost
-command.30.$(file.patterns.exmaple)=$(name.context.runjit) $(FileNameExt) --xml
-command.groupundo.30.*=yes
-command.save.before.30.*=2
-command.shortcut.30.*=Alt+F7
-
# 2 : pdf viewing
command.name.2.$(file.patterns.context)=View PDF File with GhostScript
@@ -357,9 +337,10 @@ highlight.indentation.guides=1
# Editor: keys
user.shortcuts=\
-F12|IDM_BUILD|\
-Ctrl+F12|IDM_COMPILE|\
+F12|IDM_COMPILE|\
+Ctrl+F12|IDM_BUILD|\
Shift+F12|IDM_GO|\
+Alt+F12|IDM_STOPEXECUTE|\
os.x.home.end.keys=0
diff --git a/context/data/scite/scite-ctx.properties b/context/data/scite/scite-ctx.properties
index acbb33c0b..d56ae653d 100644
--- a/context/data/scite/scite-ctx.properties
+++ b/context/data/scite/scite-ctx.properties
@@ -69,19 +69,14 @@ ctx.spellcheck.wordsize.uk=4
ctx.spellcheck.wordsize.nl=4
ctx.helpinfo=\
- Shift + F11 pop up menu with ctx options|\
+ Shift + F11 pop up menu with ctx options|\
|\
- Ctrl + B check spelling|\
- Ctrl + M wrap text (auto indent)|\
- Ctrl + R reset spelling results|\
- Ctrl + I insert template|\
- Ctrl + E open log file|\
- Ctrl + + toggle strip|\
- |\
- F7 / F12 check (or process)|\
- Ctrl + F7 / F12 process|\
- Alt + F7 / F12 process with jit|\
- shift + F7 / F12 launch
+ Ctrl + B check spelling|\
+ Ctrl + M wrap text (auto indent)|\
+ Ctrl + R reset spelling results|\
+ Ctrl + I insert template|\
+ Ctrl + E open log file|\
+ Ctrl + + toggle strip
command.name.21.$(file.patterns.context)=CTX Action List
command.subsystem.21.$(file.patterns.context)=3
diff --git a/doc/context/manuals/allkind/mkiv-publications.bib b/doc/context/manuals/allkind/mkiv-publications.bib
deleted file mode 100644
index e94f43202..000000000
--- a/doc/context/manuals/allkind/mkiv-publications.bib
+++ /dev/null
@@ -1,34 +0,0 @@
-@book{demo-001,
- author = "Hans Hagen",
- title = "\BIBTEX, the \CONTEXT\ way",
- year = "2013",
-}
-
-@book{demo-002,
- crossref = "demo-001"
- year = "2014",
-}
-
-@book{demo-003,
- author = "Hans Hagen and Ton Otten",
- title = "Typesetting education documents",
- year = "1996",
- comment = "a non-existing document",
-}
-
-@book{demo-004,
- author = "Luigi Scarso",
- title = "Designing high speed trains",
- year = "2021",
- comment = "still to be published",
-}
-
-@book{demo-005,
- author = "author",
- title = "title",
- year = "year",
- serial = "serial",
- doi = "doi",
- url = "url",
- pages = "pages"
-}
diff --git a/doc/context/manuals/allkind/mkiv-publications.tex b/doc/context/manuals/allkind/mkiv-publications.tex
deleted file mode 100644
index 3300a0f53..000000000
--- a/doc/context/manuals/allkind/mkiv-publications.tex
+++ /dev/null
@@ -1,1325 +0,0 @@
-% language=uk
-
-% \setupbtxrendering[continue=yes]
-% \btxfield{manipulator_a->manipulator_b->fieldname}
-
-% engine=luajittex
-
-% criterium: all + sorttype=cite => citex before rest
-% criterium: all + sorttype=database => database order
-% criterium: used
-%
-% numbering: label, short, indexinlist, indexused
-%
-% maybeyear
-%
-% \cite[data][whatever]
-
-% \showframe
-
-\usemodule[abr-02]
-\usemodule[set-11]
-
-\loadsetups[publications-en.xml] \enablemode[interface:setup:defaults]
-
-\setupbackend
- [export=yes,
- xhtml=yes,
- css=export-example.css]
-
-\setupexport
- [hyphen=yes,
- width=60em]
-
-% \input publ-tmp.mkiv
-
-\setupbodyfont
- [dejavu,10pt]
-
-\setuphead
- [chapter]
- [header=high,
- style=\bfc,
- color=darkmagenta]
-
-\setuplayout
- [topspace=2cm,
- bottomspace=1cm,
- header=0cm,
- width=middle,
- height=middle]
-
-\setupwhitespace
- [big]
-
-\setuptyping
- [color=darkmagenta]
-
-\setuptyping
- [keeptogether=yes]
-
-\setuptype
- [color=darkcyan]
-
-\setupfootertexts
- [pagenumber]
-
-\setupMPgraphics
- [mpy=\jobname.mpy]
-
-\setupinteraction
- [state=start,
- color=darkcyan,
- contrastcolor=darkyellow]
-
-\starttext
-
-\startMPpage
-
- StartPage ;
-
- % input "mkiv-publications.mpy" ;
-
- picture pic ; pic := image (
- path pth ; pth := ((0,0) for i=1 step 2 until 20 : -- (i,1) -- (i+1,0) endfor) ;
- for i=0 upto 9 : draw pth shifted (0,2*i) ; endfor ;
- ) ;
-
- % picture btx ; btx := textext("\ssbf BIBTEX") ;
- % picture ctx ; ctx := textext("\ssbf THE CONTEXT WAY") ;
- picture btx ; btx := image(graphictext("\ssbf BIBTEX") withfillcolor white) ;
- picture ctx ; ctx := image(graphictext("\ssbf THE CONTEXT WAY") withfillcolor white) ;
-
- pic := pic shifted - llcorner pic ;
- btx := btx shifted - llcorner btx ;
- ctx := ctx shifted - llcorner ctx ;
-
- pic := pic xysized (PaperWidth,PaperHeight) ;
- btx := btx xsized (2PaperWidth/3) shifted (.25PaperWidth,.15PaperHeight) ;
- ctx := ctx xsized (2PaperWidth/3) shifted (.25PaperWidth,.075PaperHeight) ;
-
- fill Page withcolor \MPcolor{darkcyan} ;
-
- draw pic withcolor \MPcolor{darkmagenta} ;
- draw btx withcolor \MPcolor{lightgray} ;
- draw ctx withcolor \MPcolor{lightgray} ;
-
- % draw boundingbox btx ;
- % draw boundingbox ctx ;
-
- StopPage ;
-
-\stopMPpage
-
-
-\startfrontmatter
-
-\starttitle[title=Contents]
- \placelist[chapter,section][color=black]
-\stoptitle
-
-\startchapter[title=Introduction]
-
-This manual describes how \MKIV\ handles bibliographies. Support in \CONTEXT\
-started in \MKII for \BIBTEX, using a module written by Taco Hoekwater. Later his
-code was adapted to \MKIV, but because users demanded more, I decided that
-reimplementing made more sense than patching. In particular, through the use of
-\LUA, the \BIBTEX\ data files can be easily directly parsed, thus liberating
-\CONTEXT\ from the dependency on an external \BIBTEX\ executable. The CritEd
-project (by Thomas Schmitz, Alan Braslau, Luigi Scarso and myself) was a good
-reason to undertake this rewrite. As part that project users were invited to come
-up with ideas about extensions. Not all of them are (yet) honored, but the
-rewrite makes more functionality possible.
-
-This manual is dedicated to Taco Hoekwater who in a previous century implemented
-the first \BIBTEX\ module and saw it morf into a \TEX||\LUA\ hybrid in this
-century. The fact that there was support for bibliographies made it possible for
-users to use \CONTEXT\ in an academic environment, dominated by bibliographic
-databases encoded in the \BIBTEX\ format.
-
-\startlines
-Hans Hagen
-PRAGMA ADE
-Hasselt NL
-\stoplines
-
-\stopchapter
-
-\stopfrontmatter
-
-\startbodymatter
-
-\startchapter[title=The database]
-
-The \BIBTEX\ format is rather popular in the \TEX\ community and even with its
-shortcomings it will stay around for a while. Many publication websites can
-export and many tools are available to work with this database format. It is
-rather simple and looks a bit like \LUA\ tables. Unfortunately the content can be
-polluted with non|-|standardized \TEX\ commands which complicates pre- or
-postprocessing outside \TEX. In that sense a \BIBTEX\ database is often not coded
-neutrally. Some limitations, like the use of commands to encode accented
-characters root in the \ASCII\ world and can be bypassed by using \UTF\ instead
-(as handled somewhat in \LATEX\ through extensions such as \type {bibtex8}).
-
-The normal way to deal with a bibliography is to refer to entries using a unique
-tag or key. When a list of entries is typeset, this reference can be used for
-linking purposes. The typeset list can be processed and sorted using the \type
-{bibtex} program that converts the database into something more \TEX\ friendly (a
-\type {.bbl} file). I never used the program myself (nor bibliographies) so I
-will not go into too much detail here, if only because all I say can be wrong.
-
-In \CONTEXT\ we no longer use the \type {bibtex} program: we just use
-database files and deal with the necessary manipulations directly in \CONTEXT.
-One or more such databases can be used and combined with additional entries
-defined within the document. We can have several such datasets active at the same
-time.
-
-A \BIBTEX\ file looks like this:
-
-\starttyping
-@Article{sometag,
- author = "An Author and Another One",
- title = "A hopefully meaningful title",
- journal = maps,
- volume = "25",
- number = "2",
- pages = "5--9",
- month = mar,
- year = "2013",
- ISSN = "1234-5678",
-}
-\stoptyping
-
-Normally a value is given between quotes (or curly brackets) but single words are
-also OK (there is no real benefit in not using quotes, so we advise to always use
-them). There can be many more fields and instead of strings one can use
-predefined shortcuts. The title for example quite often contains \TEX\ macros.
-Some fields, like \type {pages} have funny characters such as the endash
-(typically as \type {--}) so we have a mixture of data and typesetting
-directives. If you are covering non||english references, you often need
-characters that are not in the \ASCII\ subset but \CONTEXT\ is quite happy with
-\UTF. If your database file uses old|-|fashioned \TEX\ accent commands then these
-will be internally converted automatically to \UTF. Commands (macros) are
-converted to an indirect call, which is quite robust.
-
-The \BIBTEX\ files are loaded in memory as \LUA\ table but can be converted to
-\XML\ so that we can access them in a more flexible way, but that is a subject
-for specialists.
-
-In the old \MKII\ setup we have two kinds of entries: the ones that come from the
-\BIBTEX\ run and user supplied ones. We no longer rely on \BIBTEX\ output but we
-do still support the user supplied definitions. These were in fact prepared in a
-way that suits the processing of \BIBTEX\ generated entries. The next variant
-reflects the \CONTEXT\ recoding of the old \BIBTEX\ output.
-
-\starttyping
-\startpublication[k=Hagen:Second,t=article,a={Hans Hagen},y=2013,s=HH01]
- \artauthor[]{Hans}[H.]{}{Hagen}
- \arttitle{Who knows more?}
- \journal{MyJournal}
- \pubyear{2013}
- \month{8}
- \volume{1}
- \issue{3}
- \issn{1234-5678}
- \pages{123--126}
-\stoppublication
-\stoptyping
-
-The split \type {\artauthor} fields are collapsed into a single \type {author}
-field as we deal with the splitting later when it gets parsed in \LUA. The \type
-{\artauthor} syntax is only kept around for backward compatibility with the
-previous use of \BIBTEX.
-
-In the new setup we support these variants as well:
-
-\starttyping
-\startpublication[k=Hagen:Third,t=article]
- \author{Hans Hagen}
- \title{Who knows who?}
- ...
-\stoppublication
-\stoptyping
-
-and
-
-\starttyping
-\startpublication[tag=Hagen:Third,category=article]
- \author{Hans Hagen}
- \title{Who knows who?}
- ...
-\stoppublication
-\stoptyping
-
-and
-
-\starttyping
-\startpublication
- \tag{Hagen:Third}
- \category{article}
- \author{Hans Hagen}
- \title{Who knows who?}
- ...
-\stoppublication
-\stoptyping
-
-Because internally the entries are \LUA\ tables, we also support loading of \LUA\
-based definitions:
-
-\starttyping
-return {
- ["Hagen:First"] = {
- author = "Hans Hagen",
- category = "article",
- issn = "1234-5678",
- issue = "3",
- journal = "MyJournal",
- month = "8",
- pages = "123--126",
- tag = "Hagen:First",
- title = "Who knows nothing?",
- volume = "1",
- year = "2013",
- },
-}
-\stoptyping
-
-Files set up like this can be loaded too. The following \XML\ input is rather
-close to this, and is also accepted as input.
-
-\starttyping
-
-
-
- Hans Hagen
- article
- 1234-5678
- 3
- MyJournal
- 8
- 123--126
- Hagen:First
- Who knows nothing?
- 1
- 2013
-
-
-\stoptyping
-
-{\em Todo: Add some remarks about loading EndNote and RIS formats, but first we
-need to complete the tag mapping (on Alan's plate).}
-
-So the user has a rather wide choice of formatting style for bibliography
-database files.
-
-\stopchapter
-
-You can load more data than you actually need. Only entries that are referred to
-explicitly through the \type {\cite} and \type {\nocite} commands will be shown
-in lists. We will cover these details later.
-
-\startchapter[title=Commands in entries]
-
-One unfortunate aspect commonly found in \BIBTEX\ files is that they often
-contain \TEX\ commands. Even worse is that there is no standard on what these
-commands can be and what they mean, at least not formally, as \BIBTEX\ is a
-program intended to be used with many variants of \TEX\ style: plain, \LATEX, and
-others. This means that we need to define our use of these typesetting commands.
-However, in most cases, they are just abbreviations or font switches and these
-are often known. Therefore, \CONTEXT\ will try to resolve them before reporting
-an issue. In the log file there is a list of commands that has been seen in the
-loaded databases. For instance, loading \type {tugboat.bib} gives a long list of
-commands of which we show a small set here:
-
-\starttyping
-publications > start used btx commands
-
-publications > standard CONTEXT 1 known
-publications > standard ConTeXt 4 known
-publications > standard TeXLive 3 KNOWN
-publications > standard eTeX 1 known
-publications > standard hbox 6 known
-publications > standard sltt 1 unknown
-
-publications > stop used btxcommands
-\stoptyping
-
-You can define unknown commands, or overload existing definitions in the
-following way:
-
-\starttyping
-\definebtxcommand\TUB {TUGboat}
-\definebtxcommand\sltt{\tt}
-\definebtxcommand\<#1>{\type{#1}}
-\stoptyping
-
-Unknown commands do not stall processing, but their names are then typeset in a
-mono|-|spaced font so they probably stand out for proofreading. You can
-access the commands with \type {\btxcommand {...}}, as in:
-
-\startbuffer
-commands like \btxcommand{MySpecialCommand} are handled in an indirect way
-\stopbuffer
-
-\typebuffer
-
-As this is an undefined command we get: \quotation {\inlinebuffer}.
-
-??
-
-\stopchapter
-
-\startchapter[title=Datasets]
-
-Normally in a document you will use only one bibliographic database, whether or
-not distributed over multiple files. Nevertheless we support multiple databases as well
-which is why we talk of datasets instead. A dataset is loaded with the \type
-{\usebtxdataset} command. Although currently it is not necessary to define a
-(default) dataset you can best do this because in the future we might provide more
-options. Here are some examples:
-
-\starttyping
-\definebtxdataset[standard]
-
-\usebtxdataset[standard][tugboat.bib]
-\usebtxdataset[standard][mtx-bibtex-output.xml]
-\usebtxdataset[standard][test-001-btx-standard.lua]
-\stoptyping
-
-These three suffixes are understood by the loader. Here the dataset has the name
-\type {standard} and the three database files are merged, where later entries having the
-same tag overload previous ones. Definitions in the document source (coded in \TEX\
-speak) are also added, and they are saved for successive runs. This means that if
-you load and define entries, they will be known at a next run beforehand, so that
-references to them are independent of when loading and definitions take place.
-
-\showsetup{setupbtxdataset}
-
-\showsetup{definebtxdataset}
-
-\showsetup{usebtxdataset}
-
-In this document we use some example databases, so let's load one of them now:
-
-\startbuffer
-\definebtxdataset[example]
-
-\usebtxdataset[example][mkiv-publications.bib]
-\stopbuffer
-
-\typebuffer \getbuffer
-
-You can ask for an overview of entries in a dataset with:
-
-\startbuffer
-\showbtxdatasetfields[example]
-\stopbuffer
-
-\typebuffer
-
-this gives:
-
-\getbuffer
-
-You can set the current active dataset with
-
-\starttyping
-\setbtxdataset[standard]
-\stoptyping
-
-but most publication|-|related commands accept optional arguments that denote the
-dataset and references to entries can be prefixed with a dataset identifier.. More
-about that later.
-
-Sometimes you want to check a database. One way of doing that is the following:
-
-\startbuffer
-\definebtxdataset[check]
-
-\usebtxdataset[check][mkiv-publications-check.bib]
-
-\showbtxdatasetcompleteness[check]
-\stopbuffer
-
-\typebuffer
-
-The database like like this:
-
-\typefile{mkiv-publications-check.bib}
-
-The completeness check shows (with green field names) the required fields and
-when one is missing this is indicated in red. In blue we show what gets
-inherited.
-
-\getbuffer
-
-\stopchapter
-
-\startchapter[title=Renderings]
-
-A list of publications can be rendered at any place in the document. A database
-can be much larger than needed for a document. The same is true for the fields
-that make up an entry. Here is the list of fields that are currently handled, but
-of course there can be additional ones:
-
-
-\startalignment[flushleft,verytolerant,nothyphenated]
-\startluacode
-local fields = publications.tracers.fields
-
-for i=1,#fields do
- if i > 1 then
- context(", ")
- end
- context.type(fields[i])
-end
-\stopluacode
-\stopalignment
-
-If you want to see what publications are in the database, the easiest way is to
-ask for a complete list:
-
-\startbuffer
-\definebtxrendering
- [example]
- [dataset=example,
- method=local,
- alternative=apa]
-\placelistofpublications % \placebtxrendering
- [example]
- [criterium=all]
-\stopbuffer
-
-\typebuffer
-
-This gives:
-
-\getbuffer
-
-The rendering itself is somewhat complex to set up because we have not only many
-different standards but also many fields that can be set up. This means that
-there are several commands involved. Often there is a prescribed style to render
-bibliographic descriptions, for example \type {apa}. A rendering is setup and
-defined with:
-
-\showsetup[setupbtxrendering]
-%showrootvalues[btxrendering]
-\showsetup[definebtxrendering]
-
-And a list of such descriptions is generated with:
-
-\showsetup[placebtxrendering]
-
-A dataset can have all kind of entries:
-
-\startalignment[flushleft,verytolerant,nothyphenated]
-\startluacode
- local categories = publications.tracers.categories
-
- for i=1,#categories do
- if i > 1 then
- context(", ")
- end
- context.type(categories[i])
- end
-\stopluacode
-\stopalignment
-
-Each has its own rendering variant. To keep things simple we have their settings
-separated. However, these settings are shared for all rendering alternatives. In
-practice this is seldom a problem in a publication as only one rendering
-alternative will be active. If this be not sufficient, you can always group local
-settings in a setup and hook that into the specific rendering.
-
-\showsetup[setupbtxlistvariant]
-%showrootvalues[btxlistvariant]
-\showsetup[definebtxlistvariant]
-
-Examples of list variants are:
-
-\startluacode
- local variants = publications.tracers.listvariants
-
- for i=1,#variants do
- context.showinstancevalues( { "btxlistvariant" }, { variants[i] })
- end
-\stopluacode
-
-The exact rendering of list entries is determined by the \type {alternative} key
-and defaults to \type {apa} which uses definitions from \type
-{publ-imp-apa.mkiv}. If you look at that file you will see that each category has
-its own setup. You may also notice that additional tests are needed to make sure
-that empty fields don't trigger separators and such.
-
-% \showsetup{setuplists}
-
-There are a couple of accessors and helpers to get the job done. When you want to
-fetch a field from the current entry you use \type {\btxfield}. In most cases
-you want to make sure this field has a value, for instance because you don't want
-fences or punctuation that belongs to a field.
-
-\starttyping
-\btxdoif {title} {
- \bold{\btxfield{title}},
-}
-\stoptyping
-
-There are three test macros:
-
-\starttyping
-\btxdoifelse{fieldname}{action when found}{action when not found}
-\btxdoif {fieldname}{action when found}
-\btxdoifnot {fieldname} {action when not found}
-\stoptyping
-
-An extra conditional is available for testing interactivity:
-
-\starttyping
-\btxdoifelseinteraction{action when true}{action when false}
-\stoptyping
-
-In addition there is also a conditional \type {\btxinteractive} which is
-more efficient, although in practice efficiency is not so important here.
-
-There are three commands to flush data:
-
-\starttabulate[|||] % Funny usage here! Could not tabulate work without
- % even specifying the number of columns?
-\NC \type {\btxfield} \NC fetch a explicit field (e.g. \type {year}) \NC \NR
-\NC \type {\btxdetail} \NC fetch a derived field (e.g. \type {short}) \NC \NR
-\NC \type {\btxflush} \NC fetch a derived or explicit field \NC \NR
-\stoptabulate
-
-Normally you can use \type {\btxfield} or \type {\btxflush} as derived fields
-just like analyzed author fields are flushed in a special way.
-
-You can improve readability by using setups, for instance:
-
-\starttyping
-\btxdoifelse {author} {
- \btxsetup{btx:apa:author:yes}
-} {
- \btxsetup{btx:apa:author:nop}
-}
-\stoptyping
-
-Keep in mind that normally you don't need to mess with definitions like this
-because standard rendering styles are provided. These styles use a few helpers
-that inject symbols but also take care of leading and trailing spaces:
-
-\starttabulate[|||]
-\NC \type {\btxspace } \NC before \btxspace after \NC \NR
-\NC \type {\btxperiod } \NC before \btxperiod after \NC \NR
-\NC \type {\btxcomma } \NC before \btxcomma after \NC \NR
-\NC \type {\btxlparent } \NC before \btxlparent after \NC \NR
-\NC \type {\btxrparent } \NC before \btxrparent after \NC \NR
-\NC \type {\btxlbracket} \NC before \btxlbracket after \NC \NR
-\NC \type {\btxrbracket} \NC before \btxrbracket after \NC \NR
-\stoptabulate
-
-So, the previous example setup can be rewritten as:
-
-\starttyping
-\btxdoif {title} {
- \bold{\btxfield{title}}
- \btxcomma
-}
-\stoptyping
-
-There is a special command for rendering a (combination) of authors:
-
-\starttyping
-\btxflushauthor{author}
-\btxflushauthor{editor}
-\btxflushauthor[inverted]{editor}
-\stoptyping
-
-Instead of the last one you can also use:
-
-\starttyping
-\btxflushauthorinverted{editor}
-\stoptyping
-
-You can use a (configurable) default or pass directives: Valid directives are
-
-\starttabulate
-\NC \bf conversion \NC \bf rendering \NC \NR
-\HL
-\NC \type{inverted} \NC the Frog jr, Kermit \NC \NR
-\NC \type{invertedshort} \NC the Frog jr, K \NC \NR
-\NC \type{normal} \NC Kermit, the Frog, jr \NC \NR
-\NC \type{normalshort} \NC K, the Frog, jr \NC \NR
-\stoptabulate
-
-The list itself is not a list in the sense of a regular \CONTEXT\ structure related
-list. We do use the list mechanism to keep track of used entries but that is mostly
-because we can then reuse filtering mechanisms. The actual rendering of a reference
-and entry runs on top of so called constructions (other examples of constructions are
-descriptions, enumerations and notes).
-
-\showsetup[setupbtxlist]
-
-You need to be aware what command is used to achieve the desired result. For instance,
-in order to put parentheses around a number reference you say:
-
-\starttyping
-\setupbtxlistvariant
- [num]
- [left=(,
- right=)]
-\stoptyping
-
-If you want automated width calculations, the following does the trick:
-
-\starttyping
-\setupbtxrendering
- [standard]
- [width=auto]
-\stoptyping
-
-but if you want to control it yourself you say something:
-
-\starttyping
-\setupbtxrendering
- [width=none]
-
-\setupbtxlist
- [standard]
- [width=3cm,
- distance=\emwidth,
- color=red,
- headcolor=blue,
- headalign=flushright]
-\stoptyping
-
-In most cases the defaults will work out fine.
-
-\stopchapter
-
-\startchapter[title=Citations]
-
-Citations are references to bibliographic entries that normally show up in lists
-someplace in the document: at the end of a chapter, in an appendix, at the end of
-an article, etc. We discussed the rendering of these lists in the previous chapter.
-A citation is normally pretty short as its main purpose is to refer uniquely to a more
-detailed description. But, there are several ways to refer, which is why the citation
-subsystem is configurable and extensible. Just look at the following commands:
-
-\startbuffer
-\cite[author][example::demo-003]
-\cite[authoryear][example::demo-003]
-\cite[authoryears][example::demo-003]
-\cite[author][example::demo-003,demo-004]
-\cite[authoryear][example::demo-003,demo-004]
-\cite[authoryears][example::demo-003,demo-004]
-\cite[author][example::demo-004,demo-003]
-\cite[authoryear][example::demo-004,demo-003]
-\cite[authoryears][example::demo-004,demo-003]
-\stopbuffer
-
-\typebuffer
-
-\startlines \getbuffer \stoplines
-
-The first argument is optional.
-% What is the default? How can one set this up?
-
-\showsetup[cite]
-
-You can tune the way a citation shows up:
-
-\startbuffer
-\setupbtxcitevariant[author] [sorttype=author,color=darkyellow]
-\setupbtxcitevariant[authoryear] [sorttype=author,color=darkyellow]
-\setupbtxcitevariant[authoryears][sorttype=author,color=darkyellow]
-
-\cite[author][example::demo-004,demo-003]
-\cite[authoryear][example::demo-004,demo-003]
-\cite[authoryears][example::demo-004,demo-003]
-\stopbuffer
-
-\typebuffer
-
-Here we sort the authors and color the citation:
-
-\startlines \getbuffer \stoplines
-
-For reasons of backward compatibility the \type {\cite} command is a bit picky
-about spaces between the two arguments, of which the first is optional. This is
-a consequence of allowing its use with the key specified between curly brackets
-as is the traditional practice. (We do encourage users to adopt the more
-coherent \CONTEXT\ syntax by using square brackets for keywords and reserving
-curly brackets to regroup text to be typeset.)
-% Just how is it picky?
-
-The \type {\citation} command is synonymous but is more flexible with respect to
-spacing of its arguments:
-
-\starttyping
-\citation[author] [example::demo-004,demo-003]
-\citation[authoryear] [example::demo-004,demo-003]
-\citation[authoryears][example::demo-004,demo-003]
-\stoptyping
-
-% The first argument of cite and citation is optional. What is the default and how does one set it?
-
-There is a whole bunch of cite options and more can be easily defined.
-
-\startluacode
-local variants = publications.tracers.citevariants
-
-context.starttabulate { "|l|p|" }
- context.NC() context.bold("key")
- context.NC() context.bold("rendering")
- context.NC() context.NR() context.FL()
- for i=1,#variants do
- local variant = variants[i]
- context.NC() context.type(variant)
- context.NC() context.citation( { variant }, { "example::demo-005" })
- context.NC() context.NR()
- end
-context.stoptabulate()
-\stopluacode
-
-Because we are dealing with database input and because we generally need to
-manipulate entries, much of the work is delegated to \LUA. This makes it easier
-to maintain and extend the code. Of course \TEX\ still does the rendering. The
-typographic details are controlled by parameters but not all are used in all
-variants. As with most \CONTEXT\ commands, it starts out with a general setup
-command:
-
-\showsetup[setupbtxcitevariant]
-
-On top of that we can define instances that inherit either from a given parent or
-from the topmost setup.
-
-\showsetup[definebtxcitevariant]
-
-% The default values are:
-
-% \showrootvalues[btxcitevariant]
-
-But, specific variants can have them overloaded:
-
-% \showinstancevalues[setupbtxcitevariant][author]
-% \showinstancevalues[setupbtxcitevariant][authornum]
-
-\startluacode
- local variants = publications.tracers.citevariants
-
- for i=1,#variants do
- context.showinstancevalues( { "btxcitevariant" }, { variants[i] })
- end
-\stopluacode
-
-A citation variant is defined in several steps and if you really want to know
-the dirty details, you should look into the \type {publ-imp-*.mkiv} files. Here
-we stick to the concept.
-
-\starttyping
-\startsetups btx:cite:author
- \btxcitevariant{author}
-\stopsetups
-\stoptyping
-
-You can overload such setups if needed, but that only makes sense when you cannot
-configure the rendering with parameters. The \type {\btxcitevariant} command is
-one of the build in accessors and it calls out to \LUA\ where more complex
-manipulation takes place if needed. If no manipulation is known, the field with
-the same name (if found) will be flushed. A command like \type {\btxcitevariant}
-assumes that a dataset and specific tag has been set. This is normally done in
-the wrapper macros, like \type {\cite}. For special purposes you can use these
-commands
-
-\starttyping
-\setbtxdataset[example]
-\setbtxentry[hh2013]
-\stoptyping
-
-But don't expect too much support for such low level rendering control.
-
-Unless you use \type {criterium=all} only publications that are cited will end up
-in the lists. You can force a citation into a list using \type {\usecitation}, for
-example:
-
-\starttyping
-\usecitation[example::demo-004,demo-003]
-\stoptyping
-
-This command has two synonyms: \type {\nocite} and \type {\nocitation} so you can
-choose whatever fits you best.
-
-\showsetup[nocite]
-
-\stopchapter
-
-\startchapter[title=The \LUA\ view]
-
-Because we manage data at the \LUA\ end it is tempting to access it there for
-other purposes. This is fine as long as you keep in mind that aspects of the
-implementation may change over time, although this is unlikely once the modules
-become stable.
-
-The entries are collected in datasets and each set has a unique name. In this
-document we have the set named \type {example}. A dataset table has several
-fields, and probably the one of most interest is the \type {luadata} field. Each
-entry in this table describes a publication:
-
-\startluacode
- context.tocontext(publications.datasets.example.luadata["demo-001"])
-\stopluacode
-
-This is \type {publications.datasets.example.luadata["demo-001"]}. There can be
-a companion entry in the parallel \type {details} table.
-
-\startluacode
- context.tocontext(publications.datasets.example.details["demo-001"])
-\stopluacode
-
-These details are accessed as \type
-{publications.datasets.example.details["demo-001"]} and by using a separate table
-we can overload fields in the original entry without losing the original.
-
-You can loop over the entries using regular \LUA\ code combined with \MKIV\
-helpers:
-
-\startbuffer
-local dataset = publications.datasets.example
-
-context.starttabulate { "|l|l|l|" }
-for tag, entry in table.sortedhash(dataset.luadata) do
- local detail = dataset.details[tag] or { }
- context.NC() context.type(tag)
- context.NC() context(detail.short)
- context.NC() context(entry.title)
- context.NC() context.NR()
-end
-context.stoptabulate()
-\stopbuffer
-
-\typebuffer
-
-This results in:
-
-\ctxluabuffer
-
-You can manipulate a dataset after loading. Of course this assumes that you know
-what kind of content you have and what you need for rendering. As example we
-load a small dataset.
-
-\startbuffer
-\definebtxdataset[drumming]
-\usebtxdataset[drumming][mkiv-publications.lua]
-\stopbuffer
-
-\typebuffer \getbuffer
-
-Because we're going to do some \LUA, we could also have loaded the dataset
-with:
-
-\starttyping
-publications.load("drumming","mkiv-publications.lua","lua")
-\stoptyping
-
-The dataset has three entries:
-
-\typefile{mkiv-publications.lua}
-
-As you can see, we can have a subtitle. We will combine the title and subtitle
-into one:
-
-\startbuffer
-\startluacode
-for tag, entry in next, publications.datasets.drumming.luadata do
- if entry.subtitle then
- if entry.title then
- entry.title = entry.title .. ", " .. entry.subtitle
- else
- entry.title = entry.subtitle
- end
- entry.subtitle = nil
- logs.report("btx","combining title and subtitle of entry tagged %a",tag)
- end
-end
-\stopluacode
-\stopbuffer
-
-\typebuffer \getbuffer
-
-We can now typeset the entries with:
-
-\startbuffer
-\definebtxrendering[drumming][dataset=drumming,method=dataset]
-\placebtxrendering[drumming]
-\stopbuffer
-
-\typebuffer
-
-Because we just want to show the entries, and have no citations that force them
-to be shown, we have to the \type {method} to \type {dataset}. \footnote {Gavin
-Harrison is in my opinion one of the most creative, diverse and interesting
-drummers of our time. It's also fascinating to watch him play and a welcome
-distraction from writing code and manuals.}
-
-\blank \getbuffer \blank
-
-\stopchapter
-
-\startchapter[title=The \XML\ view]
-
-The \type {luadata} table can be converted into an \XML\ representation. This is
-a follow up on earlier experiments with an \XML|-|only approach. I decided in the end
-to stick to a \LUA\ approach and provide some simple \XML\ support in addition.
-
-Once a dataset is accessible as \XML\ tree, you can use the regular \type {\xml...}
-commands. We start with loading a dataset, in this case from just one file.
-
-\startbuffer
-\usebtxdataset[tugboat][tugboat.bib]
-\stopbuffer
-
-\typebuffer \getbuffer
-
-The dataset has to be converted to \XML:
-
-\startbuffer
-\convertbtxdatasettoxml[tugboat]
-\stopbuffer
-
-\typebuffer \getbuffer
-
-The tree is now accessible by its root reference \type {btx:tugboat}. If we want simple
-field access we can use a few setups:
-
-\startbuffer
-\startxmlsetups btx:initialize
- \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
- \xmlmain{#1}
-\stopxmlsetups
-
-\startxmlsetups btx:field
- \xmlflushcontext{#1}
-\stopxmlsetups
-
-\xmlsetup{btx:tugboat}{btx:initialize}
-\stopbuffer
-
-\typebuffer \getbuffer
-
-The two setups are predefined in the core already, but you might want to change them. They are
-applied in for instance:
-
-\startbuffer
-\starttabulate[|||]
- \NC \type {tag} \NC \xmlfirst {btx:tugboat}
- {/bibtex/entry[string.find(@tag,'Hagen')]/attribute('tag')}
- \NC \NR
- \NC \type {title} \NC \xmlfirst {btx:tugboat}
- {/bibtex/entry[string.find(@tag,'Hagen')]/field[@name='title']}
- \NC \NR
-\stoptabulate
-\stopbuffer
-
-\typebuffer \getbuffer
-
-\startbuffer
-\startxmlsetups btx:demo
- \xmlcommand
- {#1}
- {/bibtex/entry[string.find(@tag,'Hagen')][1]}{btx:table}
-\stopxmlsetups
-
-\startxmlsetups btx:table
-\starttabulate[|||]
- \NC \type {tag} \NC \xmlatt{#1}{tag} \NC \NR
- \NC \type {title} \NC \xmlfirst{#1}{/field[@name='title']} \NC \NR
-\stoptabulate
-\stopxmlsetups
-
-\xmlsetup{btx:tugboat}{btx:demo}
-\stopbuffer
-
-\typebuffer \getbuffer
-
-Here is another example:
-
-\startbuffer
-\startxmlsetups btx:row
- \NC \xmlatt{#1}{tag}
- \NC \xmlfirst{#1}{/field[@name='title']}
- \NC \NR
-\stopxmlsetups
-
-\startxmlsetups btx:demo
- \xmlfilter {#1} {
- /bibtex
- /entry[@category='article']
- /field[@name='author' and (find(text(),'Knuth') or find(text(),'DEK'))]
- /../command(btx:row)
- }
-\stopxmlsetups
-
-\starttabulate[|||]
- \xmlsetup{btx:tugboat}{btx:demo}
-\stoptabulate
-\stopbuffer
-
-\typebuffer \getbuffer
-
-A more extensive example is the following. Of course this assumes that you
-know what \XML\ support mechanisms and macros are available.
-
-\startbuffer
-\startxmlsetups btx:getkeys
- \xmladdsortentry{btx}{#1}{\xmlfilter{#1}{/field[@name='author']/text()}}
- \xmladdsortentry{btx}{#1}{\xmlfilter{#1}{/field[@name='year' ]/text()}}
- \xmladdsortentry{btx}{#1}{\xmlatt{#1}{tag}}
-\stopxmlsetups
-
-\startxmlsetups btx:sorter
- \xmlresetsorter{btx}
- % \xmlfilter{#1}{entry/command(btx:getkeys)}
- \xmlfilter{#1}{
- /bibtex
- /entry[@category='article']
- /field[@name='author' and find(text(),'Knuth')]
- /../command(btx:getkeys)}
- \xmlsortentries{btx}
- \starttabulate[||||]
- \xmlflushsorter{btx}{btx:entry:flush}
- \stoptabulate
-\stopxmlsetups
-
-\startxmlsetups btx:entry:flush
- \NC \xmlfilter{#1}{/field[@name='year' ]/context()}
- \NC \xmlatt{#1}{tag}
- \NC \xmlfilter{#1}{/field[@name='author']/context()}
- \NC \NR
-\stopxmlsetups
-
-\xmlsetup{btx:tugboat}{btx:sorter}
-\stopbuffer
-
-\typebuffer \getbuffer
-
-The original data is stored in a \LUA\ table, hashed by tag. Starting with \LUA\ 5.2
-each run of \LUA\ gets a different ordering of such a hash. In older versions, when you
-looped over a hash, the order was undefined, but the same as long as you used the same
-binary. This had the advantage that successive runs, something we often have in document
-processing gave consistent results. In today's \LUA\ we need to do much more sorting of
-hashes before we loop, especially when we save multi||pass data. It is for this reason
-that the \XML\ tree is sorted by hash key by default. That way lookups (especially
-the first of a set) give consistent outcomes.
-
-\stopchapter
-
-\startchapter[title=Standards]
-
-The rendering of bibliographic entries is often standardized and prescribed by
-the publisher. If you submit an article to a journal, normally it will be
-reformatted (or even re|-|keyed) and the rendering will happen at the publishers
-end. In that case it may not matter how entries were rendered when writing the
-publication, because the publisher will do it his or her way.
-This means that most users probably will stick to the standard \APA\ rules and for
-them we provide some configuration. Because we use setups it is easy to overload
-specifics. If you really want to tweak, best look in the files that deal with it.
-
-Many standards exist and support for other renderings may be added to the core.
-Interested users are invited to develop and to test alternate standard renderings
-according to their needs.
-
-Todo: maybe a list of categories and fields.
-
-\stopchapter
-
-\startchapter[title=Cleaning up]
-
-Although the \BIBTEX\ format is reasonably well defined, in practice there are
-many ways to organize the data. For instance, one can use predefined string
-constants that get used (either or not combined with other strings) later on. A string
-can be enclosed in curly braces or double quotes. The strings can contain \TEX\ commands
-but these are not standardized. The databases often have somewhat complex
-ways to deal with special characters and the use of braces in their definition is also
-not normalized.
-
-The most complex to deal with are the fields that contain names of people. At some point it
-might be needed to split a combination of names into individual ones that then get split into
-title, first name, optional inbetweens, surname(s) and additional: \type {Prof. Dr. Alfred
-B. C. von Kwik Kwak Jr. II and P. Q. Olet} is just one example of this. The convention seems
-to be not to use commas but \type {and} to separate names (often each name will be specified
-as lastname, firstname).
-
-We don't see it as challenge nor as a duty to support all kinds of messy definitions. Of
-course we try to be somewhat tolerant, but you will be sure to get better results if you
-use nicely setup, consistent databases.
-
-Todo: maybe some examples of bad.
-
-\stopchapter
-
-\startchapter[title=Transition]
-
-In the original bibliography support module usage was as follows (example taken
-from the contextgarden wiki):
-
-\starttyping
-% engine=pdftex
-
-\usemodule[bib]
-\usemodule[bibltx]
-
-\setupbibtex
- [database=xampl]
-
-\setuppublications
- [numbering=yes]
-
-\starttext
- As \cite [article-full] already indicated, bibtex is a \LATEX||centric
- program.
-
- \completepublications
-\stoptext
-\stoptyping
-
-For \MKIV\ the modules were partly rewritten and ended up in the core so the two
-{\usemodule} commands were no longer needed. The overhead associated with the
-automatic loading of the bibliography macros can be neglected these days, so
-standardized modules such as \type {bib} are all being moved to the core and do
-not need to be explicitly loaded.
-
-The first \type {\setupbibtex} command in this example is needed to bootstrap
-the process: it tells what database has to be processed by \BIBTEX\ between
-runs. The second \type {\setuppublications} command is optional. Each citation
-(tagged with \type {\cite}) ends up in the list of publications.
-
-In the new approach we no longer use \BIBTEX so we don't need to setup \BIBTEX.
-Instead we define dataset(s). We also no longer set up publications with one
-command, but have split that up in rendering-, list-, and cite|-|variants. The
-basic \type {\cite} command remains. The above example becomes:
-
-\starttyping
-\definebtxdataset
- [document]
-
-\usebtxdataset
- [document]
- [mybibfile.bib]
-
-\definebtxrendering
- [document]
-
-\setupbtxrendering
- [document]
- [numbering=yes]
-
-\starttext
- As \cite [article-full] already indicated, bibtex is a \LATEX||centric
- program.
-
- \completebtxrendering[document]
-\stoptext
-\stoptyping
-
-So, we have a few more commands to set up things. If you intend to use just a
-single dataset and rendering, the above preamble can be simplified to:
-
-\starttyping
-\usebtxdataset
- [mybibfile.bib]
-
-\setupbtxrendering
- [numbering=yes]
-\stoptyping
-
-But keep in mind that compared to the old \MKII\ derived method we have moved
-some of the options to the rendering, list and cite setup variants.
-
-Another difference is now the use of lists. When you define a rendering, you
-also define a list. However, all entries are collected in a common list tagged
-\type {btx}. Although you will normally configure a rendering you can still set
-some properties of lists, but in that case you need to prefix the list
-identifier. In the case of the above example this is \type {btx:document}.
-
-\stopchapter
-
-\startchapter[title=\MLBIBTEX]
-
-Todo: how to plug in \MLBIBTEX\ for sorting and other advanced operations.
-
-\stopchapter
-
-\startchapter[title=Extensions]
-
-As \TEX\ and \LUA\ are both open and accessible in \CONTEXT\ it is possible to
-extend the functionality of the bibliography related code. For instance, you can add
-extra loaders.
-
-\starttyping
-function publications.loaders.myformat(dataset,filename)
- local t = { }
- -- Load data from 'filename' and convert it to a Lua table 't' with
- -- the key as hash entry and fields conforming the luadata table
- -- format.
- loaders.lua(dataset,t)
-end
-\stoptyping
-
-This then permits loading a database (into a dataset) with the command:
-
-\starttyping
-\usebtxdataset[standard][myfile.myformat]
-\stoptyping
-
-The \type {myformat} suffix is recognized automatically. If you want to use another
-suffix, you can do this:
-
-\starttyping
-\usebtxdataset[standard][myformat::myfile.txt]
-\stoptyping
-
-\stopchapter
-
-\startchapter[title=Notes]
-
-The move from external \BIBTEX\ processing to internal processing has the
-advantage that we stay within the same run. In the traditional approach we had
-roughly the following steps:
-
-\startitemize[packed]
-\startitem the first run information is collected and written to file \stopitem
-\startitem after that run the \BIBTEX\ program converts that file to another one \stopitem
-\startitem successive runs use that data for references and producing lists \stopitem
-\stopitemize
-
-In the \MKIV\ approach the bibliographic database is loaded in memory each run
-and processing also happens each run. On paper this looks less efficient but as
-\LUA\ is quite fast, in practice performance is much better.
-
-Probably most demanding is the treatment of authors as we have to analyze names,
-split multiple authors and reassemble firstnames, vons, surnames and juniors.
-When we sort by author sorting vectors have to be made which also has a penalty.
-However, in practice the user will not notice a performance degradation. We did
-some tests with a list of 500.000 authors, sorted them and typeset them as list
-(producing some 5400 dense pages in a small font and with small margins). This is
-typical one of these cases where using \LUAJITTEX\ saves quite time. On my
-machine it took just over 100 seconds to get this done. Unfortunately not all
-operating systems performed equally well: 32 bit versions worked fine, but 64 bit
-\LINUX\ either crashed (stalled) the machine or ran out of memory rather fast,
-while \MACOSX\ and \WINDOWS\ performed fine. In practice you will never run into
-this, unless you produce massive amounts of bibliographic entries. \LUAJIT\ has
-some benefits but also some drawbacks.
-
-\stopchapter
-
-\stopbodymatter
-
-\stoptext
-
-Todo:
-
-\setuplabeltext[en][reprint=reprint]
-\setuplabeltext[de][reprint=Nachdruck]
-
-note = {\labeltext{reprint} 2004}
-
diff --git a/doc/context/manuals/allkind/publications-en.xml b/doc/context/manuals/allkind/publications-en.xml
deleted file mode 100644
index ea577ccf4..000000000
--- a/doc/context/manuals/allkind/publications-en.xml
+++ /dev/null
@@ -1,369 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.html b/doc/context/scripts/mkiv/mtx-bibtex.html
deleted file mode 100644
index ba1591b4b..000000000
--- a/doc/context/scripts/mkiv/mtx-bibtex.html
+++ /dev/null
@@ -1,53 +0,0 @@
-
-
-
-
-
-
-
-
-
-
- bibtex helpers
-
-
-
-
-
-
-
-
wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl
-
-
-
-
-
-
-
Command line options
-
- flag | value | description |
- | | |
- --toxml | | convert bibtex database(s) to xml |
- --tolua | | convert bibtex database(s) to lua |
-
-
-
Example
-
mtxrun --script bibtex --tolua bibl-001.bib
-
mtxrun --script bibtex --tolua --simple bibl-001.bib
-
mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml
-
-
-
-
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.man b/doc/context/scripts/mkiv/mtx-bibtex.man
deleted file mode 100644
index cedf41b8b..000000000
--- a/doc/context/scripts/mkiv/mtx-bibtex.man
+++ /dev/null
@@ -1,30 +0,0 @@
-.TH "mtx-bibtex" "1" "01-01-2014" "version 1.00" "bibtex helpers"
-.SH NAME
-.B mtx-bibtex
-.SH SYNOPSIS
-.B mtxrun --script bibtex [
-.I OPTIONS ...
-.B ] [
-.I FILENAMES
-.B ]
-.SH DESCRIPTION
-.B bibtex helpers
-.SH OPTIONS
-.TP
-.B --toxml
-convert bibtex database(s) to xml
-.TP
-.B --tolua
-convert bibtex database(s) to lua
-.SH AUTHOR
-More information about ConTeXt and the tools that come with it can be found at:
-
-
-.B "maillist:"
-ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-
-.B "webpage:"
-http://www.pragma-ade.nl / http://tex.aanhet.net
-
-.B "wiki:"
-http://contextgarden.net
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.xml b/doc/context/scripts/mkiv/mtx-bibtex.xml
deleted file mode 100644
index b33e1809c..000000000
--- a/doc/context/scripts/mkiv/mtx-bibtex.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-
-
-
- mtx-bibtex
- bibtex helpers
- 1.00
-
-
-
-
- convert bibtex database(s) to xml
- convert bibtex database(s) to lua
-
-
-
-
-
- Example
-
- mtxrun --script bibtex --tolua bibl-001.bib
- mtxrun --script bibtex --tolua --simple bibl-001.bib
- mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml
-
-
-
-
diff --git a/metapost/context/base/mp-base.mpii b/metapost/context/base/mp-base.mpii
index 7af4bc436..0f8104447 100644
--- a/metapost/context/base/mp-base.mpii
+++ b/metapost/context/base/mp-base.mpii
@@ -110,15 +110,12 @@ transform identity;
for z=origin,right,up: z transformed identity = z; endfor
% color constants
-color black, white, red, green, blue, cyan, magenta, yellow, background;
+color black, white, red, green, blue, background;
black = (0,0,0);
white = (1,1,1);
red = (1,0,0);
green = (0,1,0);
blue = (0,0,1);
-cyan = (0,1,1);
-magenta = (1,0,1);
-yellow = (1,1,0);
background = white; % The user can reset this
% color part selection for within
@@ -363,17 +360,9 @@ enddef;
def filldraw expr c =
addto currentpicture contour c withpen currentpen
_op_ enddef;
-% def drawdot expr z =
-% addto currentpicture contour makepath currentpen shifted z
-% _op_ enddef;
-
-def drawdot expr p =
- if pair p :
- addto currentpicture doublepath p withpen currentpen _op_
- else :
- errmessage("drawdot only accepts a pair expression")
- fi
-enddef ;
+def drawdot expr z =
+ addto currentpicture contour makepath currentpen shifted z
+ _op_ enddef;
def unfill expr c = fill c withcolor background enddef;
def undraw expr p = draw p withcolor background enddef;
diff --git a/metapost/context/base/mp-base.mpiv b/metapost/context/base/mp-base.mpiv
index 0b655ef47..2887cc462 100644
--- a/metapost/context/base/mp-base.mpiv
+++ b/metapost/context/base/mp-base.mpiv
@@ -323,7 +323,7 @@ primarydef w dotprod z =
enddef ;
primarydef x**y =
- if y = 2 :
+ if y=2 :
x*x
else :
takepower y of x
@@ -348,46 +348,11 @@ def takepower expr y of x =
endfor
fi
else :
- hide(errmessage "Undefined power: " & decimal x & "**" & decimal y)
+ hide(errmessage "Undefined power: " & decimal x&"**"&decimal y)
fi
fi
enddef ;
-% for big number systems:
-%
-% primarydef x**y =
-% if y = 1 :
-% x
-% elseif y = 2 :
-% x*x
-% elseif y = 3 :
-% x*x*x
-% else :
-% takepower y of x
-% fi
-% enddef ;
-%
-% vardef takepower expr y of x =
-% if (x=0) and (y>0) :
-% 0
-% else :
-% 1
-% if y = floor y :
-% if y >= 0 :
-% for n=1 upto y :
-% *x
-% endfor
-% else :
-% for n=-1 downto y :
-% /x
-% endfor
-% fi
-% else :
-% hide(errmessage "Undefined power: " & decimal x & "**" & decimal y)
-% fi
-% fi
-% enddef ;
-
vardef direction expr t of p =
postcontrol t of p - precontrol t of p
enddef ;
@@ -629,36 +594,8 @@ def filldraw expr c =
addto currentpicture contour c withpen currentpen _op_
enddef ;
-% def drawdot expr z =
-% addto currentpicture contour makepath currentpen shifted z _op_
-% enddef ;
-%
-% testcase DEK:
-%
-% for j=1 upto 9 :
-% pickup pencircle xscaled .4 yscaled .2 ;
-% drawdot (10j,0) withpen pencircle xscaled .5j yscaled .25j rotated 45 ;
-% pickup pencircle xscaled .5j yscaled .25j rotated 45 ;
-% drawdot (10j,10);
-% endfor ;
-%
-% or:
-%
-%\startMPpage
-%
-% def drawdot expr z =
-% addto currentpicture contour (makepath currentpen shifted z) _op_
-% enddef;
-%
-% drawdot origin shifted (0,-3cm) withpen pencircle scaled 2cm ;
-% pickup pencircle scaled 2cm ; drawdot origin withcolor red ;
-
-def drawdot expr p =
- if pair p :
- addto currentpicture doublepath p withpen currentpen _op_
- else :
- errmessage("drawdot only accepts a pair expression")
- fi
+def drawdot expr z =
+ addto currentpicture contour makepath currentpen shifted z _op_
enddef ;
def unfill expr c = fill c withcolor background enddef ;
diff --git a/metapost/context/base/mp-grap.mpiv b/metapost/context/base/mp-grap.mpiv
index 6d69c0b1e..417bfbe69 100644
--- a/metapost/context/base/mp-grap.mpiv
+++ b/metapost/context/base/mp-grap.mpiv
@@ -52,11 +52,11 @@ fi
% endgraph end of graph--the result is a picture
% option `plot
--ldx]]--
function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
- local base = getprev(start) -- [glyph] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
+ local base = start.prev -- [glyph] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
if marks[basechar] then
while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
if not marks[basechar] then
break
end
@@ -738,16 +717,16 @@ end
function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
-- check chainpos variant
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
- local base = getprev(start) -- [glyph] [optional marks] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
+ local base = start.prev -- [glyph] [optional marks] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
if marks[basechar] then
while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
if not marks[basechar] then
break
end
@@ -759,7 +738,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
end
end
- local index = getattr(start,a_ligacomp)
+ local index = start[a_ligacomp]
local baseanchors = descriptions[basechar]
if baseanchors then
baseanchors = baseanchors.anchors
@@ -806,22 +785,22 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
- local base = getprev(start) -- [glyph] [basemark] [start=mark]
- local slc = getattr(start,a_ligacomp)
+ local base = start.prev -- [glyph] [basemark] [start=mark]
+ local slc = start[a_ligacomp]
if slc then -- a rather messy loop ... needs checking with husayni
while base do
- local blc = getattr(base,a_ligacomp)
+ local blc = base[a_ligacomp]
if blc and blc ~= slc then
- base = getprev(base)
+ base = base.prev
else
break
end
end
end
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
- local basechar = getchar(base)
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
+ local basechar = base.char
local baseanchors = descriptions[basechar]
if baseanchors then
baseanchors = baseanchors.anchors
@@ -861,21 +840,21 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
end
function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and getattr(start,a_cursbase)
+ local alreadydone = cursonce and start[a_cursbase]
if not alreadydone then
local done = false
- local startchar = getchar(start)
+ local startchar = start.char
if marks[startchar] then
if trace_cursive then
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt = getnext(start)
- while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
- local nextchar = getchar(nxt)
+ local nxt = start.next
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
+ local nextchar = nxt.char
if marks[nextchar] then
-- should not happen (maybe warning)
- nxt = getnext(nxt)
+ nxt = nxt.next
else
local entryanchors = descriptions[nextchar]
if entryanchors then
@@ -910,14 +889,14 @@ function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
return head, start, done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
end
return head, start, false
end
end
function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
- local startchar = getchar(start)
+ local startchar = start.char
local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
@@ -928,19 +907,19 @@ end
function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
-- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
-- todo: kerns in components of ligatures
- local snext = getnext(start)
+ local snext = start.next
if not snext then
return head, start, false
else
local prev, done = start, false
local factor = tfmdata.parameters.factor
local lookuptype = lookuptypes[lookupname]
- while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
- local nextchar = getchar(snext)
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
+ local nextchar = snext.char
local krn = kerns[nextchar]
if not krn and marks[nextchar] then
prev = snext
- snext = getnext(snext)
+ snext = snext.next
else
if not krn then
-- skip
@@ -948,14 +927,14 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
if lookuptype == "pair" then -- probably not needed
local a, b = krn[2], krn[3]
if a and #a > 0 then
- local startchar = getchar(start)
+ local startchar = start.char
local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
- local startchar = getchar(start)
+ local startchar = start.char
local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
@@ -967,7 +946,7 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
-- if a and a ~= 0 then
-- local k = setkern(snext,factor,rlmode,a)
-- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
-- end
-- end
-- if b and b ~= 0 then
@@ -978,7 +957,7 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
elseif krn ~= 0 then
local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
end
done = true
end
@@ -1033,13 +1012,13 @@ end
-- itself. It is meant mostly for dealing with Urdu.
function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = getchar(start)
+ local char = start.char
local replacement = replacements[char]
if replacement then
if trace_singles then
logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
end
- setfield(start,"char",replacement)
+ start.char = replacement
return head, start, true
else
return head, start, false
@@ -1068,9 +1047,9 @@ as less as needed but that would also make the code even more messy.
-- -- done
-- elseif ignoremarks then
-- repeat -- start x x m x x stop => start m
--- local next = getnext(start)
--- if not marks[getchar(next)] then
--- local components = getfield(next,"components")
+-- local next = start.next
+-- if not marks[next.char] then
+-- local components = next.components
-- if components then -- probably not needed
-- flush_node_list(components)
-- end
@@ -1080,8 +1059,8 @@ as less as needed but that would also make the code even more messy.
-- until next == stop
-- else -- start x x x stop => start
-- repeat
--- local next = getnext(start)
--- local components = getfield(next,"components")
+-- local next = start.next
+-- local components = next.components
-- if components then -- probably not needed
-- flush_node_list(components)
-- end
@@ -1105,8 +1084,8 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
end
while current do
- if getid(current) == glyph_code then
- local currentchar = getchar(current)
+ if current.id == glyph_code then
+ local currentchar = current.char
local lookupname = subtables[1] -- only 1
local replacement = lookuphash[lookupname]
if not replacement then
@@ -1123,14 +1102,14 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
if trace_singles then
logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
end
- setfield(current,"char",replacement)
+ current.char = replacement
end
end
return head, start, true
elseif current == stop then
break
else
- current = getnext(current)
+ current = current.next
end
end
return head, start, false
@@ -1145,7 +1124,7 @@ the match.
function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
-- local head, n = delete_till_stop(head,start,stop)
- local startchar = getchar(start)
+ local startchar = start.char
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local replacements = lookuphash[lookupname]
@@ -1188,8 +1167,8 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
local subtables = currentlookup.subtables
local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
while current do
- if getid(current) == glyph_code then -- is this check needed?
- local currentchar = getchar(current)
+ if current.id == glyph_code then -- is this check needed?
+ local currentchar = current.char
local lookupname = subtables[1]
local alternatives = lookuphash[lookupname]
if not alternatives then
@@ -1204,7 +1183,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
if trace_alternatives then
logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
end
- setfield(start,"char",choice)
+ start.char = choice
else
if trace_alternatives then
logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
@@ -1218,7 +1197,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
elseif current == stop then
break
else
- current = getnext(current)
+ current = current.next
end
end
return head, start, false
@@ -1233,7 +1212,7 @@ assume rather stupid ligatures (no complex disc nodes).
--ldx]]--
function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = getchar(start)
+ local startchar = start.char
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local ligatures = lookuphash[lookupname]
@@ -1248,20 +1227,20 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
end
else
- local s = getnext(start)
+ local s = start.next
local discfound = false
local last = stop
local nofreplacements = 0
local skipmark = currentlookup.flags[1]
while s do
- local id = getid(s)
+ local id = s.id
if id == disc_code then
- s = getnext(s)
+ s = s.next
discfound = true
else
- local schar = getchar(s)
+ local schar = s.char
if skipmark and marks[schar] then -- marks
- s = getnext(s)
+ s = s.next
else
local lg = ligatures[schar]
if lg then
@@ -1269,7 +1248,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if s == stop then
break
else
- s = getnext(s)
+ s = s.next
end
else
break
@@ -1286,7 +1265,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start == stop then
logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
end
end
head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
@@ -1295,7 +1274,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start == stop then
logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
end
end
end
@@ -1306,7 +1285,7 @@ end
chainmores.gsub_ligature = chainprocs.gsub_ligature
function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
local subtables = currentlookup.subtables
local lookupname = subtables[1]
@@ -1315,14 +1294,14 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
markanchors = markanchors[markchar]
end
if markanchors then
- local base = getprev(start) -- [glyph] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
+ local base = start.prev -- [glyph] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
if marks[basechar] then
while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
if not marks[basechar] then
break
end
@@ -1370,7 +1349,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
end
function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
local subtables = currentlookup.subtables
local lookupname = subtables[1]
@@ -1379,14 +1358,14 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
markanchors = markanchors[markchar]
end
if markanchors then
- local base = getprev(start) -- [glyph] [optional marks] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
+ local base = start.prev -- [glyph] [optional marks] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
if marks[basechar] then
while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
if not marks[basechar] then
break
end
@@ -1399,7 +1378,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
end
-- todo: like marks a ligatures hash
- local index = getattr(start,a_ligacomp)
+ local index = start[a_ligacomp]
local baseanchors = descriptions[basechar].anchors
if baseanchors then
local baseanchors = baseanchors['baselig']
@@ -1439,9 +1418,9 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
- -- local alreadydone = markonce and getattr(start,a_markmark)
+ -- local alreadydone = markonce and start[a_markmark]
-- if not alreadydone then
-- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
local subtables = currentlookup.subtables
@@ -1451,20 +1430,20 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
markanchors = markanchors[markchar]
end
if markanchors then
- local base = getprev(start) -- [glyph] [basemark] [start=mark]
- local slc = getattr(start,a_ligacomp)
+ local base = start.prev -- [glyph] [basemark] [start=mark]
+ local slc = start[a_ligacomp]
if slc then -- a rather messy loop ... needs checking with husayni
while base do
- local blc = getattr(base,a_ligacomp)
+ local blc = base[a_ligacomp]
if blc and blc ~= slc then
- base = getprev(base)
+ base = base.prev
else
break
end
end
end
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
- local basechar = getchar(base)
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
+ local basechar = base.char
local baseanchors = descriptions[basechar].anchors
if baseanchors then
baseanchors = baseanchors['basemark']
@@ -1504,9 +1483,9 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
end
function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and getattr(start,a_cursbase)
+ local alreadydone = cursonce and start[a_cursbase]
if not alreadydone then
- local startchar = getchar(start)
+ local startchar = start.char
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local exitanchors = lookuphash[lookupname]
@@ -1520,12 +1499,12 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt = getnext(start)
- while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
- local nextchar = getchar(nxt)
+ local nxt = start.next
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
+ local nextchar = nxt.char
if marks[nextchar] then
-- should not happen (maybe warning)
- nxt = getnext(nxt)
+ nxt = nxt.next
else
local entryanchors = descriptions[nextchar]
if entryanchors then
@@ -1560,7 +1539,7 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
return head, start, done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
end
return head, start, false
end
@@ -1570,7 +1549,7 @@ end
function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
-- untested .. needs checking for the new model
- local startchar = getchar(start)
+ local startchar = start.char
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local kerns = lookuphash[lookupname]
@@ -1591,9 +1570,9 @@ chainmores.gpos_single = chainprocs.gpos_single -- okay?
-- when machines become faster i will make a shared function
function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local snext = getnext(start)
+ local snext = start.next
if snext then
- local startchar = getchar(start)
+ local startchar = start.char
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local kerns = lookuphash[lookupname]
@@ -1603,12 +1582,12 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
local lookuptype = lookuptypes[lookupname]
local prev, done = start, false
local factor = tfmdata.parameters.factor
- while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
- local nextchar = getchar(snext)
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
+ local nextchar = snext.char
local krn = kerns[nextchar]
if not krn and marks[nextchar] then
prev = snext
- snext = getnext(snext)
+ snext = snext.next
else
if not krn then
-- skip
@@ -1616,14 +1595,14 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
if lookuptype == "pair" then
local a, b = krn[2], krn[3]
if a and #a > 0 then
- local startchar = getchar(start)
+ local startchar = start.char
local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
- local startchar = getchar(start)
+ local startchar = start.char
local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
@@ -1635,7 +1614,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
if a and a ~= 0 then
local k = setkern(snext,factor,rlmode,a)
if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
end
end
if b and b ~= 0 then
@@ -1646,7 +1625,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
elseif krn ~= 0 then
local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
end
done = true
end
@@ -1678,12 +1657,6 @@ local function show_skip(kind,chainname,char,ck,class)
end
end
-local quit_on_no_replacement = true
-
-directives.register("otf.chain.quitonnoreplacement",function(value) -- maybe per font
- quit_on_no_replacement = value
-end)
-
local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
-- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
local flags = sequence.flags
@@ -1704,7 +1677,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- f..l = mid string
if s == 1 then
-- never happens
- match = getid(current) == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
+ match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
else
-- maybe we need a better space check (maybe check for glue or category or combination)
-- we cannot optimize for n=2 because there can be disc nodes
@@ -1719,13 +1692,13 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- match = true
else
local n = f + 1
- last = getnext(last)
+ last = last.next
while n <= l do
if last then
- local id = getid(last)
+ local id = last.id
if id == glyph_code then
- if getfont(last) == currentfont and getsubtype(last)<256 then
- local char = getchar(last)
+ if last.font == currentfont and last.subtype<256 then
+ local char = last.char
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1734,10 +1707,10 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if trace_skips then
show_skip(kind,chainname,char,ck,class)
end
- last = getnext(last)
+ last = last.next
elseif seq[n][char] then
if n < l then
- last = getnext(last)
+ last = last.next
end
n = n + 1
else
@@ -1753,7 +1726,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
elseif id == disc_code then
- last = getnext(last)
+ last = last.next
else
match = false
break
@@ -1767,15 +1740,15 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
-- before
if match and f > 1 then
- local prev = getprev(start)
+ local prev = start.prev
if prev then
local n = f-1
while n >= 1 do
if prev then
- local id = getid(prev)
+ local id = prev.id
if id == glyph_code then
- if getfont(prev) == currentfont and getsubtype(prev)<256 then -- normal char
- local char = getchar(prev)
+ if prev.font == currentfont and prev.subtype<256 then -- normal char
+ local char = prev.char
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1806,7 +1779,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
match = false
break
end
- prev = getprev(prev)
+ prev = prev.prev
elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
n = n -1
else
@@ -1827,16 +1800,16 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
-- after
if match and s > l then
- local current = last and getnext(last)
+ local current = last and last.next
if current then
-- removed optimization for s-l == 1, we have to deal with marks anyway
local n = l + 1
while n <= s do
if current then
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- if getfont(current) == currentfont and getsubtype(current)<256 then -- normal char
- local char = getchar(current)
+ if current.font == currentfont and current.subtype<256 then -- normal char
+ local char = current.char
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1867,7 +1840,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
match = false
break
end
- current = getnext(current)
+ current = current.next
elseif seq[n][32] then
n = n + 1
else
@@ -1891,7 +1864,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- ck == currentcontext
if trace_contexts then
local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = getchar(start)
+ local char = start.char
if ck[9] then
logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
@@ -1926,12 +1899,12 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
repeat
if skipped then
while true do
- local char = getchar(start)
+ local char = start.char
local ccd = descriptions[char]
if ccd then
local class = ccd.class
if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = getnext(start)
+ start = start.next
else
break
end
@@ -1965,7 +1938,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
if start then
- start = getnext(start)
+ start = start.next
else
-- weird
end
@@ -1976,7 +1949,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if replacements then
head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
else
- done = quit_on_no_replacement -- can be meant to be skipped / quite inconsistent in fonts
+ done = true -- can be meant to be skipped
if trace_contexts then
logprocess("%s: skipping match",cref(kind,chainname))
end
@@ -2126,12 +2099,12 @@ end
-- if ok then
-- done = true
-- end
--- if start then start = getnext(start) end
+-- if start then start = start.next end
-- else
--- start = getnext(start)
+-- start = start.next
-- end
-- else
--- start = getnext(start)
+-- start = start.next
-- end
-- there will be a new direction parser (pre-parsed etc)
@@ -2153,8 +2126,6 @@ local function featuresprocessor(head,font,attr)
return head, false
end
- head = tonut(head)
-
if trace_steps then
checkstep(head)
end
@@ -2186,8 +2157,6 @@ local function featuresprocessor(head,font,attr)
-- Keeping track of the headnode is needed for devanagari (I generalized it a bit
-- so that multiple cases are also covered.)
- -- todo: retain prev
-
for s=1,#datasets do
local dataset = datasets[s]
featurevalue = dataset[1] -- todo: pass to function instead of using a global
@@ -2206,10 +2175,10 @@ local function featuresprocessor(head,font,attr)
-- we need to get rid of this slide! probably no longer needed in latest luatex
local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
if a then
a = a == attr
else
@@ -2220,7 +2189,7 @@ local function featuresprocessor(head,font,attr)
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[start.char]
if lookupmatch then
head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
if success then
@@ -2231,15 +2200,15 @@ local function featuresprocessor(head,font,attr)
report_missing_cache(typ,lookupname)
end
end
- if start then start = getprev(start) end
+ if start then start = start.prev end
else
- start = getprev(start)
+ start = start.prev
end
else
- start = getprev(start)
+ start = start.prev
end
else
- start = getprev(start)
+ start = start.prev
end
end
else
@@ -2259,16 +2228,16 @@ local function featuresprocessor(head,font,attr)
local head = start
local done = false
while start do
- local id = getid(start)
- if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
+ local id = start.id
+ if id == glyph_code and start.font == font and start.subtype <256 then
+ local a = start[0]
if a then
- a = (a == attr) and (not attribute or getattr(start,a_state) == attribute)
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
else
- a = not attribute or getattr(start,a_state) == attribute
+ a = not attribute or start[a_state] == attribute
end
if a then
- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[start.char]
if lookupmatch then
-- sequence kan weg
local ok
@@ -2277,12 +2246,12 @@ local function featuresprocessor(head,font,attr)
done = true
end
end
- if start then start = getnext(start) end
+ if start then start = start.next end
else
- start = getnext(start)
+ start = start.next
end
else
- start = getnext(start)
+ start = start.next
end
end
if done then
@@ -2292,19 +2261,19 @@ local function featuresprocessor(head,font,attr)
end
local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = getprev(disc)
- local next = getnext(disc)
+ local prev = disc.prev
+ local next = disc.next
if prev and next then
- setfield(prev,"next",next)
- -- setfield(next,"prev",prev)
- local a = getattr(prev,0)
+ prev.next = next
+ -- next.prev = prev
+ local a = prev[0]
if a then
- a = (a == attr) and (not attribute or getattr(prev,a_state) == attribute)
+ a = (a == attr) and (not attribute or prev[a_state] == attribute)
else
- a = not attribute or getattr(prev,a_state) == attribute
+ a = not attribute or prev[a_state] == attribute
end
if a then
- local lookupmatch = lookupcache[getchar(prev)]
+ local lookupmatch = lookupcache[prev.char]
if lookupmatch then
-- sequence kan weg
local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
@@ -2314,24 +2283,24 @@ local function featuresprocessor(head,font,attr)
end
end
end
- setfield(prev,"next",disc)
- -- setfield(next,"prev",disc)
+ prev.next = disc
+ -- next.prev = disc
end
return next
end
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
if a then
- a = (a == attr) and (not attribute or getattr(start,a_state) == attribute)
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
else
- a = not attribute or getattr(start,a_state) == attribute
+ a = not attribute or start[a_state] == attribute
end
if a then
- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[start.char]
if lookupmatch then
-- sequence kan weg
local ok
@@ -2340,39 +2309,39 @@ local function featuresprocessor(head,font,attr)
success = true
end
end
- if start then start = getnext(start) end
+ if start then start = start.next end
else
- start = getnext(start)
+ start = start.next
end
else
- start = getnext(start)
+ start = start.next
end
elseif id == disc_code then
-- mostly for gsub
- if getsubtype(start) == discretionary_code then
- local pre = getfield(start,"pre")
+ if start.subtype == discretionary_code then
+ local pre = start.pre
if pre then
local new = subrun(pre)
- if new then setfield(start,"pre",new) end
+ if new then start.pre = new end
end
- local post = getfield(start,"post")
+ local post = start.post
if post then
local new = subrun(post)
- if new then setfield(start,"post",new) end
+ if new then start.post = new end
end
- local replace = getfield(start,"replace")
+ local replace = start.replace
if replace then
local new = subrun(replace)
- if new then setfield(start,"replace",new) end
+ if new then start.replace = new end
end
elseif typ == "gpos_single" or typ == "gpos_pair" then
kerndisc(start)
end
- start = getnext(start)
+ start = start.next
elseif id == whatsit_code then -- will be function
- local subtype = getsubtype(start)
+ local subtype = start.subtype
if subtype == dir_code then
- local dir = getfield(start,"dir")
+ local dir = start.dir
if dir == "+TRT" or dir == "+TLT" then
topstack = topstack + 1
dirstack[topstack] = dir
@@ -2391,7 +2360,7 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype == localpar_code then
- local dir = getfield(start,"dir")
+ local dir = start.dir
if dir == "TRT" then
rlparmode = -1
elseif dir == "TLT" then
@@ -2405,11 +2374,11 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
- start = getnext(start)
+ start = start.next
elseif id == math_code then
- start = getnext(end_of_math(start))
+ start = end_of_math(start).next
else
- start = getnext(start)
+ start = start.next
end
end
end
@@ -2420,20 +2389,20 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
local head = start
local done = false
while start do
- local id = getid(start)
- if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
+ local id = start.id
+ if id == glyph_code and start.id == font and start.subtype <256 then
+ local a = start[0]
if a then
- a = (a == attr) and (not attribute or getattr(start,a_state) == attribute)
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
else
- a = not attribute or getattr(start,a_state) == attribute
+ a = not attribute or start[a_state] == attribute
end
if a then
for i=1,ns do
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[start.char]
if lookupmatch then
-- we could move all code inline but that makes things even more unreadable
local ok
@@ -2450,12 +2419,12 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_missing_cache(typ,lookupname)
end
end
- if start then start = getnext(start) end
+ if start then start = start.next end
else
- start = getnext(start)
+ start = start.next
end
else
- start = getnext(start)
+ start = start.next
end
end
if done then
@@ -2465,23 +2434,23 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
end
local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = getprev(disc)
- local next = getnext(disc)
+ local prev = disc.prev
+ local next = disc.next
if prev and next then
- setfield(prev,"next",next)
- -- setfield(next,"prev",prev)
- local a = getattr(prev,0)
+ prev.next = next
+ -- next.prev = prev
+ local a = prev[0]
if a then
- a = (a == attr) and (not attribute or getattr(prev,a_state) == attribute)
+ a = (a == attr) and (not attribute or prev[a_state] == attribute)
else
- a = not attribute or getattr(prev,a_state) == attribute
+ a = not attribute or prev[a_state] == attribute
end
if a then
for i=1,ns do
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[getchar(prev)]
+ local lookupmatch = lookupcache[prev.char]
if lookupmatch then
-- we could move all code inline but that makes things even more unreadable
local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
@@ -2495,28 +2464,28 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
end
end
end
- setfield(prev,"next",disc)
- -- setfield(next,"prev",disc)
+ prev.next = disc
+ -- next.prev = disc
end
return next
end
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
if a then
- a = (a == attr) and (not attribute or getattr(start,a_state) == attribute)
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
else
- a = not attribute or getattr(start,a_state) == attribute
+ a = not attribute or start[a_state] == attribute
end
if a then
for i=1,ns do
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[start.char]
if lookupmatch then
-- we could move all code inline but that makes things even more unreadable
local ok
@@ -2533,39 +2502,39 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_missing_cache(typ,lookupname)
end
end
- if start then start = getnext(start) end
+ if start then start = start.next end
else
- start = getnext(start)
+ start = start.next
end
else
- start = getnext(start)
+ start = start.next
end
elseif id == disc_code then
-- mostly for gsub
- if getsubtype(start) == discretionary_code then
- local pre = getfield(start,"pre")
+ if start.subtype == discretionary_code then
+ local pre = start.pre
if pre then
local new = subrun(pre)
- if new then setfield(start,"pre",new) end
+ if new then start.pre = new end
end
- local post = getfield(start,"post")
+ local post = start.post
if post then
local new = subrun(post)
- if new then setfield(start,"post",new) end
+ if new then start.post = new end
end
- local replace = getfield(start,"replace")
+ local replace = start.replace
if replace then
local new = subrun(replace)
- if new then setfield(start,"replace",new) end
+ if new then start.replace = new end
end
elseif typ == "gpos_single" or typ == "gpos_pair" then
kerndisc(start)
end
- start = getnext(start)
+ start = start.next
elseif id == whatsit_code then
- local subtype = getsubtype(start)
+ local subtype = start.subtype
if subtype == dir_code then
- local dir = getfield(start,"dir")
+ local dir = start.dir
if dir == "+TRT" or dir == "+TLT" then
topstack = topstack + 1
dirstack[topstack] = dir
@@ -2584,7 +2553,7 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype == localpar_code then
- local dir = getfield(start,"dir")
+ local dir = start.dir
if dir == "TRT" then
rlparmode = -1
elseif dir == "TLT" then
@@ -2597,11 +2566,11 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
- start = getnext(start)
+ start = start.next
elseif id == math_code then
- start = getnext(end_of_math(start))
+ start = end_of_math(start).next
else
- start = getnext(start)
+ start = start.next
end
end
end
@@ -2613,9 +2582,6 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
registerstep(head)
end
end
-
- head = tonode(head)
-
return head, done
end
diff --git a/tex/context/base/font-ott.lua b/tex/context/base/font-ott.lua
index 1e2309056..e3aacd0d1 100644
--- a/tex/context/base/font-ott.lua
+++ b/tex/context/base/font-ott.lua
@@ -42,7 +42,6 @@ local scripts = allocate {
['cprt'] = 'cypriot syllabary',
['cyrl'] = 'cyrillic',
['deva'] = 'devanagari',
- ['dev2'] = 'devanagari variant 2',
['dsrt'] = 'deseret',
['ethi'] = 'ethiopic',
['geor'] = 'georgian',
@@ -68,7 +67,6 @@ local scripts = allocate {
['linb'] = 'linear b',
['math'] = 'mathematical alphanumeric symbols',
['mlym'] = 'malayalam',
- ['mlm2'] = 'malayalam variant 2',
['mong'] = 'mongolian',
['musc'] = 'musical symbols',
['mymr'] = 'myanmar',
@@ -633,7 +631,6 @@ local features = allocate {
['js..'] = 'justification ..',
["dv.."] = "devanagari ..",
- ["ml.."] = "malayalam ..",
}
local baselines = allocate {
diff --git a/tex/context/base/font-otx.lua b/tex/context/base/font-otx.lua
index b7d2ae0bc..f39045223 100644
--- a/tex/context/base/font-otx.lua
+++ b/tex/context/base/font-otx.lua
@@ -30,29 +30,15 @@ analyzers.methods = methods
local a_state = attributes.private('state')
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-
-local setattr = nuts.setattr
-
-local traverse_id = nuts.traverse_id
-local traverse_node_list = nuts.traverse
-local end_of_math = nuts.end_of_math
-
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
local math_code = nodecodes.math
+local traverse_id = node.traverse_id
+local traverse_node_list = node.traverse
+local end_of_math = node.end_of_math
+
local fontdata = fonts.hashes.identifiers
local categories = characters and characters.categories or { } -- sorry, only in context
local chardata = characters and characters.data
@@ -109,61 +95,60 @@ analyzers.useunicodemarks = false
-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
-function analyzers.setstate(head,font) -- we can skip math
+function analyzers.setstate(head,font)
local useunicodemarks = analyzers.useunicodemarks
local tfmdata = fontdata[font]
local descriptions = tfmdata.descriptions
local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
- current = tonut(current)
while current do
- local id = getid(current)
- if id == glyph_code and getfont(current) == font then
+ local id = current.id
+ if id == glyph_code and current.font == font then
done = true
- local char = getchar(current)
+ local char = current.char
local d = descriptions[char]
if d then
if d.class == "mark" then
done = true
- setattr(current,a_state,s_mark)
+ current[a_state] = s_mark
elseif useunicodemarks and categories[char] == "mn" then
done = true
- setattr(current,a_state,s_mark)
+ current[a_state] = s_mark
elseif n == 0 then
first, last, n = current, current, 1
- setattr(current,a_state,s_init)
+ current[a_state] = s_init
else
last, n = current, n+1
- setattr(current,a_state,s_medi)
+ current[a_state] = s_medi
end
else -- finish
if first and first == last then
- setattr(last,a_state,s_isol)
+ last[a_state] = s_isol
elseif last then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
end
first, last, n = nil, nil, 0
end
elseif id == disc_code then
-- always in the middle
- setattr(current,a_state,s_medi)
+ current[a_state] = s_medi
last = current
else -- finish
if first and first == last then
- setattr(last,a_state,s_isol)
+ last[a_state] = s_isol
elseif last then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
end
first, last, n = nil, nil, 0
if id == math_code then
current = end_of_math(current)
end
end
- current = getnext(current)
+ current = current.next
end
if first and first == last then
- setattr(last,a_state,s_isol)
+ last[a_state] = s_isol
elseif last then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
end
return head, done
end
@@ -224,7 +209,7 @@ methods.latn = analyzers.setstate
local arab_warned = { }
local function warning(current,what)
- local char = getchar(current)
+ local char = current.char
if not arab_warned[char] then
log.report("analyze","arab: character %C has no %a class",char,what)
arab_warned[char] = true
@@ -276,95 +261,94 @@ function methods.arab(head,font,attr)
local first, last = nil, nil
local c_first, c_last = nil, nil
local current, done = head, false
- current = tonut(current)
while current do
- local id = getid(current)
- if id == glyph_code and getfont(current) == font and getsubtype(current)<256 and not getattr(current,a_state) then
+ local id = current.id
+ if id == glyph_code and current.font == font and current.subtype<256 and not current[a_state] then
done = true
- local char = getchar(current)
+ local char = current.char
local classifier = classifiers[char]
if not classifier then
if last then
if c_last == s_medi or c_last == s_fina then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
else
warning(last,"fina")
- setattr(last,a_state,s_error)
+ last[a_state] = s_error
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- setattr(first,a_state,s_isol)
+ first[a_state] = s_isol
else
warning(first,"isol")
- setattr(first,a_state,s_error)
+ first[a_state] = s_error
end
first = nil
end
elseif classifier == s_mark then
- setattr(current,a_state,s_mark)
+ current[a_state] = s_mark
elseif classifier == s_isol then
if last then
if c_last == s_medi or c_last == s_fina then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
else
warning(last,"fina")
- setattr(last,a_state,s_error)
+ last[a_state] = s_error
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- setattr(first,a_state,s_isol)
+ first[a_state] = s_isol
else
warning(first,"isol")
- setattr(first,a_state,s_error)
+ first[a_state] = s_error
end
first = nil
end
- setattr(current,a_state,s_isol)
+ current[a_state] = s_isol
elseif classifier == s_medi then
if first then
last = current
c_last = classifier
- setattr(current,a_state,s_medi)
+ current[a_state] = s_medi
else
- setattr(current,a_state,s_init)
+ current[a_state] = s_init
first = current
c_first = classifier
end
elseif classifier == s_fina then
if last then
- if getattr(last,a_state) ~= s_init then
- setattr(last,a_state,s_medi)
+ if last[a_state] ~= s_init then
+ last[a_state] = s_medi
end
- setattr(current,a_state,s_fina)
+ current[a_state] = s_fina
first, last = nil, nil
elseif first then
- -- if getattr(first,a_state) ~= s_init then
+ -- if first[a_state] ~= s_init then
-- -- needs checking
- -- setattr(first,a_state,s_medi)
+ -- first[a_state] = s_medi
-- end
- setattr(current,a_state,s_fina)
+ current[a_state] = s_fina
first = nil
else
- setattr(current,a_state,s_isol)
+ current[a_state] = s_isol
end
else -- classifier == s_rest
- setattr(current,a_state,s_rest)
+ current[a_state] = s_rest
if last then
if c_last == s_medi or c_last == s_fina then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
else
warning(last,"fina")
- setattr(last,a_state,s_error)
+ last[a_state] = s_error
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- setattr(first,a_state,s_isol)
+ first[a_state] = s_isol
else
warning(first,"isol")
- setattr(first,a_state,s_error)
+ first[a_state] = s_error
end
first = nil
end
@@ -372,18 +356,18 @@ function methods.arab(head,font,attr)
else
if last then
if c_last == s_medi or c_last == s_fina then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
else
warning(last,"fina")
- setattr(last,a_state,s_error)
+ last[a_state] = s_error
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- setattr(first,a_state,s_isol)
+ first[a_state] = s_isol
else
warning(first,"isol")
- setattr(first,a_state,s_error)
+ first[a_state] = s_error
end
first = nil
end
@@ -391,21 +375,21 @@ function methods.arab(head,font,attr)
current = end_of_math(current)
end
end
- current = getnext(current)
+ current = current.next
end
if last then
if c_last == s_medi or c_last == s_fina then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
else
warning(last,"fina")
- setattr(last,a_state,s_error)
+ last[a_state] = s_error
end
elseif first then
if c_first == s_medi or c_first == s_fina then
- setattr(first,a_state,s_isol)
+ first[a_state] = s_isol
else
warning(first,"isol")
- setattr(first,a_state,s_error)
+ first[a_state] = s_error
end
end
return head, done
diff --git a/tex/context/base/font-pat.lua b/tex/context/base/font-pat.lua
index 049853796..9733c9ada 100644
--- a/tex/context/base/font-pat.lua
+++ b/tex/context/base/font-pat.lua
@@ -38,7 +38,7 @@ local report = patches.report
-- library) the palatino arabic fonts don't have the mkmk features properly
-- set up.
-register("after","rehash features","^palatino.*arabic", function (data,filename)
+register("after","rehash features","^palatino.*arabic", function patch(data,filename)
local gpos = data.gpos
if gpos then
for k=1,#gpos do
diff --git a/tex/context/base/font-pre.mkiv b/tex/context/base/font-pre.mkiv
index fc6eb289e..c404771fd 100644
--- a/tex/context/base/font-pre.mkiv
+++ b/tex/context/base/font-pre.mkiv
@@ -169,24 +169,6 @@
calt=yes,
kern=yes]
-\definefontfeature
- [malayalam-one]
- [mode=node,
- language=dflt,
- script=mlym,
- akhn=yes,
- blwf=yes,
- half=yes,
- pres=yes,
- blws=yes,
- psts=yes,
- haln=no]
-
-\definefontfeature
- [malayalam-two]
- [malayalam-one]
- [script=mlm2]
-
\definefontfeature
[jamoforms]
[ljmo=yes,
diff --git a/tex/context/base/font-sol.lua b/tex/context/base/font-sol.lua
index a41e4a679..9ccfd0588 100644
--- a/tex/context/base/font-sol.lua
+++ b/tex/context/base/font-sol.lua
@@ -48,41 +48,19 @@ local v_split = variables.split
local settings_to_array = utilities.parsers.settings_to_array
local settings_to_hash = utilities.parsers.settings_to_hash
-local tasks = nodes.tasks
-
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local getlist = nuts.getlist
-
-local find_node_tail = nuts.tail
-local free_node = nuts.free
-local free_nodelist = nuts.flush_list
-local copy_nodelist = nuts.copy_list
-local traverse_nodes = nuts.traverse
-local traverse_ids = nuts.traverse_id
-local hpack_nodes = nuts.hpack
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local protect_glyphs = nuts.protect_glyphs
-
-local repack_hlist = nuts.repackhlist
-
+local find_node_tail = node.tail or node.slide
+local free_node = node.free
+local free_nodelist = node.flush_list
+local copy_nodelist = node.copy_list
+local traverse_nodes = node.traverse
+local traverse_ids = node.traverse_id
+local protect_glyphs = nodes.handlers.protectglyphs or node.protect_glyphs
+local hpack_nodes = node.hpack
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local repack_hlist = nodes.repackhlist
local nodes_to_utf = nodes.listtoutf
------ protect_glyphs = nodes.handlers.protectglyphs
-
local setnodecolor = nodes.tracers.colors.set
local nodecodes = nodes.nodecodes
@@ -101,7 +79,8 @@ local localpar_code = whatsitcodes.localpar
local dir_code = whatsitcodes.dir
local userdefined_code = whatsitcodes.userdefined
-local nodepool = nuts.pool
+local nodepool = nodes.pool
+local tasks = nodes.tasks
local usernodeids = nodepool.userids
local new_textdir = nodepool.textdir
@@ -111,7 +90,7 @@ local new_leftskip = nodepool.leftskip
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
------ process_characters = nodes.handlers.characters
+local process_characters = nodes.handlers.characters
local inject_kerns = nodes.injections.handler
local fonthashes = fonts.hashes
@@ -338,12 +317,11 @@ end)
function splitters.split(head)
-- quite fast
- head = tonut(head)
local current, done, rlmode, start, stop, attribute = head, false, false, nil, nil, 0
cache, max_less, max_more = { }, 0, 0
local function flush() -- we can move this
- local font = getfont(start)
- local last = getnext(stop)
+ local font = start.font
+ local last = stop.next
local list = last and copy_nodelist(start,last) or copy_nodelist(start)
local n = #cache + 1
if encapsulate then
@@ -354,18 +332,18 @@ function splitters.split(head)
else
local current = start
while true do
- setattr(current,a_word,n)
+ current[a_word] = n
if current == stop then
break
else
- current = getnext(current)
+ current = current.next
end
end
end
if rlmode == "TRT" or rlmode == "+TRT" then
local dirnode = new_textdir("+TRT")
- setfield(list,"prev",dirnode)
- setfield(dirnode,"next",list)
+ list.prev = dirnode
+ dirnode.next = list
list = dirnode
end
local c = {
@@ -386,11 +364,11 @@ function splitters.split(head)
start, stop, done = nil, nil, true
end
while current do -- also nextid
- local next = getnext(current)
- local id = getid(current)
+ local next = current.next
+ local id = current.id
if id == glyph_code then
- if getsubtype(current) < 256 then
- local a = getattr(current,a_split)
+ if current.subtype < 256 then
+ local a = current[a_split]
if not a then
start, stop = nil, nil
elseif not start then
@@ -406,7 +384,7 @@ function splitters.split(head)
if start then
flush()
end
- elseif start and next and getid(next) == glyph_code and getsubtype(next) < 256 then
+ elseif start and next and next.id == glyph_code and next.subtype < 256 then
-- beware: we can cross future lines
stop = next
else
@@ -416,9 +394,9 @@ function splitters.split(head)
if start then
flush()
end
- local subtype = getsubtype(current)
+ local subtype = current.subtype
if subtype == dir_code or subtype == localpar_code then
- rlmode = getfield(current,"dir")
+ rlmode = current.dir
end
else
if start then
@@ -432,17 +410,17 @@ function splitters.split(head)
end
nofparagraphs = nofparagraphs + 1
nofwords = nofwords + #cache
- return tonode(head), done
+ return head, done
end
local function collect_words(list) -- can be made faster for attributes
local words, w, word = { }, 0, nil
if encapsulate then
for current in traverse_ids(whatsit_code,list) do
- if getsubtype(current) == userdefined_code then -- hm
- local user_id = getfield(current,"user_id")
+ if current.subtype == userdefined_code then -- hm
+ local user_id = current.user_id
if user_id == splitter_one then
- word = { getfield(current,"value"), current, current }
+ word = { current.value, current, current }
w = w + 1
words[w] = word
elseif user_id == splitter_two then
@@ -458,9 +436,9 @@ local function collect_words(list) -- can be made faster for attributes
local current, first, last, index = list, nil, nil, nil
while current do
-- todo: disc and kern
- local id = getid(current)
+ local id = current.id
if id == glyph_code or id == disc_code then
- local a = getattr(current,a_word)
+ local a = current[a_word]
if a then
if a == index then
-- same word
@@ -493,7 +471,7 @@ local function collect_words(list) -- can be made faster for attributes
report_splitters("skipped: %C",current.char)
end
end
- elseif id == kern_code and (getsubtype(current) == fontkern_code or getattr(current,a_fontkern)) then
+ elseif id == kern_code and (current.subtype == fontkern_code or current[a_fontkern]) then
if first then
last = current
else
@@ -511,7 +489,7 @@ local function collect_words(list) -- can be made faster for attributes
end
end
end
- current = getnext(current)
+ current = current.next
end
if index then
w = w + 1
@@ -542,8 +520,8 @@ local function doit(word,list,best,width,badness,line,set,listdir)
if found then
local h, t
if encapsulate then
- h = getnext(word[2]) -- head of current word
- t = getprev(word[3]) -- tail of current word
+ h = word[2].next -- head of current word
+ t = word[3].prev -- tail of current word
else
h = word[2]
t = word[3]
@@ -558,7 +536,7 @@ local function doit(word,list,best,width,badness,line,set,listdir)
ok = true
break
else
- c = getnext(c)
+ c = c.next
end
end
if not ok then
@@ -577,20 +555,19 @@ local function doit(word,list,best,width,badness,line,set,listdir)
local first = copy_nodelist(original)
if not trace_colors then
for n in traverse_nodes(first) do -- maybe fast force so no attr needed
- setattr(n,0,featurenumber) -- this forces dynamics
+ n[0] = featurenumber -- this forces dynamics
end
elseif set == "less" then
for n in traverse_nodes(first) do
setnodecolor(n,"font:isol") -- yellow
- setattr(n,0,featurenumber)
+ n[0] = featurenumber
end
else
for n in traverse_nodes(first) do
setnodecolor(n,"font:medi") -- green
- setattr(n,0,featurenumber)
+ n[0] = featurenumber
end
end
-first = tonode(first)
local font = found.font
local setdynamics = setfontdynamics[font]
if setdynamics then
@@ -602,21 +579,20 @@ first = tonode(first)
report_solutions("fatal error, no dynamics for font %a",font)
end
first = inject_kerns(first)
-first = tonut(first)
- if getid(first) == whatsit_code then
+ if first.id == whatsit_code then
local temp = first
- first = getnext(first)
+ first = first.next
free_node(temp)
end
local last = find_node_tail(first)
-- replace [u]h->t by [u]first->last
- local prev = getprev(h)
- local next = getnext(t)
- setfield(prev,"next",first)
- setfield(first,"prev",prev)
+ local prev = h.prev
+ local next = t.next
+ prev.next = first
+ first.prev = prev
if next then
- setfield(last,"next",next)
- setfield(next,"prev",last)
+ last.next = next
+ next.prev = last
end
-- check new pack
local temp, b = repack_hlist(list,width,'exactly',listdir)
@@ -625,22 +601,22 @@ first = tonut(first)
report_optimizers("line %a, badness before %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"quit")
end
-- remove last insert
- setfield(prev,"next",h)
- setfield(h,"prev",prev)
+ prev.next = h
+ h.prev = prev
if next then
- setfield(t,"next",next)
- setfield(next,"prev",t)
+ t.next = next
+ next.prev = t
else
- setfield(t,"next",nil)
+ t.next = nil
end
- setfield(last,"next",nil)
+ last.next = nil
free_nodelist(first)
else
if trace_optimize then
report_optimizers("line %a, badness before: %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"continue")
end
-- free old h->t
- setfield(t,"next",nil)
+ t.next = nil
free_nodelist(h) -- somhow fails
if not encapsulate then
word[2] = first
@@ -721,9 +697,9 @@ variants[v_random] = function(words,list,best,width,badness,line,set,listdir)
end
local function show_quality(current,what,line)
- local set = getfield(current,"glue_set")
- local sign = getfield(current,"glue_sign")
- local order = getfield(current,"glue_order")
+ local set = current.glue_set
+ local sign = current.glue_sign
+ local order = current.glue_order
local amount = set * ((sign == 2 and -1) or 1)
report_optimizers("line %a, category %a, amount %a, set %a, sign %a, how %a, order %a",line,what,amount,set,sign,how,order)
end
@@ -743,25 +719,20 @@ function splitters.optimize(head)
math.setrandomseedi(randomseed)
randomseed = nil
end
- local line = 0
- local tex_hbadness = tex.hbadness
- local tex_hfuzz = tex.hfuzz
- tex.hbadness = 10000
- tex.hfuzz = number.maxdimen
+ local line = 0
+ local tex_hbadness, tex_hfuzz = tex.hbadness, tex.hfuzz
+ tex.hbadness, tex.hfuzz = 10000, number.maxdimen
if trace_optimize then
report_optimizers("preroll %a, variant %a, criterium %a, cache size %a",preroll,variant,criterium,nc)
end
- for current in traverse_ids(hlist_code,tonut(head)) do
+ for current in traverse_ids(hlist_code,head) do
+ -- report_splitters("before: [%s] => %s",current.dir,nodes.tosequence(current.list,nil))
line = line + 1
- local sign = getfield(current,"glue_sign")
- local dir = getfield(current,"dir")
- local width = getfield(current,"width")
- local list = getlist(current)
- if not encapsulate and getid(list) == glyph_code then
+ local sign, dir, list, width = current.glue_sign, current.dir, current.list, current.width
+ if not encapsulate and list.id == glyph_code then
-- nasty .. we always assume a prev being there .. future luatex will always have a leftskip set
- -- is this assignment ok ? .. needs checking
- list = insert_node_before(list,list,new_leftskip(0)) -- new_glue(0)
- setfield(current,"list",list)
+ -- current.list, list = insert_node_before(list,list,new_glue(0))
+ current.list, list = insert_node_before(list,list,new_leftskip(0))
end
local temp, badness = repack_hlist(list,width,'exactly',dir) -- it would be nice if the badness was stored in the node
if badness > 0 then
@@ -821,7 +792,7 @@ function splitters.optimize(head)
local words = collect_words(list)
for best=lastbest or 1,max do
local temp, done, changes, b = optimize(words,list,best,width,badness,line,set,dir)
- setfield(current,"list",temp)
+ current.list = temp
if trace_optimize then
report_optimizers("line %a, alternative %a, changes %a, badness %a",line,best,changes,b)
end
@@ -839,16 +810,15 @@ function splitters.optimize(head)
end
end
-- we pack inside the outer hpack and that way keep the original wd/ht/dp as bonus
- local list = hpack_nodes(getlist(current),width,'exactly',listdir)
- setfield(current,"list",list)
+ current.list = hpack_nodes(current.list,width,'exactly',listdir)
+ -- report_splitters("after: [%s] => %s",temp.dir,nodes.tosequence(temp.list,nil))
end
for i=1,nc do
local ci = cache[i]
free_nodelist(ci.original)
end
cache = { }
- tex.hbadness = tex_hbadness
- tex.hfuzz = tex_hfuzz
+ tex.hbadness, tex.hfuzz = tex_hbadness, tex_hfuzz
stoptiming(splitters)
end
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index 6296f088e..5b50ac75f 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -81,33 +81,7 @@ directives.register("fonts.usesystemfonts", function(v) usesystemfonts = toboole
local P, C, Cc, Cs = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cs
--- -- what to do with these -- --
---
--- thin -> thin
---
--- regu -> regular -> normal
--- norm -> normal -> normal
--- stan -> standard -> normal
--- medi -> medium
--- ultr -> ultra
--- ligh -> light
--- heav -> heavy
--- blac -> black
--- thin
--- book
--- verylight
---
--- buch -> book
--- buchschrift -> book
--- halb -> demi
--- halbfett -> demi
--- mitt -> medium
--- mittel -> medium
--- fett -> bold
--- mage -> light
--- mager -> light
--- nord -> normal
--- gras -> normal
+-- what to do with 'thin'
local weights = Cs ( -- not extra
P("demibold")
@@ -116,7 +90,6 @@ local weights = Cs ( -- not extra
+ P("ultrabold")
+ P("extrabold")
+ P("ultralight")
- + P("extralight")
+ P("bold")
+ P("demi")
+ P("semi")
@@ -130,17 +103,6 @@ local weights = Cs ( -- not extra
+ P("regular") / "normal"
)
--- numeric_weights = {
--- 200 = "extralight",
--- 300 = "light",
--- 400 = "book",
--- 500 = "medium",
--- 600 = "demi",
--- 700 = "bold",
--- 800 = "heavy",
--- 900 = "black",
--- }
-
local normalized_weights = sparse {
regular = "normal",
}
@@ -154,7 +116,6 @@ local styles = Cs (
+ P("roman") / "normal"
+ P("ital") / "italic" -- might be tricky
+ P("ita") / "italic" -- might be tricky
---+ P("obli") / "oblique"
)
local normalized_styles = sparse {
@@ -168,7 +129,6 @@ local widths = Cs(
+ P("thin")
+ P("expanded")
+ P("cond") / "condensed"
---+ P("expa") / "expanded"
+ P("normal")
+ P("book") / "normal"
)
@@ -308,9 +268,6 @@ filters.dfont = fontloader.info
-- glyphs so here we first load and then discard which is a waste. In the past it did
-- free memory because a full load was done. One of these things that goes unnoticed.
--
--- missing: names, units_per_em, design_range_bottom, design_range_top, design_size,
--- pfminfo, top_side_bearing
-
-- function fontloader.fullinfo(...) -- check with taco what we get / could get
-- local ff = fontloader.open(...)
-- if ff then
@@ -326,7 +283,7 @@ filters.dfont = fontloader.info
-- Phillip suggested this faster variant but it's still a hack as fontloader.info should
-- return these keys/values (and maybe some more) but at least we close the loader which
-- might save some memory in the end.
-
+--
-- function fontloader.fullinfo(name)
-- local ff = fontloader.open(name)
-- if ff then
@@ -344,9 +301,8 @@ filters.dfont = fontloader.info
-- design_size = fields.design_size and ff.design_size,
-- italicangle = fields.italicangle and ff.italicangle,
-- pfminfo = fields.pfminfo and ff.pfminfo,
--- top_side_bearing = fields.top_side_bearing and ff.top_side_bearing,
-- }
--- setmetatableindex(d,function(t,k)
+-- table.setmetatableindex(d,function(t,k)
-- report_names("warning, trying to access field %a in font table of %a",k,name)
-- end)
-- fontloader.close(ff)
@@ -357,26 +313,19 @@ filters.dfont = fontloader.info
-- end
-- As we have lazy loading anyway, this one still is full and with less code than
--- the previous one. But this depends on the garbage collector to kick in.
+-- the previous one.
function fontloader.fullinfo(...)
local ff = fontloader.open(...)
if ff then
local d = { } -- ff is userdata so [1] or # fails on it
- setmetatableindex(d,ff)
+ table.setmetatableindex(d,ff)
return d
else
return nil, "error in loading font"
end
end
--- We don't get the design_* values here as for that the fontloader has to load feature
--- info and therefore we're not much better off than using 'open'.
---
--- if tonumber(status.luatex_version) > 78 or (tonumber(status.luatex_version) == 78 and tonumber(status.luatex_revision) > 0) then
--- fontloader.fullinfo = fontloader.info
--- end
-
filters.otf = fontloader.fullinfo
filters.ttf = fontloader.fullinfo
@@ -598,7 +547,7 @@ local function check_name(data,result,filename,modification,suffix,subfont)
fullname = fullname or fontname
familyname = familyname or fontname
-- we do these sparse
- local units = result.units_per_em or 1000 -- can be zero too
+ local units = result.units_per_em or 1000
local minsize = result.design_range_bottom or 0
local maxsize = result.design_range_top or 0
local designsize = result.design_size or 0
@@ -622,7 +571,7 @@ local function check_name(data,result,filename,modification,suffix,subfont)
style = style,
width = width,
variant = variant,
- units = units ~= 1000 and units or nil,
+ units = units ~= 1000 and unit or nil,
pfmwidth = pfmwidth ~= 0 and pfmwidth or nil,
pfmweight = pfmweight ~= 0 and pfmweight or nil,
angle = angle ~= 0 and angle or nil,
@@ -631,9 +580,6 @@ local function check_name(data,result,filename,modification,suffix,subfont)
designsize = designsize ~= 0 and designsize or nil,
modification = modification ~= 0 and modification or nil,
}
--- inspect(filename)
--- inspect(result)
--- inspect(specifications[#specifications])
end
local function cleanupkeywords()
diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua
index b658b7c75..40081cc3b 100644
--- a/tex/context/base/l-dir.lua
+++ b/tex/context/base/l-dir.lua
@@ -25,7 +25,6 @@ local isdir = lfs.isdir
local isfile = lfs.isfile
local currentdir = lfs.currentdir
local chdir = lfs.chdir
-local mkdir = lfs.mkdir
local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
@@ -285,28 +284,17 @@ local make_indeed = true -- false
if onwindows then
function dir.mkdirs(...)
- local n = select("#",...)
- local str
- if n == 1 then
- str = select(1,...)
- if isdir(str) then
- return str, true
- end
- else
- str = ""
- for i=1,n do
- local s = select(i,...)
- local s = select(i,...)
- if s == "" then
- -- skip
- elseif str == "" then
- str = s
- else
- str = str .. "/" .. s
- end
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
+ if s == "" then
+ -- skip
+ elseif str == "" then
+ str = s
+ else
+ str = str .. "/" .. s
end
end
- local pth = ""
local drive = false
local first, middle, last = match(str,"^(//)(//*)(.*)$")
if first then
@@ -342,7 +330,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -363,23 +351,14 @@ if onwindows then
else
function dir.mkdirs(...)
- local n = select("#",...)
- local str, pth
- if n == 1 then
- str = select(1,...)
- if isdir(str) then
- return str, true
- end
- else
- str = ""
- for i=1,n do
- local s = select(i,...)
- if s and s ~= "" then -- we catch nil and false
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
+ if s and s ~= "" then -- we catch nil and false
+ if str ~= "" then
+ str = str .. "/" .. s
+ else
+ str = s
end
end
end
@@ -394,7 +373,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -402,7 +381,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
index 6feb7089c..399b3ad65 100644
--- a/tex/context/base/l-lpeg.lua
+++ b/tex/context/base/l-lpeg.lua
@@ -6,10 +6,6 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
license = "see context related readme files"
}
--- lpeg 12 vs lpeg 10: slower compilation, similar parsing speed (i need to check
--- if i can use new features like capture / 2 and .B (at first sight the xml
--- parser is some 5% slower)
-
-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
-- move utf -> l-unicode
@@ -19,15 +15,14 @@ lpeg = require("lpeg")
-- The latest lpeg doesn't have print any more, and even the new ones are not
-- available by default (only when debug mode is enabled), which is a pitty as
--- as it helps nailign down bottlenecks. Performance seems comparable: some 10%
--- slower pattern compilation, same parsing speed, although,
+-- as it helps bailign down bottlenecks. Performance seems comparable, although
--
-- local p = lpeg.C(lpeg.P(1)^0 * lpeg.P(-1))
--- local a = string.rep("123",100)
+-- local a = string.rep("123",10)
-- lpeg.match(p,a)
--
--- seems slower and is also still suboptimal (i.e. a match that runs from begin
--- to end, one of the cases where string matchers win).
+-- is nearly 20% slower and also still suboptimal (i.e. a match that runs from
+-- begin to end, one of the cases where string matchers win).
if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end
@@ -79,9 +74,7 @@ local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print
-- let's start with an inspector:
-if setinspector then
- setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-end
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-- Beware, we predefine a bunch of patterns here and one reason for doing so
-- is that we get consistent behaviour in some of the visualizers.
@@ -176,14 +169,12 @@ patterns.whitespace = whitespace
patterns.nonspacer = nonspacer
patterns.nonwhitespace = nonwhitespace
-local stripper = spacer ^0 * C((spacer ^0 * nonspacer ^1)^0) -- from example by roberto
-local fullstripper = whitespace^0 * C((whitespace^0 * nonwhitespace^1)^0)
+local stripper = spacer^0 * C((spacer^0 * nonspacer^1)^0) -- from example by roberto
----- collapser = Cs(spacer^0/"" * ((spacer^1 * endofstring / "") + (spacer^1/" ") + P(1))^0)
local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0))
patterns.stripper = stripper
-patterns.fullstripper = fullstripper
patterns.collapser = collapser
patterns.lowercase = lowercase
@@ -478,7 +469,7 @@ end
-- local pattern1 = P(1-P(pattern))^0 * P(pattern) : test for not nil
-- local pattern2 = (P(pattern) * Cc(true) + P(1))^0 : test for true (could be faster, but not much)
-function lpeg.finder(lst,makefunction,isutf) -- beware: slower than find with 'patternless finds'
+function lpeg.finder(lst,makefunction) -- beware: slower than find with 'patternless finds'
local pattern
if type(lst) == "table" then
pattern = P(false)
@@ -494,12 +485,7 @@ function lpeg.finder(lst,makefunction,isutf) -- beware: slower than find with 'p
else
pattern = P(lst)
end
- if isutf then
--- pattern = ((utf8char or 1)-pattern)^0 * pattern
- pattern = ((utf8char or 1)-pattern)^0 * pattern
- else
- pattern = (1-pattern)^0 * pattern
- end
+ pattern = (1-pattern)^0 * pattern
if makefunction then
return function(str)
return lpegmatch(pattern,str)
diff --git a/tex/context/base/l-lua.lua b/tex/context/base/l-lua.lua
index 4a96b0b1d..fc05afa67 100644
--- a/tex/context/base/l-lua.lua
+++ b/tex/context/base/l-lua.lua
@@ -148,9 +148,3 @@ function optionalrequire(...)
return result
end
end
-
--- nice for non ascii scripts (this might move):
-
-if lua then
- lua.mask = load([[τεχ = 1]]) and "utf" or "ascii"
-end
diff --git a/tex/context/base/l-string.lua b/tex/context/base/l-string.lua
index 3b1a0003f..9b079b00a 100644
--- a/tex/context/base/l-string.lua
+++ b/tex/context/base/l-string.lua
@@ -70,7 +70,6 @@ function string.limit(str,n,sentinel) -- not utf proof
end
local stripper = patterns.stripper
-local fullstripper = patterns.fullstripper
local collapser = patterns.collapser
local longtostring = patterns.longtostring
@@ -78,10 +77,6 @@ function string.strip(str)
return lpegmatch(stripper,str) or ""
end
-function string.fullstrip(str)
- return lpegmatch(fullstripper,str) or ""
-end
-
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua
index c318c57bb..f361f3d20 100644
--- a/tex/context/base/l-table.lua
+++ b/tex/context/base/l-table.lua
@@ -1006,9 +1006,7 @@ function table.print(t,...)
end
end
-if setinspector then
- setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
-end
+setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
-- -- -- obsolete but we keep them for a while and might comment them later -- -- --
diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua
index 7bb731254..7b7910fa7 100644
--- a/tex/context/base/l-url.lua
+++ b/tex/context/base/l-url.lua
@@ -26,8 +26,6 @@ local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replac
-- | ___________|____________ |
-- / \ / \ |
-- urn:example:animal:ferret:nose interpretable as extension
---
--- also nice: http://url.spec.whatwg.org/ (maybe some day ...)
url = url or { }
local url = url
@@ -45,7 +43,7 @@ local hexdigit = R("09","AF","af")
local plus = P("+")
local nothing = Cc("")
local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar
-local escaped = (plus / " ") + escapedchar -- so no loc://foo++.tex
+local escaped = (plus / " ") + escapedchar
local noslash = P("/") / ""
@@ -191,11 +189,7 @@ local function hashed(str) -- not yet ok (/test?test)
return s
end
--- inspect(hashed("template:///test"))
--- inspect(hashed("template:///test++.whatever"))
--- inspect(hashed("template:///test%2B%2B.whatever"))
--- inspect(hashed("template:///test%x.whatever"))
--- inspect(hashed("tem%2Bplate:///test%x.whatever"))
+-- inspect(hashed("template://test"))
-- Here we assume:
--
diff --git a/tex/context/base/lang-lab.mkiv b/tex/context/base/lang-lab.mkiv
index 14d9d8594..1ddb44cbb 100644
--- a/tex/context/base/lang-lab.mkiv
+++ b/tex/context/base/lang-lab.mkiv
@@ -94,10 +94,8 @@
\csname\??label\currentlabelcategory#1:##1:##2\endcsname
\else\ifcsname\??label#1:##1:##2\endcsname
\csname\??label#1:##1:##2\endcsname
-% \else\ifcsname\??language#4\s!default\endcsname
-% \expandafter#5\csname\??language#4\s!default\endcsname{##2}%
- \else\ifcsname\??language##1\s!default\endcsname
- \expandafter#5\csname\??language##1\s!default\endcsname{##2}%
+ \else\ifcsname\??language#4\s!default\endcsname
+ \expandafter#5\csname\??language#4\s!default\endcsname{##2}%
\else\ifcsname\??label\currentlabelcategory#1:##2\endcsname
\csname\??label\currentlabelcategory#1:##2\endcsname
\else\ifcsname\??label#1:##2\endcsname
diff --git a/tex/context/base/lang-rep.lua b/tex/context/base/lang-rep.lua
index be74d597a..31ae36e6d 100644
--- a/tex/context/base/lang-rep.lua
+++ b/tex/context/base/lang-rep.lua
@@ -7,21 +7,9 @@ if not modules then modules = { } end modules ['lang-rep'] = {
}
-- A BachoTeX 2013 experiment, probably not that useful. Eventually I used a simpler
--- more generic example. I'm sure no one ever notices of even needs this code.
---
--- As a follow up on a question by Alan about special treatment of dropped caps I wonder
--- if I can make this one more clever (probably in a few more dev steps). For instance
--- injecting nodes or replacing nodes. It's a prelude to a kind of lpeg for nodes,
--- although (given experiences so far) we don't really need that. After all, each problem
--- is somewhat unique.
+-- more generic example.
-local type = type
local utfbyte, utfsplit = utf.byte, utf.split
-local P, C, U, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.patterns.utf8character, lpeg.Cc, lpeg.Ct, lpeg.match
-local find = string.find
-
-local grouped = P("{") * ( Ct((U/utfbyte-P("}"))^1) + Cc(false) ) * P("}")-- grouped
-local splitter = Ct((Ct(Cc("discretionary") * grouped * grouped * grouped) + U/utfbyte)^1)
local trace_replacements = false trackers.register("languages.replacements", function(v) trace_replacements = v end)
local trace_detail = false trackers.register("languages.replacements.detail", function(v) trace_detail = v end)
@@ -30,26 +18,9 @@ local report_replacement = logs.reporter("languages","replacements")
local glyph_code = nodes.nodecodes.glyph
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getattr = nuts.getattr
-local getid = nuts.getid
-local getchar = nuts.getchar
-
-local insert_node_before = nuts.insert_before
-local remove_node = nuts.remove
-local copy_node = nuts.copy
-local flush_list = nuts.flush_list
-local insert_after = nuts.insert_after
-
-local nodepool = nuts.pool
-local new_glyph = nodepool.glyph
-local new_disc = nodepool.disc
+local insert_node_before = nodes.insert_before
+local remove_node = nodes.remove
+local copy_node = nodes.copy
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
@@ -75,30 +46,23 @@ table.setmetatableindex(lists,function(lists,name)
return data
end)
--- todo: glue kern
-
local function add(root,word,replacement)
local list = utfsplit(word,true)
- local size = #list
- for i=1,size do
+ for i=1,#list do
local l = utfbyte(list[i])
if not root[l] then
root[l] = { }
end
- if i == size then
- -- local newlist = utfsplit(replacement,true)
- -- for i=1,#newlist do
- -- newlist[i] = utfbyte(newlist[i])
- -- end
- local special = find(replacement,"{")
- local newlist = lpegmatch(splitter,replacement)
- --
+ if i == #list then
+ local newlist = utfsplit(replacement,true)
+ for i=1,#newlist do
+ newlist[i] = utfbyte(newlist[i])
+ end
root[l].final = {
word = word,
replacement = replacement,
- oldlength = size,
+ oldlength = #list,
newcodes = newlist,
- special = special,
}
end
root = root[l]
@@ -119,13 +83,13 @@ end
local function hit(a,head)
local tree = trees[a]
if tree then
- local root = tree[getchar(head)]
+ local root = tree[head.char]
if root then
- local current = getnext(head)
+ local current = head.next
local lastrun = false
local lastfinal = false
- while current and getid(current) == glyph_code do
- local newroot = root[getchar(current)]
+ while current and current.id == glyph_code do
+ local newroot = root[current.char]
if not newroot then
return lastrun, lastfinal
else
@@ -140,7 +104,7 @@ local function hit(a,head)
root = newroot
end
end
- current = getnext(current)
+ current = current.next
end
if lastrun then
return lastrun, lastfinal
@@ -149,27 +113,11 @@ local function hit(a,head)
end
end
-local function tonodes(list,template)
- local head, current
- for i=1,#list do
- local new = copy_node(template)
- setfield(new,"char",list[i])
- if head then
- head, current = insert_after(head,current,new)
- else
- head, current = new, new
- end
- end
- return head
-end
-
-
function replacements.handler(head)
- head = tonut(head)
local current = head
local done = false
while current do
- if getid(current) == glyph_code then
+ if current.id == glyph_code then
local a = getattr(current,a_replacements)
if a then
local last, final = hit(a,current)
@@ -177,85 +125,41 @@ function replacements.handler(head)
local oldlength = final.oldlength
local newcodes = final.newcodes
local newlength = #newcodes
- if trace_replacement then
+ if report_replacement then
report_replacement("replacing word %a by %a",final.word,final.replacement)
end
- if final.special then
- -- easier is to delete and insert (a simple callout to tex would be more efficient)
- -- maybe just walk over a replacement string instead
- local prev = getprev(current)
- local next = getnext(last)
- local list = current
- setfield(last,"next",nil)
- setfield(prev,"next",next)
- if next then
- setfield(next,"prev",prev)
- end
- current = prev
- if not current then
- head = nil
- end
- for i=1,newlength do
- local codes = newcodes[i]
- local new = nil
- if type(codes) == "table" then
- local method = codes[1]
- if method == "discretionary" then
- local pre, post, replace = codes[2], codes[3], codes[4]
- new = new_disc()
- if pre then
- setfield(new,"pre",tonodes(pre,last))
- end
- if post then
- setfield(new,"post",tonodes(post,last))
- end
- if replace then
- setfield(new,"replace",tonodes(replace,last))
- end
- else
- -- todo
- end
- else
- new = copy_node(last)
- setfield(new,"char",codes)
- end
- if new then
- head, current = insert_after(head,current,new)
- end
- end
- flush_list(list)
- elseif oldlength == newlength then -- #old == #new
+ if oldlength == newlength then -- #old == #new
for i=1,newlength do
- setfield(current,"char",newcodes[i])
- current = getnext(current)
+ current.char = newcodes[i]
+ current = current.next
end
elseif oldlength < newlength then -- #old < #new
for i=1,newlength-oldlength do
local n = copy_node(current)
- setfield(n,"char",newcodes[i])
+ n.char = newcodes[i]
head, current = insert_node_before(head,current,n)
- current = getnext(current)
+ current = current.next
end
for i=newlength-oldlength+1,newlength do
- setfield(current,"char",newcodes[i])
- current = getnext(current)
+ current.char = newcodes[i]
+ current = current.next
end
else -- #old > #new
for i=1,oldlength-newlength do
head, current = remove_node(head,current,true)
end
for i=1,newlength do
- setfield(current,"char",newcodes[i])
- current = getnext(current)
+ current.char = newcodes[i]
+ current = current.next
end
end
done = true
end
end
end
- current = getnext(current)
+ current = current.next
end
- return tonode(head), done
+ return head, done
end
local enabled = false
@@ -280,3 +184,6 @@ end
commands.setreplacements = replacements.set
commands.addreplacements = replacements.add
+
+nodes.tasks.prependaction("processors","words","languages.replacements.handler")
+nodes.tasks.disableaction("processors","languages.replacements.handler")
diff --git a/tex/context/base/lang-rep.mkiv b/tex/context/base/lang-rep.mkiv
deleted file mode 100644
index b3f21f22a..000000000
--- a/tex/context/base/lang-rep.mkiv
+++ /dev/null
@@ -1,75 +0,0 @@
-%D \module
-%D [ file=lang-rep,
-%D version=2013.04.28,
-%D title=\CONTEXT\ Language Macros,
-%D subtitle=Substitution,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-%D As I needed an example of messing with nodes for the bacho\TEX\ tutorial
-%D I cooked up this. In the end I decided to stick to a simpler example and
-%D just finished this off in case someone really needs it.
-
-\writestatus{loading}{ConTeXt Language Macros / Replacements}
-
-\unprotect
-
-\registerctxluafile{lang-rep}{1.001}
-
-\definesystemattribute[replacements][public]
-
-%D \startluacode
-%D
-%D -- todo: other nodes (prelude to more experiments with auto dropped caps)
-%D
-%D languages.replacements.add("basics", {
-%D ["aap"] = "monkey",
-%D ["noot"] = "nut",
-%D ["never"] = "forever",
-%D ["newer"] = "cooler",
-%D ["new"] = "cool",
-%D -- ["special"] = "veryspe{>>>}{<<<}{=}cial",
-%D })
-%D
-%D \stopluacode
-%D
-%D \replaceword[more][this][that]
-%D \replaceword[more][crap][support]
-%D \replaceword[more][---][—]
-%D \replaceword[basics][special][veryspe{>>>}{<<<}{=}cial]
-%D
-%D \starttyping
-%D \start \setreplacements[basics] What the heck, it's now or never, isn't it new? \par \stop
-%D \start \setreplacements[more] Do we --- {\it really} --- need this kind of crap? \par \stop
-%D \start \setreplacements[basics] All kinds of special thingies! \par \stop
-%D \start \setreplacements[basics] \hsize1mm special \par \stop
-%D \stoptyping
-
-\unexpanded\def\setreplacements[#1]%
- {\ctxcommand{setreplacements("#1")}}
-
-\unexpanded\def\resetreplacements
- {\attribute\replacementsattribute\attributeunsetvalue}
-
-\unexpanded\def\replaceword
- {\dotripleargument\languages_replacements_replace}
-
-\unexpanded\def\languages_replacements_replace[#1][#2][#3]%
- {\ifthirdargument
- \ctxcommand{addreplacements("#1",\!!bs#2\!!es,\!!bs#3\!!es)}%
- \fi}
-
-\appendtoks
- \resetreplacements
-\to \everyresettypesetting
-
-\appendtoks
- \resetreplacements
-\to \everyinitializeverbatim
-
-\protect \endinput
diff --git a/tex/context/base/lang-wrd.lua b/tex/context/base/lang-wrd.lua
index 5fc23757e..bf066fc09 100644
--- a/tex/context/base/lang-wrd.lua
+++ b/tex/context/base/lang-wrd.lua
@@ -26,18 +26,7 @@ words.threshold = 4
local numbers = languages.numbers
local registered = languages.registered
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local setattr = nuts.setattr
-
-local traverse_nodes = nuts.traverse
-
+local traverse_nodes = node.traverse
local wordsdata = words.data
local chardata = characters.data
local tasks = nodes.tasks
@@ -107,7 +96,7 @@ end
-- there is an n=1 problem somewhere in nested boxes
local function mark_words(head,whenfound) -- can be optimized and shared
- local current, language, done = tonut(head), nil, nil, 0, false
+ local current, language, done = head, nil, nil, 0, false
local str, s, nds, n = { }, 0, { }, 0 -- n could also be a table, saves calls
local function action()
if s > 0 then
@@ -123,9 +112,9 @@ local function mark_words(head,whenfound) -- can be optimized and shared
n, s = 0, 0
end
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local a = getfield(current,"lang")
+ local a = current.lang
if a then
if a ~= language then
if s > 0 then
@@ -137,16 +126,16 @@ local function mark_words(head,whenfound) -- can be optimized and shared
action()
language = a
end
- local components = getfield(current,"components")
+ local components = current.components
if components then
n = n + 1
nds[n] = current
for g in traverse_nodes(components) do
s = s + 1
- str[s] = utfchar(getchar(g))
+ str[s] = utfchar(g.char)
end
else
- local code = getchar(current)
+ local code = current.char
local data = chardata[code]
if is_letter[data.category] then
n = n + 1
@@ -162,12 +151,12 @@ local function mark_words(head,whenfound) -- can be optimized and shared
n = n + 1
nds[n] = current
end
- elseif id == kern_code and getsubtype(current) == kerning_code and s > 0 then
+ elseif id == kern_code and current.subtype == kerning_code and s > 0 then
-- ok
elseif s > 0 then
action()
end
- current = getnext(current)
+ current = current.next
end
if s > 0 then
action()
@@ -187,8 +176,6 @@ local enabled = false
function words.check(head)
if enabled then
return methods[wordmethod](head)
- elseif not head then
- return head, false
else
return head, false
end
@@ -220,7 +207,7 @@ table.setmetatableindex(cache, function(t,k) -- k == language, numbers[k] == tag
else
c = colist["word:" .. (numbers[k] or "unset")] or colist["word:unknown"]
end
- local v = c and function(n) setattr(n,a_color,c) end or false
+ local v = c and function(n) n[a_color] = c end or false
t[k] = v
return v
end)
@@ -239,7 +226,7 @@ end
methods[1] = function(head)
for n in traverse_nodes(head) do
- setattr(n,a_color,unsetvalue) -- hm, not that selective (reset color)
+ n[a_color] = unsetvalue -- hm, not that selective (reset color)
end
return mark_words(head,sweep)
end
@@ -340,7 +327,7 @@ end
methods[3] = function(head)
for n in traverse_nodes(head) do
- setattr(n,a_color,unsetvalue)
+ n[a_color] = unsetvalue
end
return mark_words(head,sweep)
end
diff --git a/tex/context/base/lpdf-mis.lua b/tex/context/base/lpdf-mis.lua
index 43f6cb7e1..174d17427 100644
--- a/tex/context/base/lpdf-mis.lua
+++ b/tex/context/base/lpdf-mis.lua
@@ -43,7 +43,6 @@ local pdfflushobject = lpdf.flushobject
local pdfflushstreamobject = lpdf.flushstreamobject
local variables = interfaces.variables
-local v_stop = variables.stop
local positive = register(pdfliteral("/GSpositive gs"))
local negative = register(pdfliteral("/GSnegative gs"))
@@ -338,82 +337,31 @@ local map = {
characters = "a",
}
--- local function featurecreep()
--- local pages, lastconversion, list = structures.pages.tobesaved, nil, pdfarray()
--- local getstructureset = structures.sets.get
--- for i=1,#pages do
--- local p = pages[i]
--- if not p then
--- return -- fatal error
--- else
--- local numberdata = p.numberdata
--- if numberdata then
--- local conversionset = numberdata.conversionset
--- if conversionset then
--- local conversion = getstructureset("structure:conversions",p.block,conversionset,1,"numbers")
--- if conversion ~= lastconversion then
--- lastconversion = conversion
--- list[#list+1] = i - 1 -- pdf starts numbering at 0
--- list[#list+1] = pdfdictionary { S = pdfconstant(map[conversion] or map.numbers) }
--- end
--- end
--- end
--- if not lastconversion then
--- lastconversion = "numbers"
--- list[#list+1] = i - 1 -- pdf starts numbering at 0
--- list[#list+1] = pdfdictionary { S = pdfconstant(map.numbers) }
--- end
--- end
--- end
--- lpdf.addtocatalog("PageLabels", pdfdictionary { Nums = list })
--- end
-
local function featurecreep()
- local pages = structures.pages.tobesaved
- local list = pdfarray()
- local getset = structures.sets.get
- local stopped = false
- local oldlabel = nil
- local olconversion = nil
+ local pages, lastconversion, list = structures.pages.tobesaved, nil, pdfarray()
+ local getstructureset = structures.sets.get
for i=1,#pages do
local p = pages[i]
if not p then
return -- fatal error
- end
- local label = p.viewerprefix or ""
- if p.status == v_stop then
- if not stopped then
- list[#list+1] = i - 1 -- pdf starts numbering at 0
- list[#list+1] = pdfdictionary {
- P = pdfunicode(label),
- }
- stopped = true
- end
- oldlabel = nil
- oldconversion = nil
- stopped = false
else
local numberdata = p.numberdata
- local conversion = nil
- local number = p.number
if numberdata then
local conversionset = numberdata.conversionset
if conversionset then
- conversion = getset("structure:conversions",p.block,conversionset,1,"numbers")
+ local conversion = getstructureset("structure:conversions",p.block,conversionset,1,"numbers")
+ if conversion ~= lastconversion then
+ lastconversion = conversion
+ list[#list+1] = i - 1 -- pdf starts numbering at 0
+ list[#list+1] = pdfdictionary { S = pdfconstant(map[conversion] or map.numbers) }
+ end
end
end
- conversion = conversion and map[conversion] or map.numbers
- if number == 1 or oldlabel ~= label or oldconversion ~= conversion then
+ if not lastconversion then
+ lastconversion = "numbers"
list[#list+1] = i - 1 -- pdf starts numbering at 0
- list[#list+1] = pdfdictionary {
- S = pdfconstant(conversion),
- St = number,
- P = label ~= "" and pdfunicode(label) or nil,
- }
+ list[#list+1] = pdfdictionary { S = pdfconstant(map.numbers) }
end
- oldlabel = label
- oldconversion = conversion
- stopped = false
end
end
lpdf.addtocatalog("PageLabels", pdfdictionary { Nums = list })
diff --git a/tex/context/base/lpdf-nod.lua b/tex/context/base/lpdf-nod.lua
index 68d7fca90..6b104d2fa 100644
--- a/tex/context/base/lpdf-nod.lua
+++ b/tex/context/base/lpdf-nod.lua
@@ -6,29 +6,21 @@ if not modules then modules = { } end modules ['lpdf-nod'] = {
license = "see context related readme files"
}
-local type = type
-
local formatters = string.formatters
-local whatsitcodes = nodes.whatsitcodes
-local nodeinjections = backends.nodeinjections
-
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local setfield = nuts.setfield
-
-local copy_node = nuts.copy
-local new_node = nuts.new
+local copy_node = node.copy
+local new_node = node.new
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local register = nodepool.register
+local whatsitcodes = nodes.whatsitcodes
+local nodeinjections = backends.nodeinjections
-local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdfliteral,"mode",1)
+local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) pdfliteral.mode = 1
local pdfsave = register(new_node("whatsit", whatsitcodes.pdfsave))
local pdfrestore = register(new_node("whatsit", whatsitcodes.pdfrestore))
local pdfsetmatrix = register(new_node("whatsit", whatsitcodes.pdfsetmatrix))
-local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) setfield(pdfdest,"named_id",1) -- xyz_zoom untouched
+local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) pdfdest.named_id = 1 -- xyz_zoom untouched
local pdfannot = register(new_node("whatsit", whatsitcodes.pdfannot))
local variables = interfaces.variables
@@ -46,14 +38,14 @@ local views = { -- beware, we do support the pdf keys but this is *not* official
function nodepool.pdfliteral(str)
local t = copy_node(pdfliteral)
- setfield(t,"data",str)
+ t.data = str
return t
end
function nodepool.pdfdirect(str)
local t = copy_node(pdfliteral)
- setfield(t,"data",str)
- setfield(t,"mode",1)
+ t.data = str
+ t.mode = 1
return t
end
@@ -65,10 +57,16 @@ function nodepool.pdfrestore()
return copy_node(pdfrestore)
end
-function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty) -- todo: tx ty
+function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
+ local t = copy_node(pdfsetmatrix)
+ t.data = formatters["%s %s %s %s"](rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty
+ return t
+end
+
+function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
local t = copy_node(pdfsetmatrix)
if type(rx) == "string" then
- setfield(t,"data",rx)
+ t.data = rx
else
if not rx then
rx = 1
@@ -88,12 +86,12 @@ function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty) -- todo: tx ty
end
if sx == 0 and sy == 0 then
if rx == 1 and ry == 1 then
- setfield(t,"data","1 0 0 1")
+ t.data = "1 0 0 1"
else
- setfield(t,"data",formatters["%0.6f 0 0 %0.6f"](rx,ry))
+ t.data = formatters["%0.6f 0 0 %0.6f"](rx,ry)
end
else
- setfield(t,"data",formatters["%0.6f %0.6f %0.6f %0.6f"](rx,sx,sy,ry))
+ t.data = formatters["%0.6f %0.6f %0.6f %0.6f"](rx,sx,sy,ry)
end
end
return t
@@ -106,19 +104,19 @@ nodeinjections.transform = nodepool.pdfsetmatrix
function nodepool.pdfannotation(w,h,d,data,n)
local t = copy_node(pdfannot)
if w and w ~= 0 then
- setfield(t,"width",w)
+ t.width = w
end
if h and h ~= 0 then
- setfield(t,"height",h)
+ t.height = h
end
if d and d ~= 0 then
- setfield(t,"depth",d)
+ t.depth = d
end
if n then
- setfield(t,"objnum",n)
+ t.objnum = n
end
if data and data ~= "" then
- setfield(t,"data",data)
+ t.data = data
end
return t
end
@@ -140,36 +138,35 @@ function nodepool.pdfdestination(w,h,d,name,view,n)
local t = copy_node(pdfdest)
local hasdimensions = false
if w and w ~= 0 then
- setfield(t,"width",w)
+ t.width = w
hasdimensions = true
end
if h and h ~= 0 then
- setfield(t,"height",h)
+ t.height = h
hasdimensions = true
end
if d and d ~= 0 then
- setfield(t,"depth",d)
+ t.depth = d
hasdimensions = true
end
if n then
- setfield(t,"objnum",n)
+ t.objnum = n
end
view = views[view] or view or 1 -- fit is default
- setfield(t,"dest_id",name)
- setfield(t,"dest_type",view)
+ t.dest_id = name
+ t.dest_type = view
if hasdimensions and view == 0 then -- xyz
-- see (!) s -> m -> t -> r
- -- linked
local s = copy_node(pdfsave)
local m = copy_node(pdfsetmatrix)
local r = copy_node(pdfrestore)
- setfield(m,"data","1 0 0 1")
- setfield(s,"next",m)
- setfield(m,"next",t)
- setfield(t,"next",r)
- setfield(m,"prev",s)
- setfield(t,"prev",m)
- setfield(r,"prev",t)
+ m.data = "1 0 0 1"
+ s.next = m
+ m.next = t
+ t.next = r
+ m.prev = s
+ t.prev = m
+ r.prev = t
return s -- a list
else
return t
diff --git a/tex/context/base/lpdf-tag.lua b/tex/context/base/lpdf-tag.lua
index afddec345..29ffcd207 100644
--- a/tex/context/base/lpdf-tag.lua
+++ b/tex/context/base/lpdf-tag.lua
@@ -6,7 +6,6 @@ if not modules then modules = { } end modules ['lpdf-tag'] = {
license = "see context related readme files"
}
-local next = next
local format, match, concat = string.format, string.match, table.concat
local lpegmatch = lpeg.match
local utfchar = utf.char
@@ -15,9 +14,7 @@ local trace_tags = false trackers.register("structures.tags", function(v) trace
local report_tags = logs.reporter("backend","tags")
-local backends = backends
-local lpdf = lpdf
-local nodes = nodes
+local backends, lpdf, nodes = backends, lpdf, nodes
local nodeinjections = backends.pdf.nodeinjections
local codeinjections = backends.pdf.codeinjections
@@ -37,6 +34,10 @@ local pdfpagereference = lpdf.pagereference
local texgetcount = tex.getcount
+local nodepool = nodes.pool
+
+local pdfliteral = nodepool.pdfliteral
+
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
@@ -46,26 +47,11 @@ local glyph_code = nodecodes.glyph
local a_tagged = attributes.private('tagged')
local a_image = attributes.private('image')
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local nodepool = nuts.pool
-local pdfliteral = nodepool.pdfliteral
-
-local getid = nuts.getid
-local getattr = nuts.getattr
-local getprev = nuts.getprev
-local getnext = nuts.getnext
-local getlist = nuts.getlist
-local setfield = nuts.setfield
-
-local traverse_nodes = nuts.traverse
-local tosequence = nuts.tosequence
-local copy_node = nuts.copy
-local slide_nodelist = nuts.slide
-local insert_before = nuts.insert_before
-local insert_after = nuts.insert_after
+local traverse_nodes = node.traverse
+local traverse_id = node.traverse_id
+local tosequence = nodes.tosequence
+local copy_node = node.copy
+local slide_nodelist = node.slide
local structure_stack = { }
local structure_kids = pdfarray()
@@ -189,8 +175,7 @@ local function makeelement(fulltag,parent)
end
local function makecontent(parent,start,stop,slist,id)
- local tag = parent.tag
- local kids = parent.kids
+ local tag, kids = parent.tag, parent.kids
local last = index
if id == "image" then
local d = pdfdictionary {
@@ -213,29 +198,24 @@ local function makecontent(parent,start,stop,slist,id)
end
--
local bliteral = pdfliteral(format("/%s <>BDC",tag,last))
- local eliteral = pdfliteral("EMC")
- -- use insert instead:
- local prev = getprev(start)
+ local prev = start.prev
if prev then
- setfield(prev,"next",bliteral)
- setfield(bliteral,"prev",prev)
+ prev.next, bliteral.prev = bliteral, prev
end
- setfield(start,"prev",bliteral)
- setfield(bliteral,"next",start)
- -- use insert instead:
- local next = getnext(stop)
- if next then
- setfield(next,"prev",eliteral)
- setfield(eliteral,"next",next)
+ start.prev, bliteral.next = bliteral, start
+ if slist and slist.list == start then
+ slist.list = bliteral
+ elseif not prev then
+ report_tags("this can't happen: injection in front of nothing")
end
- setfield(stop,"next",eliteral)
- setfield(eliteral,"prev",stop)
--
- if slist and getlist(slist) == start then
- setfield(slist,"list",bliteral)
- elseif not getprev(start) then
- report_tags("this can't happen: injection in front of nothing")
+ local eliteral = pdfliteral("EMC")
+ local next = stop.next
+ if next then
+ next.prev, eliteral.next = eliteral, next
end
+ stop.next, eliteral.prev = eliteral, stop
+ --
index = index + 1
list[index] = parent.pref
return bliteral, eliteral
@@ -247,9 +227,9 @@ local level, last, ranges, range = 0, nil, { }, nil
local function collectranges(head,list)
for n in traverse_nodes(head) do
- local id = getid(n) -- 14: image, 8: literal (mp)
+ local id = n.id -- 14: image, 8: literal (mp)
if id == glyph_code then
- local at = getattr(n,a_tagged)
+ local at = n[a_tagged]
if not at then
range = nil
elseif last ~= at then
@@ -260,9 +240,9 @@ local function collectranges(head,list)
range[4] = n -- stop
end
elseif id == hlist_code or id == vlist_code then
- local at = getattr(n,a_image)
+ local at = n[a_image]
if at then
- local at = getattr(n,a_tagged)
+ local at = n[a_tagged]
if not at then
range = nil
else
@@ -270,7 +250,7 @@ local function collectranges(head,list)
end
last = nil
else
- local nl = getlist(n)
+ local nl = n.list
slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
collectranges(nl,n)
end
@@ -282,7 +262,6 @@ function nodeinjections.addtags(head)
-- no need to adapt head, as we always operate on lists
level, last, ranges, range = 0, nil, { }, nil
initializepage()
- head = tonut(head)
collectranges(head)
if trace_tags then
for i=1,#ranges do
@@ -316,9 +295,8 @@ function nodeinjections.addtags(head)
finishpage()
-- can be separate feature
--
- -- injectspans(tonut(head)) -- does to work yet
+ -- injectspans(head) -- does to work yet
--
- head = tonode(head)
return head, true
end
diff --git a/tex/context/base/luat-cnf.lua b/tex/context/base/luat-cnf.lua
index 4ad6cd69d..3672c603e 100644
--- a/tex/context/base/luat-cnf.lua
+++ b/tex/context/base/luat-cnf.lua
@@ -23,7 +23,7 @@ texconfig.half_error_line = 50 -- 50 -- obsolete
texconfig.expand_depth = 10000 -- 10000
texconfig.hash_extra = 100000 -- 0
texconfig.nest_size = 1000 -- 50
-texconfig.max_in_open = 500 -- 15 -- in fact it's limited to 127
+texconfig.max_in_open = 500 -- 15
texconfig.max_print_line = 10000 -- 79
texconfig.max_strings = 500000 -- 15000
texconfig.param_size = 25000 -- 60
diff --git a/tex/context/base/luat-sto.lua b/tex/context/base/luat-sto.lua
index 041050fb8..7a11b7f5e 100644
--- a/tex/context/base/luat-sto.lua
+++ b/tex/context/base/luat-sto.lua
@@ -163,7 +163,6 @@ storage.register("storage/shared", storage.shared, "storage.shared")
local mark = storage.mark
if string.patterns then mark(string.patterns) end
-if string.formatters then mark(string.formatters) end
if lpeg.patterns then mark(lpeg.patterns) end
if os.env then mark(os.env) end
if number.dimenfactors then mark(number.dimenfactors) end
diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua
index 8b34a96a3..3e10eb96d 100644
--- a/tex/context/base/lxml-tab.lua
+++ b/tex/context/base/lxml-tab.lua
@@ -746,11 +746,8 @@ local function _xmlconvert_(data, settings)
end
if errorstr and errorstr ~= "" then
result.error = true
- else
- errorstr = nil
end
result.statistics = {
- errormessage = errorstr,
entities = {
decimals = dcache,
hexadecimals = hcache,
@@ -1019,28 +1016,26 @@ local function verbose_document(e,handlers)
end
local function serialize(e,handlers,...)
- if e then
- local initialize = handlers.initialize
- local finalize = handlers.finalize
- local functions = handlers.functions
- if initialize then
- local state = initialize(...)
- if not state == true then
- return state
- end
- end
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers) -- dc ?
- end
- if finalize then
- return finalize()
+ local initialize = handlers.initialize
+ local finalize = handlers.finalize
+ local functions = handlers.functions
+ if initialize then
+ local state = initialize(...)
+ if not state == true then
+ return state
end
end
+ local etg = e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ -- elseif type(e) == "string" then
+ -- functions["@tx@"](e,handlers)
+ else
+ functions["@dc@"](e,handlers) -- dc ?
+ end
+ if finalize then
+ return finalize()
+ end
end
local function xserialize(e,handlers)
diff --git a/tex/context/base/m-oldbibtex.mkiv b/tex/context/base/m-oldbibtex.mkiv
deleted file mode 100644
index 08c23e7cc..000000000
--- a/tex/context/base/m-oldbibtex.mkiv
+++ /dev/null
@@ -1,16 +0,0 @@
-%D \module
-%D [ file=m-oldbibtex,
-%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
-%D title=Falback on old method,
-%D subtitle=Publications,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
-%D by \PRAGMA. See mreadme.pdf for details.
-
-\loadmarkfile{bibl-bib}
-\loadmarkfile{bibl-tra}
-
-\endinput
diff --git a/tex/context/base/math-dir.lua b/tex/context/base/math-dir.lua
index 525d07831..507a24e41 100644
--- a/tex/context/base/math-dir.lua
+++ b/tex/context/base/math-dir.lua
@@ -23,19 +23,8 @@ local trace_directions = false trackers.register("typesetters.directions.math
local report_directions = logs.reporter("typesetting","math directions")
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getlist = nuts.getlist
-local setfield = nuts.setfield
-local getattr = nuts.getattr
-
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
+local insert_node_before = nodes.insert_before
+local insert_node_after = nodes.insert_after
local nodecodes = nodes.nodecodes
local tasks = nodes.tasks
@@ -44,7 +33,7 @@ local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_textdir = nodepool.textdir
@@ -72,9 +61,9 @@ local function processmath(head)
stop = nil
end
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local char = getchar(current)
+ local char = current.char
local cdir = chardirections[char]
if cdir == "en" or cdir == "an" then -- we could check for mathclass punctuation
if not start then
@@ -94,7 +83,7 @@ local function processmath(head)
if mirror then
local class = charclasses[char]
if class == "open" or class == "close" then
- setfield(current,"char",mirror)
+ current.char = mirror
if trace_directions then
report_directions("mirrored: %C to %C",char,mirror)
end
@@ -105,13 +94,6 @@ local function processmath(head)
end
elseif not start then
-- nothing
-if id == hlist_code or id == vlist_code then
- local list, d = processmath(getlist(current))
- setfield(current,"list",list)
- if d then
- done = true
- end
-end
elseif start == stop then
start = nil
else
@@ -119,14 +101,14 @@ end
-- math can pack things into hlists .. we need to make sure we don't process
-- too often: needs checking
if id == hlist_code or id == vlist_code then
- local list, d = processmath(getlist(current))
- setfield(current,"list",list)
+ local list, d = processmath(current.list)
+ current.list = list
if d then
done = true
end
end
end
- current = getnext(current)
+ current = current.next
end
if not start then
-- nothing
@@ -142,11 +124,9 @@ local enabled = false
function directions.processmath(head) -- style, penalties
if enabled then
- local h = tonut(head)
- local a = getattr(h,a_mathbidi)
+ local a = head[a_mathbidi]
if a and a > 0 then
- local head, done = processmath(h)
- return tonode(head), done
+ return processmath(head)
end
end
return head, false
diff --git a/tex/context/base/math-fbk.lua b/tex/context/base/math-fbk.lua
index f4bd1348a..bd9a1d315 100644
--- a/tex/context/base/math-fbk.lua
+++ b/tex/context/base/math-fbk.lua
@@ -133,8 +133,10 @@ function fallbacks.apply(target,original)
else
-- something else
end
- if trace_fallbacks and characters[k] then
- report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
+ if trace_fallbacks then
+ if characters[k] then
+ report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
+ end
end
end
end
@@ -332,10 +334,9 @@ end
local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset)
local characters = target.characters
+ local addprivate = fonts.helpers.addprivate
local olddata = characters[oldchr]
- -- brrr ... pagella has only next
- if olddata and not olddata.commands and olddata.width > 0 then
- local addprivate = fonts.helpers.addprivate
+ if olddata and not olddata.commands then
if swap then
swap = characters[swap]
height = swap.depth
@@ -398,9 +399,9 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
end
end
end
- return glyphdata, true
+ return glyphdata
else
- return olddata, false
+ return olddata
end
end
@@ -444,9 +445,9 @@ addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mat
addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h" } )
addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h" } )
-virtualcharacters[0xFE3DF] = function(data) return data.target.characters[0x23DF] end
-virtualcharacters[0xFE3DD] = function(data) return data.target.characters[0x23DD] end
-virtualcharacters[0xFE3B5] = function(data) return data.target.characters[0x23B5] end
+virtualcharacters[0xFE3DF] = function(data) return data.original.characters[0x23DF] end
+virtualcharacters[0xFE3DD] = function(data) return data.original.characters[0x23DD] end
+virtualcharacters[0xFE3B5] = function(data) return data.original.characters[0x23B5] end
-- todo: add some more .. numbers might change
@@ -456,10 +457,8 @@ addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mat
local function smashed(data,unicode,private)
local target = data.target
local height = target.parameters.xheight / 2
- local c, done = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height)
- if done then
- c.top_accent = nil -- or maybe also all the others
- end
+ local c = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height)
+ c.top_accent = nil
return c
end
diff --git a/tex/context/base/math-fen.mkiv b/tex/context/base/math-fen.mkiv
index fe959cc1e..94d93e4af 100644
--- a/tex/context/base/math-fen.mkiv
+++ b/tex/context/base/math-fen.mkiv
@@ -144,7 +144,7 @@
\definemathfence [mirroredangle] [mirrored] [\c!right="27E8,\c!left="27E9]
\definemathfence [mirroreddoubleangle] [mirrored] [\c!right="27EA,\c!left="27EB]
\definemathfence [mirroredsolidus] [mirrored] [\c!right="2044,\c!left="2044]
-\definemathfence [mirrorednothing] [mirrored]
+\definemathfence [mirrorednothing] [mirorred]
%D A bonus:
diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua
index 1351559a0..6be06e634 100644
--- a/tex/context/base/math-ini.lua
+++ b/tex/context/base/math-ini.lua
@@ -350,12 +350,10 @@ local utf8byte = lpeg.patterns.utf8byte * lpeg.P(-1)
local somechar = { }
table.setmetatableindex(somechar,function(t,k)
- if k then
- local b = lpegmatch(utf8byte,k)
- local v = b and chardata[b] or false
- t[k] = v
- return v
- end
+ local b = lpegmatch(utf8byte,k)
+ local v = b and chardata[b] or false
+ t[k] = v
+ return v
end)
local function utfmathclass(chr, default)
@@ -472,7 +470,6 @@ mathematics.utfmathclass = utfmathclass
mathematics.utfmathstretch = utfmathstretch
mathematics.utfmathcommand = utfmathcommand
mathematics.utfmathfiller = utfmathfiller
-mathematics.utfmathaccent = utfmathaccent
-- interfaced
diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua
index 4e25fe206..f3987c12f 100644
--- a/tex/context/base/math-noa.lua
+++ b/tex/context/base/math-noa.lua
@@ -54,35 +54,16 @@ local report_families = logs.reporter("mathematics","families")
local a_mathrendering = attributes.private("mathrendering")
local a_exportstatus = attributes.private("exportstatus")
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-local tonut = nuts.tonut
-local nutstring = nuts.tostring
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local free_node = nuts.free
-local new_node = nuts.new -- todo: pool: math_noad math_sub
-local copy_node = nuts.copy
-
-local mlist_to_hlist = nodes.mlist_to_hlist
-
+local mlist_to_hlist = node.mlist_to_hlist
local font_of_family = node.family_font
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
+local free_node = node.free
+local new_node = node.new -- todo: pool: math_noad math_sub
+local copy_node = node.copy
-local new_kern = nodepool.kern
-local new_rule = nodepool.rule
+local new_kern = nodes.pool.kern
+local new_rule = nodes.pool.rule
local topoints = number.points
@@ -145,23 +126,23 @@ local function process(start,what,n,parent)
if n then n = n + 1 else n = 0 end
local prev = nil
while start do
- local id = getid(start)
+ local id = start.id
if trace_processing then
if id == math_noad then
- report_processing("%w%S, class %a",n*2,nutstring(start),noadcodes[getsubtype(start)])
+ report_processing("%w%S, class %a",n*2,start,noadcodes[start.subtype])
elseif id == math_char then
- local char = getchar(start)
- local fam = getfield(start,"fam")
+ local char = start.char
+ local fam = start.fam
local font = font_of_family(fam)
- report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,nutstring(start),fam,font,char,char)
+ report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,start,fam,font,char,char)
else
- report_processing("%w%S",n*2,nutstring(start))
+ report_processing("%w%S",n*2,start)
end
end
local proc = what[id]
if proc then
-- report_processing("start processing")
- local done, newstart = proc(start,what,n,parent) -- prev is bugged: or getprev(start)
+ local done, newstart = proc(start,what,n,parent) -- prev is bugged: or start.prev
if newstart then
start = newstart
-- report_processing("stop processing (new start)")
@@ -173,55 +154,55 @@ local function process(start,what,n,parent)
elseif id == math_noad then
if prev then
-- we have no proper prev in math nodes yet
- setfield(start,"prev",prev)
+ start.prev = prev
end
-
- local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
elseif id == math_box or id == math_sub then
- local noad = getfield(start,"list") if noad then process(noad,what,n,start) end -- list (not getlist !)
+ -- local noad = start.list if noad then process(noad,what,n,start) end -- list
+ local noad = start.head if noad then process(noad,what,n,start) end -- list
elseif id == math_fraction then
- local noad = getfield(start,"num") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"denom") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"left") if noad then process(noad,what,n,start) end -- delimiter
- noad = getfield(start,"right") if noad then process(noad,what,n,start) end -- delimiter
+ local noad = start.num if noad then process(noad,what,n,start) end -- list
+ noad = start.denom if noad then process(noad,what,n,start) end -- list
+ noad = start.left if noad then process(noad,what,n,start) end -- delimiter
+ noad = start.right if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_choice then
- local noad = getfield(start,"display") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"text") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"script") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"scriptscript") if noad then process(noad,what,n,start) end -- list
+ local noad = start.display if noad then process(noad,what,n,start) end -- list
+ noad = start.text if noad then process(noad,what,n,start) end -- list
+ noad = start.script if noad then process(noad,what,n,start) end -- list
+ noad = start.scriptscript if noad then process(noad,what,n,start) end -- list
elseif id == math_fence then
- local noad = getfield(start,"delim") if noad then process(noad,what,n,start) end -- delimiter
+ local noad = start.delim if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_radical then
- local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"left") if noad then process(noad,what,n,start) end -- delimiter
- noad = getfield(start,"degree") if noad then process(noad,what,n,start) end -- list
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
+ noad = start.left if noad then process(noad,what,n,start) end -- delimiter
+ noad = start.degree if noad then process(noad,what,n,start) end -- list
elseif id == math_accent then
- local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"accent") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"bot_accent") if noad then process(noad,what,n,start) end -- list
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
+ noad = start.accent if noad then process(noad,what,n,start) end -- list
+ noad = start.bot_accent if noad then process(noad,what,n,start) end -- list
elseif id == math_style then
-- has a next
else
-- glue, penalty, etc
end
prev = start
- start = getnext(start)
+ start = start.next
end
end
local function processnoads(head,actions,banner)
if trace_processing then
report_processing("start %a",banner)
- process(tonut(head),actions)
+ process(head,actions)
report_processing("stop %a",banner)
else
- process(tonut(head),actions)
+ process(head,actions)
end
end
@@ -252,71 +233,37 @@ local familymap = { [0] =
"pseudobold",
}
--- families[math_char] = function(pointer)
--- if getfield(pointer,"fam") == 0 then
--- local a = getattr(pointer,a_mathfamily)
--- if a and a > 0 then
--- setattr(pointer,a_mathfamily,0)
--- if a > 5 then
--- local char = getchar(pointer)
--- local bold = boldmap[char]
--- local newa = a - 3
--- if bold then
--- setattr(pointer,a_exportstatus,char)
--- setfield(pointer,"char",bold)
--- if trace_families then
--- report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
--- end
--- else
--- if trace_families then
--- report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
--- end
--- end
--- setfield(pointer,"fam",newa)
--- else
--- if trace_families then
--- local char = getchar(pointer)
--- report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
--- end
--- setfield(pointer,"fam",a)
--- end
--- else
--- -- pointer.fam = 0
--- end
--- end
--- end
-
families[math_char] = function(pointer)
- if getfield(pointer,"fam") == 0 then
- local a = getattr(pointer,a_mathfamily)
+ if pointer.fam == 0 then
+ local a = pointer[a_mathfamily]
if a and a > 0 then
- setattr(pointer,a_mathfamily,0)
+ pointer[a_mathfamily] = 0
if a > 5 then
- local char = getchar(pointer)
+ local char = pointer.char
local bold = boldmap[char]
local newa = a - 3
if not bold then
if trace_families then
report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
end
- setfield(pointer,"fam",newa)
- elseif not fontcharacters[font_of_family(newa)][bold] then
+ pointer.fam = newa
+ elseif not fontcharacters[font_of_family(newa)][bold] then
if trace_families then
report_families("no bold character for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
end
if newa > 3 then
- setfield(pointer,"fam",newa-3)
+ pointer.fam = newa - 3
end
else
- setattr(pointer,a_exportstatus,char)
- setfield(pointer,"char",bold)
+ pointer[a_exportstatus] = char
+ pointer.char = bold
if trace_families then
report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
end
- setfield(pointer,"fam",newa)
+ pointer.fam = newa
end
else
- local char = getchar(pointer)
+ local char = pointer.char
if not fontcharacters[font_of_family(a)][char] then
if trace_families then
report_families("no bold replacement for %C",char)
@@ -325,7 +272,7 @@ families[math_char] = function(pointer)
if trace_families then
report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
end
- setfield(pointer,"fam",a)
+ pointer.fam = a
end
end
end
@@ -333,31 +280,31 @@ families[math_char] = function(pointer)
end
families[math_delim] = function(pointer)
- if getfield(pointer,"small_fam") == 0 then
- local a = getattr(pointer,a_mathfamily)
+ if pointer.small_fam == 0 then
+ local a = pointer[a_mathfamily]
if a and a > 0 then
- setattr(pointer,a_mathfamily,0)
+ pointer[a_mathfamily] = 0
if a > 5 then
-- no bold delimiters in unicode
a = a - 3
end
- local char = getfield(pointer,"small_char")
+ local char = pointer.small_char
local okay = fontcharacters[font_of_family(a)][char]
if okay then
- setfield(pointer,"small_fam",a)
+ pointer.small_fam = a
elseif a > 2 then
- setfield(pointer,"small_fam",a-3)
+ pointer.small_fam = a - 3
end
- local char = getfield(pointer,"large_char")
+ local char = pointer.large_char
local okay = fontcharacters[font_of_family(a)][char]
if okay then
- setfield(pointer,"large_fam",a)
+ pointer.large_fam = a
elseif a > 2 then
- setfield(pointer,"large_fam",a-3)
+ pointer.large_fam = a - 3
end
else
- setfield(pointer,"small_fam",0)
- setfield(pointer,"large_fam",0)
+ pointer.small_fam = 0
+ pointer.large_fam = 0
end
end
end
@@ -385,8 +332,8 @@ local fallbackstyleattr = mathematics.fallbackstyleattr
local setnodecolor = nodes.tracers.colors.set
local function checked(pointer)
- local char = getchar(pointer)
- local fam = getfield(pointer,"fam")
+ local char = pointer.char
+ local fam = pointer.fam
local id = font_of_family(fam)
local tc = fontcharacters[id]
if not tc[char] then
@@ -399,27 +346,27 @@ local function checked(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- setattr(pointer,a_exportstatus,char) -- testcase: exponentiale
- setfield(pointer,"char",newchar)
+ pointer[a_exportstatus] = char -- testcase: exponentiale
+ pointer.char = newchar
return true
end
end
end
processors.relocate[math_char] = function(pointer)
- local g = getattr(pointer,a_mathgreek) or 0
- local a = getattr(pointer,a_mathalphabet) or 0
+ local g = pointer[a_mathgreek] or 0
+ local a = pointer[a_mathalphabet] or 0
if a > 0 or g > 0 then
if a > 0 then
- setattr(pointer,a_mathgreek,0)
+ pointer[a_mathgreek] = 0
end
if g > 0 then
- setattr(pointer,a_mathalphabet,0)
+ pointer[a_mathalphabet] = 0
end
- local char = getchar(pointer)
+ local char = pointer.char
local newchar = remapalphabets(char,a,g)
if newchar then
- local fam = getfield(pointer,"fam")
+ local fam = pointer.fam
local id = font_of_family(fam)
local characters = fontcharacters[id]
if characters[newchar] then
@@ -429,7 +376,7 @@ processors.relocate[math_char] = function(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- setfield(pointer,"char",newchar)
+ pointer.char = newchar
return true
else
local fallback = fallbackstyleattr(a)
@@ -443,7 +390,7 @@ processors.relocate[math_char] = function(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- setfield(pointer,"char",newchar)
+ pointer.char = newchar
return true
elseif trace_remapping then
report_remap("char",id,char,newchar," fails (no fallback character)")
@@ -489,19 +436,19 @@ processors.render = { }
local rendersets = mathematics.renderings.numbers or { } -- store
processors.render[math_char] = function(pointer)
- local attr = getattr(pointer,a_mathrendering)
+ local attr = pointer[a_mathrendering]
if attr and attr > 0 then
- local char = getchar(pointer)
+ local char = pointer.char
local renderset = rendersets[attr]
if renderset then
local newchar = renderset[char]
if newchar then
- local fam = getfield(pointer,"fam")
+ local fam = pointer.fam
local id = font_of_family(fam)
local characters = fontcharacters[id]
if characters and characters[newchar] then
- setfield(pointer,"char",newchar)
- setattr(pointer,a_exportstatus,char)
+ pointer.char = newchar
+ pointer[a_exportstatus] = char
end
end
end
@@ -528,19 +475,19 @@ local mathsize = attributes.private("mathsize")
local resize = { } processors.resize = resize
resize[math_fence] = function(pointer)
- local subtype = getsubtype(pointer)
+ local subtype = pointer.subtype
if subtype == left_fence_code or subtype == right_fence_code then
- local a = getattr(pointer,mathsize)
+ local a = pointer[mathsize]
if a and a > 0 then
local method, size = div(a,100), a % 100
- setattr(pointer,mathsize,0)
- local delimiter = getfield(pointer,"delim")
- local chr = getfield(delimiter,"small_char")
+ pointer[mathsize] = 0
+ local delimiter = pointer.delim
+ local chr = delimiter.small_char
if chr > 0 then
- local fam = getfield(delimiter,"small_fam")
+ local fam = delimiter.small_fam
local id = font_of_family(fam)
if id > 0 then
- setfield(delimiter,"small_char",mathematics.big(fontdata[id],chr,size,method))
+ delimiter.small_char = mathematics.big(fontdata[id],chr,size,method)
end
end
end
@@ -552,6 +499,7 @@ function handlers.resize(head,style,penalties)
return true
end
+
local collapse = { } processors.collapse = collapse
local mathpairs = characters.mathpairs
@@ -590,20 +538,20 @@ local validpair = {
}
local function movesubscript(parent,current_nucleus,current_char)
- local prev = getfield(parent,"prev")
- if prev and getid(prev) == math_noad then
- if not getfield(prev,"sup") and not getfield(prev,"sub") then
- setfield(current_nucleus,"char",movesub[current_char or getchar(current_nucleus)])
+ local prev = parent.prev
+ if prev and prev.id == math_noad then
+ if not prev.sup and not prev.sub then
+ current_nucleus.char = movesub[current_char or current_nucleus.char]
-- {f} {'}_n => f_n^'
- local nucleus = getfield(parent,"nucleus")
- local sub = getfield(parent,"sub")
- local sup = getfield(parent,"sup")
- setfield(prev,"sup",nucleus)
- setfield(prev,"sub",sub)
+ local nucleus = parent.nucleus
+ local sub = parent.sub
+ local sup = parent.sup
+ prev.sup = nucleus
+ prev.sub = sub
local dummy = copy_node(nucleus)
- setfield(dummy,"char",0)
- setfield(parent,"nucleus",dummy)
- setfield(parent,"sub",nil)
+ dummy.char = 0
+ parent.nucleus = dummy
+ parent.sub = nil
if trace_collapsing then
report_collapsing("fixing subscript")
end
@@ -613,40 +561,40 @@ end
local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to turn in on and off
if parent then
- if validpair[getsubtype(parent)] then
- local current_nucleus = getfield(parent,"nucleus")
- if getid(current_nucleus) == math_char then
- local current_char = getchar(current_nucleus)
- if not getfield(parent,"sub") and not getfield(parent,"sup") then
+ if validpair[parent.subtype] then
+ local current_nucleus = parent.nucleus
+ if current_nucleus.id == math_char then
+ local current_char = current_nucleus.char
+ if not parent.sub and not parent.sup then
local mathpair = mathpairs[current_char]
if mathpair then
- local next_noad = getnext(parent)
- if next_noad and getid(next_noad) == math_noad then
- if validpair[getsubtype(next_noad)] then
- local next_nucleus = getfield(next_noad,"nucleus")
- if getid(next_nucleus) == math_char then
- local next_char = getchar(next_nucleus)
+ local next_noad = parent.next
+ if next_noad and next_noad.id == math_noad then
+ if validpair[next_noad.subtype] then
+ local next_nucleus = next_noad.nucleus
+ if next_nucleus.id == math_char then
+ local next_char = next_nucleus.char
local newchar = mathpair[next_char]
if newchar then
- local fam = getfield(current_nucleus,"fam")
+ local fam = current_nucleus.fam
local id = font_of_family(fam)
local characters = fontcharacters[id]
if characters and characters[newchar] then
if trace_collapsing then
report_collapsing("%U + %U => %U",current_char,next_char,newchar)
end
- setfield(current_nucleus,"char",newchar)
- local next_next_noad = getnext(next_noad)
+ current_nucleus.char = newchar
+ local next_next_noad = next_noad.next
if next_next_noad then
- setfield(parent,"next",next_next_noad)
- setfield(next_next_noad,"prev",parent)
+ parent.next = next_next_noad
+ next_next_noad.prev = parent
else
- setfield(parent,"next",nil)
+ parent.next = nil
end
- setfield(parent,"sup",getfield(next_noad,"sup"))
- setfield(parent,"sub",getfield(next_noad,"sub"))
- setfield(next_noad,"sup",nil)
- setfield(next_noad,"sub",nil)
+ parent.sup = next_noad.sup
+ parent.sub = next_noad.sub
+ next_noad.sup = nil
+ next_noad.sub = nil
free_node(next_noad)
collapsepair(pointer,what,n,parent,true)
if not nested and movesub[current_char] then
@@ -686,13 +634,13 @@ local replaced = { }
local function replace(pointer,what,n,parent)
pointer = parent -- we're following the parent list (chars trigger this)
- local next = getnext(pointer)
+ local next = pointer.next
local start_super, stop_super, start_sub, stop_sub
local mode = "unset"
- while next and getid(next) == math_noad do
- local nextnucleus = getfield(next,"nucleus")
- if nextnucleus and getid(nextnucleus) == math_char and not getfield(next,"sub") and not getfield(next,"sup") then
- local char = getchar(nextnucleus)
+ while next and next.id == math_noad do
+ local nextnucleus = next.nucleus
+ if nextnucleus and nextnucleus.id == math_char and not next.sub and not next.sup then
+ local char = nextnucleus.char
local s = superscripts[char]
if s then
if not start_super then
@@ -702,8 +650,8 @@ local function replace(pointer,what,n,parent)
break
end
stop_super = next
- next = getnext(next)
- setfield(nextnucleus,"char",s)
+ next = next.next
+ nextnucleus.char = s
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
report_normalizing("superscript %C becomes %C",char,s)
@@ -718,8 +666,8 @@ local function replace(pointer,what,n,parent)
break
end
stop_sub = next
- next = getnext(next)
- setfield(nextnucleus,"char",s)
+ next = next.next
+ nextnucleus.char = s
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
report_normalizing("subscript %C becomes %C",char,s)
@@ -734,29 +682,29 @@ local function replace(pointer,what,n,parent)
end
if start_super then
if start_super == stop_super then
- setfield(pointer,"sup",getfield(start_super,"nucleus"))
+ pointer.sup = start_super.nucleus
else
local list = new_node(math_sub) -- todo attr
- setfield(list,"list",start_super)
- setfield(pointer,"sup",list)
+ list.head = start_super
+ pointer.sup = list
end
if mode == "super" then
- setfield(pointer,"next",getnext(stop_super))
+ pointer.next = stop_super.next
end
- setfield(stop_super,"next",nil)
+ stop_super.next = nil
end
if start_sub then
if start_sub == stop_sub then
- setfield(pointer,"sub",getfield(start_sub,"nucleus"))
+ pointer.sub = start_sub.nucleus
else
local list = new_node(math_sub) -- todo attr
- setfield(list,"list",start_sub)
- setfield(pointer,"sub",list)
+ list.head = start_sub
+ pointer.sub = list
end
if mode == "sub" then
- setfield(pointer,"next",getnext(stop_sub))
+ pointer.next = stop_sub.next
end
- setfield(stop_sub,"next",nil)
+ stop_sub.next = nil
end
-- we could return stop
end
@@ -837,20 +785,20 @@ function mathematics.setalternate(fam,tag)
end
alternate[math_char] = function(pointer)
- local a = getattr(pointer,a_mathalternate)
+ local a = pointer[a_mathalternate]
if a and a > 0 then
- setattr(pointer,a_mathalternate,0)
- local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))] -- we can also have a famdata
+ pointer[a_mathalternate] = 0
+ local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
local mathalternatesattributes = tfmdata.shared.mathalternatesattributes
if mathalternatesattributes then
local what = mathalternatesattributes[a]
- local alt = getalternate(tfmdata,getchar(pointer),what.feature,what.value)
+ local alt = getalternate(tfmdata,pointer.char,what.feature,what.value)
if alt then
if trace_alternates then
report_alternates("alternate %a, value %a, replacing glyph %U by glyph %U",
- tostring(what.feature),tostring(what.value),getchar(pointer),alt)
+ tostring(what.feature),tostring(what.value),pointer.char,alt)
end
- setfield(pointer,"char",alt)
+ pointer.char = alt
end
end
end
@@ -937,14 +885,13 @@ end
local function insert_kern(current,kern)
local sub = new_node(math_sub) -- todo: pool
local noad = new_node(math_noad) -- todo: pool
- setfield(sub,"list",kern)
- setfield(kern,"next",noad)
- setfield(noad,"nucleus",current)
+ sub.head = kern
+ kern.next = noad
+ noad.nucleus = current
return sub
end
local setcolor = nodes.tracers.colors.set
-local resetcolor = nodes.tracers.colors.reset
local italic_kern = new_kern
local c_positive_d = "trace:db"
local c_negative_d = "trace:dr"
@@ -966,44 +913,44 @@ trackers.register("math.italics", function(v)
end)
italics[math_char] = function(pointer,what,n,parent)
- local method = getattr(pointer,a_mathitalics)
+ local method = pointer[a_mathitalics]
if method and method > 0 then
- local char = getchar(pointer)
- local font = font_of_family(getfield(pointer,"fam")) -- todo: table
+ local char = pointer.char
+ local font = font_of_family(pointer.fam) -- todo: table
local correction, visual = getcorrection(method,font,char)
if correction then
- local pid = getid(parent)
+ local pid = parent.id
local sub, sup
if pid == math_noad then
- sup = getfield(parent,"sup")
- sub = getfield(parent,"sub")
+ sup = parent.sup
+ sub = parent.sub
end
if sup or sub then
- local subtype = getsubtype(parent)
+ local subtype = parent.subtype
if subtype == noad_oplimits then
if sup then
- setfield(parent,"sup",insert_kern(sup,italic_kern(correction,font)))
+ parent.sup = insert_kern(sup,italic_kern(correction,font))
if trace_italics then
report_italics("method %a, adding %p italic correction for upper limit of %C",method,correction,char)
end
end
if sub then
local correction = - correction
- setfield(parent,"sub",insert_kern(sub,italic_kern(correction,font)))
+ parent.sub = insert_kern(sub,italic_kern(correction,font))
if trace_italics then
report_italics("method %a, adding %p italic correction for lower limit of %C",method,correction,char)
end
end
else
if sup then
- setfield(parent,"sup",insert_kern(sup,italic_kern(correction,font)))
+ parent.sup = insert_kern(sup,italic_kern(correction,font))
if trace_italics then
report_italics("method %a, adding %p italic correction before superscript after %C",method,correction,char)
end
end
end
else
- local next_noad = getnext(parent)
+ local next_noad = parent.next
if not next_noad then
if n== 1 then -- only at the outer level .. will become an option (always,endonly,none)
if trace_italics then
@@ -1011,12 +958,12 @@ italics[math_char] = function(pointer,what,n,parent)
end
insert_node_after(parent,parent,italic_kern(correction,font))
end
- elseif getid(next_noad) == math_noad then
- local next_subtype = getsubtype(next_noad)
+ elseif next_noad.id == math_noad then
+ local next_subtype = next_noad.subtype
if next_subtype == noad_punct or next_subtype == noad_ord then
- local next_nucleus = getfield(next_noad,"nucleus")
- if getid(next_nucleus) == math_char then
- local next_char = getchar(next_nucleus)
+ local next_nucleus = next_noad.nucleus
+ if next_nucleus.id == math_char then
+ local next_char = next_nucleus.char
local next_data = chardata[next_char]
local visual = next_data.visual
if visual == "it" or visual == "bi" then
@@ -1100,15 +1047,15 @@ local validvariants = { -- fast check on valid
}
variants[math_char] = function(pointer,what,n,parent) -- also set export value
- local char = getchar(pointer)
+ local char = pointer.char
local selector = validvariants[char]
if selector then
- local next = getnext(parent)
- if next and getid(next) == math_noad then
- local nucleus = getfield(next,"nucleus")
- if nucleus and getid(nucleus) == math_char and getchar(nucleus) == selector then
+ local next = parent.next
+ if next and next.id == math_noad then
+ local nucleus = next.nucleus
+ if nucleus and nucleus.id == math_char and nucleus.char == selector then
local variant
- local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))] -- we can also have a famdata
+ local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
local mathvariants = tfmdata.resources.variants -- and variantdata
if mathvariants then
mathvariants = mathvariants[selector]
@@ -1117,8 +1064,8 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
end
end
if variant then
- setfield(pointer,"char",variant)
- setattr(pointer,a_exportstatus,char) -- we don't export the variant as it's visual markup
+ pointer.char = variant
+ pointer[a_exportstatus] = char -- we don't export the variant as it's visual markup
if trace_variants then
report_variants("variant (%U,%U) replaced by %U",char,selector,variant)
end
@@ -1127,8 +1074,8 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
report_variants("no variant (%U,%U)",char,selector)
end
end
- setfield(next,"prev",pointer)
- setfield(parent,"next",getnext(next))
+ next.prev = pointer
+ parent.next = next.next
free_node(next)
end
end
@@ -1161,7 +1108,7 @@ local colors = {
}
classes[math_char] = function(pointer,what,n,parent)
- local color = colors[getsubtype(parent)]
+ local color = colors[parent.subtype]
if color then
setcolor(pointer,color)
else
diff --git a/tex/context/base/math-tag.lua b/tex/context/base/math-tag.lua
index 3cd4cae16..ab5902dd4 100644
--- a/tex/context/base/math-tag.lua
+++ b/tex/context/base/math-tag.lua
@@ -11,22 +11,10 @@ if not modules then modules = { } end modules ['math-tag'] = {
local find, match = string.find, string.match
local insert, remove = table.insert, table.remove
-local attributes = attributes
-local nodes = nodes
+local attributes, nodes = attributes, nodes
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getchar = nuts.getchar
-local getlist = nuts.getlist
-local getfield = nuts.getfield
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-
-local set_attributes = nuts.setattributes
-local traverse_nodes = nuts.traverse
+local set_attributes = nodes.setattributes
+local traverse_nodes = node.traverse
local nodecodes = nodes.nodecodes
@@ -73,24 +61,22 @@ local function processsubsup(start)
-- At some point we might need to add an attribute signaling the
-- super- and subscripts because TeX and MathML use a different
-- order.
- local nucleus = getfield(start,"nucleus")
- local sup = getfield(start,"sup")
- local sub = getfield(start,"sub")
+ local nucleus, sup, sub = start.nucleus, start.sup, start.sub
if sub then
if sup then
- setattr(start,a_tagged,start_tagged("msubsup"))
+ start[a_tagged] = start_tagged("msubsup")
process(nucleus)
process(sub)
process(sup)
stop_tagged()
else
- setattr(start,a_tagged,start_tagged("msub"))
+ start[a_tagged] = start_tagged("msub")
process(nucleus)
process(sub)
stop_tagged()
end
elseif sup then
- setattr(start,a_tagged,start_tagged("msup"))
+ start[a_tagged] = start_tagged("msup")
process(nucleus)
process(sup)
stop_tagged()
@@ -107,11 +93,11 @@ local actionstack = { }
process = function(start) -- we cannot use the processor as we have no finalizers (yet)
while start do
- local id = getid(start)
+ local id = start.id
if id == math_char_code then
- local char = getchar(start)
+ local char = start.char
-- check for code
- local a = getattr(start,a_mathcategory)
+ local a = start[a_mathcategory]
if a then
a = { detail = a }
end
@@ -133,22 +119,22 @@ process = function(start) -- we cannot use the processor as we have no finalizer
else
tag = "mo"
end
- setattr(start,a_tagged,start_tagged(tag,a))
+ start[a_tagged] = start_tagged(tag,a)
stop_tagged()
break -- okay?
elseif id == math_textchar_code then
-- check for code
- local a = getattr(start,a_mathcategory)
+ local a = start[a_mathcategory]
if a then
- setattr(start,a_tagged,start_tagged("ms",{ detail = a }))
+ start[a_tagged] = start_tagged("ms",{ detail = a })
else
- setattr(start,a_tagged,start_tagged("ms"))
+ start[a_tagged] = start_tagged("ms")
end
stop_tagged()
break
elseif id == math_delim_code then
-- check for code
- setattr(start,a_tagged,start_tagged("mo"))
+ start[a_tagged] = start_tagged("mo")
stop_tagged()
break
elseif id == math_style_code then
@@ -157,14 +143,14 @@ process = function(start) -- we cannot use the processor as we have no finalizer
processsubsup(start)
elseif id == math_box_code or id == hlist_code or id == vlist_code then
-- keep an eye on math_box_code and see what ends up in there
- local attr = getattr(start,a_tagged)
+ local attr = start[a_tagged]
local last = attr and taglist[attr]
if last and find(last[#last],"formulacaption[:%-]") then
-- leave alone, will nicely move to the outer level
else
local text = start_tagged("mtext")
- setattr(start,a_tagged,text)
- local list = getfield(start,"list")
+ start[a_tagged] = text
+ local list = start.list
if not list then
-- empty list
elseif not attr then
@@ -180,8 +166,8 @@ process = function(start) -- we cannot use the processor as we have no finalizer
local function runner(list) -- quite inefficient
local cache = { } -- we can have nested unboxed mess so best local to runner
for n in traverse_nodes(list) do
- local id = getid(n)
- local aa = getattr(n,a_tagged)
+ local id = n.id
+ local aa = n[a_tagged]
if aa then
local ac = cache[aa]
if not ac then
@@ -199,12 +185,12 @@ process = function(start) -- we cannot use the processor as we have no finalizer
end
cache[aa] = ac
end
- setattr(n,a_tagged,ac)
+ n[a_tagged] = ac
else
- setattr(n,a_tagged,text)
+ n[a_tagged] = text
end
if id == hlist_code or id == vlist_code then
- runner(getlist(n))
+ runner(n.list)
end
end
end
@@ -213,53 +199,47 @@ process = function(start) -- we cannot use the processor as we have no finalizer
stop_tagged()
end
elseif id == math_sub_code then
- local list = getfield(start,"list")
+ local list = start.list
if list then
- local attr = getattr(start,a_tagged)
+ local attr = start[a_tagged]
local last = attr and taglist[attr]
local action = last and match(last[#last],"maction:(.-)%-")
if action and action ~= "" then
if actionstack[#actionstack] == action then
- setattr(start,a_tagged,start_tagged("mrow"))
+ start[a_tagged] = start_tagged("mrow")
process(list)
stop_tagged()
else
insert(actionstack,action)
- setattr(start,a_tagged,start_tagged("mrow",{ detail = action }))
+ start[a_tagged] = start_tagged("mrow",{ detail = action })
process(list)
stop_tagged()
remove(actionstack)
end
else
- setattr(start,a_tagged,start_tagged("mrow"))
+ start[a_tagged] = start_tagged("mrow")
process(list)
stop_tagged()
end
end
elseif id == math_fraction_code then
- local num = getfield(start,"num")
- local denom = getfield(start,"denom")
- local left = getfield(start,"left")
- local right = getfield(start,"right")
+ local num, denom, left, right = start.num, start.denom, start.left, start.right
if left then
- setattr(left,a_tagged,start_tagged("mo"))
+ left[a_tagged] = start_tagged("mo")
process(left)
stop_tagged()
end
- setattr(start,a_tagged,start_tagged("mfrac"))
+ start[a_tagged] = start_tagged("mfrac")
process(num)
process(denom)
stop_tagged()
if right then
- setattr(right,a_tagged,start_tagged("mo"))
+ right[a_tagged] = start_tagged("mo")
process(right)
stop_tagged()
end
elseif id == math_choice_code then
- local display = getfield(start,"display")
- local text = getfield(start,"text")
- local script = getfield(start,"script")
- local scriptscript = getfield(start,"scriptscript")
+ local display, text, script, scriptscript = start.display, start.text, start.script, start.scriptscript
if display then
process(display)
end
@@ -273,69 +253,67 @@ process = function(start) -- we cannot use the processor as we have no finalizer
process(scriptscript)
end
elseif id == math_fence_code then
- local delim = getfield(start,"delim")
- local subtype = getfield(start,"subtype")
- -- setattr(start,a_tagged,start_tagged("mfenced")) -- needs checking
+ local delim = start.delim
+ local subtype = start.subtype
if subtype == 1 then
-- left
+ start[a_tagged] = start_tagged("mfenced")
if delim then
- setattr(start,a_tagged,start_tagged("mleft"))
+ start[a_tagged] = start_tagged("mleft")
process(delim)
stop_tagged()
end
elseif subtype == 2 then
-- middle
if delim then
- setattr(start,a_tagged,start_tagged("mmiddle"))
+ start[a_tagged] = start_tagged("mmiddle")
process(delim)
stop_tagged()
end
elseif subtype == 3 then
if delim then
- setattr(start,a_tagged,start_tagged("mright"))
+ start[a_tagged] = start_tagged("mright")
process(delim)
stop_tagged()
end
+ stop_tagged()
else
-- can't happen
end
- -- stop_tagged()
elseif id == math_radical_code then
- local left = getfield(start,"left")
- local degree = getfield(start,"degree")
+ local left, degree = start.left, start.degree
if left then
start_tagged("")
process(left) -- root symbol, ignored
stop_tagged()
end
if degree then -- not good enough, can be empty mlist
- setattr(start,a_tagged,start_tagged("mroot"))
+ start[a_tagged] = start_tagged("mroot")
processsubsup(start)
process(degree)
stop_tagged()
else
- setattr(start,a_tagged,start_tagged("msqrt"))
+ start[a_tagged] = start_tagged("msqrt")
processsubsup(start)
stop_tagged()
end
elseif id == math_accent_code then
- local accent = getfield(start,"accent")
- local bot_accent = getfield(start,"bot_accent")
+ local accent, bot_accent = start.accent, start.bot_accent
if bot_accent then
if accent then
- setattr(start,a_tagged,start_tagged("munderover",{ detail = "accent" }))
+ start[a_tagged] = start_tagged("munderover",{ detail = "accent" })
processsubsup(start)
process(bot_accent)
process(accent)
stop_tagged()
else
- setattr(start,a_tagged,start_tagged("munder",{ detail = "accent" }))
+ start[a_tagged] = start_tagged("munder",{ detail = "accent" })
processsubsup(start)
process(bot_accent)
stop_tagged()
end
elseif accent then
- setattr(start,a_tagged,start_tagged("mover",{ detail = "accent" }))
+ start[a_tagged] = start_tagged("mover",{ detail = "accent" })
processsubsup(start)
process(accent)
stop_tagged()
@@ -343,23 +321,22 @@ process = function(start) -- we cannot use the processor as we have no finalizer
processsubsup(start)
end
elseif id == glue_code then
- setattr(start,a_tagged,start_tagged("mspace"))
+ start[a_tagged] = start_tagged("mspace")
stop_tagged()
else
- setattr(start,a_tagged,start_tagged("merror", { detail = nodecodes[i] }))
+ start[a_tagged] = start_tagged("merror", { detail = nodecodes[i] })
stop_tagged()
end
- start = getnext(start)
+ start = start.next
end
end
function noads.handlers.tags(head,style,penalties)
- head = tonut(head)
local v_math = start_tagged("math")
local v_mrow = start_tagged("mrow")
- local v_mode = getattr(head,a_mathmode)
- -- setattr(head,a_tagged,v_math)
- setattr(head,a_tagged,v_mrow)
+ local v_mode = head[a_mathmode]
+ head[a_tagged] = v_math
+ head[a_tagged] = v_mrow
tags.setattributehash(v_math,"mode",v_mode == 1 and "display" or "inline")
process(head)
stop_tagged()
diff --git a/tex/context/base/mult-de.mkii b/tex/context/base/mult-de.mkii
index 0c4dae5b6..5f2714ce6 100644
--- a/tex/context/base/mult-de.mkii
+++ b/tex/context/base/mult-de.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-def.lua b/tex/context/base/mult-def.lua
index 65db8fd5e..afd466531 100644
--- a/tex/context/base/mult-def.lua
+++ b/tex/context/base/mult-def.lua
@@ -6613,9 +6613,6 @@ return {
["firstnamesep"]={
["en"]="firstnamesep",
},
- ["surnamefirstnamesep"]={
- ["en"]="surnamefirstnamesep",
- },
["vonsep"]={
["en"]="vonsep",
},
@@ -6625,9 +6622,6 @@ return {
["surnamesep"]={
["en"]="surnamesep",
},
- ["surnameinitialsep"]={
- ["en"]="surnameinitialsep",
- },
["lastnamesep"]={
["en"]="lastnamesep",
},
diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv
index 9206743f4..192a380ee 100644
--- a/tex/context/base/mult-def.mkiv
+++ b/tex/context/base/mult-def.mkiv
@@ -36,7 +36,6 @@
\def\c!fences {fences}
\def\c!keeptogether {keeptogether}
-\def\c!viewerprefix {viewerprefix}
\def\c!dataset {dataset}
\def\c!sectionblock {sectionblock}
@@ -54,7 +53,6 @@
\def\c!comma {comma}
\def\c!period {period}
\def\c!monthconversion {monthconversion}
-\def\c!authorconversion {authorconversion}
\def\c!comment {comment}
\def\c!textalign {textalign}
\def\c!up {up}
@@ -64,7 +62,6 @@
\def\c!group {group}
\def\c!groupsuffix {groupsuffix}
-\def\v!dataset {dataset}
\def\v!compressseparator{compressseparator}
\def\v!notation {notation}
\def\v!endnote {endnote}
@@ -81,7 +78,6 @@
\def\v!combination {combination}
\def\v!norepeat {norepeat}
\def\v!mixed {mixed}
-\def\v!centerlast {centerlast}
\def\s!lcgreek {lcgreek}
\def\s!ucgreek {ucgreek}
diff --git a/tex/context/base/mult-en.mkii b/tex/context/base/mult-en.mkii
index 00861c3be..97732dab7 100644
--- a/tex/context/base/mult-en.mkii
+++ b/tex/context/base/mult-en.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-fr.mkii b/tex/context/base/mult-fr.mkii
index 9afe371c2..520f8e1a6 100644
--- a/tex/context/base/mult-fr.mkii
+++ b/tex/context/base/mult-fr.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-fun.lua b/tex/context/base/mult-fun.lua
index 0f5bd8ace..2101b95e9 100644
--- a/tex/context/base/mult-fun.lua
+++ b/tex/context/base/mult-fun.lua
@@ -17,7 +17,7 @@ return {
--
"sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian",
"tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos",
- "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh",
+ "invsin", "invcos", "acosh", "asinh", "sinh", "cosh",
"paired", "tripled",
"unitcircle", "fulldiamond", "unitdiamond", "fullsquare",
-- "halfcircle", "quartercircle",
diff --git a/tex/context/base/mult-it.mkii b/tex/context/base/mult-it.mkii
index 802cb840c..2b31e8e10 100644
--- a/tex/context/base/mult-it.mkii
+++ b/tex/context/base/mult-it.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-low.lua b/tex/context/base/mult-low.lua
index 250b20c22..f82be039c 100644
--- a/tex/context/base/mult-low.lua
+++ b/tex/context/base/mult-low.lua
@@ -279,9 +279,7 @@ return {
--
"dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith",
--
- "newconstant", "setnewconstant", "setconstant", "setconstantvalue",
- "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue",
- --
+ "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant",
"newmacro", "setnewmacro", "newfraction",
"newsignal",
--
@@ -367,7 +365,5 @@ return {
--
"lesshyphens", "morehyphens", "nohyphens", "dohyphens",
--
- "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath",
- --
}
}
diff --git a/tex/context/base/mult-nl.mkii b/tex/context/base/mult-nl.mkii
index a1f9742f1..9f91515cb 100644
--- a/tex/context/base/mult-nl.mkii
+++ b/tex/context/base/mult-nl.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixscheider}
\setinterfaceconstant{suffixstopper}{suffixafsluiter}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-pe.mkii b/tex/context/base/mult-pe.mkii
index 999b16cf5..240130cdf 100644
--- a/tex/context/base/mult-pe.mkii
+++ b/tex/context/base/mult-pe.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{پسوند}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-ro.mkii b/tex/context/base/mult-ro.mkii
index f577eabda..3b7206e44 100644
--- a/tex/context/base/mult-ro.mkii
+++ b/tex/context/base/mult-ro.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/node-acc.lua b/tex/context/base/node-acc.lua
index 59fa031bf..81ae496b2 100644
--- a/tex/context/base/node-acc.lua
+++ b/tex/context/base/node-acc.lua
@@ -11,27 +11,10 @@ local nodes, node = nodes, node
local nodecodes = nodes.nodecodes
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local tonut = nodes.tonut
-local tonode = nodes.tonode
-
-local getid = nuts.getid
-local getfield = nuts.getfield
-local getattr = nuts.getattr
-local getlist = nuts.getlist
-local getchar = nuts.getchar
-local getnext = nuts.getnext
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local traverse_nodes = nuts.traverse
-local traverse_id = nuts.traverse_id
-local copy_node = nuts.copy
-local free_nodelist = nuts.flush_list
-local insert_after = nuts.insert_after
-
-local new_gluespec = nuts.pool.gluespec -- temp hack
+local traverse_nodes = node.traverse
+local traverse_id = node.traverse_id
+local copy_node = node.copy
+local free_nodelist = node.flush_list
local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
@@ -46,72 +29,57 @@ local threshold = 65536
-- todo: nbsp etc
-- todo: collapse kerns
--- p_id
-
local function injectspaces(head)
- local p, p_id
+ local p
local n = head
while n do
- local id = getid(n)
+ local id = n.id
if id == glue_code then -- todo: check for subtype related to spacing (13/14 but most seems to be 0)
- -- if getfield(getfield(n,"spec"),"width") > 0 then -- threshold
--- if p and p_id == glyph_code then
- if p and getid(p) == glyph_code then
+ -- if n.spec.width > 0 then -- threshold
+ if p and p.id == glyph_code then
local g = copy_node(p)
- local c = getfield(g,"components")
+ local c = g.components
if c then -- it happens that we copied a ligature
free_nodelist(c)
- setfield(g,"components",nil)
- setfield(g,"subtype",256)
+ g.components = nil
+ g.subtype = 256
end
- local a = getattr(n,a_characters)
- -- local s = copy_node(getfield(n,"spec"))
- -- this will be fixed in luatex but for now a temp hack (zero test)
- local s = getfield(n,"spec")
- s = s == 0 and new_gluespec(0) or copy_node(s)
- --
- setfield(g,"char",32)
- setfield(n,"spec",s)
- -- insert_after(p,p,g)
- setfield(p,"next",g)
- setfield(g,"prev",p)
- setfield(g,"next",n)
- setfield(n,"prev",g)
- setfield(s,"width",getfield(s,"width") - getfield(g,"width"))
+ local a = n[a_characters]
+ local s = copy_node(n.spec)
+ g.char, n.spec = 32, s
+ p.next, g.prev = g, p
+ g.next, n.prev = n, g
+ s.width = s.width - g.width
if a then
- setattr(g,a_characters,a)
+ g[a_characters] = a
end
- setattr(s,a_characters,0)
- setattr(n,a_characters,0)
+ s[a_characters] = 0
+ n[a_characters] = 0
end
-- end
elseif id == hlist_code or id == vlist_code then
- injectspaces(getlist(n),attribute)
+ injectspaces(n.list,attribute)
-- elseif id == kern_code then -- the backend already collapses
-- local first = n
-- while true do
- -- local nn = getnext(n)
- -- if nn and getid(nn) == kern_code then
+ -- local nn = n.next
+ -- if nn and nn.id == kern_code then
-- -- maybe we should delete kerns but who cares at this stage
- -- setfield(first,"kern",getfield(first,"kern") + getfield(nn,"kern")
- -- setfield(nn,"kern",0)
+ -- first.kern = first.kern + nn.kern
+ -- nn.kern = 0
-- n = nn
-- else
-- break
-- end
-- end
end
- p_id = id
p = n
- n = getnext(n)
+ n = n.next
end
- return head, true -- always done anyway
+ return head, true
end
-nodes.handlers.accessibility = function(head)
- local head, done = injectspaces(tonut(head))
- return tonode(head), done
-end
+nodes.handlers.accessibility = injectspaces
-- todo:
@@ -122,18 +90,16 @@ end
-- local function compact(n)
-- local t = { }
-- for n in traverse_id(glyph_code,n) do
--- t[#t+1] = utfchar(getchar(n)) -- check for unicode
+-- t[#t+1] = utfchar(n.char) -- check for unicode
-- end
-- return concat(t,"")
-- end
--
-- local function injectspans(head)
--- local done = false
--- for n in traverse_nodes(tonuts(head)) do
--- local id = getid(n)
+-- for n in traverse_nodes(head) do
+-- local id = n.id
-- if id == disc then
--- local r = getfield(n,"replace")
--- local p = getfield(n,"pre")
+-- local r, p = n.replace, n.pre
-- if r and p then
-- local str = compact(r)
-- local hsh = hyphenated[str]
@@ -142,14 +108,13 @@ end
-- hyphenated[str] = hsh
-- codes[hsh] = str
-- end
--- setattr(n,a_hyphenated,hsh)
--- done = true
+-- n[a_hyphenated] = hsh
-- end
-- elseif id == hlist_code or id == vlist_code then
--- injectspans(getlist(n))
+-- injectspans(n.list)
-- end
-- end
--- return tonodes(head), done
+-- return head, true
-- end
--
-- nodes.injectspans = injectspans
@@ -157,22 +122,19 @@ end
-- tasks.appendaction("processors", "words", "nodes.injectspans")
--
-- local function injectspans(head)
--- local done = false
--- for n in traverse_nodes(tonut(head)) do
--- local id = getid(n)
+-- for n in traverse_nodes(head) do
+-- local id = n.id
-- if id == disc then
--- local a = getattr(n,a_hyphenated)
+-- local a = n[a_hyphenated]
-- if a then
-- local str = codes[a]
-- local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str)))
-- local e = new_pdfliteral("EMC")
--- insert_before(head,n,b)
--- insert_after(head,n,e)
--- done = true
+-- node.insert_before(head,n,b)
+-- node.insert_after(head,n,e)
-- end
-- elseif id == hlist_code or id == vlist_code then
--- injectspans(getlist(n))
+-- injectspans(n.list)
-- end
-- end
--- return tonodes(head), done
-- end
diff --git a/tex/context/base/node-aux.lua b/tex/context/base/node-aux.lua
index 7f4b0342a..443c78547 100644
--- a/tex/context/base/node-aux.lua
+++ b/tex/context/base/node-aux.lua
@@ -22,108 +22,82 @@ local vlist_code = nodecodes.vlist
local attributelist_code = nodecodes.attributelist -- temporary
local math_code = nodecodes.math
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-local vianuts = nuts.vianuts
-
-local getbox = nuts.getbox
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getattr = nuts.getattr
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local traverse_nodes = nuts.traverse
-local traverse_id = nuts.traverse_id
-local free_node = nuts.free
-local hpack_nodes = nuts.hpack
-local unset_attribute = nuts.unset_attribute
-local first_glyph = nuts.first_glyph
-local copy_node = nuts.copy
-local copy_node_list = nuts.copy_list
-local find_tail = nuts.tail
-local insert_node_after = nuts.insert_after
-local isnode = nuts.is_node
-
-local nodes_traverse_id = nodes.traverse_id
-local nodes_first_glyph = nodes.first_glyph
-
-local nodepool = nuts.pool
+local nodepool = nodes.pool
+
local new_glue = nodepool.glue
local new_glyph = nodepool.glyph
+local traverse_nodes = node.traverse
+local traverse_id = node.traverse_id
+local free_node = node.free
+local hpack_nodes = node.hpack
+local unset_attribute = node.unset_attribute
+local first_glyph = node.first_glyph or node.first_character
+local copy_node = node.copy
+local copy_node_list = node.copy_list
+local slide_nodes = node.slide
+local insert_node_after = node.insert_after
+local isnode = node.is_node
+
local unsetvalue = attributes.unsetvalue
local current_font = font.current
+local texgetbox = tex.getbox
+
local report_error = logs.reporter("node-aux:error")
-local function repackhlist(list,...)
+function nodes.repackhlist(list,...)
+--~ nodes.showsimplelist(list)
local temp, b = hpack_nodes(list,...)
- list = getlist(temp)
- setfield(temp,"list",nil)
+ list = temp.list
+ temp.list = nil
free_node(temp)
return list, b
end
-nuts.repackhlist = repackhlist
-
-function nodes.repackhlist(list,...)
- local list, b = repackhlist(tonut(list),...)
- return tonode(list), b
-end
-
local function set_attributes(head,attr,value)
for n in traverse_nodes(head) do
- setattr(n,attr,value)
- local id = getid(n)
+ n[attr] = value
+ local id = n.id
if id == hlist_node or id == vlist_node then
- set_attributes(getlist(n),attr,value)
+ set_attributes(n.list,attr,value)
end
end
end
local function set_unset_attributes(head,attr,value)
for n in traverse_nodes(head) do
- if not getattr(n,attr) then
- setattr(n,attr,value)
+ if not n[attr] then
+ n[attr] = value
end
- local id = getid(n)
+ local id = n.id
if id == hlist_code or id == vlist_code then
- set_unset_attributes(getlist(n),attr,value)
+ set_unset_attributes(n.list,attr,value)
end
end
end
local function unset_attributes(head,attr)
for n in traverse_nodes(head) do
- setattr(n,attr,unsetvalue)
- local id = getid(n)
+ n[attr] = unsetvalue
+ local id = n.id
if id == hlist_code or id == vlist_code then
- unset_attributes(getlist(n),attr)
+ unset_attributes(n.list,attr)
end
end
end
--- for old times sake
+nodes.setattribute = node.set_attribute
+nodes.getattribute = node.has_attribute
+nodes.unsetattribute = node.unset_attribute
+nodes.has_attribute = node.has_attribute
-nuts.setattribute = nuts.setattr nodes.setattribute = nodes.setattr
-nuts.getattribute = nuts.getattr nodes.getattribute = nodes.getattr
-nuts.unsetattribute = nuts.unset_attribute nodes.unsetattribute = nodes.unset_attribute
-nuts.has_attribute = nuts.has_attribute nodes.has_attribute = nodes.has_attribute
-nuts.firstglyph = nuts.first_glyph nodes.firstglyph = nodes.first_glyph
+nodes.firstglyph = first_glyph
+nodes.setattributes = set_attributes
+nodes.setunsetattributes = set_unset_attributes
+nodes.unsetattributes = unset_attributes
-nuts.setattributes = set_attributes nodes.setattributes = vianuts(set_attributes)
-nuts.setunsetattributes = set_unset_attributes nodes.setunsetattributes = vianuts(set_unset_attributes)
-nuts.unsetattributes = unset_attributes nodes.unsetattributes = vianuts(unset_attributes)
-
--- history:
---
-- function nodes.is_skipable(a,id) -- skipable nodes at the margins during character protrusion
-- return (
-- id ~= glyph_node
@@ -132,26 +106,29 @@ nuts.unsetattributes = unset_attributes nodes.unsetattribut
-- or id == adjust_node
-- or id == penalty_node
-- or (id == glue_node and a.spec.writable)
--- or (id == disc_node and getfield(a,"pre") == nil and getfield(a,"post") == nil and getfield(a,"replace") == nil)
--- or (id == math_node and getfield(a,"surround") == 0)
--- or (id == kern_node and (getfield(a,"kern") == 0 or getsubtype(subtype) == NORMAL))
--- or (id == hlist_node and getfield(a,"width") == 0 and getfield(a,"height") == 0 and getfield(a,"depth") == 0 and getlist(a) == nil)
--- or (id == whatsit_node and getsubtype(a) ~= pdf_refximage_node and getsubtype(a) ~= pdf_refxform_node)
+-- or (id == disc_node and a.pre == nil and a.post == nil and a.replace == nil)
+-- or (id == math_node and a.surround == 0)
+-- or (id == kern_node and (a.kern == 0 or a.subtype == NORMAL))
+-- or (id == hlist_node and a.width == 0 and a.height == 0 and a.depth == 0 and a.list == nil)
+-- or (id == whatsit_node and a.subtype ~= pdf_refximage_node and a.subtype ~= pdf_refxform_node)
-- )
-- end
+
+-- history:
+--
--
-- local function glyph_width(a)
--- local ch = chardata[getfont(a)][getchar(a)]
+-- local ch = chardata[a.font][a.char]
-- return (ch and ch.width) or 0
-- end
--
-- local function glyph_total(a)
--- local ch = chardata[getfont(a)][getchar(a)]
+-- local ch = chardata[a.font][a.char]
-- return (ch and (ch.height+ch.depth)) or 0
-- end
--
-- local function non_discardable(a) -- inline
--- return getid(id) < math_node -- brrrr
+-- return a.id < math_node -- brrrr
-- end
--
-- local function calculate_badness(t,s)
@@ -206,36 +183,8 @@ nuts.unsetattributes = unset_attributes nodes.unsetattribut
-- return -u
-- end
-- end
---
--- if not node.end_of_math then
--- function node.end_of_math(n)
--- for n in traverse_id(math_code,getnext(next)) do
--- return n
--- end
--- end
--- end
---
--- nodes.endofmath = node.end_of_math
---
--- local function firstline(n)
--- while n do
--- local id = getid(n)
--- if id == hlist_code then
--- if getsubtype(n) == line_code then
--- return n
--- else
--- return firstline(getlist(n))
--- end
--- elseif id == vlist_code then
--- return firstline(getlist(n))
--- end
--- n = getnext(n)
--- end
--- end
---
--- nodes.firstline = firstline
-function nuts.firstcharacter(n,untagged) -- tagged == subtype > 255
+function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
if untagged then
return first_glyph(n)
else
@@ -245,38 +194,44 @@ function nuts.firstcharacter(n,untagged) -- tagged == subtype > 255
end
end
--- function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
--- if untagged then
--- return nodes_first_glyph(n)
--- else
--- for g in nodes_traverse_id(glyph_code,n) do
--- return g
--- end
--- end
--- end
-
-local function firstcharinbox(n)
- local l = getlist(getbox(n))
+function nodes.firstcharinbox(n)
+ local l = texgetbox(n).list
if l then
for g in traverse_id(glyph_code,l) do
- return getchar(g)
+ return g.char
end
end
return 0
end
-nuts .firstcharinbox = firstcharinbox
-nodes.firstcharinbox = firstcharinbox
-nodes.firstcharacter = vianuts(firstcharacter)
-
-function commands.buildtextaccent(n)
- local char = firstcharinbox(n)
- if char > 0 then
- -- context.accent(false,char)
- context([[\accent%s\relax]],char)
+if not node.end_of_math then
+ function node.end_of_math(n)
+ for n in traverse_id(math_code,n.next) do
+ return n
+ end
end
end
+nodes.endofmath = node.end_of_math
+
+-- local function firstline(n)
+-- while n do
+-- local id = n.id
+-- if id == hlist_code then
+-- if n.subtype == line_code then
+-- return n
+-- else
+-- return firstline(n.list)
+-- end
+-- elseif id == vlist_code then
+-- return firstline(n.list)
+-- end
+-- n = n.next
+-- end
+-- end
+
+-- nodes.firstline = firstline
+
-- this depends on fonts, so we have a funny dependency ... will be
-- sorted out .. we could make tonodes a plugin into this
@@ -287,8 +242,10 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
local head, tail, space, fnt, template = nil, nil, nil, nil, nil
if not fnt then
fnt = current_font()
- elseif type(fnt) ~= "number" and getid(fnt) == glyph_code then -- so it has to be a real node
- fnt, template = nil, tonut(fnt)
+ elseif type(fnt) ~= "number" and fnt.id == "glyph" then
+ fnt, template = nil, fnt
+ -- else
+ -- already a number
end
for s in utfvalues(str) do
local n
@@ -302,12 +259,12 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
end
elseif template then
n = copy_node(template)
- setvalue(n,"char",s)
+ n.char = s
else
n = new_glyph(fnt,s)
end
if attr then -- normally false when template
- setfield(n,"attr",copy_node_list(attr))
+ n.attr = copy_node_list(attr)
end
if head then
insert_node_after(head,tail,n)
@@ -319,130 +276,69 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
return head, tail
end
-nuts.tonodes = tonodes
+nodes.tonodes = tonodes
-nodes.tonodes = function(str,fnt,attr)
- local head, tail = tonodes(str,fnt,attr)
- return tonode(head), tonode(tail)
-end
-
--- local function link(list,currentfont,currentattr,head,tail)
--- for i=1,#list do
--- local n = list[i]
--- if n then
--- local tn = isnode(n)
--- if not tn then
--- local tn = type(n)
--- if tn == "number" then
--- if not currentfont then
--- currentfont = current_font()
--- end
--- local h, t = tonodes(tostring(n),currentfont,currentattr)
--- if not h then
--- -- skip
--- elseif not head then
--- head = h
--- tail = t
--- else
--- setfield(tail,"next",h)
--- setfield(h,"prev",t)
--- tail = t
--- end
--- elseif tn == "string" then
--- if #tn > 0 then
--- if not currentfont then
--- currentfont = current_font()
--- end
--- local h, t = tonodes(n,currentfont,currentattr)
--- if not h then
--- -- skip
--- elseif not head then
--- head, tail = h, t
--- else
--- setfield(tail,"next",h)
--- setfield(h,"prev",t)
--- tail = t
--- end
--- end
--- elseif tn == "table" then
--- if #tn > 0 then
--- if not currentfont then
--- currentfont = current_font()
--- end
--- head, tail = link(n,currentfont,currentattr,head,tail)
--- end
--- end
--- elseif not head then
--- head = n
--- tail = find_tail(n)
--- elseif getid(n) == attributelist_code then
--- -- weird case
--- report_error("weird node type in list at index %s:",i)
--- for i=1,#list do
--- local l = list[i]
--- report_error("%3i: %s %S",i,getid(l) == attributelist_code and "!" or ">",l)
--- end
--- os.exit()
--- else
--- setfield(tail,"next",n)
--- setfield(n,"prev",tail)
--- if getnext(n) then
--- tail = find_tail(n)
--- else
--- tail = n
--- end
--- end
--- else
--- -- permitting nil is convenient
--- end
--- end
--- return head, tail
--- end
-
-local function link(list,currentfont,currentattr,head,tail) -- an oldie, might be replaced
+local function link(list,currentfont,currentattr,head,tail)
for i=1,#list do
local n = list[i]
if n then
- local tn = type(n)
- if tn == "string" then
- if #tn > 0 then
+ local tn = isnode(n)
+ if not tn then
+ local tn = type(n)
+ if tn == "number" then
if not currentfont then
currentfont = current_font()
end
- local h, t = tonodes(n,currentfont,currentattr)
+ local h, t = tonodes(tostring(n),currentfont,currentattr)
if not h then
-- skip
elseif not head then
head, tail = h, t
else
- setfield(tail,"next",h)
- setfield(h,"prev",t)
- tail = t
+ tail.next, h.prev, tail = h, t, t
end
- end
- elseif tn == "table" then
- if #tn > 0 then
- if not currentfont then
- currentfont = current_font()
+ elseif tn == "string" then
+ if #tn > 0 then
+ if not currentfont then
+ currentfont = current_font()
+ end
+ local h, t = tonodes(n,currentfont,currentattr)
+ if not h then
+ -- skip
+ elseif not head then
+ head, tail = h, t
+ else
+ tail.next, h.prev, tail = h, t, t
+ end
+ end
+ elseif tn == "table" then
+ if #tn > 0 then
+ if not currentfont then
+ currentfont = current_font()
+ end
+ head, tail = link(n,currentfont,currentattr,head,tail)
end
- head, tail = link(n,currentfont,currentattr,head,tail)
end
elseif not head then
head = n
- tail = find_tail(n)
- elseif getid(n) == attributelist_code then
+ if n.next then
+ tail = slide_nodes(n)
+ else
+ tail = n
+ end
+ elseif n.id == attributelist_code then
-- weird case
report_error("weird node type in list at index %s:",i)
for i=1,#list do
local l = list[i]
- report_error("%3i: %s %S",i,getid(l) == attributelist_code and "!" or ">",l)
+ report_error("%3i: %s %S",i,l.id == attributelist_code and "!" or ">",l)
end
os.exit()
else
- setfield(tail,"next",n)
- setfield(n,"prev",tail)
- if getnext(n) then
- tail = find_tail(n)
+ tail.next = n
+ n.prev = tail
+ if n.next then
+ tail = slide_nodes(n)
else
tail = n
end
@@ -454,22 +350,17 @@ local function link(list,currentfont,currentattr,head,tail) -- an oldie, might b
return head, tail
end
-nuts.link = link
-
-nodes.link = function(list,currentfont,currentattr,head,tail)
- local head, tail = link(list,currentfont,currentattr,tonut(head),tonut(tail))
- return tonode(head), tonode(tail)
-end
+nodes.link = link
local function locate(start,wantedid,wantedsubtype)
for n in traverse_nodes(start) do
- local id = getid(n)
+ local id = n.id
if id == wantedid then
- if not wantedsubtype or getsubtype(n) == wantedsubtype then
+ if not wantedsubtype or n.subtype == wantedsubtype then
return n
end
elseif id == hlist_code or id == vlist_code then
- local found = locate(getlist(n),wantedid,wantedsubtype)
+ local found = locate(n.list,wantedid,wantedsubtype)
if found then
return found
end
@@ -477,12 +368,7 @@ local function locate(start,wantedid,wantedsubtype)
end
end
-nuts.locate = locate
-
-nodes.locate = function(start,wantedid,wantedsubtype)
- local found = locate(tonut(start),wantedid,wantedsubtype)
- return found and tonode(found)
-end
+nodes.locate = locate
-- I have no use for this yet:
--
@@ -495,12 +381,10 @@ end
-- return (badness/100)^(1/3)
-- end
--
--- function tex.stretch_amount(skip,badness) -- node no nut
+-- function tex.stretch_amount(skip,badness)
-- if skip.id == gluespec_code then
-- return skip.width + (badness and (badness/100)^(1/3) or 1) * skip.stretch
-- else
-- return 0
-- end
-- end
-
-
diff --git a/tex/context/base/node-bck.lua b/tex/context/base/node-bck.lua
index 4b7b4a064..feaa2c684 100644
--- a/tex/context/base/node-bck.lua
+++ b/tex/context/base/node-bck.lua
@@ -11,8 +11,6 @@ if not modules then modules = { } end modules ['node-bck'] = {
local attributes, nodes, node = attributes, nodes, node
-local tasks = nodes.tasks
-
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -21,25 +19,11 @@ local vlist_code = nodecodes.vlist
local glyph_code = nodecodes.glyph
local cell_code = listcodes.cell
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getsubtype = nuts.getsubtype
-
-local traverse = nuts.traverse
-local traverse_id = nuts.traverse_id
+local traverse = node.traverse
+local traverse_id = node.traverse_id
+local nodepool = nodes.pool
+local tasks = nodes.tasks
local new_rule = nodepool.rule
local new_glue = nodepool.glue
@@ -53,50 +37,50 @@ local a_alignbackground = attributes.private('alignbackground')
local function add_backgrounds(head) -- rather old code .. to be redone
local current = head
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code or id == vlist_code then
- local list = getlist(current)
+ local list = current.list
if list then
local head = add_backgrounds(list)
if head then
- setfield(current,"list",head)
+ current.list = head
list = head
end
end
- local width = getfield(current,"width")
+ local width = current.width
if width > 0 then
- local background = getattr(current,a_background)
+ local background = current[a_background]
if background then
-- direct to hbox
-- colorspace is already set so we can omit that and stick to color
- local mode = getattr(current,a_colorspace)
+ local mode = current[a_colorspace]
if mode then
- local height = getfield(current,"height")
- local depth = getfield(current,"depth")
+ local height = current.height
+ local depth = current.depth
local skip = id == hlist_code and width or (height + depth)
local glue = new_glue(-skip)
local rule = new_rule(width,height,depth)
- local color = getattr(current,a_color)
- local transparency = getattr(current,a_transparency)
- setattr(rule,a_colorspace,mode)
+ local color = current[a_color]
+ local transparency = current[a_transparency]
+ rule[a_colorspace] = mode
if color then
- setattr(rule,a_color,color)
+ rule[a_color] = color
end
if transparency then
- setattr(rule,a_transparency,transparency)
+ rule[a_transparency] = transparency
end
- setfield(rule,"next",glue)
- setfield(glue,"prev",rule)
+ rule.next = glue
+ glue.prev = rule
if list then
- setfield(glue,"next",list)
- setfield(list,"prev",glue)
+ glue.next = list
+ list.prev = glue
end
- setfield(current,"list",rule)
+ current.list = rule
end
end
end
end
- current = getnext(current)
+ current = current.next
end
return head, true
end
@@ -104,16 +88,16 @@ end
local function add_alignbackgrounds(head)
local current = head
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code then
- local list = getlist(current)
+ local list = current.list
if not list then
-- no need to look
- elseif getsubtype(current) == cell_code then
+ elseif current.subtype == cell_code then
local background = nil
local found = nil
-- for l in traverse(list) do
- -- background = getattr(l,a_alignbackground)
+ -- background = l[a_alignbackground]
-- if background then
-- found = l
-- break
@@ -122,7 +106,7 @@ local function add_alignbackgrounds(head)
-- we know that it's a fake hlist (could be user node)
-- but we cannot store tables in user nodes yet
for l in traverse_id(hpack_code,list) do
- background = getattr(l,a_alignbackground)
+ background = l[a_alignbackground]
if background then
found = l
end
@@ -131,28 +115,28 @@ local function add_alignbackgrounds(head)
--
if background then
-- current has subtype 5 (cell)
- local width = getfield(current,"width")
+ local width = current.width
if width > 0 then
- local mode = getattr(found,a_colorspace)
+ local mode = found[a_colorspace]
if mode then
local glue = new_glue(-width)
- local rule = new_rule(width,getfield(current,"height"),getfield(current,"depth"))
- local color = getattr(found,a_color)
- local transparency = getattr(found,a_transparency)
- setattr(rule,a_colorspace,mode)
+ local rule = new_rule(width,current.height,current.depth)
+ local color = found[a_color]
+ local transparency = found[a_transparency]
+ rule[a_colorspace] = mode
if color then
- setattr(rule,a_color,color)
+ rule[a_color] = color
end
if transparency then
- setattr(rule,a_transparency,transparency)
+ rule[a_transparency] = transparency
end
- setfield(rule,"next",glue)
- setfield(glue,"prev",rule)
+ rule.next = glue
+ glue.prev = rule
if list then
- setfield(glue,"next",list)
- setfield(list,"prev",glue)
+ glue.next = list
+ list.prev = glue
end
- setfield(current,"list",rule)
+ current.list = rule
end
end
end
@@ -160,23 +144,18 @@ local function add_alignbackgrounds(head)
add_alignbackgrounds(list)
end
elseif id == vlist_code then
- local list = getlist(current)
+ local list = current.list
if list then
add_alignbackgrounds(list)
end
end
- current = getnext(current)
+ current = current.next
end
return head, true
end
--- nodes.handlers.backgrounds = add_backgrounds
--- nodes.handlers.alignbackgrounds = add_alignbackgrounds
-
-nodes.handlers.backgrounds = function(head) local head, done = add_backgrounds (tonut(head)) return tonode(head), done end
-nodes.handlers.alignbackgrounds = function(head) local head, done = add_alignbackgrounds(tonut(head)) return tonode(head), done end
-
--- elsewhere: needs checking
+nodes.handlers.backgrounds = add_backgrounds
+nodes.handlers.alignbackgrounds = add_alignbackgrounds
tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds")
tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds")
diff --git a/tex/context/base/node-fin.lua b/tex/context/base/node-fin.lua
index 8476b47a6..63a5ef83e 100644
--- a/tex/context/base/node-fin.lua
+++ b/tex/context/base/node-fin.lua
@@ -8,54 +8,36 @@ if not modules then modules = { } end modules ['node-fin'] = {
-- this module is being reconstructed
-- local functions, only slightly slower
---
--- leaders are also triggers
local next, type, format = next, type, string.format
local attributes, nodes, node = attributes, nodes, node
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getleader = nuts.getleader
-local getattr = nuts.getattr
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local copy_node = nuts.copy
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
+local copy_node = node.copy
+local find_tail = node.slide
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local rule_code = nodecodes.rule
-local whatsit_code = nodecodes.whatsit
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
-local pdfliteral_code = whatcodes.pdfliteral
+local pdfliteral_code = whatcodes.pdfliteral
-local states = attributes.states
-local numbers = attributes.numbers
-local a_trigger = attributes.private('trigger')
-local triggering = false
+local states = attributes.states
+local numbers = attributes.numbers
+local a_trigger = attributes.private('trigger')
+local triggering = false
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local loadstripped = utilities.lua.loadstripped
-local unsetvalue = attributes.unsetvalue
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local loadstripped = utilities.lua.loadstripped
+local unsetvalue = attributes.unsetvalue
-- these two will be like trackers
@@ -120,14 +102,11 @@ function nodes.installattributehandler(plugin)
return loadstripped(template)()
end
--- for the moment:
-
-local function copied(n)
- return copy_node(tonut(n))
-end
-
-- the injectors
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+
local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger
local current, current_selector, done = 0, 0, false -- nb, stack has a local current !
local nsbegin, nsend
@@ -153,25 +132,23 @@ end
function states.finalize(namespace,attribute,head) -- is this one ok?
if current > 0 and nsnone then
- head = tonut(head)
- local id = getid(head)
+ local id = head.id
if id == hlist_code or id == vlist_code then
- local list = getlist(head)
+ local list = head.list
if list then
- list = insert_node_before(list,list,copied(nsnone)) -- two return values
- setfield(head,"list",list)
+ head.list = insert_node_before(list,list,copy_node(nsnone))
end
else
- head = insert_node_before(head,head,copied(nsnone))
+ head = insert_node_before(head,head,copy_node(nsnone))
end
- return tonode(head), true, true
+ return head, true, true
end
return head, false, false
end
-- disc nodes can be ignored
-- we need to deal with literals too (reset as well as oval)
--- if id == glyph_code or (id == whatsit_code and getsubtype(stack) == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
+-- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
local function process(namespace,attribute,head,inheritance,default) -- one attribute
local stack = head
@@ -179,57 +156,53 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
local check = false
local leader = nil
while stack do
- local id = getid(stack)
+ local id = stack.id
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = getleader(stack)
+ leader = stack.leader
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = getlist(stack)
+ local content = stack.list
if content then
-- begin nested --
- if nstrigger and getattr(stack,nstrigger) then
- local outer = getattr(stack,attribute)
+ local ok
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
if outer ~= inheritance then
- local list, ok = process(namespace,attribute,content,inheritance,outer)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = process(namespace,attribute,content,inheritance,outer)
else
- local list, ok = process(namespace,attribute,content,inheritance,default)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = process(namespace,attribute,content,inheritance,default)
end
else
- local list, ok = process(namespace,attribute,content,inheritance,default)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = process(namespace,attribute,content,inheritance,default)
end
-- end nested --
+ done = done or ok
end
elseif id == rule_code then
- check = getfield(stack,"width") ~= 0
+ check = stack.width ~= 0
end
-- much faster this way than using a check() and nested() function
if check then
- local c = getattr(stack,attribute)
+ local c = stack[attribute]
if c then
if default and c == inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copied(nsdata[default]))
+ head = insert_node_before(head,stack,copy_node(nsdata[default]))
current = default
done = true
end
elseif current ~= c then
- head = insert_node_before(head,stack,copied(nsdata[c]))
+ head = insert_node_before(head,stack,copy_node(nsdata[c]))
current = c
done = true
end
if leader then
local savedcurrent = current
- local ci = getid(leader)
+ local ci = leader.id
if ci == hlist_code or ci == vlist_code then
-- else we reset inside a box unneeded, okay, the downside is
-- that we trigger color in each repeated box, so there is room
@@ -237,48 +210,41 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
current = 0
end
-- begin nested --
- if nstrigger and getattr(stack,nstrigger) then
- local outer = getattr(stack,attribute)
+ local ok = false
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
if outer ~= inheritance then
- local list, ok = process(namespace,attribute,leader,inheritance,outer)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,outer)
else
- local list, ok = process(namespace,attribute,leader,inheritance,default)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
end
else
- local list, ok = process(namespace,attribute,leader,inheritance,default)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
end
-- end nested --
+ done = done or ok
current = savedcurrent
leader = false
end
elseif default and inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copied(nsdata[default]))
+ head = insert_node_before(head,stack,copy_node(nsdata[default]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copied(nsnone))
+ head = insert_node_before(head,stack,copy_node(nsnone))
current = 0
done = true
end
check = false
end
- stack = getnext(stack)
+ stack = stack.next
end
return head, done
end
-states.process = function(namespace,attribute,head,default)
- local head, done = process(namespace,attribute,tonut(head),default)
- return tonode(head), done
-end
+states.process = process
-- we can force a selector, e.g. document wide color spaces, saves a little
-- watch out, we need to check both the selector state (like colorspace) and
@@ -292,103 +258,93 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
local check = false
local leader = nil
while stack do
- local id = getid(stack)
+ local id = stack.id
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = getleader(stack)
+ leader = stack.leader
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = getlist(stack)
+ local content = stack.list
if content then
+ local ok = false
-- begin nested
- if nstrigger and getattr(stack,nstrigger) then
- local outer = getattr(stack,attribute)
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
if outer ~= inheritance then
- local list, ok = selective(namespace,attribute,content,inheritance,outer)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
else
- local list, ok = selective(namespace,attribute,content,inheritance,default)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = selective(namespace,attribute,content,inheritance,default)
end
else
- local list, ok = selective(namespace,attribute,content,inheritance,default)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = selective(namespace,attribute,content,inheritance,default)
end
-- end nested
+ done = done or ok
end
elseif id == rule_code then
- check = getfield(stack,"width") ~= 0
+ check = stack.width ~= 0
end
if check then
- local c = getattr(stack,attribute)
+ local c = stack[attribute]
if c then
if default and c == inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
current = default
done = true
end
else
- local s = getattr(stack,nsselector)
+ local s = stack[nsselector]
if current ~= c or current_selector ~= s then
local data = nsdata[c]
- head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
current = c
current_selector = s
done = true
end
end
if leader then
+ local ok = false
-- begin nested
- if nstrigger and getattr(stack,nstrigger) then
- local outer = getatribute(stack,attribute)
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
if outer ~= inheritance then
- local list, ok = selective(namespace,attribute,leader,inheritance,outer)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,outer)
else
- local list, ok = selective(namespace,attribute,leader,inheritance,default)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
end
else
- local list, ok = selective(namespace,attribute,leader,inheritance,default)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
end
-- end nested
- leader = false
+ done = done or ok
+ leader = false
end
elseif default and inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copied(nsnone))
+ head = insert_node_before(head,stack,copy_node(nsnone))
current, current_selector, done = 0, 0, true
end
check = false
end
- stack = getnext(stack)
+
+ stack = stack.next
end
return head, done
end
-states.selective = function(namespace,attribute,head,default)
- local head, done = selective(namespace,attribute,tonut(head),default)
- return tonode(head), done
-end
+states.selective = selective
-- Ideally the next one should be merged with the previous but keeping it separate is
-- safer. We deal with two situations: efficient boxwise (layoutareas) and mixed layers
@@ -407,80 +363,76 @@ local function stacked(namespace,attribute,head,default) -- no triggering, no in
local check = false
local leader = false
while stack do
- local id = getid(stack)
+ local id = stack.id
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = getleader(stack)
+ leader = stack.leader
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = getlist(stack)
+ local content = stack.list
if content then
-- the problem is that broken lines gets the attribute which can be a later one
if nslistwise then
- local a = getattr(stack,attribute)
+ local a = stack[attribute]
if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
local p = current
- current = a
- head = insert_node_before(head,stack,copied(nsdata[a]))
- local list = stacked(namespace,attribute,content,current) -- two return values
- setfield(stack,"list",list)
- done = true
- head, stack = insert_node_after(head,stack,copied(nsnone))
+ current, done = a, true
+ head = insert_node_before(head,stack,copy_node(nsdata[a]))
+ stack.list = stacked(namespace,attribute,content,current)
+ head, stack = insert_node_after(head,stack,copy_node(nsnone))
current = p
else
- local list, ok = stacked(namespace,attribute,content,current)
- setfield(stack,"list",list) -- only if ok
+ local ok = false
+ stack.list, ok = stacked(namespace,attribute,content,current)
done = done or ok
end
else
- local list, ok = stacked(namespace,attribute,content,current)
- setfield(stack,"list",list) -- only if ok
+ local ok = false
+ stack.list, ok = stacked(namespace,attribute,content,current)
done = done or ok
end
end
elseif id == rule_code then
- check = getfield(stack,"width") ~= 0
+ check = stack.width ~= 0
end
if check then
- local a = getattr(stack,attribute)
+ local a = stack[attribute]
if a then
if current ~= a then
- head = insert_node_before(head,stack,copied(nsdata[a]))
+ head = insert_node_before(head,stack,copy_node(nsdata[a]))
depth = depth + 1
current, done = a, true
end
if leader then
- local list, ok = stacked(namespace,attribute,content,current)
- setfield(stack,"leader",list) -- only if ok
+ local ok = false
+ stack.leader, ok = stacked(namespace,attribute,content,current)
done = done or ok
leader = false
end
elseif default > 0 then
--
elseif current > 0 then
- head = insert_node_before(head,stack,copied(nsnone))
+ head = insert_node_before(head,stack,copy_node(nsnone))
depth = depth - 1
current, done = 0, true
end
check = false
end
- stack = getnext(stack)
+
+ stack = stack.next
end
while depth > 0 do
- head = insert_node_after(head,stack,copied(nsnone))
+ head = insert_node_after(head,stack,copy_node(nsnone))
depth = depth - 1
end
return head, done
end
-states.stacked = function(namespace,attribute,head,default)
- local head, done = stacked(namespace,attribute,tonut(head),default)
- return tonode(head), done
-end
+states.stacked = stacked
-- experimental
@@ -494,53 +446,52 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
local check = false
local leader = false
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = getleader(current)
+ leader = current.leader
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = getlist(current)
+ local content = current.list
if not content then
-- skip
elseif nslistwise then
- local a = getattr(current,attribute)
+ local a = current[attribute]
if a and attrib ~= a and nslistwise[a] then -- viewerlayer
- head = insert_node_before(head,current,copied(nsdata[a]))
- local list = stacker(namespace,attribute,content,a)
- setfield(current,"list",list)
done = true
- head, current = insert_node_after(head,current,copied(nsnone))
+ head = insert_node_before(head,current,copy_node(nsdata[a]))
+ current.list = stacker(namespace,attribute,content,a)
+ head, current = insert_node_after(head,current,copy_node(nsnone))
else
- local list, ok = stacker(namespace,attribute,content,attrib)
- setfield(current,"list",list)
+ local ok = false
+ current.list, ok = stacker(namespace,attribute,content,attrib)
done = done or ok
end
else
- local list, ok = stacker(namespace,attribute,content,default)
- setfield(current,"list",list)
+ local ok = false
+ current.list, ok = stacker(namespace,attribute,content,default)
done = done or ok
end
elseif id == rule_code then
- check = getfield(current,"width") ~= 0
+ check = current.width ~= 0
end
if check then
- local a = getattr(current,attribute) or unsetvalue
+ local a = current[attribute] or unsetvalue
if a ~= attrib then
local n = nsstep(a)
if n then
-- !!!! TEST CODE !!!!
- -- head = insert_node_before(head,current,copied(nsdata[tonumber(n)])) -- a
- head = insert_node_before(head,current,tonut(n)) -- a
+ -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
+ head = insert_node_before(head,current,n) -- a
end
attrib, done, okay = a, true, true
if leader then
-- tricky as a leader has to be a list so we cannot inject before
- local list, ok = stacker(namespace,attribute,leader,attrib)
+ local _, ok = stacker(namespace,attribute,leader,attrib)
done = done or ok
leader = false
end
@@ -549,23 +500,20 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
end
previous = current
- current = getnext(current)
+ current = current.next
end
if okay then
local n = nsend()
if n then
-- !!!! TEST CODE !!!!
- -- head = insert_node_after(head,previous,copied(nsdata[tostring(n)]))
- head = insert_node_after(head,previous,tonut(n))
+ -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
+ head = insert_node_after(head,previous,n)
end
end
return head, done
end
-states.stacker = function(namespace,attribute,head,default)
- local head, done = stacker(namespace,attribute,tonut(head),default)
- return tonode(head), done
-end
+states.stacker = stacker
-- -- --
diff --git a/tex/context/base/node-fnt.lua b/tex/context/base/node-fnt.lua
index 7000c4fd7..2f59d513c 100644
--- a/tex/context/base/node-fnt.lua
+++ b/tex/context/base/node-fnt.lua
@@ -23,24 +23,12 @@ local fontdata = fonthashes.identifiers
local otf = fonts.handlers.otf
+local traverse_id = node.traverse_id
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
-
local nodecodes = nodes.nodecodes
local handlers = nodes.handlers
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getattr = nuts.getattr
-local getid = nuts.getid
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local getnext = nuts.getnext
-
-local traverse_id = nuts.traverse_id
-
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
@@ -121,25 +109,25 @@ function handlers.characters(head)
report_fonts()
report_fonts("checking node list, run %s",run)
report_fonts()
- local n = tonut(head)
+ local n = head
while n do
- local id = getid(n)
+ local id = n.id
if id == glyph_code then
- local font = getfont(n)
- local attr = getattr(n,0) or 0
- report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,getchar(n))
+ local font = n.font
+ local attr = n[0] or 0
+ report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,n.char)
elseif id == disc_code then
report_fonts("[disc] %s",nodes.listtoutf(n,true,false,n))
else
report_fonts("[%s]",nodecodes[id])
end
- n = getnext(n)
+ n = n.next
end
end
- for n in traverse_id(glyph_code,tonut(head)) do
- -- if getsubtype(n) <256 then -- all are 1
- local font = getfont(n)
- local attr = getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
+ for n in traverse_id(glyph_code,head) do
+ -- if n.subtype<256 then -- all are 1
+ local font = n.font
+ local attr = n[0] or 0 -- zero attribute is reserved for fonts in context
if font ~= prevfont or attr ~= prevattr then
if attr > 0 then
local used = attrfonts[font]
@@ -403,8 +391,5 @@ end
-- return head, true
-- end
-local d_protect_glyphs = nuts.protect_glyphs
-local d_unprotect_glyphs = nuts.unprotect_glyphs
-
-handlers.protectglyphs = function(n) return d_protect_glyphs (tonut(n)) end
-handlers.unprotectglyphs = function(n) return d_unprotect_glyphs(tonut(n)) end
+handlers.protectglyphs = node.protect_glyphs
+handlers.unprotectglyphs = node.unprotect_glyphs
diff --git a/tex/context/base/node-inj.lua b/tex/context/base/node-inj.lua
index f30070e9e..ae48150a6 100644
--- a/tex/context/base/node-inj.lua
+++ b/tex/context/base/node-inj.lua
@@ -11,7 +11,7 @@ if not modules then modules = { } end modules ['node-inj'] = {
-- test fonts. Btw, future versions of luatex will have extended glyph properties
-- that can be of help. Some optimizations can go away when we have faster machines.
--- todo: ignore kerns between disc and glyph
+-- todo: make a special one for context
local next = next
local utfchar = utf.char
@@ -30,32 +30,13 @@ local injections = nodes.injections
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
-
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
+local nodepool = nodes.pool
local newkern = nodepool.kern
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local traverse_id = nuts.traverse_id
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
local a_kernpair = attributes.private('kernpair')
local a_ligacomp = attributes.private('ligacomp')
@@ -90,8 +71,8 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne
local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
local ws, wn = tfmstart.width, tfmnext.width
local bound = #cursives + 1
- setattr(start,a_cursbase,bound)
- setattr(nxt,a_curscurs,bound)
+ start[a_cursbase] = bound
+ nxt[a_curscurs] = bound
cursives[bound] = { rlmode, dx, dy, ws, wn }
return dx, dy, bound
end
@@ -100,14 +81,14 @@ function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
-- dy = y - h
if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = getattr(current,a_kernpair)
+ local bound = current[a_kernpair]
if bound then
local kb = kerns[bound]
-- inefficient but singles have less, but weird anyway, needs checking
kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
else
bound = #kerns + 1
- setattr(current,a_kernpair,bound)
+ current[a_kernpair] = bound
kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
end
return x, y, w, h, bound
@@ -119,7 +100,7 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
local dx = factor*x
if dx ~= 0 then
local bound = #kerns + 1
- setattr(current,a_kernpair,bound)
+ current[a_kernpair] = bound
kerns[bound] = { rlmode, dx }
return dx, bound
else
@@ -129,7 +110,7 @@ end
function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor
local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = getattr(base,a_markbase) -- fails again we should pass it
+ local bound = base[a_markbase] -- fails again we should pass it
local index = 1
if bound then
local mb = marks[bound]
@@ -137,19 +118,19 @@ function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) --
-- if not index then index = #mb + 1 end
index = #mb + 1
mb[index] = { dx, dy, rlmode }
- setattr(start,a_markmark,bound)
- setattr(start,a_markdone,index)
+ start[a_markmark] = bound
+ start[a_markdone] = index
return dx, dy, bound
else
- report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound)
+ report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
end
end
-- index = index or 1
index = index or 1
bound = #marks + 1
- setattr(base,a_markbase,bound)
- setattr(start,a_markmark,bound)
- setattr(start,a_markdone,index)
+ base[a_markbase] = bound
+ start[a_markmark] = bound
+ start[a_markdone] = index
marks[bound] = { [index] = { dx, dy, rlmode, baseismark } }
return dx, dy, bound
end
@@ -161,15 +142,15 @@ end
local function trace(head)
report_injections("begin run")
for n in traverse_id(glyph_code,head) do
- if getsubtype(n) < 256 then
- local kp = getattr(n,a_kernpair)
- local mb = getattr(n,a_markbase)
- local mm = getattr(n,a_markmark)
- local md = getattr(n,a_markdone)
- local cb = getattr(n,a_cursbase)
- local cc = getattr(n,a_curscurs)
- local char = getchar(n)
- report_injections("font %s, char %U, glyph %c",getfont(n),char,char)
+ if n.subtype < 256 then
+ local kp = n[a_kernpair]
+ local mb = n[a_markbase]
+ local mm = n[a_markmark]
+ local md = n[a_markdone]
+ local cb = n[a_cursbase]
+ local cc = n[a_curscurs]
+ local char = n.char
+ report_injections("font %s, char %U, glyph %c",n.font,char,char)
if kp then
local k = kerns[kp]
if k[3] then
@@ -217,24 +198,22 @@ local function show_result(head)
local current = head
local skipping = false
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",
- getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
skipping = false
elseif id == kern_code then
- report_injections("kern: %p",getfield(current,"kern"))
+ report_injections("kern: %p",current.kern)
skipping = false
elseif not skipping then
report_injections()
skipping = true
end
- current = getnext(current)
+ current = current.next
end
end
function injections.handler(head,where,keep)
- head = tonut(head)
local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
if has_marks or has_cursives then
if trace_injections then
@@ -245,18 +224,17 @@ function injections.handler(head,where,keep)
if has_kerns then -- move outside loop
local nf, tm = nil, nil
for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if getsubtype(n) < 256 then
+ if n.subtype < 256 then
nofvalid = nofvalid + 1
valid[nofvalid] = n
- local f = getfont(n)
- if f ~= nf then
- nf = f
- tm = fontdata[nf].resources.marks -- other hash in ctx
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
end
if tm then
- mk[n] = tm[getchar(n)]
+ mk[n] = tm[n.char]
end
- local k = getattr(n,a_kernpair)
+ local k = n[a_kernpair]
if k then
local kk = kerns[k]
if kk then
@@ -276,16 +254,15 @@ function injections.handler(head,where,keep)
else
local nf, tm = nil, nil
for n in traverse_id(glyph_code,head) do
- if getsubtype(n) < 256 then
+ if n.subtype < 256 then
nofvalid = nofvalid + 1
valid[nofvalid] = n
- local f = getfont(n)
- if f ~= nf then
- nf = f
- tm = fontdata[nf].resources.marks -- other hash in ctx
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
end
if tm then
- mk[n] = tm[getchar(n)]
+ mk[n] = tm[n.char]
end
end
end
@@ -295,7 +272,7 @@ function injections.handler(head,where,keep)
local cx = { }
if has_kerns and next(ky) then
for n, k in next, ky do
- setfield(n,"yoffset",k)
+ n.yoffset = k
end
end
-- todo: reuse t and use maxt
@@ -306,9 +283,9 @@ function injections.handler(head,where,keep)
for i=1,nofvalid do -- valid == glyphs
local n = valid[i]
if not mk[n] then
- local n_cursbase = getattr(n,a_cursbase)
+ local n_cursbase = n[a_cursbase]
if p_cursbase then
- local n_curscurs = getattr(n,a_curscurs)
+ local n_curscurs = n[a_curscurs]
if p_cursbase == n_curscurs then
local c = cursives[n_curscurs]
if c then
@@ -333,20 +310,20 @@ function injections.handler(head,where,keep)
end
end
elseif maxt > 0 then
- local ny = getfield(n,"yoffset")
+ local ny = n.yoffset
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- setfield(ti,"yoffset",getfield(ti,"yoffset") + ny)
+ ti.yoffset = ti.yoffset + ny
end
maxt = 0
end
if not n_cursbase and maxt > 0 then
- local ny = getfield(n,"yoffset")
+ local ny = n.yoffset
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- setfield(ti,"yoffset",ny)
+ ti.yoffset = ny
end
maxt = 0
end
@@ -354,11 +331,11 @@ function injections.handler(head,where,keep)
end
end
if maxt > 0 then
- local ny = getfield(n,"yoffset")
+ local ny = n.yoffset
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- setfield(ti,"yoffset",ny)
+ ti.yoffset = ny
end
maxt = 0
end
@@ -369,83 +346,57 @@ function injections.handler(head,where,keep)
if has_marks then
for i=1,nofvalid do
local p = valid[i]
- local p_markbase = getattr(p,a_markbase)
+ local p_markbase = p[a_markbase]
if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,getnext(p)) do
- local n_markmark = getattr(n,a_markmark)
+ local mrks = marks[p_markbase]
+ local nofmarks = #mrks
+ for n in traverse_id(glyph_code,p.next) do
+ local n_markmark = n[a_markmark]
if p_markbase == n_markmark then
- local index = getattr(n,a_markdone) or 1
+ local index = n[a_markdone] or 1
local d = mrks[index]
if d then
local rlmode = d[3]
--
local k = wx[p]
- local px = getfield(p,"xoffset")
- local ox = 0
if k then
local x = k[2]
local w = k[4]
if w then
if rlmode and rlmode >= 0 then
-- kern(x) glyph(p) kern(w-x) mark(n)
- ox = px - getfield(p,"width") + d[1] - (w-x)
- -- report_injections("l2r case 1: %p",ox)
+ n.xoffset = p.xoffset - p.width + d[1] - (w-x)
else
-- kern(w-x) glyph(p) kern(x) mark(n)
- ox = px - d[1] - x
- -- report_injections("r2l case 1: %p",ox)
+ n.xoffset = p.xoffset - d[1] - x
end
else
if rlmode and rlmode >= 0 then
-- okay for husayni
- ox = px - getfield(p,"width") + d[1]
- -- report_injections("r2l case 2: %p",ox)
+ n.xoffset = p.xoffset - p.width + d[1]
else
-- needs checking: is x ok here?
- ox = px - d[1] - x
- -- report_injections("r2l case 2: %p",ox)
+ n.xoffset = p.xoffset - d[1] - x
end
end
else
- -- if rlmode and rlmode >= 0 then
- -- ox = px - getfield(p,"width") + d[1]
- -- -- report_injections("l2r case 3: %p",ox)
- -- else
- -- ox = px - d[1]
- -- -- report_injections("r2l case 3: %p",ox)
- -- end
- --
- -- we need to deal with fonts that have marks with width
- --
- local wp = getfield(p,"width")
- local wn = getfield(n,"width") -- in arial marks have widths
if rlmode and rlmode >= 0 then
- ox = px - wp + d[1]
- -- report_injections("l2r case 3: %p",ox)
+ n.xoffset = p.xoffset - p.width + d[1]
else
- ox = px - d[1]
- -- report_injections("r2l case 3: %p",ox)
+ n.xoffset = p.xoffset - d[1]
end
- if wn ~= 0 then
- -- bad: we should center
- insert_node_before(head,n,newkern(-wn/2))
- insert_node_after(head,n,newkern(-wn/2))
- -- wx[n] = { 0, -wn/2, 0, -wn }
+ local w = n.width
+ if w ~= 0 then
+ insert_node_before(head,n,newkern(-w/2))
+ insert_node_after(head,n,newkern(-w/2))
end
- -- so far
end
- setfield(n,"xoffset",ox)
- --
- local py = getfield(p,"yoffset")
- local oy = 0
+ -- --
if mk[p] then
- oy = py + d[2]
+ n.yoffset = p.yoffset + d[2]
else
- oy = getfield(n,"yoffset") + py + d[2]
+ n.yoffset = n.yoffset + p.yoffset + d[2]
end
- setfield(n,"yoffset",oy)
--
if nofmarks == 1 then
break
@@ -453,8 +404,6 @@ function injections.handler(head,where,keep)
nofmarks = nofmarks - 1
end
end
- elseif not n_markmark then
- break -- HH: added 2013-09-12: no need to deal with non marks
else
-- KE: there can be sequences in ligatures
end
@@ -516,7 +465,6 @@ function injections.handler(head,where,keep)
-- if trace_injections then
-- show_result(head)
-- end
-head = tonode(head)
return head, true
elseif not keep then
kerns, cursives, marks = { }, { }, { }
@@ -526,14 +474,14 @@ head = tonode(head)
trace(head)
end
for n in traverse_id(glyph_code,head) do
- if getsubtype(n) < 256 then
- local k = getattr(n,a_kernpair)
+ if n.subtype < 256 then
+ local k = n[a_kernpair]
if k then
local kk = kerns[k]
if kk then
local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
if y and y ~= 0 then
- setfield(n,"yoffset",y) -- todo: h ?
+ n.yoffset = y -- todo: h ?
end
if w then
-- copied from above
@@ -570,9 +518,9 @@ head = tonode(head)
-- if trace_injections then
-- show_result(head)
-- end
- return tonode(head), true
+ return head, true
else
-- no tracing needed
end
- return tonode(head), false
+ return head, false
end
diff --git a/tex/context/base/node-ltp.lua b/tex/context/base/node-ltp.lua
index 9f2491cfa..c52e001df 100644
--- a/tex/context/base/node-ltp.lua
+++ b/tex/context/base/node-ltp.lua
@@ -18,6 +18,7 @@ if not modules then modules = { } end modules ['node-par'] = {
-- todo: add a couple of plugin hooks
-- todo: maybe split expansion code paths
-- todo: fix line numbers (cur_list.pg_field needed)
+-- todo: make kerns stretch an option and disable it by default (definitely not shrink)
-- todo: check and improve protrusion
-- todo: arabic etc (we could use pretty large scales there) .. marks and cursive
@@ -72,8 +73,7 @@ if not modules then modules = { } end modules ['node-par'] = {
To be honest, I slowly start to grasp the magic here as normally I start from scratch when implementing
something (as it's the only way I can understand things). This time I had a recently acquired stack of
- Porcupine Tree disks to get me through, although I must admit that watching their dvd's is more fun
- than coding.
+ Porcupine Tree disks to get me through.
Picking up this effort was inspired by discussions between Luigi Scarso and me about efficiency of Lua
code and we needed some stress tests to compare regular LuaTeX and LuajitTeX. One of the tests was
@@ -121,13 +121,6 @@ if not modules then modules = { } end modules ['node-par'] = {
is enabled, but in the Lua variant the extra overhead is way less significant. This means that when we
retrofit the same approach into the core, the overhead of expansion can be sort of nilled.
- In 2013 the expansion factor method became also used at the TeX end so then I could complete the code
- here, and indeed, expansions works quite well now (not compatible of course because we use floats at the
- Lua end. The Lua base variant is still slower but quite ok, especially if we go nuts.
-
- A next iteration will provide plug-ins and more control. I will also explore the possibility to avoid the
- redundant hpack calculations (easier now, although I've only done some quick and dirty experiments.)
-
]]--
local utfchar = utf.char
@@ -187,38 +180,22 @@ local chardata = fonthashes.characters
local quaddata = fonthashes.quads
local parameters = fonthashes.parameters
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getlist = nuts.getlist
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getattr = nuts.getattr
-
-local slide_nodelist = nuts.slide -- get rid of this, probably ok > 78.2
-local find_tail = nuts.tail
-local new_node = nuts.new
-local copy_node = nuts.copy
-local copy_nodelist = nuts.copy_list
-local flush_node = nuts.free
-local flush_nodelist = nuts.flush_list
-local hpack_nodes = nuts.hpack
-local xpack_nodes = nuts.hpack
-local replace_node = nuts.replace
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local traverse_by_id = nuts.traverse_id
+local slide_nodes = node.slide
+local new_node = node.new
+local copy_node = node.copy
+local copy_node_list = node.copy_list
+local flush_node = node.free
+local flush_node_list = node.flush_list
+local hpack_nodes = node.hpack
+local xpack_nodes = node.hpack
+local replace_node = nodes.replace
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
+local traverse_by_id = node.traverse_id
local setnodecolor = nodes.tracers.colors.set
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -310,8 +287,7 @@ local glyphdir_is_equal = nodes.glyphdir_is_equal
local dir_pops = nodes.dir_is_pop
local dir_negations = nodes.dir_negation
-local is_skipable = nuts.protrusion_skippable
-
+local is_skipable = node.protrusion_skippable
local a_fontkern = attributes.private('fontkern')
-- helpers --
@@ -332,12 +308,12 @@ local function checked_line_dir(stack,current)
local n = stack.n + 1
stack.n = n
stack[n] = current
- return getfield(current,"dir")
+ return current.dir
elseif n > 0 then
local n = stack.n
local dirnode = stack[n]
dirstack.n = n - 1
- return getfield(dirnode,"dir")
+ return dirnode.dir
else
report_parbuilders("warning: missing pop node (%a)",1) -- in line ...
end
@@ -352,8 +328,8 @@ local function inject_dirs_at_end_of_line(stack,current,start,stop)
local n = stack.n
local h = nil
while start and start ~= stop do
- if getid(start) == whatsit_code and getsubtype(start) == dir_code then
- if not dir_pops[getfield(start,"dir")] then -- weird, what is this #
+ if start.id == whatsit_code and start.subtype == dir_code then
+ if not dir_pops[start.dir] then
n = n + 1
stack[n] = start
elseif n > 0 then
@@ -362,10 +338,10 @@ local function inject_dirs_at_end_of_line(stack,current,start,stop)
report_parbuilders("warning: missing pop node (%a)",2) -- in line ...
end
end
- start = getnext(start)
+ start = start.next
end
for i=n,1,-1 do
- h, current = insert_node_after(current,current,new_dir(dir_negations[getfield(stack[i],"dir")]))
+ h, current = insert_node_after(current,current,new_dir(dir_negations[stack[i].dir]))
end
stack.n = n
return current
@@ -414,8 +390,8 @@ local whatsiters = {
local get_whatsit_width = whatsiters.get_width
local get_whatsit_dimensions = whatsiters.get_dimensions
-local function get_width (n,dir) return getfield(n,"width") end
-local function get_dimensions(n,dir) return getfield(n,"width"), getfield(n,"height"), getfield(n,"depth") end
+local function get_width (n) return n.width end
+local function get_dimensions(n) return n.width, n.height, n.depth end
get_whatsit_width[pdfrefximage_code] = get_width
get_whatsit_width[pdfrefxform_code ] = get_width
@@ -438,13 +414,13 @@ end
local function check_shrinkage(par,n)
-- called often, so maybe move inline -- use NORMAL
- if getfield(n,"shrink_order") ~= 0 and getfield(n,"shrink") ~= 0 then
+ if n.shrink_order ~= 0 and n.shrink ~= 0 then
if par.no_shrink_error_yet then
par.no_shrink_error_yet = false
report_parbuilders("infinite glue shrinkage found in a paragraph and removed")
end
n = copy_node(n)
- setfield(n,"shrink_order",0)
+ n.shrink_order = 0
end
return n
end
@@ -491,10 +467,48 @@ setmetatableindex(expansions,function(t,font) -- we can store this in tfmdata if
end
end)
+-- local function char_stretch_shrink(p)
+-- local data = expansions[p.font][p.char]
+-- if data then
+-- return data.glyphstretch, data.glyphshrink
+-- else
+-- return 0, 0
+-- end
+-- end
+--
+-- local cal_margin_kern_var = char_stretch_shrink
+
+-- local function kern_stretch_shrink(p,d)
+-- local l = p.prev
+-- if l and l.id == glyph_code then -- how about disc nodes?
+-- local r = p.next
+-- if r and r.id == glyph_code then
+-- local lf, rf = l.font, r.font
+-- if lf == rf then
+-- local data = expansions[lf][l.char]
+-- if data then
+-- local stretch = data.stretch
+-- local shrink = data.shrink
+-- if stretch ~= 0 then
+-- -- stretch = data.factor * (d * stretch - d)
+-- stretch = data.factor * d * (stretch - 1)
+-- end
+-- if shrink ~= 0 then
+-- -- shrink = data.factor * (d * shrink - d)
+-- shrink = data.factor * d * (shrink - 1)
+-- end
+-- return stretch, shrink
+-- end
+-- end
+-- end
+-- end
+-- return 0, 0
+-- end
+
local function kern_stretch_shrink(p,d)
- local left = getprev(p)
- if left and getid(left) == glyph_code then -- how about disc nodes?
- local data = expansions[getfont(left)][getchar(left)]
+ local left = p.prev
+ if left and left.id == glyph_code then -- how about disc nodes?
+ local data = expansions[left.font][left.char]
if data then
local stretch = data.stretch
local shrink = data.shrink
@@ -512,8 +526,14 @@ local function kern_stretch_shrink(p,d)
return 0, 0
end
+-- local function kern_stretch_shrink(p,d)
+-- -- maybe make it an option in luatex where we also need to check for attribute fontkern but in general
+-- -- it makes no sense to scale kerns
+-- return 0, 0
+-- end
+
local expand_kerns = false
------ expand_kerns = "both"
+-- local expand_kerns = "both"
directives.register("builders.paragraphs.adjusting.kerns",function(v)
if not v then
@@ -603,18 +623,18 @@ end
local function find(head) -- do we really want to recurse into an hlist?
while head do
- local id = getid(head)
+ local id = head.id
if id == glyph_code then
return head
elseif id == hlist_code then
- local found = find(getlist(head))
+ local found = find(head.list)
if found then
return found
else
- head = getnext(head)
+ head = head.next
end
elseif is_skipable(head) then
- head = getnext(head)
+ head = head.next
else
return head
end
@@ -623,38 +643,38 @@ local function find(head) -- do we really want to recurse into an hlist?
end
local function find_protchar_left(l) -- weird function
- local ln = getnext(l)
- if ln and getid(ln) == hlist_code and not getlist(ln) and getfield(ln,"width") == 0 and getfield(ln,"height") == 0 and getfield(ln,"depth") == 0 then
- l = getnext(l)
+ local ln = l.next
+ if ln and ln.id == hlist_code and not ln.list and ln.width == 0 and ln.height == 0 and ln.depth == 0 then
+ l = l.next
else -- if d then -- was always true
- local id = getid(l)
+ local id = l.id
while ln and not (id == glyph_code or id < math_code) do -- is there always a glyph?
l = ln
- ln = getnext(l)
- id = getid(ln)
+ ln = l.next
+ id = ln.id
end
end
- -- if getid(l) == glyph_code then
+ -- if l.id == glyph_code then
-- return l
-- end
return find(l) or l
end
local function find(head,tail)
- local tail = tail or find_tail(head)
+ local tail = tail or slide_nodes(head)
while tail do
- local id = getid(tail)
+ local id = tail.id
if id == glyph_code then
return tail
elseif id == hlist_code then
- local found = find(getlist(tail))
+ local found = find(tail.list)
if found then
return found
else
- tail = getprev(tail)
+ tail = tail.prev
end
elseif is_skipable(tail) then
- tail = getprev(tail)
+ tail = tail.prev
else
return tail
end
@@ -667,8 +687,8 @@ local function find_protchar_right(l,r)
end
local function left_pw(p)
- local font = getfont(p)
- local prot = chardata[font][getchar(p)].left_protruding
+ local font = p.font
+ local prot = chardata[font][p.char].left_protruding
if not prot or prot == 0 then
return 0
end
@@ -676,8 +696,8 @@ local function left_pw(p)
end
local function right_pw(p)
- local font = getfont(p)
- local prot = chardata[font][getchar(p)].right_protruding
+ local font = p.font
+ local prot = chardata[font][p.char].right_protruding
if not prot or prot == 0 then
return 0
end
@@ -701,17 +721,17 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
local adjust_stretch = 0
local adjust_shrink = 0
while s do
- local id = getid(s)
+ local id = s.id
if id == glyph_code then
if is_rotated[line_break_dir] then -- can be shared
- size = size + getfield(s,"height") + getfield(s,"depth")
+ size = size + s.height + s.depth
else
- size = size + getfield(s,"width")
+ size = size + s.width
end
if checked_expansion then
- local data = checked_expansion[getfont(s)]
+ local data = checked_expansion[s.font]
if data then
- data = data[getchar(s)]
+ data = data[s.char]
if data then
adjust_stretch = adjust_stretch + data.glyphstretch
adjust_shrink = adjust_shrink + data.glyphshrink
@@ -719,16 +739,16 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
end
end
elseif id == hlist_code or id == vlist_code then
- if is_parallel[getfield(s,"dir")][line_break_dir] then
- size = size + getfield(s,"width")
+ if is_parallel[s.dir][line_break_dir] then
+ size = size + s.width
else
- size = size + getfield(s,"height") + getfield(s,"depth")
+ size = size + s.depth + s.height
end
elseif id == kern_code then
- local kern = getfield(s,"kern")
- if kern ~= 0 then
- if checked_expansion and expand_kerns and (getsubtype(s) == kerning_code or getattr(a_fontkern)) then
- local stretch, shrink = kern_stretch_shrink(s,kern)
+ local d = s.kern
+ if d ~= 0 then
+ if checked_expansion and expand_kerns and (s.subtype == kerning_code or s[a_fontkern]) then
+ local stretch, shrink = kern_stretch_shrink(s,d)
if expand_kerns == "stretch" then
adjust_stretch = adjust_stretch + stretch
elseif expand_kerns == "shrink" then
@@ -738,14 +758,14 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
adjust_shrink = adjust_shrink + shrink
end
end
- size = size + kern
+ size = size + d
end
elseif id == rule_code then
- size = size + getfield(s,"width")
- elseif trace_unsupported then
+ size = size + s.width
+ else
report_parbuilders("unsupported node at location %a",6)
end
- s = getnext(s)
+ s = s.next
end
return size, adjust_stretch, adjust_shrink
end
@@ -759,14 +779,14 @@ local function compute_break_width(par,break_type,p) -- split in two
local break_size = break_width.size + disc_width.size
local break_adjust_stretch = break_width.adjust_stretch + disc_width.adjust_stretch
local break_adjust_shrink = break_width.adjust_shrink + disc_width.adjust_shrink
- local replace = getfield(p,"replace")
+ local replace = p.replace
if replace then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace)
break_size = break_size - size
break_adjust_stretch = break_adjust_stretch - adjust_stretch
break_adjust_shrink = break_adjust_shrink - adjust_shrink
end
- local post = getfield(p,"post")
+ local post = p.post
if post then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,post)
break_size = break_size + size
@@ -777,56 +797,56 @@ local function compute_break_width(par,break_type,p) -- split in two
break_width.adjust_stretch = break_adjust_stretch
break_width.adjust_shrink = break_adjust_shrink
if not post then
- p = getnext(p)
+ p = p.next
else
return
end
end
while p do -- skip spacing etc
- local id = getid(p)
+ local id = p.id
if id == glyph_code then
return -- happens often
elseif id == glue_code then
- local spec = getfield(p,"spec")
- local order = stretch_orders[getfield(spec,"stretch_order")]
- break_width.size = break_width.size - getfield(spec,"width")
- break_width[order] = break_width[order] - getfield(spec,"stretch")
- break_width.shrink = break_width.shrink - getfield(spec,"shrink")
+ local spec = p.spec
+ local order = stretch_orders[spec.stretch_order]
+ break_width.size = break_width.size - spec.width
+ break_width[order] = break_width[order] - spec.stretch
+ break_width.shrink = break_width.shrink - spec.shrink
elseif id == penalty_code then
-- do nothing
elseif id == kern_code then
- if getsubtype(p) == userkern_code then
- break_width.size = break_width.size - getfield(p,"kern")
+ if p.subtype == userkern_code then
+ break_width.size = break_width.size - p.kern
else
return
end
elseif id == math_code then
- break_width.size = break_width.size - getfield(p,"surround")
+ break_width.size = break_width.size - p.surround
else
return
end
- p = getnext(p)
+ p = p.next
end
end
local function append_to_vlist(par, b)
local prev_depth = par.prev_depth
if prev_depth > par.ignored_dimen then
- if getid(b) == hlist_code then
- local d = getfield(par.baseline_skip,"width") - prev_depth - getfield(b,"height") -- deficiency of space between baselines
- local s = d < par.line_skip_limit and new_lineskip(par.lineskip) or new_baselineskip(d)
+ if b.id == hlist_code then
+ local d = par.baseline_skip.width - prev_depth - b.height -- deficiency of space between baselines
+ local s = d < par.line_skip_limit and new_lineskip(tex.lineskip) or new_baselineskip(d)
-- local s = d < par.line_skip_limit
-- if s then
-- s = new_lineskip()
- -- setfield(s,"spec",tex.lineskip)
+ -- s.spec = tex.lineskip
-- else
-- s = new_baselineskip(d)
-- end
local head_field = par.head_field
if head_field then
- local n = slide_nodelist(head_field) -- todo: find_tail
- setfield(n,"next",s)
- setfield(s,"prev",n)
+ local n = slide_nodes(head_field)
+ n.next = s
+ s.prev = n
else
par.head_field = s
end
@@ -834,14 +854,14 @@ local function append_to_vlist(par, b)
end
local head_field = par.head_field
if head_field then
- local n = slide_nodelist(head_field) -- todo: find_tail
- setfield(n,"next",b)
- setfield(b,"prev",n)
+ local n = slide_nodes(head_field)
+ n.next = b
+ b.prev = n
else
par.head_field = b
end
- if getid(b) == hlist_code then
- local pd = getfield(b,"depth")
+ if b.id == hlist_code then
+ local pd = b.depth
par.prev_depth = pd
texnest[texnest.ptr].prevdepth = pd
end
@@ -850,9 +870,9 @@ end
local function append_list(par, b)
local head_field = par.head_field
if head_field then
- local n = slide_nodelist(head_field) -- todo: find_tail
- setfield(n,"next",b)
- setfield(b,"prev",n)
+ local n = slide_nodes(head_field)
+ n.next = b
+ b.prev = n
else
par.head_field = b
end
@@ -864,18 +884,14 @@ end
local hztolerance = 2500
local hzwarned = false
-local function used_skip(s)
- return s and (getfield(s,"width") ~= 0 or getfield(s,"stretch") ~= 0 or getfield(s,"shrink") ~= 0) and s or nil
-end
-
local function initialize_line_break(head,display)
local hang_indent = tex.hangindent or 0
local hsize = tex.hsize or 0
local hang_after = tex.hangafter or 0
local par_shape_ptr = tex.parshape
- local left_skip = tonut(tex.leftskip) -- nodes
- local right_skip = tonut(tex.rightskip) -- nodes
+ local left_skip = tex.leftskip -- nodes
+ local right_skip = tex.rightskip -- nodes
local pretolerance = tex.pretolerance
local tolerance = tex.tolerance
local adjust_spacing = tex.pdfadjustspacing
@@ -883,7 +899,7 @@ local function initialize_line_break(head,display)
local last_line_fit = tex.lastlinefit
local newhead = new_temp()
- setfield(newhead,"next",head)
+ newhead.next = head
local adjust_spacing_status = adjust_spacing > 1 and -1 or 0
@@ -950,13 +966,13 @@ local function initialize_line_break(head,display)
last_line_depth = tex.pdflastlinedepth or 0, -- this will go away
ignored_dimen = tex.pdfignoreddimen or 0, -- this will go away
- baseline_skip = tonut(tex.baselineskip),
- lineskip = tonut(tex.lineskip),
- line_skip_limit = tex.lineskiplimit,
+ baseline_skip = tex.baselineskip or 0,
+ lineskip = tex.lineskip or 0,
+ line_skip_limit = tex.lineskiplimit or 0,
prev_depth = texnest[texnest.ptr].prevdepth,
- final_par_glue = slide_nodelist(head), -- todo: we know tail already, slow
+ final_par_glue = slide_nodes(head), -- todo: we know tail already, slow
par_break_dir = tex.pardir,
line_break_dir = tex.pardir,
@@ -1025,13 +1041,6 @@ local function initialize_line_break(head,display)
}
- -- optimizers
-
- par.used_left_skip = used_skip(par.left_skip)
- par.used_right_skip = used_skip(par.right_skip)
-
- -- so far
-
if adjust_spacing > 1 then
local checked_expansion = { par = par }
setmetatableindex(checked_expansion,check_expand_pars)
@@ -1053,13 +1062,13 @@ local function initialize_line_break(head,display)
local l = check_shrinkage(par,left_skip)
local r = check_shrinkage(par,right_skip)
- local l_order = stretch_orders[getfield(l,"stretch_order")]
- local r_order = stretch_orders[getfield(r,"stretch_order")]
+ local l_order = stretch_orders[l.stretch_order]
+ local r_order = stretch_orders[r.stretch_order]
- background.size = getfield(l,"width") + getfield(r,"width")
- background.shrink = getfield(l,"shrink") + getfield(r,"shrink")
- background[l_order] = getfield(l,"stretch")
- background[r_order] = getfield(r,"stretch") + background[r_order]
+ background.size = l.width + r.width
+ background.shrink = l.shrink + r.shrink
+ background[l_order] = l.stretch
+ background[r_order] = r.stretch + background[r_order]
-- this will move up so that we can assign the whole par table
@@ -1139,192 +1148,185 @@ local function initialize_line_break(head,display)
return par
end
--- there are still all kind of artefacts in here (a side effect I guess of pdftex,
--- etex, omega and other extensions that got obscured by patching)
-
local function post_line_break(par)
local prevgraf = texnest[texnest.ptr].prevgraf
- local current_line = prevgraf + 1 -- the current line number being justified
+ local cur_line = prevgraf + 1 -- the current line number being justified
+ local cur_p = nil
local adjust_spacing = par.adjust_spacing
local protrude_chars = par.protrude_chars
local statistics = par.statistics
- local stack = new_dir_stack()
-
- local leftskip = par.used_left_skip -- used or normal ?
- local rightskip = par.right_skip
- local parshape = par.par_shape_ptr
- local ignored_dimen = par.ignored_dimen
-
- local adapt_width = par.adapt_width
+ local p, s, k, w -- check when local
- -- reverse the links of the relevant passive nodes, goto first breakpoint
+ local q = par.best_bet.break_node
+ repeat -- goto first breakpoint
+ local r = q
+ q = q.prev_break
+ r.prev_break = cur_p
+ cur_p = r
+ until not q
- local current_break = nil
+ local stack = new_dir_stack()
- local break_node = par.best_bet.break_node
repeat
- local first_break = break_node
- break_node = break_node.prev_break
- first_break.prev_break = current_break
- current_break = first_break
- until not break_node
-
- local head = par.head
-
- -- maybe : each_...
- while current_break do
+ inject_dirs_at_begin_of_line(stack,par.head)
- inject_dirs_at_begin_of_line(stack,head)
+ local q = nil
+ local r = cur_p.cur_break
local disc_break = false
local post_disc_break = false
local glue_break = false
- local lineend = nil -- q lineend refers to the last node of the line (and paragraph)
- local lastnode = current_break.cur_break -- r lastnode refers to the node after which the dir nodes should be closed
-
- if not lastnode then
- -- only at the end
- lastnode = slide_nodelist(head) -- todo: find_tail
- if lastnode == par.final_par_glue then
- lineend = lastnode
- lastnode = getprev(lastnode)
+ if not r then
+ r = slide_nodes(par.head)
+ if r == par.final_par_glue then
+ q = r -- q refers to the last node of the line (and paragraph)
+ r = r.prev -- r refers to the node after which the dir nodes should be closed
end
- else -- todo: use insert_list_after
- local id = getid(lastnode)
+ else
+ local id = r.id
if id == glue_code then
- -- lastnode is normal skip
- lastnode = replace_node(lastnode,new_rightskip(rightskip))
+ -- r is normal skip
+ r = replace_node(r,new_rightskip(par.right_skip))
glue_break = true
- lineend = lastnode
- lastnode = getprev(r)
+ q = r -- q refers to the last node of the line
+ r = r.prev -- r refers to the node after which the dir nodes should be closed
elseif id == disc_code then
- local prevlast = getprev(lastnode)
- local nextlast = getnext(lastnode)
- local subtype = getsubtype(lastnode)
- local pre = getfield(lastnode,"pre")
- local post = getfield(lastnode,"post")
- local replace = getfield(lastnode,"replace")
+ -- todo: use insert_before/after
+ local prev_r = r.prev
+ local next_r = r.next
+ local subtype = r.subtype
+ local pre = r.pre
+ local post = r.post
+ local replace = r.replace
if subtype == second_disc_code then
- if not (getid(prevlast) == disc_code and getsubtype(prevlast) == first_disc_code) then
+ if not (prev_r.id == disc_code and prev_r.subtype == first_disc_code) then
report_parbuilders('unsupported disc at location %a',3)
end
if pre then
- flush_nodelist(pre)
- setfield(lastnode,"pre",nil)
- pre = nil -- signal
+ flush_node_list(pre)
+ r.pre = nil
+ pre = nil -- signal
end
if replace then
- local n = find_tail(replace)
- setfield(prevlast,"next",replace)
- setfield(replace,"prev",prevlast)
- setfield(n,"next",lastnode)
- setfield(lastnode,"prev",n)
- setfield(lastnode,"replace",nil)
- replace = nil -- signal
+ local n = slide_nodes(replace)
+ prev_r.next = replace
+ replace.prev = prev_r
+ n.next = r
+ r.prev = n
+ r.replace = nil
+ replace = nil -- signal
end
- local pre = getfield(prevlast,"pre")
- local post = getfield(prevlast,"post")
- local replace = getfield(prevlast,"replace")
+ local pre = prev_r.pre
+ local post = prev_r.post
+ local replace = prev_r.replace
if pre then
- flush_nodelist(pre)
- setfield(prevlast,"pre",nil)
+ flush_node_list(pre)
+ prev_r.pre = nil
end
if replace then
- flush_nodelist(replace)
- setfield(prevlast,"replace",nil)
+ flush_node_list(replace)
+ prev_r.replace = nil
end
if post then
- flush_nodelist(post)
- setfield(prevlast,"post",nil)
+ flush_node_list(post)
+ prev_r.post = nil
end
elseif subtype == first_disc_code then
- if not (getid(v) == disc_code and getsubtype(v) == second_disc_code) then
+ if not (v.id == disc_code and v.subtype == second_disc_code) then
report_parbuilders('unsupported disc at location %a',4)
end
- setfield(nextlast,"subtype",regular_disc_code)
- setfield(nextlast,"replace",post)
- setfield(lastnode,"post",nil)
+ next_r.subtype = regular_disc_code
+ next_r.replace = post
+ r.post = nil
end
if replace then
- setfield(lastnode,"replace",nil) -- free
- flush_nodelist(replace)
+ r.replace = nil -- free
+ flush_node_list(replace)
end
if pre then
- local n = find_tail(pre)
- setfield(prevlast,"next",pre)
- setfield(pre,"prev",prevlast)
- setfield(n,"next",lastnode)
- setfield(lastnode,"prev",n)
- setfield(lastnode,"pre",nil)
+ local n = slide_nodes(pre)
+ prev_r.next = pre
+ pre.prev = prev_r
+ n.next = r
+ r.prev = n
+ r.pre = nil
end
if post then
- local n = find_tail(post)
- setfield(lastnode,"next",post)
- setfield(post,"prev",lastnode)
- setfield(n,"next",nextlast)
- setfield(nextlast,"prev",n)
- setfield(lastnode,"post",nil)
+ local n = slide_nodes(post)
+ r.next = post
+ post.prev = r
+ n.next = next_r
+ next_r.prev = n
+ r.post = nil
post_disc_break = true
end
disc_break = true
elseif id == kern_code then
- setfield(lastnode,"kern",0)
- elseif getid(lastnode) == math_code then
- setfield(lastnode,"surround",0)
+ r.kern = 0
+ elseif r.id == math_code then
+ r.surround = 0
end
end
- lastnode = inject_dirs_at_end_of_line(stack,lastnode,getnext(head),current_break.cur_break)
- local rightbox = current_break.passive_right_box
- if rightbox then
- lastnode = insert_node_after(lastnode,lastnode,copy_node(rightbox))
+ r = inject_dirs_at_end_of_line(stack,r,par.head.next,cur_p.cur_break)
+ local crb = cur_p.passive_right_box
+ if crb then
+ local s = copy_node(crb)
+ local e = r.next
+ r.next = s
+ s.prev = r
+ s.next = e
+ if e then
+ e.prev = s
+ end
+ r = s
end
- if not lineend then
- lineend = lastnode
+ if not q then
+ q = r
end
- if lineend and lineend ~= head and protrude_chars > 0 then
- local id = getid(lineend)
- local c = (disc_break and (id == glyph_code or id ~= disc_code) and lineend) or getprev(lineend)
- local p = find_protchar_right(getnext(head),c)
- if p and getid(p) == glyph_code then
+ if q and q ~= par.head and protrude_chars > 0 then
+ local id = q.id
+ local c = (disc_break and (id == glyph_code or id ~= disc_code) and q) or q.prev
+ local p = find_protchar_right(par.head.next,c)
+ if p and p.id == glyph_code then
local w, last_rightmost_char = right_pw(p)
if last_rightmost_char and w ~= 0 then
- -- so we inherit attributes, lineend is new pseudo head
- lineend, c = insert_node_after(lineend,c,new_rightmarginkern(copy_node(last_rightmost_char),-w))
+ -- so we inherit attributes, q is new pseudo head
+ q, c = insert_node_after(q,c,new_rightmarginkern(copy_node(last_rightmost_char),-w))
end
end
end
- -- we finish the line
- local r = getnext(lineend)
- setfield(lineend,"next",nil)
if not glue_break then
- if rightskip then
- insert_node_after(lineend,lineend,new_rightskip(right_skip)) -- lineend moves on as pseudo head
- end
- end
- -- each time ?
- local q = getnext(head)
- setfield(head,"next",r)
+ local h
+ h, q = insert_node_after(q,q,new_rightskip(par.right_skip)) -- q moves on as pseudo head
+ end
+ r = q.next
+ q.next = nil
+ local phead = par.head
+ q = phead.next
+ phead.next = r
if r then
- setfield(r,"prev",head)
- end
- -- insert leftbox (if needed after parindent)
- local leftbox = current_break.passive_left_box
- if leftbox then
- local first = getnext(q)
- if first and current_line == (par.first_line + 1) and getid(first) == hlist_code and not getlist(first) then
- insert_node_after(q,q,copy_node(leftbox))
- else
- q = insert_node_before(q,q,copy_node(leftbox))
+ r.prev = phead
+ end
+ local clb = cur_p.passive_left_box
+ if clb then -- here we miss some prev links
+ local s = copy_node(cb)
+ s = q.next
+ r.next = q
+ q = r
+ if s and cur_line == (par.first_line + 1) and s.id == hlist_code and not s.list then
+ q = q.next
+ r.next = s.next
+ s.next = r
end
end
if protrude_chars > 0 then
local p = find_protchar_left(q)
- if p and getid(p) == glyph_code then
+ if p and p.id == glyph_code then
local w, last_leftmost_char = left_pw(p)
if last_leftmost_char and w ~= 0 then
-- so we inherit attributes, q is pseudo head and moves back
@@ -1332,35 +1334,32 @@ local function post_line_break(par)
end
end
end
- if leftskip then
- q = insert_node_before(q,q,new_leftskip(leftskip))
+ local ls = par.left_skip
+ if ls and (ls.width ~= 0 or ls.stretch ~= 0 or ls.shrink ~= 0) then
+ q = insert_node_before(q,q,new_leftskip(ls))
end
- local cur_width, cur_indent
- if current_line > par.last_special_line then
+ local curwidth, cur_indent
+ if cur_line > par.last_special_line then
cur_indent = par.second_indent
cur_width = par.second_width
- elseif parshape then
- local shape = parshape[current_line]
- cur_indent = shape[1]
- cur_width = shape[2]
else
- cur_indent = par.first_indent
- cur_width = par.first_width
- end
-
- if adapt_width then -- extension
- local l, r = adapt_width(par,current_line)
- cur_indent = cur_indent + l
- cur_width = cur_width - l - r
+ local psp = par.par_shape_ptr
+ if psp then
+ cur_indent = psp[cur_line][1]
+ cur_width = psp[cur_line][2]
+ else
+ cur_indent = par.first_indent
+ cur_width = par.first_width
+ end
end
-
statistics.noflines = statistics.noflines + 1
- local finished_line = nil
if adjust_spacing > 0 then
statistics.nofadjustedlines = statistics.nofadjustedlines + 1
- finished_line = xpack_nodes(q,cur_width,"cal_expand_ratio",par.par_break_dir,par.first_line,current_line) -- ,current_break.analysis)
+ -- in the built-in hpack cal_expand_ratio will later on call subst_ext_font
+ -- in the alternative approach we can do both in one run
+ just_box = xpack_nodes(q,cur_width,"cal_expand_ratio",par.par_break_dir) -- ,cur_p.analysis)
else
- finished_line = xpack_nodes(q,cur_width,"exactly",par.par_break_dir,par.first_line,current_line) -- ,current_break.analysis)
+ just_box = xpack_nodes(q,cur_width,"exactly",par.par_break_dir) -- ,cur_p.analysis)
end
if protrude_chars > 0 then
statistics.nofprotrudedlines = statistics.nofprotrudedlines + 1
@@ -1369,42 +1368,39 @@ local function post_line_break(par)
local adjust_head = texlists.adjust_head
local pre_adjust_head = texlists.pre_adjust_head
--
- setfield(finished_line,"shift",cur_indent)
- -- this will probably go away:
- if par.each_line_height ~= ignored_dimen then
- setfield(finished_line,"height",par.each_line_height)
+ just_box.shift = cur_indent
+ if par.each_line_height ~= par.ignored_dimen then
+ just_box.height = par.each_line_height
end
- if par.each_line_depth ~= ignored_dimen then
- setfield(finished_line,"depth",par.each_line_depth)
+ if par.each_line_depth ~= par.ignored_dimen then
+ just_box.depth = par.each_line_depth
end
- if par.first_line_height ~= ignored_dimen and (current_line == par.first_line + 1) then
- setfield(finished_line,"height",par.first_line_height)
+ if par.first_line_height ~= par.ignored_dimen and (cur_line == par.first_line + 1) then
+ just_box.height = par.first_line_height
end
- if par.last_line_depth ~= ignored_dimen and current_line + 1 == par.best_line then
- setfield(finished_line,"depth",par.last_line_depth)
+ if par.last_line_depth ~= par.ignored_dimen and cur_line + 1 == par.best_line then
+ just_box.depth = par.last_line_depth
end
- --
if texlists.pre_adjust_head ~= pre_adjust_head then
append_list(par, texlists.pre_adjust_head)
texlists.pre_adjust_head = pre_adjust_head
end
- append_to_vlist(par,finished_line)
+ append_to_vlist(par, just_box)
if texlists.adjust_head ~= adjust_head then
append_list(par, texlists.adjust_head)
texlists.adjust_head = adjust_head
end
- --
local pen
- if current_line + 1 ~= par.best_line then
- if current_break.passive_pen_inter then
- pen = current_break.passive_pen_inter
+ if cur_line + 1 ~= par.best_line then
+ if cur_p.passive_pen_inter then
+ pen = cur_p.passive_pen_inter
else
pen = par.inter_line_penalty
end
- if current_line == prevgraf + 1 then
+ if cur_line == prevgraf + 1 then
pen = pen + par.club_penalty
end
- if current_line + 2 == par.best_line then
+ if cur_line + 2 == par.best_line then
if par.display then
pen = pen + par.display_widow_penalty
else
@@ -1412,58 +1408,56 @@ local function post_line_break(par)
end
end
if disc_break then
- if current_break.passive_pen_broken ~= 0 then
- pen = pen + current_break.passive_pen_broken
+ if cur_p.passive_pen_broken ~= 0 then
+ pen = pen + cur_p.passive_pen_broken
else
pen = pen + par.broken_penalty
end
end
if pen ~= 0 then
append_to_vlist(par,new_penalty(pen))
- end
+ end
end
- current_line = current_line + 1
- current_break = current_break.prev_break
- if current_break and not post_disc_break then
- local current = head
- local next = nil
+ cur_line = cur_line + 1
+ cur_p = cur_p.prev_break
+ if cur_p and not post_disc_break then
+ local phead = par.head
+ local r = phead
while true do
- next = getnext(current)
- if next == current_break.cur_break or getid(next) == glyph_code then
+ q = r.next
+ if q == cur_p.cur_break or q.id == glyph_code then
break
end
- local id = getid(next)
- local subtype = getsubtype(next)
- if id == whatsit_code and subtype == localpar_code then
- -- nothing
- elseif id < math_code then
- -- messy criterium
- break
- elseif id == kern_code and (subtype ~= userkern_code and not getattr(next,a_fontkern)) then
- -- fontkerns and accent kerns as well as otf injections
- break
+ local id = q.id
+ if not (id == whatsit_code and q.subtype == localpar_code) then
+ if id < math_code or (id == kern_code and q.subtype ~= userkern_code) then
+ break
+ end
end
- current = next
+ r = q
end
- if current ~= head then
- setfield(current,"next",nil)
- flush_nodelist(getnext(head))
- setfield(head,"next",next)
- if next then
- setfield(next,"prev",head)
+ if r ~= phead then
+ r.next = nil
+ flush_node_list(phead.next)
+ phead.next = q
+ if q then
+ q.prev = phead
end
end
end
+ until not cur_p
+ if cur_line ~= par.best_line then -- or not par.head.next then
+ report_parbuilders("line breaking")
end
- -- if current_line ~= par.best_line then
- -- report_parbuilders("line breaking")
- -- end
- par.head = nil -- needs checking
- current_line = current_line - 1
+ if par.head then -- added
+-- flush_node(par.head) -- the localpar_code whatsit
+ par.head = nil
+ end
+ cur_line = cur_line - 1
if trace_basic then
- report_parbuilders("paragraph broken into %a lines",current_line)
+ report_parbuilders("paragraph broken into %a lines",cur_line)
end
- texnest[texnest.ptr].prevgraf = current_line
+ texnest[texnest.ptr].prevgraf = cur_line
end
local function wrap_up(par)
@@ -1481,11 +1475,11 @@ local function wrap_up(par)
par.do_last_line_fit = false
else
local glue = par.final_par_glue
- local spec = copy_node(getfield(glue,"spec"))
- setfield(spec,"width",getfield(spec,"width") + active_short - active_glue)
- setfield(spec,"stretch",0)
- -- flush_node(getfield(glue,"spec")) -- brrr, when we do this we can get an "invalid id stretch message", maybe dec refcount
- setfield(glue,"spec",spec)
+ local spec = copy_node(glue.spec)
+ spec.width = spec.width + active_short - active_glue
+ spec.stretch = 0
+ -- flush_node(glue.spec) -- brrr, when we do this we can get an "invalid id stretch message", maybe dec refcount
+ glue.spec = spec
if trace_lastlinefit then
report_parbuilders("applying last line fit, short %a, glue %p",active_short,active_glue)
end
@@ -1493,8 +1487,8 @@ local function wrap_up(par)
end
-- we have a bunch of glue and and temp nodes not freed
local head = par.head
- if getid(head) == temp_code then
- par.head = getnext(head)
+ if head.id == temp_code then
+ par.head = head.next
flush_node(head)
end
post_line_break(par)
@@ -1504,8 +1498,7 @@ local function wrap_up(par)
end
-- we could do active nodes differently ... table instead of linked list or a list
--- with prev nodes but it doesn't save much (as we still need to keep indices then
--- in next)
+-- with prev nodes
local function deactivate_node(par,prev_prev_r,prev_r,r,cur_active_width,checked_expansion) -- no need for adjust if disabled
local active = par.active
@@ -1623,26 +1616,18 @@ local function lastlinecrap(shortfall,active_short,active_glue,cur_active_width,
end
end
--- todo: statistics .. count tries and so
-
-local trialcount = 0
-
-local function try_break(pi, break_type, par, first_p, current, checked_expansion)
-
--- trialcount = trialcount + 1
--- print(trialcount,pi,break_type,current,nuts.tostring(current))
+local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
- if pi >= infinite_penalty then -- this breakpoint is inhibited by infinite penalty
- local p_active = par.active
- return p_active, p_active and p_active.next
- elseif pi <= -infinite_penalty then -- this breakpoint will be forced
- pi = eject_penalty
+ if pi >= infinite_penalty then
+ return -- this breakpoint is inhibited by infinite penalty
+ elseif pi <= -infinite_penalty then
+ pi = eject_penalty -- this breakpoint will be forced
end
local prev_prev_r = nil -- a step behind prev_r, if type(prev_r)=delta_code
local prev_r = par.active -- stays a step behind r
local r = nil -- runs through the active list
- local no_break_yet = true -- have we found a feasible break at current?
+ local no_break_yet = true -- have we found a feasible break at cur_p?
local node_r_stays_active = false -- should node r remain in the active list?
local line_width = 0 -- the current line will be justified to this width
local line_number = 0 -- line number of current active node
@@ -1663,10 +1648,6 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
local tracing_paragraphs = par.tracing_paragraphs
-- local par_active = par.active
- local adapt_width = par.adapt_width
-
- local parshape = par.par_shape_ptr
-
local cur_active_width = checked_expansion and { -- distance from current active node
size = active_width.size,
stretch = active_width.stretch,
@@ -1721,8 +1702,8 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
break_width.adjust_stretch = 0
break_width.adjust_shrink = 0
end
- if current then
- compute_break_width(par,break_type,current)
+ if cur_p then
+ compute_break_width(par,break_type,cur_p)
end
end
if prev_r.id == delta_code then
@@ -1788,14 +1769,14 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
end
for fit_class = fit_very_loose_class, fit_tight_class do
if minimal_demerits[fit_class] <= minimum_demerits then
- -- insert a new active node from best_place[fit_class] to current
+ -- insert a new active node from best_place[fit_class] to cur_p
par.pass_number = par.pass_number + 1
local prev_break = best_place[fit_class]
local passive = {
id = passive_code,
subtype = nosubtype_code,
next = par.passive,
- cur_break = current,
+ cur_break = cur_p,
serial = par.pass_number,
prev_break = prev_break,
passive_pen_inter = par.internal_pen_inter,
@@ -1830,7 +1811,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
prev_r.next = q
prev_r = q
if tracing_paragraphs then
- diagnostics.break_node(par,q,fit_class,break_type,current)
+ diagnostics.break_node(par,q,fit_class,break_type,cur_p)
end
end
minimal_demerits[fit_class] = awful_badness
@@ -1869,7 +1850,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
end
end
if r == par.active then
- return r, r and r.next -- p_active, n_active
+ return
end
if line_number > par.easy_line then
old_line_number = max_halfword - 1
@@ -1878,16 +1859,12 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
old_line_number = line_number
if line_number > par.last_special_line then
line_width = par.second_width
- elseif parshape then
- line_width = parshape[line_number][2]
+ elseif par.par_shape_ptr then
+ line_width = par.par_shape_ptr[line_number][2]
else
line_width = par.first_width
end
end
- if adapt_width then
- local l, r = adapt_width(par,line_number)
- line_width = line_width - l - r
- end
end
local artificial_demerits = false -- has d been forced to zero
local shortfall = line_width - cur_active_width.size - par.internal_right_box_width -- used in badness calculations
@@ -1901,17 +1878,17 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
-- this is quite time consuming
local b = r.break_node
local l = b and b.cur_break or first_p
- local o = current and getprev(current)
- if current and getid(current) == disc_code and getfield(current,"pre") then
- o = find_tail(getfield(current,"pre"))
+ local o = cur_p and cur_p.prev
+ if cur_p and cur_p.id == disc_code and cur_p.pre then
+ o = slide_nodes(cur_p.pre)
else
o = find_protchar_right(l,o)
end
- if o and getid(o) == glyph_code then
+ if o and o.id == glyph_code then
pw, rp = right_pw(o)
shortfall = shortfall + pw
end
- local id = getid(l)
+ local id = l.id
if id == glyph_code then
-- ok ?
elseif id == disc_code and l.post then
@@ -1919,7 +1896,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
else
l = find_protchar_left(l)
end
- if l and getid(l) == glyph_code then
+ if l and l.id == glyph_code then
pw, lp = left_pw(l)
shortfall = shortfall + pw
end
@@ -1929,23 +1906,27 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
local margin_kern_shrink = 0
if protrude_chars > 1 then
if lp then
- local data = expansions[getfont(lp)][getchar(lp)]
- if data then
- margin_kern_stretch, margin_kern_shrink = data.glyphstretch, data.glyphshrink
- end
+-- margin_kern_stretch, margin_kern_shrink = cal_margin_kern_var(lp)
+local data = expansions[lp.font][lp.char]
+if data then
+ margin_kern_stretch, margin_kern_shrink = data.glyphstretch, data.glyphshrink
+end
end
if rp then
- local data = expansions[getfont(lp)][getchar(lp)]
- if data then
- margin_kern_stretch = margin_kern_stretch + data.glyphstretch
- margin_kern_shrink = margin_kern_shrink + data.glyphshrink
- end
+-- local mka, mkb = cal_margin_kern_var(rp)
+-- margin_kern_stretch = margin_kern_stretch + mka
+-- margin_kern_shrink = margin_kern_shrink + mkb
+local data = expansions[lp.font][lp.char]
+if data then
+ margin_kern_stretch = margin_kern_stretch + data.glyphstretch
+ margin_kern_shrink = margin_kern_shrink + data.glyphshrink
+end
end
end
local total = cur_active_width.adjust_stretch + margin_kern_stretch
if shortfall > 0 and total > 0 then
if total > shortfall then
- shortfall = total / (par.max_stretch_ratio / par.cur_font_step) / 2
+ shortfall = total / (par.max_stretch_ratio / par.cur_font_step) / 2 -- to be adapted
else
shortfall = shortfall - total
end
@@ -1953,7 +1934,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
total = cur_active_width.adjust_shrink + margin_kern_shrink
if shortfall < 0 and total > 0 then
if total > - shortfall then
- shortfall = - total / (par.max_shrink_ratio / par.cur_font_step) / 2
+ shortfall = - total / (par.max_shrink_ratio / par.cur_font_step) / 2 -- to be adapted
else
shortfall = shortfall + total
end
@@ -1968,7 +1949,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
if cur_active_width.fi ~= 0 or cur_active_width.fil ~= 0 or cur_active_width.fill ~= 0 or cur_active_width.filll ~= 0 then
if not do_last_line_fit then
-- okay
- elseif not current then
+ elseif not cur_p then
found, shortfall, fit_class, g, b = lastlinecrap(shortfall,r.active_short,r.active_glue,cur_active_width,par.fill_width,par.last_line_fit)
else
shortfall = 0
@@ -2003,7 +1984,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
end
end
if do_last_line_fit and not found then
- if not current then
+ if not cur_p then
-- g = 0
shortfall = 0
elseif shortfall > 0 then
@@ -2051,7 +2032,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
d = d - pi * pi
end
if break_type == hyphenated_code and r.id == hyphenated_code then
- if current then
+ if cur_p then
d = d + par.double_hyphen_demerits
else
d = d + par.final_hyphen_demerits
@@ -2063,9 +2044,9 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
end
end
if tracing_paragraphs then
- diagnostics.feasible_break(par,current,r,b,pi,d,artificial_demerits)
+ diagnostics.feasible_break(par,cur_p,r,b,pi,d,artificial_demerits)
end
- d = d + r.total_demerits -- this is the minimum total demerits from the beginning to current via r
+ d = d + r.total_demerits -- this is the minimum total demerits from the beginning to cur_p via r
if d <= minimal_demerits[fit_class] then
minimal_demerits[fit_class] = d
best_place [fit_class] = r.break_node
@@ -2089,16 +2070,25 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
end
end
+local function kern_break(par, cur_p, first_p, checked_expansion) -- move inline if needed
+ local v = cur_p.next
+ if par.auto_breaking and v.id == glue_code then
+ try_break(0, unhyphenated_code, par, first_p, cur_p, checked_expansion)
+ end
+ local active_width = par.active_width
+ if cur_p.id ~= math_code then
+ active_width.size = active_width.size + cur_p.kern
+ else
+ active_width.size = active_width.size + cur_p.surround
+ end
+end
+
-- we can call the normal one for simple box building in the otr so we need
-- frequent enabling/disabling
-local dcolor = { [0] = "red", "green", "blue", "magenta", "cyan", "gray" }
-
local temp_head = new_temp()
function constructors.methods.basic(head,d)
- head = tonut(head)
-
if trace_basic then
report_parbuilders("starting at %a",head)
end
@@ -2150,27 +2140,24 @@ function constructors.methods.basic(head,d)
par.passive = nil -- = 0
par.printed_node = temp_head -- only when tracing, shared
+ par.printed_node.next = head
par.pass_number = 0
--- par.auto_breaking = true
-
- setfield(temp_head,"next",head)
-
- local current = head
- local first_p = current
+ par.auto_breaking = true
- local auto_breaking = true
+ local cur_p = head
+ local first_p = cur_p
par.font_in_short_display = 0
- if current and getid(current) == whatsit_code and getsubtype(current) == localpar_code then
- par.init_internal_left_box = getfield(current,"box_left")
- par.init_internal_left_box_width = getfield(current,"box_left_width")
- par.internal_pen_inter = getfield(current,"pen_inter")
- par.internal_pen_broken = getfield(current,"pen_broken")
+ if cur_p and cur_p.id == whatsit_code and cur_p.subtype == localpar_code then
+ par.init_internal_left_box = cur_p.box_left
+ par.init_internal_left_box_width = cur_p.box_left_width
+ par.internal_pen_inter = cur_p.pen_inter
+ par.internal_pen_broken = cur_p.pen_broken
par.internal_left_box = par.init_internal_left_box
par.internal_left_box_width = par.init_internal_left_box_width
- par.internal_right_box = getfield(current,"box_right")
- par.internal_right_box_width = getfield(current,"box_right_width")
+ par.internal_right_box = cur_p.box_right
+ par.internal_right_box_width = cur_p.box_right_width
end
-- all passes are combined in this loop so maybe we should split this into
@@ -2182,34 +2169,23 @@ function constructors.methods.basic(head,d)
local fontexp, lastfont -- we can pass fontexp to calculate width if needed
- -- i flattened the inner loop over glyphs .. it looks nicer and the extra p_active ~= n_active
- -- test is fast enough (and try_break now returns the updated values); the kern helper has been
- -- inlined as it did a double check on id so in fact we had hardly any code to share
-
- local p_active = par.active
- local n_active = p_active and p_active.next
- local second_pass = par.second_pass
-
- trialcount = 0
-
- while current and p_active ~= n_active do
- local id = getid(current)
- if id == glyph_code then
+ while cur_p and par.active.next ~= par.active do
+ while cur_p and cur_p.id == glyph_code do
if is_rotated[par.line_break_dir] then
- active_width.size = active_width.size + getfield(current,"height") + getfield(current,"depth")
+ active_width.size = active_width.size + cur_p.height + cur_p.depth
else
- active_width.size = active_width.size + getfield(current,"width")
+ active_width.size = active_width.size + cur_p.width
end
if checked_expansion then
- local currentfont = getfont(current)
- local data= checked_expansion[currentfont]
+ local data= checked_expansion[cur_p.font]
if data then
+ local currentfont = cur_p.font
if currentfont ~= lastfont then
fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
lastfont = currentfont
end
if fontexps then
- local expansion = fontexps[getchar(current)]
+ local expansion = fontexps[cur_p.char]
if expansion then
active_width.adjust_stretch = active_width.adjust_stretch + expansion.glyphstretch
active_width.adjust_shrink = active_width.adjust_shrink + expansion.glyphshrink
@@ -2217,45 +2193,51 @@ function constructors.methods.basic(head,d)
end
end
end
- elseif id == hlist_code or id == vlist_code then
- if is_parallel[getfield(current,"dir")][par.line_break_dir] then
- active_width.size = active_width.size + getfield(current,"width")
+ cur_p = cur_p.next
+ end
+ if not cur_p then -- TODO
+ report_parbuilders("problems with linebreak_tail")
+ os.exit()
+ end
+ local id = cur_p.id
+ if id == hlist_code or id == vlist_code then
+ if is_parallel[cur_p.dir][par.line_break_dir] then
+ active_width.size = active_width.size + cur_p.width
else
- active_width.size = active_width.size + getfield(current,"depth") + getfield(current,"height")
+ active_width.size = active_width.size + cur_p.depth + cur_p.height
end
elseif id == glue_code then
--- if par.auto_breaking then
- if auto_breaking then
- local prev_p = getprev(current)
+ if par.auto_breaking then
+ local prev_p = cur_p.prev
if prev_p and prev_p ~= temp_head then
- local id = getid(prev_p)
+ local id = prev_p.id
if id == glyph_code or
- (id < math_code and (id ~= whatsit_code or getsubtype(prev_p) ~= dir_code)) or -- was: precedes_break(prev_p)
- (id == kern_code and getsubtype(prev_p) ~= userkern_code) then
- p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
+ (id < math_code and (id ~= whatsit_code or prev_p.subtype ~= dir_code)) or -- was: precedes_break(prev_p)
+ (id == kern_code and prev_p.subtype ~= userkern_code) then
+ try_break(0, unhyphenated_code, par, first_p, cur_p, checked_expansion)
end
end
end
- local spec = check_shrinkage(par,getfield(current,"spec"))
- local order = stretch_orders[getfield(spec,"stretch_order")]
- setfield(current,"spec",spec)
- active_width.size = active_width.size + getfield(spec,"width")
- active_width[order] = active_width[order] + getfield(spec,"stretch")
- active_width.shrink = active_width.shrink + getfield(spec,"shrink")
+ local spec = check_shrinkage(par,cur_p.spec)
+ local order = stretch_orders[spec.stretch_order]
+ cur_p.spec = spec
+ active_width.size = active_width.size + spec.width
+ active_width[order] = active_width[order] + spec.stretch
+ active_width.shrink = active_width.shrink + spec.shrink
elseif id == disc_code then
- local subtype = getsubtype(current)
- if subtype ~= second_disc_code then
+ local subtype = cur_p.subtype
+ if subtype ~= second_disc_code then -- are there still second_disc_code in luatex
local line_break_dir = par.line_break_dir
- if second_pass or subtype <= automatic_disc_code then
+ if par.second_pass then -- todo: make second pass local
local actual_pen = subtype == automatic_disc_code and par.ex_hyphen_penalty or par.hyphen_penalty
- local pre = getfield(current,"pre")
+ local pre = cur_p.pre
if not pre then -- trivial pre-break
disc_width.size = 0
if checked_expansion then
disc_width.adjust_stretch = 0
disc_width.adjust_shrink = 0
end
- p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, current, checked_expansion)
+ try_break(actual_pen, hyphenated_code, par, first_p, cur_p, checked_expansion)
else
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre)
disc_width.size = size
@@ -2269,13 +2251,13 @@ function constructors.methods.basic(head,d)
-- disc_width.adjust_stretch = 0
-- disc_width.adjust_shrink = 0
end
- p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, current, checked_expansion)
+ try_break(actual_pen, hyphenated_code, par, first_p, cur_p, checked_expansion)
if subtype == first_disc_code then
- local cur_p_next = getnext(current)
- if getid(cur_p_next) ~= disc_code or getsubtype(cur_p_next) ~= second_disc_code then
+ local cur_p_next = cur_p.next
+ if cur_p_next.id ~= disc_code or cur_p_next.subtype ~= second_disc_code then
report_parbuilders("unsupported disc at location %a",1)
else
- local pre = getfield(cur_p_next,"pre")
+ local pre = cur_p_next.pre
if pre then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre)
disc_width.size = disc_width.size + size
@@ -2283,16 +2265,16 @@ function constructors.methods.basic(head,d)
disc_width.adjust_stretch = disc_width.adjust_stretch + adjust_stretch
disc_width.adjust_shrink = disc_width.adjust_shrink + adjust_shrink
end
- p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, cur_p_next, checked_expansion)
+ try_break(actual_pen, hyphenated_code, par, first_p, cur_p_next, checked_expansion)
--
-- I will look into this some day ... comment in linebreak.w says that this fails,
-- maybe this is what Taco means with his comment in the luatex manual.
--
-- do_one_seven_eight(sub_disc_width_from_active_width);
-- do_one_seven_eight(reset_disc_width);
- -- s = vlink_no_break(vlink(current));
+ -- s = vlink_no_break(vlink(cur_p));
-- add_to_widths(s, line_break_dir, pdf_adjust_spacing,disc_width);
- -- ext_try_break(...,first_p,vlink(current));
+ -- ext_try_break(...,first_p,vlink(cur_p));
--
else
report_parbuilders("unsupported disc at location %a",2)
@@ -2307,7 +2289,7 @@ function constructors.methods.basic(head,d)
end
end
end
- local replace = getfield(current,"replace")
+ local replace = cur_p.replace
if replace then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace)
active_width.size = active_width.size + size
@@ -2318,20 +2300,14 @@ function constructors.methods.basic(head,d)
end
end
elseif id == kern_code then
- if getsubtype(current) == userkern_code then
- local v = getnext(current)
--- if par.auto_breaking and getid(v) == glue_code then
- if auto_breaking and getid(v) == glue_code then
- p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
- end
- local active_width = par.active_width
- active_width.size = active_width.size + getfield(current,"kern")
+ if cur_p.subtype == userkern_code then
+ kern_break(par,cur_p,first_p, checked_expansion)
else
- local kern = getfield(current,"kern")
- if kern ~= 0 then
- active_width.size = active_width.size + kern
- if checked_expansion and expand_kerns and (getsubtype(current) == kerning_code or getattr(current,a_fontkern)) then
- local stretch, shrink = kern_stretch_shrink(current,kern)
+ local d = cur_p.kern
+ of d ~= 0 then
+ active_width.size = active_width.size + d
+ if checked_expansion and expand_kerns and (cur_p.subtype == kerning_code or cur_p[a_fontkern]) then
+ local stretch, shrink = kern_stretch_shrink(cur_p,d)
if expand_kerns == "stretch" then
active_width.adjust_stretch = active_width.adjust_stretch + stretch
elseif expand_kerns == "shrink" then
@@ -2344,47 +2320,40 @@ function constructors.methods.basic(head,d)
end
end
elseif id == math_code then
--- par.auto_breaking = getsubtype(current) == endmath_code
- auto_breaking = getsubtype(current) == endmath_code
- local v = getnext(current)
--- if par.auto_breaking and getid(v) == glue_code then
- if auto_breaking and getid(v) == glue_code then
- p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
- end
- local active_width = par.active_width
- active_width.size = active_width.size + getfield(current,"surround")
+ par.auto_breaking = cur_p.subtype == endmath_code
+ kern_break(par,cur_p, first_p, checked_expansion)
elseif id == rule_code then
- active_width.size = active_width.size + getfield(current,"width")
+ active_width.size = active_width.size + cur_p.width
elseif id == penalty_code then
- p_active, n_active = try_break(getfield(current,"penalty"), unhyphenated_code, par, first_p, current, checked_expansion)
+ try_break(cur_p.penalty, unhyphenated_code, par, first_p, cur_p, checked_expansion)
elseif id == whatsit_code then
- local subtype = getsubtype(current)
+ local subtype = cur_p.subtype
if subtype == localpar_code then
- par.internal_pen_inter = getfield(current,"pen_inter")
- par.internal_pen_broken = getfield(current,"pen_broken")
- par.internal_left_box = getfield(current,"box_left")
- par.internal_left_box_width = getfield(current,"box_left_width")
- par.internal_right_box = getfield(current,"box_right")
- par.internal_right_box_width = getfield(current,"box_right_width")
+ par.internal_pen_inter = cur_p.pen_inter
+ par.internal_pen_broken = cur_p.pen_broken
+ par.internal_left_box = cur_p.box_left
+ par.internal_left_box_width = cur_p.box_left_width
+ par.internal_right_box = cur_p.box_right
+ par.internal_right_box_width = cur_p.box_right_width
elseif subtype == dir_code then
par.line_break_dir = checked_line_dir(dirstack) or par.line_break_dir
else
local get_width = get_whatsit_width[subtype]
if get_width then
- active_width.size = active_width.size + get_width(current,par.line_break_dir)
+ active_width.size = active_width.size + get_width(cur_p)
end
end
- elseif trace_unsupported then
- if id == mark_code or id == ins_code or id == adjust_code then
- -- skip
- else
- report_parbuilders("node of type %a found in paragraph",type(id))
- end
+ elseif id == mark_code or id == ins_code or id == adjust_code then
+ -- skip
+ else
+ report_parbuilders("node of type %a found in paragraph",type(id))
end
- current = getnext(current)
+ cur_p = cur_p.next
end
- if not current then
- local p_active, n_active = try_break(eject_penalty, hyphenated_code, par, first_p, current, checked_expansion)
+ if not cur_p then
+ try_break(eject_penalty, hyphenated_code, par, first_p, cur_p, checked_expansion)
+ local p_active = par.active
+ local n_active = p_active.next
if n_active ~= p_active then
local r = n_active
par.fewest_demerits = awful_badness
@@ -2398,7 +2367,7 @@ function constructors.methods.basic(head,d)
par.best_line = par.best_bet.line_number
local asked_looseness = par.looseness
if asked_looseness == 0 then
- return tonode(wrap_up(par))
+ return wrap_up(par)
end
local r = n_active
local actual_looseness = 0
@@ -2418,30 +2387,30 @@ function constructors.methods.basic(head,d)
end
end
r = r.next
- until r == p_active
+ until r == p_active -- weird, loop list?
par.best_line = par.best_bet.line_number
if actual_looseness == asked_looseness or par.final_pass then
- return tonode(wrap_up(par))
+ return wrap_up(par)
end
end
end
reset_meta(par) -- clean up the memory by removing the break nodes
- if not second_pass then
+ if not par.second_pass then
if tracing_paragraphs then
diagnostics.current_pass(par,"secondpass")
end
- par.threshold = par.tolerance
+ par.threshold = par.tolerance
par.second_pass = true
- par.final_pass = par.emergency_stretch <= 0
+ par.final_pass = par.emergency_stretch <= 0
else
if tracing_paragraphs then
diagnostics.current_pass(par,"emergencypass")
end
par.background.stretch = par.background.stretch + par.emergency_stretch
- par.final_pass = true
+ par.final_pass = true
end
end
- return tonode(wrap_up(par))
+ return wrap_up(par)
end
-- standard tex logging .. will be adapted ..
@@ -2466,58 +2435,48 @@ function diagnostics.current_pass(par,what)
write_nl("log",format("@%s",what))
end
-local verbose = false -- true
-
-local function short_display(target,a,font_in_short_display)
+local function short_display(a,font_in_short_display)
while a do
- local id = getid(a)
+ local id = a.id
if id == glyph_code then
- local font = getfont(a)
+ local font = a.font
if font ~= font_in_short_display then
- write(target,tex.fontidentifier(font) .. ' ')
+ write("log",tex.fontidentifier(font) .. ' ')
font_in_short_display = font
end
- if getsubtype(a) == ligature_code then
- font_in_short_display = short_display(target,getfield(a,"components"),font_in_short_display)
+ if a.subtype == ligature_code then
+ font_in_short_display = short_display(a.components,font_in_short_display)
else
- write(target,utfchar(getchar(a)))
+ write("log",utfchar(a.char))
end
+-- elseif id == rule_code then
+-- write("log","|")
+-- elseif id == glue_code then
+-- if a.spec.writable then
+-- write("log"," ")
+-- end
+-- elseif id == math_code then
+-- write("log","$")
elseif id == disc_code then
- font_in_short_display = short_display(target,getfield(a,"pre"),font_in_short_display)
- font_in_short_display = short_display(target,getfield(a,"post"),font_in_short_display)
- elseif verbose then
- write(target,format("[%s]",nodecodes[id]))
- elseif id == rule_code then
- write(target,"|")
- elseif id == glue_code then
- if getfield(getfield(a,"spec"),"writable") then
- write(target," ")
- end
- elseif id == kern_code and (getsubtype(a) == userkern_code or getattr(a,a_fontkern)) then
- if verbose then
- write(target,"[|]")
- else
- write(target,"")
- end
- elseif id == math_code then
- write(target,"$")
- else
- write(target,"[]")
+ font_in_short_display = short_display(a.pre,font_in_short_display)
+ font_in_short_display = short_display(a.post,font_in_short_display)
+ else -- no explicit checking
+ write("log",format("[%s]",nodecodes[id]))
end
- a = getnext(a)
+ a = a.next
end
return font_in_short_display
end
diagnostics.short_display = short_display
-function diagnostics.break_node(par, q, fit_class, break_type, current) -- %d ?
+function diagnostics.break_node(par, q, fit_class, break_type, cur_p) -- %d ?
local passive = par.passive
local typ_ind = break_type == hyphenated_code and '-' or ""
if par.do_last_line_fit then
local s = number.toscaled(q.active_short)
local g = number.toscaled(q.active_glue)
- if current then
+ if cur_p then
write_nl("log",format("@@%d: line %d.%d%s t=%s s=%s g=%s",
passive.serial or 0,q.line_number-1,fit_class,typ_ind,q.total_demerits,s,g))
else
@@ -2535,26 +2494,26 @@ function diagnostics.break_node(par, q, fit_class, break_type, current) -- %d ?
end
end
-function diagnostics.feasible_break(par, current, r, b, pi, d, artificial_demerits)
+function diagnostics.feasible_break(par, cur_p, r, b, pi, d, artificial_demerits)
local printed_node = par.printed_node
- if printed_node ~= current then
+ if printed_node ~= cur_p then
write_nl("log","")
- if not current then
- par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
+ if not cur_p then
+ par.font_in_short_display = short_display(printed_node.next,par.font_in_short_display)
else
- local save_link = getnext(current)
- setfield(cur_p,"next",nil)
+ local save_link = cur_p.next
+ cur_p.next = nil
write_nl("log","")
- par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
- setfield(cur_p,"next",save_link)
+ par.font_in_short_display = short_display(printed_node.next,par.font_in_short_display)
+ cur_p.next = save_link
end
- par.printed_node = current
+ par.printed_node = cur_p
end
write_nl("log","@")
- if not current then
+ if not cur_p then
write_esc("par")
else
- local id = getid(current)
+ local id = cur_p.id
if id == glue_code then
-- print nothing
elseif id == penalty_code then
@@ -2603,54 +2562,49 @@ end)
-- with the glyph.
local function glyph_width_height_depth(curdir,pdir,p)
- local wd = getfield(p,"width")
- local ht = getfield(p,"height")
- local dp = getfield(p,"depth")
if is_rotated[curdir] then
if is_parallel[curdir][pdir] then
- local half = (ht + dp) / 2
- return wd, half, half
+ local half = (p.height + p.depth) / 2
+ return p.width, half, half
else
- local half = wd / 2
- return ht + dp, half, half
+ local half = p.width / 2
+ return p.height + p.depth, half, half
end
elseif is_rotated[pdir] then
if is_parallel[curdir][pdir] then
- local half = (ht + dp) / 2
- return wd, half, half
+ local half = (p.height + p.depth) / 2
+ return p.width, half, half
else
- return ht + dp, wd, 0 -- weird
+ return p.height + p.depth, p.width, 0 -- weird
end
else
if glyphdir_is_equal[curdir][pdir] then
- return wd, ht, dp
+ return p.width, p.height, p.depth
elseif is_opposite[curdir][pdir] then
- return wd, dp, ht
+ return p.width, p.depth, p.height
else -- can this happen?
- return ht + dp, wd, 0
+ return p.height + p.depth, p.width, 0 -- weird
end
end
end
local function pack_width_height_depth(curdir,pdir,p)
- local wd = getfield(p,"width")
- local ht = getfield(p,"height")
- local dp = getfield(p,"depth")
if is_rotated[curdir] then
if is_parallel[curdir][pdir] then
- local half = (ht + dp) / 2
- return wd, half, half
+ local half = (p.height + p.depth) / 2
+ return p.width, half, half
else -- can this happen?
- local half = wd / 2
- return ht + dp, half, half
+ local half = p.width / 2
+ return p.height + p.depth, half, half
end
else
if pardir_is_equal[curdir][pdir] then
- return wd, ht, dp
+ return p.width, p.height, p.depth
elseif is_opposite[curdir][pdir] then
- return wd, dp, ht
+ return p.width, p.depth, p.height
else -- weird dimensions, can this happen?
- return ht + dp, wd, 0
+ -- return p.width, p.depth, p.height
+ return p.height + p.depth, p.width, 0
end
end
end
@@ -2668,17 +2622,17 @@ end
--
-- local hlist = new_node("hlist")
--
--- setfield(hlist,"list",head)
--- setfield(hlist,"dir",direction or tex.textdir)
--- setfield(hlist,"width",width)
--- setfield(hlist,"height",height)
--- setfield(hlist,"depth",depth)
+-- hlist.list = head
+-- hlist.dir = direction or tex.textdir
+-- hlist.width = width
+-- hlist.height = height
+-- hlist.depth = depth
--
-- if delta == 0 then
--
--- setfield(hlist,"glue_sign",0)
--- setfield(hlist,"glue_order",0)
--- setfield(hlist,"glue_set",0)
+-- hlist.glue_sign = 0
+-- hlist.glue_order = 0
+-- hlist.glue_set = 0
--
-- else
--
@@ -2694,15 +2648,16 @@ end
-- else
-- local stretch = analysis.stretch
-- if stretch ~= 0 then
--- setfield(hlist,"glue_sign",1) -- stretch
--- setfield(hlist,"glue_order",order)
--- setfield(hlist,"glue_set",delta/stretch)
+-- hlist.glue_sign = 1 -- stretch
+-- hlist.glue_order = order
+-- hlist.glue_set = delta/stretch
-- else
--- setfield(hlist,"glue_sign",0) -- nothing
--- setfield(hlist,"glue_order",order)
--- setfield(hlist,"glue_set",0)
+-- hlist.glue_sign = 0 -- nothing
+-- hlist.glue_order = order
+-- hlist.glue_set = 0
-- end
-- end
+-- print("stretch",hlist.glue_sign,hlist.glue_order,hlist.glue_set)
--
-- else
--
@@ -2711,15 +2666,16 @@ end
-- else
-- local shrink = analysis.shrink
-- if shrink ~= 0 then
--- setfield(hlist,"glue_sign",2) -- shrink
--- setfield(hlist,"glue_order",order)
--- setfield(hlist,"glue_set",-delta/stretch)
+-- hlist.glue_sign = 2 -- shrink
+-- hlist.glue_order = order
+-- hlist.glue_set = - delta/shrink
-- else
--- setfield(hlist,"glue_sign",0) -- nothing
--- setfield(hlist,"glue_order",order)
--- setfield(hlist,"glue_set",0)
+-- hlist.glue_sign = 0 -- nothing
+-- hlist.glue_order = order
+-- hlist.glue_set = 0
-- end
-- end
+-- print("shrink",hlist.glue_sign,hlist.glue_order,hlist.glue_set)
--
-- end
--
@@ -2733,7 +2689,7 @@ end
-- end
-- local current = head
-- while current do
--- local id = getid(current)
+-- local id = current.id
-- if id == glyph_code then
-- local stretch, shrink = char_stretch_shrink(current) -- get only one
-- if stretch then
@@ -2743,12 +2699,12 @@ end
-- current.expansion_factor = font_expand_ratio * stretch
-- end
-- elseif id == kern_code then
--- local kern = getfield(current,"kern")
--- if kern ~= 0 and getsubtype(current) == kerning_code then
--- setfield(current,"kern",font_expand_ratio * kern)
+-- local kern = current.kern
+-- if kern ~= 0 and current.subtype == kerning_code then
+-- current.kern = font_expand_ratio * current.kern
-- end
-- end
--- current = getnext(current)
+-- current = current.next
-- end
-- elseif font_expand_ratio < 0 then
-- if font_expand_ratio < -1000 then
@@ -2756,7 +2712,7 @@ end
-- end
-- local current = head
-- while current do
--- local id = getid(current)
+-- local id = current.id
-- if id == glyph_code then
-- local stretch, shrink = char_stretch_shrink(current) -- get only one
-- if shrink then
@@ -2766,31 +2722,26 @@ end
-- current.expansion_factor = font_expand_ratio * shrink
-- end
-- elseif id == kern_code then
--- local kern = getfield(current,"kern")
--- if kern ~= 0 and getsubtype(current) == kerning_code then
--- setfield(current,"kern",font_expand_ratio * kern)
+-- local kern = current.kern
+-- if kern ~= 0 and current.subtype == kerning_code then
+-- current.kern = font_expand_ratio * current.kern
-- end
-- end
--- current = getnext(current)
+-- current = current.next
-- end
-- end
-- return hlist, 0
-- end
-local function hpack(head,width,method,direction,firstline,line) -- fast version when head = nil
+local function hpack(head,width,method,direction) -- fast version when head = nil
-- we can pass the adjust_width and adjust_height so that we don't need to recalculate them but
- -- with the glue mess it's less trivial as we lack detail .. challenge
+ -- with the glue mess it's less trivial as we lack detail
local hlist = new_node("hlist")
- setfield(hlist,"dir",direction)
-
if head == nil then
- setfield(hlist,"width",width)
return hlist, 0
- else
- setfield(hlist,"list",head)
end
local cal_expand_ratio = method == "cal_expand_ratio" or method == "subst_ex_font"
@@ -2806,6 +2757,8 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
local font_shrink = 0
local font_expand_ratio = 0
local last_badness = 0
+ local disc_stack = { }
+ local disc_level = 0
local expansion_stack = cal_expand_ratio and { } -- todo: optionally pass this
local expansion_index = 0
local total_stretch = { [0] = 0, 0, 0, 0, 0 }
@@ -2815,8 +2768,11 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
local adjust_head = texlists.adjust_head
local pre_adjust_head = texlists.pre_adjust_head
- local adjust_tail = adjust_head and slide_nodelist(adjust_head) -- todo: find_tail
- local pre_adjust_tail = pre_adjust_head and slide_nodelist(pre_adjust_head) -- todo: find_tail
+ local adjust_tail = adjust_head and slide_nodes(adjust_head)
+ local pre_adjust_tail = pre_adjust_head and slide_nodes(pre_adjust_head)
+
+ hlist.list = head
+ hlist.dir = hpack_dir
new_dir_stack(hpack_dir)
@@ -2831,205 +2787,225 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
local fontexps, lastfont
- local function process(current) -- called nested in disc replace
+ local current = head
- while current do
- local id = getid(current)
- if id == glyph_code then
- if cal_expand_ratio then
- local currentfont = getfont(current)
- if currentfont ~= lastfont then
- fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
- lastfont = currentfont
- end
- if fontexps then
- local expansion = fontexps[getchar(current)]
- if expansion then
- font_stretch = font_stretch + expansion.glyphstretch
- font_shrink = font_shrink + expansion.glyphshrink
- expansion_index = expansion_index + 1
- expansion_stack[expansion_index] = current
- end
- end
- end
- -- use inline
- local wd, ht, dp = glyph_width_height_depth(hpack_dir,"TLT",current) -- was TRT ?
- natural = natural + wd
- if ht > height then
- height = ht
- end
- if dp > depth then
- depth = dp
- end
- elseif id == kern_code then
- local kern = getfield(current,"kern")
- if kern == 0 then
- -- no kern
- elseif getsubtype(current) == kerning_code then -- check getfield(p,"kern")
- if cal_expand_ratio then
- local stretch, shrink = kern_stretch_shrink(current,kern)
- font_stretch = font_stretch + stretch
- font_shrink = font_shrink + shrink
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ if cal_expand_ratio then
+ local currentfont = current.font
+ if currentfont ~= lastfont then
+ fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
+ lastfont = currentfont
+ end
+ if fontexps then
+ local expansion = fontexps[current.char]
+ if expansion then
+ font_stretch = font_stretch + expansion.glyphstretch
+ font_shrink = font_shrink + expansion.glyphshrink
expansion_index = expansion_index + 1
expansion_stack[expansion_index] = current
end
- natural = natural + kern
- else
- natural = natural + kern
end
- elseif id == disc_code then
- local subtype = getsubtype(current)
- if subtype ~= second_disc_code then
- -- todo : local stretch, shrink = char_stretch_shrink(s)
- local replace = getfield(current,"replace")
- if replace then
- process(replace)
- end
- end
- elseif id == glue_code then
- local spec = getfield(current,"spec")
- natural = natural + getfield(spec,"width")
- local op = getfield(spec,"stretch_order")
- local om = getfield(spec,"shrink_order")
- total_stretch[op] = total_stretch[op] + getfield(spec,"stretch")
- total_shrink [om] = total_shrink [om] + getfield(spec,"shrink")
- if getsubtype(current) >= leaders_code then
- local leader = getleader(current)
- local ht = getfield(leader,"height")
- local dp = getfield(leader,"depth")
- if ht > height then
- height = ht
- end
- if dp > depth then
- depth = dp
+ end
+ -- use inline if no expansion
+ local wd, ht, dp = glyph_width_height_depth(hpack_dir,"TLT",current) -- was TRT ?
+ natural = natural + wd
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ current = current.next
+ elseif id == kern_code then
+ local kern = current.kern
+ if kern == 0 then
+ -- no kern
+ else
+ if cal_expand_ratio and expand_kerns and current.subtype == kerning_code or current[a_fontkern] then -- check p.kern
+ local stretch, shrink = kern_stretch_shrink(current,kern)
+ if expand_kerns == "stretch" then
+ font_stretch = font_stretch + stretch
+ elseif expand_kerns == "shrink" then
+ font_shrink = font_shrink + shrink
+ else
+ font_stretch = font_stretch + stretch
+ font_shrink = font_shrink + shrink
end
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = current
end
- elseif id == hlist_code or id == vlist_code then
- local sh = getfield(current,"shift")
- local wd, ht, dp = pack_width_height_depth(hpack_dir,getfield(current,"dir") or hpack_dir,current) -- added: or pack_dir
- local hs, ds = ht - sh, dp + sh
- natural = natural + wd
- if hs > height then
- height = hs
- end
- if ds > depth then
- depth = ds
+ natural = natural + kern
+ end
+ current = current.next
+ elseif id == disc_code then
+ if current.subtype ~= second_disc_code then
+ -- we follow the end of line disc chain
+ local replace = current.replace
+ if replace then
+ disc_level = disc_level + 1
+ disc_stack[disc_level] = current.next
+ current = replace
+ else
+ current = current.next
end
- elseif id == rule_code then
- local wd = getfield(current,"width")
- local ht = getfield(current,"height")
- local dp = getfield(current,"depth")
- natural = natural + wd
+ else
+ current = current.next
+ end
+ elseif id == glue_code then
+ local spec = current.spec
+ natural = natural + spec.width
+ local op = spec.stretch_order
+ local om = spec.shrink_order
+ total_stretch[op] = total_stretch[op] + spec.stretch
+ total_shrink [om] = total_shrink [om] + spec.shrink
+ if current.subtype >= leaders_code then
+ local leader = current.leader
+ local ht = leader.height
+ local dp = leader.depth
if ht > height then
height = ht
end
if dp > depth then
depth = dp
end
- elseif id == math_code then
- natural = natural + getfield(current,"surround")
- elseif id == unset_code then
- local wd = getfield(current,"width")
- local ht = getfield(current,"height")
- local dp = getfield(current,"depth")
- local sh = getfield(current,"shift")
- local hs = ht - sh
- local ds = dp + sh
- natural = natural + wd
- if hs > height then
- height = hs
+ end
+ current = current.next
+ elseif id == hlist_code or id == vlist_code then
+ local sh = current.shift
+ local wd, ht, dp = pack_width_height_depth(hpack_dir,current.dir or hpack_dir,current) -- added: or pack_dir
+ local hs, ds = ht - sh, dp + sh
+ natural = natural + wd
+ if hs > height then
+ height = hs
+ end
+ if ds > depth then
+ depth = ds
+ end
+ current = current.next
+ elseif id == rule_code then
+ local wd = current.width
+ local ht = current.height
+ local dp = current.depth
+ natural = natural + wd
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ current = current.next
+ elseif id == math_code then
+ natural = natural + current.surround
+ current = current.next
+ elseif id == unset_code then
+ local wd = current.width
+ local ht = current.height
+ local dp = current.depth
+ local sh = current.shift
+ local hs = ht - sh
+ local ds = dp + sh
+ natural = natural + wd
+ if hs > height then
+ height = hs
+ end
+ if ds > depth then
+ depth = ds
+ end
+ current = current.next
+ elseif id == ins_code or id == mark_code then
+ local prev = current.prev
+ local next = current.next
+ if adjust_tail then -- todo
+ if next then
+ next.prev = prev
end
- if ds > depth then
- depth = ds
+ if prev then
+ prev.next = next
end
- elseif id == ins_code or id == mark_code then
- local prev = getprev(current)
- local next = getnext(current)
- if adjust_tail then -- todo
- if next then
- setfield(next,"prev",prev)
- end
- if prev then
- setfield(prev,"next",next)
+ current.prev = adjust_tail
+ current.next = nil
+ adjust_tail.next = current
+ adjust_tail = current
+ else
+ adjust_head = current
+ adjust_tail = current
+ current.prev = nil
+ current.next = nil
+ end
+ current = next
+ elseif id == adjust_code then
+ local list = current.list
+ if adjust_tail then
+ adjust_tail.next = list
+ adjust_tail = slide_nodes(list)
+ else
+ adjust_head = list
+ adjust_tail = slide_nodes(list)
+ end
+ current = current.next
+ elseif id == whatsit_code then
+ local subtype = current.subtype
+ if subtype == dir_code then
+ hpack_dir = checked_line_dir(stack,current) or hpack_dir
+ else
+ local get_dimensions = get_whatsit_dimensions[subtype]
+ if get_dimensions then
+ local wd, ht, dp = get_dimensions(current)
+ natural = natural + wd
+ if ht > height then
+ height = ht
end
- setfield(current,"prev",adjust_tail)
- setfield(current,"next",nil)
- adjust_setfield(tail,"next",current)
- adjust_tail = current
- else
- adjust_head = current
- adjust_tail = current
- setfield(current,"prev",nil)
- setfield(current,"next",nil)
- end
- elseif id == adjust_code then
- local list = getlist(current)
- if adjust_tail then
- adjust_setfield(tail,"next",list)
- else
- adjust_head = list
- end
- adjust_tail = slide_nodelist(list) -- find_tail(list)
- elseif id == whatsit_code then
- local subtype = getsubtype(current)
- if subtype == dir_code then
- hpack_dir = checked_line_dir(stack,current) or hpack_dir
- else
- local get_dimensions = get_whatsit_dimensions[subtype]
- if get_dimensions then
- local wd, ht, dp = get_dimensions(current,hpack_dir)
- natural = natural + wd
- if ht > height then
- height = ht
- end
- if dp > depth then
- depth = dp
- end
+ if dp > depth then
+ depth = dp
end
end
- elseif id == marginkern_code then
- local width = getfield(current,"width")
- if cal_expand_ratio then
- -- is this ok?
- local glyph = getfield(current,"glyph")
- local char_pw = getsubtype(current) == leftmargin_code and left_pw or right_pw
- font_stretch = font_stretch - width - char_pw(glyph)
- font_shrink = font_shrink - width - char_pw(glyph)
- expansion_index = expansion_index + 1
- expansion_stack[expansion_index] = glyph
- end
- natural = natural + width
end
- current = getnext(current)
+ current = current.next
+ elseif id == marginkern_code then
+ if cal_expand_ratio then
+ local glyph = current.glyph
+ local char_pw = current.subtype == leftmargin_code and left_pw or right_pw
+ font_stretch = font_stretch - current.width - char_pw(glyph)
+ font_shrink = font_shrink - current.width - char_pw(glyph)
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = glyph
+ end
+ natural = natural + current.width
+ current = current.next
+ else
+ current = current.next
+ end
+ if not current and disc_level > 0 then
+ current = disc_stack[disc_level]
+ disc_level = disc_level - 1
end
-
end
-
- process(head)
-
if adjust_tail then
adjust_tail.next = nil -- todo
end
if pre_adjust_tail then
pre_adjust_tail.next = nil -- todo
end
- if method == "additional" then
+ if mode == "additional" then
width = width + natural
end
- setfield(hlist,"width",width)
- setfield(hlist,"height",height)
- setfield(hlist,"depth",depth)
+ hlist.width = width
+ hlist.height = height
+ hlist.depth = depth
local delta = width - natural
if delta == 0 then
- setfield(hlist,"glue_sign",0)
- setfield(hlist,"glue_order",0)
- setfield(hlist,"glue_set",0)
+ hlist.glue_sign = 0
+ hlist.glue_order = 0
+ hlist.glue_set = 0
elseif delta > 0 then
-- natural width smaller than requested width
local order = (total_stretch[4] ~= 0 and 4 or total_stretch[3] ~= 0 and 3) or
(total_stretch[2] ~= 0 and 2 or total_stretch[1] ~= 0 and 1) or 0
+-- local correction = 0
if cal_expand_ratio and order == 0 and font_stretch > 0 then -- check sign of font_stretch
font_expand_ratio = delta/font_stretch
@@ -3041,38 +3017,41 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
for i=1,expansion_index do
local g = expansion_stack[i]
local e
- if getid(g) == glyph_code then
- local currentfont = getfont(g)
+ if g.id == glyph_code then
+ local currentfont = g.font
if currentfont ~= lastfont then
fontexps = expansions[currentfont]
lastfont = currentfont
end
- local data = fontexps[getchar(g)]
+ local data = fontexps[g.char]
if trace_expansion then
setnodecolor(g,"hz:positive")
end
e = font_expand_ratio * data.glyphstretch / 1000
+-- correction = correction + (e / 1000) * g.width
else
- local kern = getfield(g,"kern")
+ local kern = g.kern
local stretch, shrink = kern_stretch_shrink(g,kern)
e = font_expand_ratio * stretch / 1000
+-- correction = correction + (e / 1000) * kern
end
- setfield(g,"expansion_factor",e)
+ g.expansion_factor = e
end
end
+-- delta = delta - correction
local tso = total_stretch[order]
if tso ~= 0 then
- setfield(hlist,"glue_sign",1)
- setfield(hlist,"glue_order",order)
- setfield(hlist,"glue_set",delta/tso)
+ hlist.glue_sign = 1
+ hlist.glue_order = order
+ hlist.glue_set = delta/tso
else
- setfield(hlist,"glue_sign",0)
- setfield(hlist,"glue_order",order)
- setfield(hlist,"glue_set",0)
+ hlist.glue_sign = 0
+ hlist.glue_order = order
+ hlist.glue_set = 0
end
if font_expand_ratio ~= 0 then
-- todo
- elseif order == 0 then -- and getlist(hlist) then
+ elseif order == 0 then -- and hlist.list then
last_badness = calculate_badness(delta,total_stretch[0])
if last_badness > tex.hbadness then
if last_badness > 100 then
@@ -3086,6 +3065,7 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
-- natural width larger than requested width
local order = total_shrink[4] ~= 0 and 4 or total_shrink[3] ~= 0 and 3
or total_shrink[2] ~= 0 and 2 or total_shrink[1] ~= 0 and 1 or 0
+-- local correction = 0
if cal_expand_ratio and order == 0 and font_shrink > 0 then -- check sign of font_shrink
font_expand_ratio = delta/font_shrink
@@ -3097,47 +3077,54 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
for i=1,expansion_index do
local g = expansion_stack[i]
local e
- if getid(g) == glyph_code then
- local currentfont = getfont(g)
+ if g.id == glyph_code then
+ local currentfont = g.font
if currentfont ~= lastfont then
fontexps = expansions[currentfont]
lastfont = currentfont
end
- local data = fontexps[getchar(g)]
+ local data = fontexps[g.char]
if trace_expansion then
setnodecolor(g,"hz:negative")
end
e = font_expand_ratio * data.glyphshrink / 1000
+ -- local d = (e / 1000) * 1000
+ -- local eps = g.width - (1 + d / 1000000) * g.width
+ -- correction = correction + eps
+ -- e = d
+-- correction = correction + (e / 1000) * g.width
else
- local kern = getfield(g,"kern")
+ local kern = g.kern
local stretch, shrink = kern_stretch_shrink(g,kern)
e = font_expand_ratio * shrink / 1000
+-- correction = correction + (e / 1000) * kern
end
- setfield(g,"expansion_factor",e)
+ g.expansion_factor = e
end
end
+-- delta = delta - correction
local tso = total_shrink[order]
if tso ~= 0 then
- setfield(hlist,"glue_sign",2)
- setfield(hlist,"glue_order",order)
- setfield(hlist,"glue_set",-delta/tso)
+ hlist.glue_sign = 2
+ hlist.glue_order = order
+ hlist.glue_set = -delta/tso
else
- setfield(hlist,"glue_sign",0)
- setfield(hlist,"glue_order",order)
- setfield(hlist,"glue_set",0)
+ hlist.glue_sign = 0
+ hlist.glue_order = order
+ hlist.glue_set = 0
end
if font_expand_ratio ~= 0 then
-- todo
- elseif tso < -delta and order == 0 then -- and getlist(hlist) then
+ elseif tso < -delta and order == 0 then -- and hlist.list then
last_badness = 1000000
- setfield(hlist,"glue_set",1)
+ hlist.glue_set = 1
local fuzz = - delta - total_shrink[0]
local hfuzz = tex.hfuzz
if fuzz > hfuzz or tex.hbadness < 100 then
local overfullrule = tex.overfullrule
if fuzz > hfuzz and overfullrule > 0 then
-- weird, is always called and no rules shows up
- setfield(slide_nodelist(list),"next",new_rule(overfullrule,nil,nil,hlist.dir)) -- todo: find_tail
+ slide_nodes(list).next = new_rule(overfullrule,nil,nil,hlist.dir)
end
diagnostics.overfull_hbox(hlist,line,-delta)
end
@@ -3148,7 +3135,7 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
return hlist, last_badness
end
-xpack_nodes = hpack -- comment this for old fashioned expansion (we need to fix float mess)
+xpack_nodes = hpack -- comment this for old fashioned expansion
local function common_message(hlist,line,str)
write_nl("")
@@ -3186,3 +3173,20 @@ end
function diagnostics.loose_hbox(hlist,line,b)
common_message(hlist,line,format("Loose \\hbox (badness %i)",b))
end
+
+-- e = font_expand_ratio * data.glyphstretch / 1000
+-- local stretch = data.stretch
+-- if e >= stretch then
+-- e = stretch
+-- else
+-- local step = 5
+-- e = math.round(e/step) * step
+-- end
+
+-- local shrink = - data.shrink
+-- if e <= shrink then
+-- e = shrink
+-- else
+-- local step = 5
+-- e = math.round(e/step) * step
+-- end
diff --git a/tex/context/base/node-met.lua b/tex/context/base/node-met.lua
index d52349b4a..c85a53c8e 100644
--- a/tex/context/base/node-met.lua
+++ b/tex/context/base/node-met.lua
@@ -332,28 +332,6 @@ function nodes.writable_spec(n) -- not pool
return spec
end
-function nodes.copy_spec(old,free) -- also frees
- if not old then
- return n_new_node("glue_spec")
- else
- local new = n_copy_node(old)
- if free and old.writable then
- free_node(old)
- end
- return new
- end
-end
-
-function nodes.free_spec(old)
- if not old then
- -- skip
- elseif old.writable then
- free_node(old)
- else
- -- skip
- end
-end
-
if gonuts then
function nodes.reference(n)
@@ -690,34 +668,3 @@ end
nodes.keys = keys -- [id][subtype]
nodes.fields = nodefields -- (n)
-
--- one issue solved in flush_node:
---
--- case glue_spec_node:
--- if (glue_ref_count(p)!=null) {
--- decr(glue_ref_count(p));
--- return ;
--- /*
--- } else if (! valid_node(p)) {
--- return ;
--- */
--- /*
--- } else {
--- free_node(p, get_node_size(type(p), subtype(p)));
--- return ;
--- */
--- }
--- break ;
---
--- or:
---
--- case glue_spec_node:
--- if (glue_ref_count(p)!=null) {
--- decr(glue_ref_count(p));
--- return ;
--- } else if (valid_node(p)) {
--- free_node(p, get_node_size(type(p), subtype(p)));
--- return ;
--- } else {
--- break ;
--- }
diff --git a/tex/context/base/node-mig.lua b/tex/context/base/node-mig.lua
index 41f95be45..9fc35a048 100644
--- a/tex/context/base/node-mig.lua
+++ b/tex/context/base/node-mig.lua
@@ -6,32 +6,15 @@ if not modules then modules = { } end modules ['node-mig'] = {
license = "see context related readme files"
}
--- todo: insert_after
-
local format = string.format
-local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
+local attributes, nodes, node = attributes, nodes, node
-local report_nodes = logs.reporter("nodes","migrations")
+local remove_nodes = nodes.remove
-local attributes = attributes
-local nodes = nodes
+local nodecodes = nodes.nodecodes
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local remove_node = nuts.remove
-
-local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local insert_code = nodecodes.ins
@@ -39,6 +22,10 @@ local mark_code = nodecodes.mark
local a_migrated = attributes.private("migrated")
+local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
+
+local report_nodes = logs.reporter("nodes","migrations")
+
local migrate_inserts, migrate_marks, inserts_too
local t_inserts, t_marks, t_sweeps = 0, 0, 0
@@ -46,42 +33,32 @@ local t_inserts, t_marks, t_sweeps = 0, 0, 0
local function locate(head,first,last,ni,nm)
local current = head
while current do
- local id = getid(current)
+ local id = current.id
if id == vlist_code or id == hlist_code then
- local list = getlist(current)
- if list then
- list, first, last, ni, nm = locate(list,first,last,ni,nm)
- setfield(current,"list",list)
- end
- current = getnext(current)
+ current.list, first, last, ni, nm = locate(current.list,first,last,ni,nm)
+ current = current.next
elseif migrate_inserts and id == insert_code then
local insert
- head, current, insert = remove_node(head,current)
- setfield(insert,"next",nil)
+ head, current, insert = remove_nodes(head,current)
+ insert.next = nil
if first then
- setfield(insert,"prev",last)
- setfield(last,"next",insert)
+ insert.prev, last.next = last, insert
else
- setfield(insert,"prev",nil)
- first = insert
+ insert.prev, first = nil, insert
end
- last = insert
- ni = ni + 1
+ last, ni = insert, ni + 1
elseif migrate_marks and id == mark_code then
local mark
- head, current, mark = remove_node(head,current)
- setfield(mark,"next",nil)
+ head, current, mark = remove_nodes(head,current)
+ mark.next = nil
if first then
- setfield(mark,"prev",last)
- setfield(last,"next",mark)
+ mark.prev, last.next = last, mark
else
- setfield(mark,"prev",nil)
- first = mark
+ mark.prev, first = nil, mark
end
- last = mark
- nm = nm + 1
+ last, nm = mark, nm + 1
else
- current = getnext(current)
+ current= current.next
end
end
return head, first, last, ni, nm
@@ -93,43 +70,39 @@ function nodes.handlers.migrate(head,where)
if trace_migrations then
report_nodes("migration sweep %a",where)
end
- local current = tonut(head)
+ local current = head
while current do
- local id = getid(current)
+ local id = current.id
-- inserts_too is a temp hack, we should only do them when it concerns
-- newly placed (flushed) inserts
- if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not getattr(current,a_migrated) then
- setattr(current,a_migrated,1)
+ if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not current[a_migrated] then
+ current[a_migrated] = 1
t_sweeps = t_sweeps + 1
- local h = getlist(current)
+ local h = current.list
local first, last, ni, nm
while h do
- local id = getid(h)
+ local id = h.id
if id == vlist_code or id == hlist_code then
h, first, last, ni, nm = locate(h,first,last,0,0)
end
- h = getnext(h)
+ h = h.next
end
if first then
- t_inserts = t_inserts + ni
- t_marks = t_marks + nm
+ t_inserts, t_marks = t_inserts + ni, t_marks + nm
if trace_migrations and (ni > 0 or nm > 0) then
report_nodes("sweep %a, container %a, %s inserts and %s marks migrated outwards during %a",
t_sweeps,nodecodes[id],ni,nm,where)
end
- -- inserts after head, use insert_after
- local n = getnext(current)
+ -- inserts after head
+ local n = current.next
if n then
- setfield(last,"next",n)
- setfield(n,"prev",last)
+ last.next, n.prev = n, last
end
- setfield(current,"next",first)
- setfield(first,"prev",current)
- done = true
- current = last
+ current.next, first.prev = first, current
+ done, current = true, last
end
end
- current = getnext(next)
+ current = current.next
end
return head, done
end
diff --git a/tex/context/base/node-nut.lua b/tex/context/base/node-nut.lua
deleted file mode 100644
index 4732b09eb..000000000
--- a/tex/context/base/node-nut.lua
+++ /dev/null
@@ -1,650 +0,0 @@
-if not modules then modules = { } end modules ['node-met'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Here starts some more experimental code that Luigi and I use in a next stage of
--- exploring and testing potential speedups in the engines. This code is not meant
--- for users and can change (or be removed) any moment. During the experiments I'll
--- do my best to keep the code as fast as possible by using two codebases. See
--- about-fast.pdf for some more info about impacts. Although key based access has
--- more charm, function based is somewhat faster and has more potential for future
--- speedups.
-
--- This next iteration is flagged direct because we avoid user data which has a price
--- in allocation and metatable tagging. Although in this stage we pass numbers around
--- future versions might use light user data, so never depend on what direct function
--- return. Using the direct approach had some speed advantages but you loose the key
--- based access. The speed gain is only measurable in cases with lots of access. For
--- instance when typesettign arabic with advanced fonts, we're talking of many millions
--- of function calls and there we can get a 30\% or more speedup. On average complex
--- \CONTEXT\ runs the gain can be 10\% to 15\% percent. Because mixing the two models
--- (here we call then nodes and nuts) is not possible you need to cast either way which
--- has a penalty. Also, error messages in nuts mode are less clear and \LUATEX\ will
--- often simply abort when you make mistakes of mix the models. So, development (at least
--- in \CONTEXT) can be done in node mode and not in nuts mode. Only robust code will
--- be turned nuts afterwards and quite likely not all code. The official \LUATEX\ api
--- to nodes is userdata!
---
--- Listening to 'lunatic soul' at the same time helped wrapping my mind around the mixed
--- usage of both models. Just for the record: the potential of the direct approach only
--- became clear after experimenting for weeks and partly adapting code. It is one of those
--- (sub)projects where you afterwards wonder if it was worth the trouble, but users that
--- rely on lots of complex functionality and font support will probably notice the speedup.
---
--- luatex luajittex
--- ------------- ----- -------------------- ---------------------------------
--- name pages old new pct old new pct
--- ------------- ----- -------------------- ---------------------------------
--- fonts-mkiv 166 9.3 7.7/7.4 17.2 7.4 (37.5) 5.9/5.7 (55.6) 20.3
--- about 60 3.3 2.7/2.6 20.4 2.5 (39.5) 2.1 (57.0) 23.4
--- arabic-001 61 25.3 15.8 18.2 15.3 (46.7) 6.8 (54.7) 16.0
--- torture-001 300 21.4 11.4 24.2 13.9 (35.0) 6.3 (44.7) 22.2
---
--- so:
---
--- - we run around 20% faster on documents of average complexity and gain more when
--- dealing with scripts like arabic and such
--- - luajittex benefits a bit more so a luajittex job can (in principle) now be much
--- faster
--- - if we reason backwards, and take luajittex as norm we get 1:2:3 on some jobs for
--- luajittex direct:luatex direct:luatex normal i.e. we can be 3 times faster
--- - keep in mind that these are tex/lua runs so the real gain at the lua end is much
--- larger
---
--- Because we can fake direct mode a little bit by using the fast getfield and setfield
--- at the cost of wrapped getid and alike, we still are running quite ok. As we could gain
--- some 5% with fast mode, we can sacrifice some on wrappers when we use a few fast core
--- functions. This means that simulated direct mode runs font-mkiv in 9.1 seconds (we could
--- get down to 8.7 seconds in fast mode) and that we can migrate slowely to direct mode.
---
--- The following measurements are from 2013-07-05 after adapting some 47 files to nuts. Keep
--- in mind that the old binary can fake a fast getfield and setfield but that the other
--- getters are wrapped functions. The more we have, the slower it gets.
---
--- fonts about arabic
--- old mingw, indexed plus some functions : 8.9 3.2 20.3
--- old mingw, fake functions : 9.9 3.5 27.4
--- new mingw, node functions : 9.0 3.1 20.8
--- new mingw, indexed plus some functions : 8.6 3.1 19.6
--- new mingw, direct functions : 7.5 2.6 14.4
---
--- \starttext \dorecurse{1000}{test\page} \stoptext :
---
--- luatex 560 pps
--- luajittex 600 pps
---
--- \setupbodyfont[pagella]
---
--- \edef\zapf{\cldcontext{context(io.loaddata(resolvers.findfile("zapf.tex")))}}
---
--- \starttext \dorecurse{1000}{\zapf\par} \stoptext
---
--- luatex 3.9 sec / 54 pps
--- luajittex 2.3 sec / 93 pps
-
-local nodes = nodes
-local gonuts = nodes.gonuts
-local direct = node.direct
-
-if type(direct) ~= "table" then
- return
-elseif gonuts then
- statistics.register("running in nuts mode", function() return "yes" end)
-else
- statistics.register("running in nuts mode", function() return "no" end)
- return
-end
-
-local texget = tex.get
-
-local nodecodes = nodes.nodecodes
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-
-local nuts = nodes.nuts or { }
-nodes.nuts = nuts
-
-nodes.is_node = direct.is_node or function() return true end
-nodes.is_direct = direct.is_direct or function() return false end
-nodes.is_nut = nodes.is_direct
-
--- casters
-
-local tonode = direct.tonode or function(n) return n end
-local tonut = direct.todirect or function(n) return n end
-
-nuts.tonode = tonode
-nuts.tonut = tonut
-
-nodes.tonode = tonode
-nodes.tonut = tonut
-
--- getters
-
-nuts.getfield = direct.getfield
-nuts.getnext = direct.getnext
-nuts.getprev = direct.getprev
-nuts.getid = direct.getid
-nuts.getattr = direct.getfield
-nuts.getchar = direct.getchar
-nuts.getfont = direct.getfont
-nuts.getsubtype = direct.getsubtype
-nuts.getlist = direct.getlist -- only hlist and vlist !
-nuts.getleader = direct.getleader
-
--- local dgf = direct.getfield function nuts.getlist(n) return dgf(n,"list") end
-
--- setters
-
-nuts.setfield = direct.setfield
-nuts.setattr = direct.setfield
-
-nuts.getbox = direct.getbox
-nuts.setbox = direct.setbox
-nuts.getskip = direct.getskip or function(s) return tonut(texget(s)) end
-
--- helpers
-
-nuts.tostring = direct.tostring
-nuts.copy = direct.copy
-nuts.copy_list = direct.copy_list
-nuts.delete = direct.delete
-nuts.dimensions = direct.dimensions
-nuts.end_of_math = direct.end_of_math
-nuts.flush_list = direct.flush_list
-nuts.flush_node = direct.flush_node
-nuts.free = direct.free
-nuts.insert_after = direct.insert_after
-nuts.insert_before = direct.insert_before
-nuts.hpack = direct.hpack
-nuts.new = direct.new
-nuts.tail = direct.tail
-nuts.traverse = direct.traverse
-nuts.traverse_id = direct.traverse_id
-nuts.slide = direct.slide
-nuts.writable_spec = direct.writable_spec
-nuts.vpack = direct.vpack
-nuts.is_node = direct.is_node
-nuts.is_direct = direct.is_direct
-nuts.is_nut = direct.is_direct
-nuts.first_glyph = direct.first_glyph
-nuts.first_character = direct.first_character
-nuts.has_glyph = direct.has_glyph or direct.first_glyph
-
-nuts.current_attr = direct.current_attr
-nuts.do_ligature_n = direct.do_ligature_n
-nuts.has_field = direct.has_field
-nuts.last_node = direct.last_node
-nuts.usedlist = direct.usedlist
-nuts.protrusion_skippable = direct.protrusion_skippable
-nuts.write = direct.write
-
-nuts.has_attribute = direct.has_attribute
-nuts.set_attribute = direct.set_attribute
-nuts.unset_attribute = direct.unset_attribute
-
-nuts.protect_glyphs = direct.protect_glyphs
-nuts.unprotect_glyphs = direct.unprotect_glyphs
-
--- placeholders
-
-if not direct.kerning then
-
- local n_kerning = node.kerning
-
- function nuts.kerning(head)
- return tonode(n_kerning(tonut(head)))
- end
-
-end
-
-if not direct.ligaturing then
-
- local n_ligaturing = node.ligaturing
-
- function nuts.ligaturing(head)
- return tonode(n_ligaturing(tonut(head)))
- end
-
-end
-
-if not direct.mlist_to_hlist then
-
- local n_mlist_to_hlist = node.mlist_to_hlist
-
- function nuts.mlist_to_hlist(head)
- return tonode(n_mlist_to_hlist(tonut(head)))
- end
-
-end
-
---
-
-local d_remove_node = direct.remove
-local d_free_node = direct.free
-local d_getfield = direct.getfield
-local d_setfield = direct.setfield
-local d_getnext = direct.getnext
-local d_getprev = direct.getprev
-local d_getid = direct.getid
-local d_getlist = direct.getlist
-local d_find_tail = direct.tail
-local d_insert_after = direct.insert_after
-local d_insert_before = direct.insert_before
-local d_slide = direct.slide
-local d_copy_node = direct.copy
-local d_traverse = direct.traverse
-
-local function remove(head,current,free_too)
- local t = current
- head, current = d_remove_node(head,current)
- if not t then
- -- forget about it
- elseif free_too then
- d_free_node(t)
- t = nil
- else
- d_setfield(t,"next",nil) -- not that much needed (slows down unless we check the source on this)
- d_setfield(t,"prev",nil) -- not that much needed (slows down unless we check the source on this)
- end
- return head, current, t
-end
-
--- bad: we can have prev's being glue_spec
-
--- local function remove(head,current,free_too) -- d_remove_node does a slide which can fail
--- local prev = d_getprev(current) -- weird
--- local next = d_getnext(current)
--- if next then
--- -- print("!!!!!!!! prev is gluespec",
--- -- nodes.nodecodes[d_getid(current)],
--- -- nodes.nodecodes[d_getid(next)],
--- -- nodes.nodecodes[d_getid(prev)])
--- d_setfield(prev,"next",next)
--- d_setfield(next,"prev",prev)
--- else
--- d_setfield(prev,"next",nil)
--- end
--- if free_too then
--- d_free_node(current)
--- current = nil
--- else
--- d_setfield(current,"next",nil) -- use this fact !
--- d_setfield(current,"prev",nil) -- use this fact !
--- end
--- if head == current then
--- return next, next, current
--- else
--- return head, next, current
--- end
--- end
-
-nuts.remove = remove
-
-function nuts.delete(head,current)
- return remove(head,current,true)
-end
-
-function nuts.replace(head,current,new) -- no head returned if false
- if not new then
- head, current, new = false, head, current
- end
- local prev = d_getprev(current)
- local next = d_getnext(current)
- if next then
- d_setfield(new,"next",next)
- d_setfield(next,"prev",new)
- end
- if prev then
- d_setfield(new,"prev",prev)
- d_setfield(prev,"next",new)
- end
- if head then
- if head == current then
- head = new
- end
- d_free_node(current)
- return head, new
- else
- d_free_node(current)
- return new
- end
-end
-
-local function count(stack,flat)
- local n = 0
- while stack do
- local id = d_getid(stack)
- if not flat and id == hlist_code or id == vlist_code then
- local list = d_getlist(stack)
- if list then
- n = n + 1 + count(list) -- self counts too
- else
- n = n + 1
- end
- else
- n = n + 1
- end
- stack = d_getnext(stack)
- end
- return n
-end
-
-nuts.count = count
-
-function nuts.append(head,current,...)
- for i=1,select("#",...) do
- head, current = d_insert_after(head,current,(select(i,...)))
- end
- return head, current
-end
-
-function nuts.prepend(head,current,...)
- for i=1,select("#",...) do
- head, current = d_insert_before(head,current,(select(i,...)))
- end
- return head, current
-end
-
-function nuts.linked(...)
- local head, last
- for i=1,select("#",...) do
- local next = select(i,...)
- if next then
- if head then
- d_setfield(last,"next",next)
- d_setfield(next,"prev",last)
- else
- head = next
- end
- last = d_find_tail(next) -- we could skip the last one
- end
- end
- return head
-end
-
-function nuts.concat(list) -- consider tail instead of slide
- local head, tail
- for i=1,#list do
- local li = list[i]
- if li then
- if head then
- d_setfield(tail,"next",li)
- d_setfield(li,"prev",tail)
- else
- head = li
- end
- tail = d_slide(li)
- end
- end
- return head, tail
-end
-
-function nuts.writable_spec(n) -- not pool
- local spec = d_getfield(n,"spec")
- if not spec then
- spec = d_copy_node(glue_spec)
- d_setfield(n,"spec",spec)
- elseif not d_getfield(spec,"writable") then
- spec = d_copy_node(spec)
- d_setfield(n,"spec",spec)
- end
- return spec
-end
-
-function nuts.reference(n)
- return n or ""
-end
-
--- quick and dirty tracing of nuts
-
--- for k, v in next, nuts do
--- if string.find(k,"box") then
--- nuts[k] = function(...) print(k,...) return v(...) end
--- end
--- end
-
-function nodes.vianuts (f) return function(n,...) return tonode(f(tonut (n),...)) end end
-function nodes.vianodes(f) return function(n,...) return tonut (f(tonode(n),...)) end end
-
-nuts.vianuts = nodes.vianuts
-nuts.vianodes = nodes.vianodes
-
--- for k, v in next, nuts do
--- if type(v) == "function" then
--- if not string.find(k,"^[sg]et") and not string.find(k,"^to") then
--- local f = v
--- nuts[k] = function(...) print("d",k,...) return f(...) end
--- end
--- end
--- end
-
--- for k, v in next, nodes do
--- if type(v) == "function" then
--- if not string.find(k,"^[sg]et") and not string.find(k,"^to") then
--- local f = v
--- nodes[k] = function(...) print("n",k,...) return f(...) end
--- end
--- end
--- end
-
--- function nodes.insert_before(h,c,n)
--- if c then
--- if c == h then
--- n_setfield(n,"next",h)
--- n_setfield(n,"prev",nil)
--- n_setfield(h,"prev",n)
--- else
--- local cp = n_getprev(c)
--- n_setfield(n,"next",c)
--- n_setfield(n,"prev",cp)
--- if cp then
--- n_setfield(cp,"next",n)
--- end
--- n_setfield(c,"prev",n)
--- return h, n
--- end
--- end
--- return n, n
--- end
-
--- function nodes.insert_after(h,c,n)
--- if c then
--- local cn = n_getnext(c)
--- if cn then
--- n_setfield(n,"next",cn)
--- n_setfield(cn,"prev",n)
--- else
--- n_setfield(n,"next",nil)
--- end
--- n_setfield(c,"next",n)
--- n_setfield(n,"prev",c)
--- return h, n
--- end
--- return n, n
--- end
-
-function nodes.insert_list_after(h,c,n)
- local t = n_tail(n)
- if c then
- local cn = n_getnext(c)
- if cn then
- n_setfield(t,"next",cn)
- n_setfield(cn,"prev",t)
- else
- n_setfield(t,"next",nil)
- end
- n_setfield(c,"next",n)
- n_setfield(n,"prev",c)
- return h, n
- end
- return n, t
-end
-
--- function nuts.insert_before(h,c,n)
--- if c then
--- if c == h then
--- d_setfield(n,"next",h)
--- d_setfield(n,"prev",nil)
--- d_setfield(h,"prev",n)
--- else
--- local cp = d_getprev(c)
--- d_setfield(n,"next",c)
--- d_setfield(n,"prev",cp)
--- if cp then
--- d_setfield(cp,"next",n)
--- end
--- d_setfield(c,"prev",n)
--- return h, n
--- end
--- end
--- return n, n
--- end
-
--- function nuts.insert_after(h,c,n)
--- if c then
--- local cn = d_getnext(c)
--- if cn then
--- d_setfield(n,"next",cn)
--- d_setfield(cn,"prev",n)
--- else
--- d_setfield(n,"next",nil)
--- end
--- d_setfield(c,"next",n)
--- d_setfield(n,"prev",c)
--- return h, n
--- end
--- return n, n
--- end
-
-function nuts.insert_list_after(h,c,n)
- local t = d_tail(n)
- if c then
- local cn = d_getnext(c)
- if cn then
- d_setfield(t,"next",cn)
- d_setfield(cn,"prev",t)
- else
- d_setfield(t,"next",nil)
- end
- d_setfield(c,"next",n)
- d_setfield(n,"prev",c)
- return h, n
- end
- return n, t
-end
-
--- test code only
-
--- collectranges and mix
-
-local report = logs.reporter("sliding")
-
-local function message(detail,head,current,previous)
- report("error: %s, current: %s:%s, previous: %s:%s, list: %s, text: %s",
- detail,
- nodecodes[d_getid(current)],
- current,
- nodecodes[d_getid(previous)],
- previous,
- nodes.idstostring(head),
- nodes.listtoutf(head)
- )
- utilities.debugger.showtraceback(report)
-end
-
-local function warn()
- report()
- report("warning: the slide tracer is enabled")
- report()
- warn = false
-end
-
-local function tracedslide(head)
- if head then
- if warn then
- warn()
- end
- local next = d_getnext(head)
- if next then
- local prev = head
- for n in d_traverse(next) do
- local p = d_getprev(n)
- if not p then
- message("unset",head,n,prev)
- -- break
- elseif p ~= prev then
- message("wrong",head,n,prev)
- -- break
- end
- prev = n
- end
- end
- return d_slide(head)
- end
-end
-
-local function nestedtracedslide(head,level) -- no sliding !
- if head then
- if warn then
- warn()
- end
- local id = d_getid(head)
- local next = d_getnext(head)
- if next then
- report("%whead:%s",level or 0,nodecodes[id])
- local prev = head
- for n in d_traverse(next) do
- local p = d_getprev(n)
- if not p then
- message("unset",head,n,prev)
- -- break
- elseif p ~= prev then
- message("wrong",head,n,prev)
- -- break
- end
- prev = n
- local id = d_getid(n)
- if id == hlist_code or id == vlist_code then
- nestedtracedslide(d_getlist(n),(level or 0) + 1)
- end
- end
- elseif id == hlist_code or id == vlist_code then
- report("%wlist:%s",level or 0,nodecodes[id])
- nestedtracedslide(d_getlist(head),(level or 0) + 1)
- end
- -- return d_slide(head)
- end
-end
-
-local function untracedslide(head)
- if head then
- if warn then
- warn()
- end
- local next = d_getnext(head)
- if next then
- local prev = head
- for n in d_traverse(next) do
- local p = d_getprev(n)
- if not p then
- return "unset", d_getid(n)
- elseif p ~= prev then
- return "wrong", d_getid(n)
- end
- prev = n
- end
- end
- return d_slide(head)
- end
-end
-
-nuts.tracedslide = tracedslide
-nuts.untracedslide = untracedslide
-nuts.nestedtracedslide = nestedtracedslide
-
--- nuts.slide = tracedslide
diff --git a/tex/context/base/node-pro.lua b/tex/context/base/node-pro.lua
index 2cc00601c..aa6692d7b 100644
--- a/tex/context/base/node-pro.lua
+++ b/tex/context/base/node-pro.lua
@@ -13,15 +13,15 @@ local trace_callbacks = false trackers.register("nodes.callbacks", function(v)
local report_nodes = logs.reporter("nodes","processors")
-local nodes = nodes
+local nodes, node = nodes, node
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local first_glyph = nodes.first_glyph
-local has_glyph = nodes.has_glyph
+local free_node = node.free
+local first_glyph = node.first_glyph or node.first_character
+local has_attribute = node.has_attribute
nodes.processors = nodes.processors or { }
local processors = nodes.processors
@@ -31,53 +31,43 @@ local processors = nodes.processors
local actions = tasks.actions("processors")
-do
+local n = 0
- local tonut = nuts.tonut
- local getid = nuts.getid
- local getchar = nuts.getchar
- local getnext = nuts.getnext
-
- local n = 0
-
- local function reconstruct(head) -- we probably have a better one
- local t, n, h = { }, 0, tonut(head)
- while h do
- n = n + 1
- local id = getid(h)
- if id == glyph_code then -- todo: disc etc
- t[n] = utfchar(getchar(h))
- else
- t[n] = "[]"
- end
- h = getnext(h)
- end
- return concat(t)
- end
-
- local function tracer(what,state,head,groupcode,before,after,show)
- if not groupcode then
- groupcode = "unknown"
- elseif groupcode == "" then
- groupcode = "mvl"
- end
+local function reconstruct(head) -- we probably have a better one
+ local t, n, h = { }, 0, head
+ while h do
n = n + 1
- if show then
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
+ local id = h.id
+ if id == glyph_code then -- todo: disc etc
+ t[n] = utfchar(h.char)
else
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
+ t[n] = "[]"
end
+ h = h.next
end
+ return concat(t)
+end
- processors.tracer = tracer
-
+local function tracer(what,state,head,groupcode,before,after,show)
+ if not groupcode then
+ groupcode = "unknown"
+ elseif groupcode == "" then
+ groupcode = "mvl"
+ end
+ n = n + 1
+ if show then
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
+ else
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
+ end
end
+processors.tracer = tracer
+
processors.enabled = true -- this will become a proper state (like trackers)
function processors.pre_linebreak_filter(head,groupcode) -- ,size,packtype,direction
- -- local first, found = first_glyph(head) -- they really need to be glyphs
- local found = has_glyph(head)
+ local first, found = first_glyph(head) -- they really need to be glyphs
if found then
if trace_callbacks then
local before = nodes.count(head,true)
@@ -104,8 +94,10 @@ local enabled = true
function processors.hpack_filter(head,groupcode,size,packtype,direction)
if enabled then
- -- local first, found = first_glyph(head) -- they really need to be glyphs
- local found = has_glyph(head)
+ -- if not head.next and head.id ~= glyph_code then -- happens often but not faster
+ -- return true
+ -- end
+ local first, found = first_glyph(head) -- they really need to be glyphs
if found then
if trace_callbacks then
local before = nodes.count(head,true)
@@ -129,36 +121,15 @@ function processors.hpack_filter(head,groupcode,size,packtype,direction)
return true
end
-do
-
- local setfield = nodes.setfield
- local hpack = nodes.hpack
-
- function nodes.fasthpack(...) -- todo: pass explicit arguments
- enabled = false
- local hp, b = hpack(...)
- setfield(hp,"prev",nil)
- setfield(hp,"next",nil)
- enabled = true
- return hp, b
- end
-
-end
-
-do
-
- local setfield = nuts.setfield
- local hpack = nuts.hpack
-
- function nuts.fasthpack(...) -- todo: pass explicit arguments
- enabled = false
- local hp, b = hpack(...)
- setfield(hp,"prev",nil)
- setfield(hp,"next",nil)
- enabled = true
- return hp, b
- end
+local hpack = node.hpack
+function nodes.fasthpack(...) -- todo: pass explicit arguments
+ enabled = false
+ local hp, b = hpack(...)
+ hp.prev = nil
+ hp.next = nil
+ enabled = true
+ return hp, b
end
callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter, "all kind of horizontal manipulations (before par break)")
diff --git a/tex/context/base/node-ref.lua b/tex/context/base/node-ref.lua
index 7cfbde849..aa864fb1c 100644
--- a/tex/context/base/node-ref.lua
+++ b/tex/context/base/node-ref.lua
@@ -21,6 +21,7 @@ local attributes, nodes, node = attributes, nodes, node
local allocate = utilities.storage.allocate, utilities.storage.mark
local mark = utilities.storage.allocate, utilities.storage.mark
+
local nodeinjections = backends.nodeinjections
local codeinjections = backends.codeinjections
@@ -32,6 +33,9 @@ local colors = attributes.colors
local references = structures.references
local tasks = nodes.tasks
+local hpack_list = node.hpack
+local list_dimensions = node.dimensions
+
local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end)
local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end)
local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end)
@@ -40,27 +44,6 @@ local report_reference = logs.reporter("backend","references")
local report_destination = logs.reporter("backend","destinations")
local report_area = logs.reporter("backend","areas")
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getsubtype = nuts.getsubtype
-
-local hpack_list = nuts.hpack
-local list_dimensions = nuts.dimensions
-local traverse = nuts.traverse
-local find_node_tail = nuts.tail
-
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
local whatcodes = nodes.whatcodes
@@ -80,18 +63,21 @@ local dir_code = whatcodes.dir
local line_code = listcodes.line
-local new_rule = nodepool.rule
+local nodepool = nodes.pool
+
local new_kern = nodepool.kern
+local traverse = node.traverse
+local find_node_tail = node.tail or node.slide
local tosequence = nodes.tosequence
-- local function dimensions(parent,start,stop)
--- stop = stop and getnext(stop)
+-- stop = stop and stop.next
-- if parent then
-- if stop then
--- return list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),start,stop)
+-- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop)
-- else
--- return list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign",getfield(parent,"glue_order"),start)
+-- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start)
-- end
-- else
-- if stop then
@@ -106,9 +92,9 @@ local tosequence = nodes.tosequence
local function dimensions(parent,start,stop)
if parent then
- return list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),start,stop and getnext(stop))
+ return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop and stop.next)
else
- return list_dimensions(start,stop and getnext(stop))
+ return list_dimensions(start,stop and stop.next)
end
end
@@ -125,25 +111,25 @@ local function inject_range(head,first,last,reference,make,stack,parent,pardir,t
if trace_backend then
report_area("head: %04i %s %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
end
- setfield(result,"next",first)
- setfield(first,"prev",result)
+ result.next = first
+ first.prev = result
return result, last
else
if trace_backend then
report_area("middle: %04i %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
end
- local prev = getprev(first)
+ local prev = first.prev
if prev then
- setfield(result,"next",first)
- setfield(result,"prev",prev)
- setfield(prev,"next",result)
- setfield(first,"prev",result)
+ result.next = first
+ result.prev = prev
+ prev.next = result
+ first.prev = result
else
- setfield(result,"next",first)
- setfield(first,"prev",result)
+ result.next = first
+ first.prev = result
end
- if first == getnext(head) then
- setfield(head,"next",result) -- hm, weird
+ if first == head.next then
+ head.next = result -- hm, weird
end
return head, last
end
@@ -153,9 +139,9 @@ local function inject_range(head,first,last,reference,make,stack,parent,pardir,t
end
local function inject_list(id,current,reference,make,stack,pardir,txtdir)
- local width, height, depth, correction = getfield(current,"width"), getfield(current,"height"), getfield(current,"depth"), 0
+ local width, height, depth, correction = current.width, current.height, current.depth, 0
local moveright = false
- local first = getlist(current)
+ local first = current.list
if id == hlist_code then -- box_code line_code
-- can be either an explicit hbox or a line and there is no way
-- to recognize this; anyway only if ht/dp (then inline)
@@ -163,17 +149,17 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
if first then
if sr and sr[2] then
local last = find_node_tail(first)
- if getid(last) == glue_code and getsubtype(last) == rightskip_code then
- local prev = getprev(last)
- moveright = getid(first) == glue_code and getsubtype(first) == leftskip_code
- if prev and getid(prev) == glue_code and getsubtype(prev) == parfillskip_code then
- width = dimensions(current,first,getprev(prev)) -- maybe not current as we already take care of it
+ if last.id == glue_code and last.subtype == rightskip_code then
+ local prev = last.prev
+ moveright = first.id == glue_code and first.subtype == leftskip_code
+ if prev and prev.id == glue_code and prev.subtype == parfillskip_code then
+ width = dimensions(current,first,prev.prev) -- maybe not current as we already take care of it
else
- if moveright and getfield(first,"writable") then
- width = width - getfield(getfield(first,"spec"),"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
+ if moveright and first.writable then
+ width = width - first.spec.stretch*current.glue_set * current.glue_sign
end
- if getfield(last,"writable") then
- width = width - getfield(getfield(last,"spec"),"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
+ if last.writable then
+ width = width - last.spec.stretch*current.glue_set * current.glue_sign
end
end
end
@@ -198,21 +184,19 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
report_area("box: %04i %s %s: w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved)
end
if not first then
- setfield(current,"list",result)
+ current.list = result
elseif moveright then -- brr no prevs done
-- result after first
- local n = getnext(first)
- setfield(result,"next",n)
- setfield(first,"next",result)
- setfield(result,"prev",first)
- if n then
- setfield(n,"prev",result)
- end
+ local n = first.next
+ result.next = n
+ first.next = result
+ result.prev = first
+ if n then n.prev = result end
else
-- first after result
- setfield(result,"next",first)
- setfield(first,"prev",result)
- setfield(current,"list",result)
+ result.next = first
+ first.prev = result
+ current.list = result
end
end
end
@@ -225,9 +209,9 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
pardir = pardir or "==="
txtdir = txtdir or "==="
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code or id == vlist_code then
- local r = getattr(current,attribute)
+ local r = current[attribute]
-- somehow reference is true so the following fails (second one not done) in
-- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
-- so let's wait till this fails again
@@ -238,33 +222,32 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
if r then
done[r] = (done[r] or 0) + 1
end
- local list = getlist(current)
+ local list = current.list
if list then
- local h, ok
- h, ok , pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
- setfield(current,"list",h)
+ local _
+ current.list, _, pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
end
if r then
done[r] = done[r] - 1
end
elseif id == whatsit_code then
- local subtype = getsubtype(current)
+ local subtype = current.subtype
if subtype == localpar_code then
- pardir = getfield(current,"dir")
+ pardir = current.dir
elseif subtype == dir_code then
- txtdir = getfield(current,"dir")
+ txtdir = current.dir
end
- elseif id == glue_code and getsubtype(current) == leftskip_code then -- any glue at the left?
+ elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left?
--
else
- local r = getattr(current,attribute)
+ local r = current[attribute]
if not r then
-- just go on, can be kerns
elseif not reference then
reference, first, last, firstdir = r, current, current, txtdir
elseif r == reference then
last = current
- elseif (done[reference] or 0) == 0 then -- or id == glue_code and getsubtype(current) == right_skip_code
+ elseif (done[reference] or 0) == 0 then -- or id == glue_code and current.subtype == right_skip_code
if not skip or r > skip then -- maybe no > test
head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
reference, first, last, firstdir = nil, nil, nil, nil
@@ -273,7 +256,7 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
reference, first, last, firstdir = r, current, current, txtdir
end
end
- current = getnext(current)
+ current = current.next
end
if reference and (done[reference] or 0) == 0 then
head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
@@ -288,32 +271,32 @@ local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir)
txtdir = txtdir or "==="
local current = head
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code or id == vlist_code then
- local r = getattr(current,attribute)
+ local r = current[attribute]
if r and not done[r] then
done[r] = true
inject_list(id,current,r,make,stack,pardir,txtdir)
end
- local list = getlist(current)
+ local list = current.list
if list then
- setfield(current,"list",inject_area(list,attribute,make,stack,done,current,pardir,txtdir))
+ current.list = inject_area(list,attribute,make,stack,done,current,pardir,txtdir)
end
elseif id == whatsit_code then
- local subtype = getsubtype(current)
+ local subtype = current.subtype
if subtype == localpar_code then
- pardir = getfield(current,"dir")
+ pardir = current.dir
elseif subtype == dir_code then
- txtdir = getfield(current,"dir")
+ txtdir = current.dir
end
else
- local r = getattr(current,attribute)
+ local r = current[attribute]
if r and not done[r] then
done[r] = true
head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir)
end
end
- current = getnext(current)
+ current = current.next
end
end
return head, true
@@ -321,6 +304,12 @@ end
-- tracing
+local nodepool = nodes.pool
+
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
+
+local set_attribute = node.set_attribute
local register_color = colors.register
local a_color = attributes.private('color')
@@ -357,15 +346,15 @@ local function colorize(width,height,depth,n,reference,what)
height = 65536/2
depth = height
end
- local rule = new_rule(width,height,depth) -- todo: use tracer rule
- setattr(rule,a_colormodel,1) -- gray color model
- setattr(rule,a_color,u_color)
- setattr(rule,a_transparency,u_transparency)
+ local rule = new_rule(width,height,depth)
+ rule[a_colormodel] = 1 -- gray color model
+ rule[a_color] = u_color
+ rule[a_transparency] = u_transparency
if width < 0 then
local kern = new_kern(width)
- setfield(rule,"width",-width)
- setfield(kern,"next",rule)
- setfield(rule,"prev",kern)
+ rule.width = -width
+ kern.next = rule
+ rule.prev = kern
return kern
else
return rule
@@ -374,6 +363,9 @@ end
-- references:
+local nodepool = nodes.pool
+local new_kern = nodepool.kern
+
local texsetattribute = tex.setattribute
local texsetcount = tex.setcount
@@ -418,25 +410,22 @@ local function makereference(width,height,depth,reference)
end
local annot = nodeinjections.reference(width,height,depth,set)
if annot then
-annot = tonut(annot)
nofreferences = nofreferences + 1
local result, current
if trace_references then
local step = 65536
result = hpack_list(colorize(width,height-step,depth-step,2,reference,"reference")) -- step subtracted so that we can see seperate links
- setfield(result,"width",0)
+ result.width = 0
current = result
end
if current then
- setfield(current,"next",annot)
+ current.next = annot
else
result = annot
end
references.registerpage(n)
result = hpack_list(result,0)
- setfield(result,"width",0)
- setfield(result,"height",0)
- setfield(result,"depth",0)
+ result.width, result.height, result.depth = 0, 0, 0
if cleanupreferences then stack[reference] = nil end
return result, resolved
elseif trace_references then
@@ -447,19 +436,9 @@ annot = tonut(annot)
end
end
--- function nodes.references.handler(head)
--- if topofstack > 0 then
--- return inject_areas(head,attribute,makereference,stack,done)
--- else
--- return head, false
--- end
--- end
-
function nodes.references.handler(head)
if topofstack > 0 then
- head = tonut(head)
- local head, done = inject_areas(head,attribute,makereference,stack,done)
- return tonode(head), done
+ return inject_areas(head,attribute,makereference,stack,done)
else
return head, false
end
@@ -505,12 +484,12 @@ local function makedestination(width,height,depth,reference)
end
for n=1,#name do
local rule = hpack_list(colorize(width,height,depth,3,reference,"destination"))
- setfield(rule,"width",0)
+ rule.width = 0
if not result then
result, current = rule, rule
else
- setfield(current,"next",rule)
- setfield(rule,"prev",current)
+ current.next = rule
+ rule.prev = current
current = rule
end
width, height = width - step, height - step
@@ -520,12 +499,12 @@ local function makedestination(width,height,depth,reference)
for n=1,#name do
local annot = nodeinjections.destination(width,height,depth,name[n],view)
if annot then
-annot = tonut(annot) -- obsolete soon
+ -- probably duplicate
if not result then
result = annot
else
- setfield(current,"next",annot)
- setfield(annot,"prev",current)
+ current.next = annot
+ annot.prev = current
end
current = find_node_tail(annot)
end
@@ -533,9 +512,7 @@ annot = tonut(annot) -- obsolete soon
if result then
-- some internal error
result = hpack_list(result,0)
- setfield(result,"width",0)
- setfield(result,"height",0)
- setfield(result,"depth",0)
+ result.width, result.height, result.depth = 0, 0, 0
end
if cleanupdestinations then stack[reference] = nil end
return result, resolved
@@ -544,25 +521,14 @@ annot = tonut(annot) -- obsolete soon
end
end
--- function nodes.destinations.handler(head)
--- if topofstack > 0 then
--- return inject_area(head,attribute,makedestination,stack,done) -- singular
--- else
--- return head, false
--- end
--- end
-
function nodes.destinations.handler(head)
if topofstack > 0 then
- head = tonut(head)
- local head, done = inject_areas(head,attribute,makedestination,stack,done)
- return tonode(head), done
+ return inject_area(head,attribute,makedestination,stack,done) -- singular
else
return head, false
end
end
-
-- will move
function references.mark(reference,h,d,view)
diff --git a/tex/context/base/node-res.lua b/tex/context/base/node-res.lua
index 968283745..ca9d67f91 100644
--- a/tex/context/base/node-res.lua
+++ b/tex/context/base/node-res.lua
@@ -18,8 +18,13 @@ local report_nodes = logs.reporter("nodes","housekeeping")
local nodes, node = nodes, node
+local copy_node = node.copy
+local free_node = node.free
+local free_list = node.flush_list
+local new_node = node.new
+
nodes.pool = nodes.pool or { }
-local nodepool = nodes.pool
+local pool = nodes.pool
local whatsitcodes = nodes.whatsitcodes
local skipcodes = nodes.skipcodes
@@ -30,453 +35,400 @@ local glyph_code = nodecodes.glyph
local allocate = utilities.storage.allocate
+local texgetbox = tex.getbox
local texgetcount = tex.getcount
local reserved, nofreserved = { }, 0
--- user nodes
+local function register_node(n)
+ nofreserved = nofreserved + 1
+ reserved[nofreserved] = n
+ return n
+end
-local userids = allocate()
-local lastid = 0
+pool.register = register_node
-setmetatable(userids, {
- __index = function(t,k)
- if type(k) == "string" then
- lastid = lastid + 1
- rawset(userids,lastid,k)
- rawset(userids,k,lastid)
- return lastid
- else
- rawset(userids,k,k)
- return k
- end
- end,
- __call = function(t,k)
- return t[k]
+function pool.cleanup(nofboxes) -- todo
+ if nodes.tracers.steppers then -- to be resolved
+ nodes.tracers.steppers.reset() -- todo: make a registration subsystem
end
-} )
-
--- nuts overload
-
-local nuts = nodes.nuts
-local nutpool = { }
-nuts.pool = nutpool
-
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getbox = nuts.getbox
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getid = nuts.getid
-
-local copy_nut = nuts.copy
-local new_nut = nuts.new
-local free_nut = nuts.free
-
--- at some point we could have a dual set (the overhead of tonut is not much larger than
--- metatable associations at the lua/c end esp if we also take assignments into account
-
--- table.setmetatableindex(nodepool,function(t,k,v)
--- -- report_nodes("defining nodepool[%s] instance",k)
--- local f = nutpool[k]
--- local v = function(...)
--- return tonode(f(...))
--- end
--- t[k] = v
--- return v
--- end)
---
--- -- we delay one step because that permits us a forward reference
--- -- e.g. in pdfsetmatrix
-
-table.setmetatableindex(nodepool,function(t,k,v)
- -- report_nodes("defining nodepool[%s] instance",k)
- local v = function(...)
- local f = nutpool[k]
- local v = function(...)
- return tonode(f(...))
+ local nl, nr = 0, nofreserved
+ for i=1,nofreserved do
+ local ri = reserved[i]
+ -- if not (ri.id == glue_spec and not ri.is_writable) then
+ free_node(reserved[i])
+ -- end
+ end
+ if nofboxes then
+ for i=0,nofboxes do
+ local l = texgetbox(i)
+ if l then
+ free_node(l) -- also list ?
+ nl = nl + 1
+ end
end
- t[k] = v
- return v(...)
end
- t[k] = v
- return v
-end)
-
-local function register_nut(n)
- nofreserved = nofreserved + 1
- reserved[nofreserved] = n
- return n
+ reserved = { }
+ nofreserved = 0
+ return nr, nl, nofboxes -- can be nil
end
-local function register_node(n)
- nofreserved = nofreserved + 1
- if type(n) == "number" then -- isnut(n)
- reserved[nofreserved] = n
- else
- reserved[nofreserved] = tonut(n)
+function pool.usage()
+ local t = { }
+ for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
+ t[tag] = n
end
- return n
+ return t
end
-nodepool.userids = userids
-nodepool.register = register_node
-
-nutpool.userids = userids
-nutpool.register = register_node -- could be register_nut
-
--- so far
-
-local disc = register_nut(new_nut("disc"))
-local kern = register_nut(new_nut("kern",kerncodes.userkern))
-local fontkern = register_nut(new_nut("kern",kerncodes.fontkern))
-local penalty = register_nut(new_nut("penalty"))
-local glue = register_nut(new_nut("glue")) -- glue.spec = nil
-local glue_spec = register_nut(new_nut("glue_spec"))
-local glyph = register_nut(new_nut("glyph",0))
-local textdir = register_nut(new_nut("whatsit",whatsitcodes.dir))
-local latelua = register_nut(new_nut("whatsit",whatsitcodes.latelua))
-local special = register_nut(new_nut("whatsit",whatsitcodes.special))
-local user_n = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_n,"type",100) -- 44
-local user_l = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_l,"type",110) -- 44
-local user_s = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_s,"type",115) -- 44
-local user_t = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_t,"type",116) -- 44
-local left_margin_kern = register_nut(new_nut("margin_kern",0))
-local right_margin_kern = register_nut(new_nut("margin_kern",1))
-local lineskip = register_nut(new_nut("glue",skipcodes.lineskip))
-local baselineskip = register_nut(new_nut("glue",skipcodes.baselineskip))
-local leftskip = register_nut(new_nut("glue",skipcodes.leftskip))
-local rightskip = register_nut(new_nut("glue",skipcodes.rightskip))
-local temp = register_nut(new_nut("temp",0))
-local noad = register_nut(new_nut("noad"))
+local disc = register_node(new_node("disc"))
+local kern = register_node(new_node("kern",kerncodes.userkern))
+local fontkern = register_node(new_node("kern",kerncodes.fontkern))
+local penalty = register_node(new_node("penalty"))
+local glue = register_node(new_node("glue")) -- glue.spec = nil
+local glue_spec = register_node(new_node("glue_spec"))
+local glyph = register_node(new_node("glyph",0))
+local textdir = register_node(new_node("whatsit",whatsitcodes.dir))
+local latelua = register_node(new_node("whatsit",whatsitcodes.latelua))
+local special = register_node(new_node("whatsit",whatsitcodes.special))
+local user_n = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_n.type = 100 -- 44
+local user_l = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_l.type = 110 -- 44
+local user_s = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_s.type = 115 -- 44
+local user_t = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_t.type = 116 -- 44
+local left_margin_kern = register_node(new_node("margin_kern",0))
+local right_margin_kern = register_node(new_node("margin_kern",1))
+local lineskip = register_node(new_node("glue",skipcodes.lineskip))
+local baselineskip = register_node(new_node("glue",skipcodes.baselineskip))
+local leftskip = register_node(new_node("glue",skipcodes.leftskip))
+local rightskip = register_node(new_node("glue",skipcodes.rightskip))
+local temp = register_node(new_node("temp",0))
+local noad = register_node(new_node("noad"))
-- the dir field needs to be set otherwise crash:
-local rule = register_nut(new_nut("rule")) setfield(rule, "dir","TLT")
-local hlist = register_nut(new_nut("hlist")) setfield(hlist,"dir","TLT")
-local vlist = register_nut(new_nut("vlist")) setfield(vlist,"dir","TLT")
-
-function nutpool.zeroglue(n)
- local s = getfield(n,"spec")
- return
- getfield(s,"width") == 0 and
- getfield(s,"stretch") == 0 and
- getfield(s,"shrink") == 0 and
- getfield(s,"stretch_order") == 0 and
- getfield(s,"shrink_order") == 0
-end
-
-function nutpool.glyph(fnt,chr)
- local n = copy_nut(glyph)
- if fnt then setfield(n,"font",fnt) end
- if chr then setfield(n,"char",chr) end
+local rule = register_node(new_node("rule")) rule .dir = "TLT"
+local hlist = register_node(new_node("hlist")) hlist.dir = "TLT"
+local vlist = register_node(new_node("vlist")) vlist.dir = "TLT"
+
+function pool.zeroglue(n)
+ local s = n.spec
+ return not writable or (
+ s.width == 0
+ and s.stretch == 0
+ and s.shrink == 0
+ and s.stretch_order == 0
+ and s.shrink_order == 0
+ )
+end
+
+function pool.glyph(fnt,chr)
+ local n = copy_node(glyph)
+ if fnt then n.font = fnt end
+ if chr then n.char = chr end
return n
end
-function nutpool.penalty(p)
- local n = copy_nut(penalty)
- setfield(n,"penalty",p)
+function pool.penalty(p)
+ local n = copy_node(penalty)
+ n.penalty = p
return n
end
-function nutpool.kern(k)
- local n = copy_nut(kern)
- setfield(n,"kern",k)
+function pool.kern(k)
+ local n = copy_node(kern)
+ n.kern = k
return n
end
-function nutpool.fontkern(k)
- local n = copy_nut(fontkern)
- setfield(n,"kern",k)
+function pool.fontkern(k)
+ local n = copy_node(fontkern)
+ n.kern = k
return n
end
-function nutpool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
- local s = copy_nut(glue_spec)
- if width then setfield(s,"width",width) end
- if stretch then setfield(s,"stretch",stretch) end
- if shrink then setfield(s,"shrink",shrink) end
- if stretch_order then setfield(s,"stretch_order",stretch_order) end
- if shrink_order then setfield(s,"shrink_order",shrink_order) end
+function pool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
+ local s = copy_node(glue_spec)
+ if width then s.width = width end
+ if stretch then s.stretch = stretch end
+ if shrink then s.shrink = shrink end
+ if stretch_order then s.stretch_order = stretch_order end
+ if shrink_order then s.shrink_order = shrink_order end
return s
end
local function someskip(skip,width,stretch,shrink,stretch_order,shrink_order)
- local n = copy_nut(skip)
+ local n = copy_node(skip)
if not width then
-- no spec
elseif width == false or tonumber(width) then
- local s = copy_nut(glue_spec)
- if width then setfield(s,"width",width) end
- if stretch then setfield(s,"stretch",stretch) end
- if shrink then setfield(s,"shrink",shrink) end
- if stretch_order then setfield(s,"stretch_order",stretch_order) end
- if shrink_order then setfield(s,"shrink_order",shrink_order) end
- setfield(n,"spec",s)
+ local s = copy_node(glue_spec)
+ if width then s.width = width end
+ if stretch then s.stretch = stretch end
+ if shrink then s.shrink = shrink end
+ if stretch_order then s.stretch_order = stretch_order end
+ if shrink_order then s.shrink_order = shrink_order end
+ n.spec = s
else
-- shared
- setfield(n,"spec",copy_nut(width))
+ n.spec = copy_node(width)
end
return n
end
-function nutpool.stretch(a,b)
- local n = copy_nut(glue)
- local s = copy_nut(glue_spec)
+function pool.stretch(a,b)
+ local n = copy_node(glue)
+ local s = copy_node(glue_spec)
if b then
- setfield(s,"stretch",a)
- setfield(s,"stretch_order",b)
+ s.stretch = a
+ s.stretch_order = b
else
- setfield(s,"stretch",1)
- setfield(s,"stretch_order",a or 1)
+ s.stretch = 1
+ s.stretch_order = a or 1
end
- setfield(n,"spec",s)
+ n.spec = s
return n
end
-function nutpool.shrink(a,b)
- local n = copy_nut(glue)
- local s = copy_nut(glue_spec)
+function pool.shrink(a,b)
+ local n = copy_node(glue)
+ local s = copy_node(glue_spec)
if b then
- setfield(s,"shrink",a)
- setfield(s,"shrink_order",b)
+ s.shrink = a
+ s.shrink_order = b
else
- setfield(s,"shrink",1)
- setfield(s,"shrink_order",a or 1)
+ s.shrink = 1
+ s.shrink_order = a or 1
end
- setfield(n,"spec",s)
+ n.spec = s
return n
end
-function nutpool.glue(width,stretch,shrink,stretch_order,shrink_order)
+
+function pool.glue(width,stretch,shrink,stretch_order,shrink_order)
return someskip(glue,width,stretch,shrink,stretch_order,shrink_order)
end
-function nutpool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
+function pool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(leftskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function nutpool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
+function pool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(rightskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function nutpool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
+function pool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(lineskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function nutpool.baselineskip(width,stretch,shrink)
+function pool.baselineskip(width,stretch,shrink)
return someskip(baselineskip,width,stretch,shrink)
end
-function nutpool.disc()
- return copy_nut(disc)
+function pool.disc()
+ return copy_node(disc)
end
-function nutpool.textdir(dir)
- local t = copy_nut(textdir)
- setfield(t,"dir",dir)
+function pool.textdir(dir)
+ local t = copy_node(textdir)
+ t.dir = dir
return t
end
-function nutpool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
- local n = copy_nut(rule)
- if width then setfield(n,"width",width) end
- if height then setfield(n,"height",height) end
- if depth then setfield(n,"depth",depth) end
- if dir then setfield(n,"dir",dir) end
+function pool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
+ local n = copy_node(rule)
+ if width then n.width = width end
+ if height then n.height = height end
+ if depth then n.depth = depth end
+ if dir then n.dir = dir end
return n
end
--- if node.has_field(latelua,'string') then
- function nutpool.latelua(code)
- local n = copy_nut(latelua)
- setfield(n,"string",code)
+if node.has_field(latelua,'string') then
+ function pool.latelua(code)
+ local n = copy_node(latelua)
+ n.string = code
+ return n
+ end
+else
+ function pool.latelua(code)
+ local n = copy_node(latelua)
+ n.data = code
return n
end
--- else
--- function nutpool.latelua(code)
--- local n = copy_nut(latelua)
--- setfield(n,"data",code)
--- return n
--- end
--- end
-
-function nutpool.leftmarginkern(glyph,width)
- local n = copy_nut(left_margin_kern)
+end
+
+function pool.leftmarginkern(glyph,width)
+ local n = copy_node(left_margin_kern)
if not glyph then
report_nodes("invalid pointer to left margin glyph node")
- elseif getid(glyph) ~= glyph_code then
+ elseif glyph.id ~= glyph_code then
report_nodes("invalid node type %a for %s margin glyph node",nodecodes[glyph],"left")
else
- setfield(n,"glyph",glyph)
+ n.glyph = glyph
end
if width then
- setfield(n,"width",width)
+ n.width = width
end
return n
end
-function nutpool.rightmarginkern(glyph,width)
- local n = copy_nut(right_margin_kern)
+function pool.rightmarginkern(glyph,width)
+ local n = copy_node(right_margin_kern)
if not glyph then
report_nodes("invalid pointer to right margin glyph node")
- elseif getid(glyph) ~= glyph_code then
+ elseif glyph.id ~= glyph_code then
report_nodes("invalid node type %a for %s margin glyph node",nodecodes[p],"right")
else
- setfield(n,"glyph",glyph)
+ n.glyph = glyph
end
if width then
- setfield(n,"width",width)
+ n.width = width
end
return n
end
-function nutpool.temp()
- return copy_nut(temp)
+function pool.temp()
+ return copy_node(temp)
end
-function nutpool.noad()
- return copy_nut(noad)
+function pool.noad()
+ return copy_node(noad)
end
-function nutpool.hlist(list,width,height,depth)
- local n = copy_nut(hlist)
+function pool.hlist(list,width,height,depth)
+ local n = copy_node(hlist)
if list then
- setfield(n,"list",list)
+ n.list = list
end
if width then
- setfield(n,"width",width)
+ n.width = width
end
if height then
- setfield(n,"height",height)
+ n.height = height
end
if depth then
- setfield(n,"depth",depth)
+ n.depth = depth
end
return n
end
-function nutpool.vlist(list,width,height,depth)
- local n = copy_nut(vlist)
+function pool.vlist(list,width,height,depth)
+ local n = copy_node(vlist)
if list then
- setfield(n,"list",list)
+ n.list = list
end
if width then
- setfield(n,"width",width)
+ n.width = width
end
if height then
- setfield(n,"height",height)
+ n.height = height
end
if depth then
- setfield(n,"depth",depth)
+ n.depth = depth
end
return n
end
+--[[
+At some point we ran into a problem that the glue specification
+of the zeropoint dimension was overwritten when adapting a glue spec
+node. This is a side effect of glue specs being shared. After a
+couple of hours tracing and debugging Taco and I came to the
+conclusion that it made no sense to complicate the spec allocator
+and settled on a writable flag. This all is a side effect of the
+fact that some glues use reserved memory slots (with the zeropoint
+glue being a noticeable one). So, next we wrap this into a function
+and hide it for the user. And yes, LuaTeX now gives a warning as
+well.
+]]--
+
+function nodes.writable_spec(n) -- not pool
+ local spec = n.spec
+ if not spec then
+ spec = copy_node(glue_spec)
+ n.spec = spec
+ elseif not spec.writable then
+ spec = copy_node(spec)
+ n.spec = spec
+ end
+ return spec
+end
+
-- local num = userids["my id"]
-- local str = userids[num]
-function nutpool.usernumber(id,num)
- local n = copy_nut(user_n)
+local userids = allocate() pool.userids = userids
+local lastid = 0
+
+setmetatable(userids, {
+ __index = function(t,k)
+ if type(k) == "string" then
+ lastid = lastid + 1
+ rawset(userids,lastid,k)
+ rawset(userids,k,lastid)
+ return lastid
+ else
+ rawset(userids,k,k)
+ return k
+ end
+ end,
+ __call = function(t,k)
+ return t[k]
+ end
+} )
+
+function pool.usernumber(id,num)
+ local n = copy_node(user_n)
if num then
- setfield(n,"user_id",id)
- setfield(n,"value",num)
+ n.user_id, n.value = id, num
elseif id then
- setfield(n,"value",id)
+ n.value = id
end
return n
end
-function nutpool.userlist(id,list)
- local n = copy_nut(user_l)
+function pool.userlist(id,list)
+ local n = copy_node(user_l)
if list then
- setfield(n,"user_id",id)
- setfield(n,"value",list)
+ n.user_id, n.value = id, list
else
- setfield(n,"value",id)
+ n.value = id
end
return n
end
-function nutpool.userstring(id,str)
- local n = copy_nut(user_s)
+function pool.userstring(id,str)
+ local n = copy_node(user_s)
if str then
- setfield(n,"user_id",id)
- setfield(n,"value",str)
+ n.user_id, n.value = id, str
else
- setfield(n,"value",id)
+ n.value = id
end
return n
end
-function nutpool.usertokens(id,tokens)
- local n = copy_nut(user_t)
+function pool.usertokens(id,tokens)
+ local n = copy_node(user_t)
if tokens then
- setfield(n,"user_id",id)
- setfield(n,"value",tokens)
+ n.user_id, n.value = id, tokens
else
- setfield(n,"value",id)
+ n.value = id
end
return n
end
-function nutpool.special(str)
- local n = copy_nut(special)
- setfield(n,"data",str)
+function pool.special(str)
+ local n = copy_node(special)
+ n.data = str
return n
end
--- housekeeping
-
-local function cleanup(nofboxes) -- todo
- if nodes.tracers.steppers then -- to be resolved
- nodes.tracers.steppers.reset() -- todo: make a registration subsystem
- end
- local nl, nr = 0, nofreserved
- for i=1,nofreserved do
- local ri = reserved[i]
- -- if not (getid(ri) == glue_spec and not getfield(ri,"is_writable")) then
- free_nut(reserved[i])
- -- end
- end
- if nofboxes then
- for i=0,nofboxes do
- local l = getbox(i)
- if l then
- free_nut(l) -- also list ?
- nl = nl + 1
- end
- end
- end
- reserved = { }
- nofreserved = 0
- return nr, nl, nofboxes -- can be nil
-end
-
-
-local function usage()
- local t = { }
- for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
- t[tag] = n
- end
- return t
-end
-
-nutpool .cleanup = cleanup
-nodepool.cleanup = cleanup
-
-nutpool .usage = usage
-nodepool.usage = usage
-
--- end
-
statistics.register("cleaned up reserved nodes", function()
- return format("%s nodes, %s lists of %s", cleanup(texgetcount("c_syst_last_allocated_box")))
+ return format("%s nodes, %s lists of %s", pool.cleanup(texgetcount("c_syst_last_allocated_box")))
end) -- \topofboxstack
statistics.register("node memory usage", function() -- comes after cleanup !
return status.node_mem_usage
end)
-lua.registerfinalizer(cleanup, "cleanup reserved nodes")
+lua.registerfinalizer(pool.cleanup, "cleanup reserved nodes")
diff --git a/tex/context/base/node-rul.lua b/tex/context/base/node-rul.lua
index 6f3bc9df9..96d6bdf41 100644
--- a/tex/context/base/node-rul.lua
+++ b/tex/context/base/node-rul.lua
@@ -13,28 +13,12 @@ if not modules then modules = { } end modules ['node-rul'] = {
local attributes, nodes, node = attributes, nodes, node
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local getlist = nuts.getlist
-
-local nodecodes = nodes.nodecodes
-local tasks = nodes.tasks
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local rule_code = nodecodes.rule
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local rule_code = nodecodes.rule
function nodes.striprange(first,last) -- todo: dir
if first and last then -- just to be sure
@@ -42,11 +26,11 @@ function nodes.striprange(first,last) -- todo: dir
return first, last
end
while first and first ~= last do
- local id = getid(first)
+ local id = first.id
if id == glyph_code or id == disc_code then -- or id == rule_code
break
else
- first = getnext(first)
+ first = first.next
end
end
if not first then
@@ -55,13 +39,13 @@ function nodes.striprange(first,last) -- todo: dir
return first, last
end
while last and last ~= first do
- local id = getid(last)
+ local id = last.id
if id == glyph_code or id == disc_code then -- or id == rule_code
break
else
- local prev = getprev(last) -- luatex < 0.70 has italic correction kern not prev'd
+ local prev = last.prev -- luatex < 0.70 has italic correction kern not prev'd
if prev then
- last = prev
+ last = last.prev
else
break
end
@@ -89,12 +73,12 @@ local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
local a_colorspace = attributes.private('colormodel')
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local list_dimensions = nuts.dimensions
-local hpack_nodes = nuts.hpack
-
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
local striprange = nodes.striprange
+local list_dimensions = node.dimensions
+
+local hpack_nodes = node.hpack
local fontdata = fonts.hashes.identifiers
local variables = interfaces.variables
@@ -127,7 +111,7 @@ local dir_code = whatcodes.dir
local kerning_code = kerncodes.kern
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_rule = nodepool.rule
local new_kern = nodepool.kern
@@ -157,9 +141,9 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
local f, l, a, d, i, class
local continue, done, strip, level = false, false, true, -1
while n do
- local id = getid(n)
+ local id = n.id
if id == glyph_code or id == rule_code then
- local aa = getattr(n,attribute)
+ local aa = n[attribute]
if aa then
if aa == a then
if not f then -- ?
@@ -188,13 +172,13 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
end
f, l, a = nil, nil, nil
end
--- elseif f and (id == disc_code or (id == kern_code and getsubtype(n) == kerning_code)) then
+-- elseif f and (id == disc_code or (id == kern_code and n.subtype == kerning_code)) then
-- l = n
elseif id == disc_code then
if f then
l = n
end
- elseif id == kern_code and getsubtype(n) == kerning_code then
+ elseif id == kern_code and n.subtype == kerning_code then
if f then
l = n
end
@@ -203,11 +187,11 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
head, done = flush(head,f,l,d,level,parent,strip), true
f, l, a = nil, nil, nil
end
- local list = getlist(n)
+ local list = n.list
if list then
- setfield(n,"list",(processwords(attribute,data,flush,list,n))) -- watch ()
+ n.list = processwords(attribute,data,flush,list,n)
end
- elseif checkdir and id == whatsit_code and getsubtype(n) == dir_code then -- only changes in dir, we assume proper boundaries
+ elseif checkdir and id == whatsit_code and n.subtype == dir_code then -- only changes in dir, we assume proper boundaries
if f and a then
l = n
end
@@ -219,8 +203,8 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
-- l = n
elseif id == glue_code then
-- catch \underbar{a} \underbar{a} (subtype test is needed)
- local subtype = getsubtype(n)
- if getattr(n,attribute) and (subtype == userskip_code or subtype == spaceskip_code or subtype == xspaceskip_code) then
+ local subtype = n.subtype
+ if n[attribute] and (subtype == userskip_code or subtype == spaceskip_code or subtype == xspaceskip_code) then
l = n
else
head, done = flush(head,f,l,d,level,parent,strip), true
@@ -232,7 +216,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
f, l, a = nil, nil, nil
end
end
- n = getnext(n)
+ n = n.next
end
if f then
head, done = flush(head,f,l,d,level,parent,strip), true
@@ -243,16 +227,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
end
end
--- nodes.processwords = processwords
-
-nodes.processwords = function(attribute,data,flush,head,parent) -- we have hlistdir and local dir
- head = tonut(head)
- if parent then
- parent = tonut(parent)
- end
- local head, done = processwords(attribute,data,flush,head,parent)
- return tonode(head), done
-end
+nodes.processwords = processwords
--
@@ -271,7 +246,7 @@ end
local a_viewerlayer = attributes.private("viewerlayer")
local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but acceptable for this purpose
- if getid(f) ~= glyph_code then
+ if f.id ~= glyph_code then
-- saveguard ... we need to deal with rules and so (math)
return head
end
@@ -289,16 +264,16 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
if not f then
return head
end
- local w = list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),f,getnext(l))
+ local w = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,f,l.next)
local method, offset, continue, dy, order, max = d.method, d.offset, d.continue, d.dy, d.order, d.max
local rulethickness, unit = d.rulethickness, d.unit
local ma, ca, ta = d.ma, d.ca, d.ta
- local colorspace = ma > 0 and ma or getattr(f,a_colorspace) or 1
- local color = ca > 0 and ca or getattr(f,a_color)
- local transparency = ta > 0 and ta or getattr(f,a_transparency)
+ local colorspace = ma > 0 and ma or f[a_colorspace] or 1
+ local color = ca > 0 and ca or f[a_color]
+ local transparency = ta > 0 and ta or f[a_transparency]
local foreground = order == v_foreground
- local e = dimenfactor(unit,getfont(f)) -- what if no glyph node
+ local e = dimenfactor(unit,f.font) -- what if no glyph node
local rt = tonumber(rulethickness)
if rt then
@@ -306,7 +281,7 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
else
local n, u = splitdimen(rulethickness)
if n and u then -- we need to intercept ex and em and % and ...
- rulethickness = n * dimenfactor(u,fontdata[getfont(f)]) / 2
+ rulethickness = n * dimenfactor(u,fontdata[f.font]) / 2
else
rulethickness = 1/5
end
@@ -325,18 +300,18 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
local ht = (offset+(i-1)*dy)*e + rulethickness - m
local dp = -(offset+(i-1)*dy)*e + rulethickness + m
local r = new_rule(w,ht,dp)
- local v = getattr(f,a_viewerlayer)
+ local v = f[a_viewerlayer]
-- quick hack
if v then
- setattr(r,a_viewerlayer,v)
+ r[a_viewerlayer] = v
end
--
if color then
- setattr(r,a_colorspace,colorspace)
- setattr(r,a_color,color)
+ r[a_colorspace] = colorspace
+ r[a_color] = color
end
if transparency then
- setattr(r,a_transparency,transparency)
+ r[a_transparency] = transparency
end
local k = new_kern(-w)
if foreground then
@@ -390,27 +365,21 @@ local function flush_shifted(head,first,last,data,level,parent,strip) -- not tha
if true then
first, last = striprange(first,last)
end
- local prev = getprev(first)
- local next = getnext(last)
- setfield(first,"prev",nil)
- setfield(last,"next",nil)
- local width, height, depth = list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),first,next)
+ local prev, next = first.prev, last.next
+ first.prev, last.next = nil, nil
+ local width, height, depth = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,first,next)
local list = hpack_nodes(first,width,"exactly")
if first == head then
head = list
end
if prev then
- setfield(prev,"next",list)
- setfield(list,"prev",prev)
+ prev.next, list.prev = list, prev
end
if next then
- setfield(next,"prev",list)
- setfield(list,"next",next)
+ next.prev, list.next = list, next
end
- local raise = data.dy * dimenfactor(data.unit,fontdata[getfont(first)])
- setfield(list,"shift",raise)
- setfield(list,"height",height)
- setfield(list,"depth",depth)
+ local raise = data.dy * dimenfactor(data.unit,fontdata[first.font])
+ list.shift, list.height, list.depth = raise, height, depth
if trace_shifted then
report_shifted("width %p, nodes %a, text %a",width,n_tostring(first,last),n_tosequence(first,last,true))
end
diff --git a/tex/context/base/node-tra.lua b/tex/context/base/node-tra.lua
index 081107277..9617f7476 100644
--- a/tex/context/base/node-tra.lua
+++ b/tex/context/base/node-tra.lua
@@ -34,30 +34,9 @@ nodes.handlers = handlers
local injections = nodes.injections or { }
nodes.injections = injections
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getchar = nuts.getchar
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-
-local setattr = nuts.setattr
-
-local flush_list = nuts.flush_list
-local count_nodes = nuts.count
-local used_nodes = nuts.usedlist
-
-local traverse_by_id = nuts.traverse_id
-local traverse_nodes = nuts.traverse
-local d_tostring = nuts.tostring
-
-local nutpool = nuts.pool
-local new_rule = nutpool.rule
+local traverse_nodes = node.traverse
+local traverse_by_id = node.traverse_id
+local count_nodes = nodes.count
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -77,6 +56,9 @@ local gluespec_code = nodecodes.gluespec
local localpar_code = whatcodes.localpar
local dir_code = whatcodes.dir
+local nodepool = nodes.pool
+local new_rule = nodepool.rule
+
local dimenfactors = number.dimenfactors
local formatters = string.formatters
@@ -86,16 +68,15 @@ function nodes.showlist(head, message)
if message then
report_nodes(message)
end
- for n in traverse_nodes(tonut(head)) do
- report_nodes(d_tostring(n))
+ for n in traverse_nodes(head) do
+ report_nodes(tostring(n))
end
end
function nodes.handlers.checkglyphs(head,message)
- local h = tonut(head)
local t = { }
- for g in traverse_by_id(glyph_code,h) do
- t[#t+1] = formatters["%U:%s"](getchar(g),getsubtype(g))
+ for g in traverse_by_id(glyph_code,head) do
+ t[#t+1] = formatters["%U:%s"](g.char,g.subtype)
end
if #t > 0 then
if message and message ~= "" then
@@ -109,12 +90,12 @@ end
function nodes.handlers.checkforleaks(sparse)
local l = { }
- local q = used_nodes()
- for p in traverse_nodes(q) do
- local s = table.serialize(nodes.astable(p,sparse),nodecodes[getid(p)])
+ local q = node.usedlist()
+ for p in traverse(q) do
+ local s = table.serialize(nodes.astable(p,sparse),nodecodes[p.id])
l[s] = (l[s] or 0) + 1
end
- flush_list(q)
+ node.flush_list(q)
for k, v in next, l do
report_nodes("%s * %s",v,k)
end
@@ -124,40 +105,39 @@ local f_sequence = formatters["U+%04X:%s"]
local function tosequence(start,stop,compact)
if start then
- start = tonut(start)
- stop = stop and tonut(stop)
local t = { }
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- local c = getchar(start)
+ local c = start.char
if compact then
- local components = getfield(start,"components")
- if components then
- t[#t+1] = tosequence(components,nil,compact)
+ if start.components then
+ t[#t+1] = tosequence(start.components,nil,compact)
else
t[#t+1] = utfchar(c)
end
else
t[#t+1] = f_sequence(c,utfchar(c))
end
+ elseif id == whatsit_code and start.subtype == localpar_code or start.subtype == dir_code then
+ t[#t+1] = "[" .. start.dir .. "]"
elseif id == rule_code then
if compact then
t[#t+1] = "|"
else
t[#t+1] = nodecodes[id]
end
- elseif id == whatsit_code and getsubtype(start) == localpar_code or getsubtype(start) == dir_code then
- t[#t+1] = "[" .. getfield(start,"dir") .. "]"
- elseif compact then
- t[#t+1] = "[]"
else
- t[#t+1] = nodecodes[id]
+ if compact then
+ t[#t+1] = "[]"
+ else
+ t[#t+1] = nodecodes[id]
+ end
end
if start == stop then
break
else
- start = getnext(start)
+ start = start.next
end
end
if compact then
@@ -173,23 +153,21 @@ end
nodes.tosequence = tosequence
function nodes.report(t,done)
- report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(tonut(t)))
+ report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(t))
end
function nodes.packlist(head)
local t = { }
- for n in traverse_nodes(tonut(head)) do
- t[#t+1] = d_tostring(n)
+ for n in traverse(head) do
+ t[#t+1] = tostring(n)
end
return t
end
function nodes.idstostring(head,tail)
- head = tonut(head)
- tail = tail and tonut(tail)
local t, last_id, last_n = { }, nil, 0
for n in traverse_nodes(head,tail) do -- hm, does not stop at tail
- local id = getid(n)
+ local id = n.id
if not last_id then
last_id, last_n = id, 1
elseif last_id == id then
@@ -217,8 +195,6 @@ function nodes.idstostring(head,tail)
end
-- function nodes.xidstostring(head,tail) -- only for special tracing of backlinks
--- head = tonut(head)
--- tail = tonut(tail)
-- local n = head
-- while n.next do
-- n = n.next
@@ -241,7 +217,7 @@ end
-- if n == head then
-- break
-- end
--- n = getprev(n)
+-- n = n.prev
-- end
-- if not last_id then
-- t[#t+1] = "no nodes"
@@ -254,56 +230,51 @@ end
-- end
local function showsimplelist(h,depth,n)
- h = h and tonut(h)
while h do
report_nodes("% w%s",n,d_tostring(h))
if not depth or n < depth then
- local id = getid(h)
+ local id = h.id
if id == hlist_code or id == vlist_code then
- showsimplelist(getlist(h),depth,n+1)
+ showsimplelist(h.list,depth,n+1)
end
end
- h = getnext(h)
+ h = h.next
end
end
--- \startluacode
--- callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
--- \stopluacode
--- \vbox{b\footnote{n}a}
--- \startluacode
--- callback.register('buildpage_filter',nil)
--- \stopluacode
+--~ \startluacode
+--~ callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
+--~ \stopluacode
+--~ \vbox{b\footnote{n}a}
+--~ \startluacode
+--~ callback.register('buildpage_filter',nil)
+--~ \stopluacode
nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end
local function listtoutf(h,joiner,textonly,last)
+ local joiner = (joiner == true and utfchar(0x200C)) or joiner -- zwnj
local w = { }
while h do
- local id = getid(h)
+ local id = h.id
if id == glyph_code then -- always true
- local c = getchar(h)
+ local c = h.char
w[#w+1] = c >= 0 and utfchar(c) or formatters["<%i>"](c)
if joiner then
w[#w+1] = joiner
end
elseif id == disc_code then
- local pre = getfield(h,"pre")
- local pos = getfield(h,"post")
- local rep = getfield(h,"replace")
+ local pre = h.pre
+ local pos = h.post
+ local rep = h.replace
w[#w+1] = formatters["[%s|%s|%s]"] (
pre and listtoutf(pre,joiner,textonly) or "",
pos and listtoutf(pos,joiner,textonly) or "",
rep and listtoutf(rep,joiner,textonly) or ""
)
elseif textonly then
- if id == glue_code then
- local spec = getfield(h,"spec")
- if spec and getfield(spec,"width") > 0 then
- w[#w+1] = " "
- end
- elseif id == hlist_code or id == vlist_code then
- w[#w+1] = "[]"
+ if id == glue_code and h.spec and h.spec.width > 0 then
+ w[#w+1] = " "
end
else
w[#w+1] = "[-]"
@@ -311,28 +282,24 @@ local function listtoutf(h,joiner,textonly,last)
if h == last then
break
else
- h = getnext(h)
+ h = h.next
end
end
return concat(w)
end
-function nodes.listtoutf(h,joiner,textonly,last)
- local joiner = joiner == true and utfchar(0x200C) or joiner -- zwnj
- return listtoutf(tonut(h),joiner,textonly,last and tonut(last))
-end
+nodes.listtoutf = listtoutf
local what = { [0] = "unknown", "line", "box", "indent", "row", "cell" }
local function showboxes(n,symbol,depth)
- depth = depth or 0
- symbol = symbol or "."
- for n in traverse_nodes(tonut(n)) do
- local id = getid(n)
+ depth, symbol = depth or 0, symbol or "."
+ for n in traverse_nodes(n) do
+ local id = n.id
if id == hlist_code or id == vlist_code then
- local s = getsubtype(n)
+ local s = n.subtype
report_nodes(rep(symbol,depth) .. what[s] or s)
- showboxes(getlist(n),symbol,depth+1)
+ showboxes(n.list,symbol,depth+1)
end
end
end
@@ -355,8 +322,15 @@ local stripper = lpeg.patterns.stripzeros
local dimenfactors = number.dimenfactors
-local function nodetodimen(d,unit,fmt,strip)
- d = tonut(d) -- tricky: direct nuts are an issue
+local function numbertodimen(d,unit,fmt,strip)
+ if not d then
+ local str = formatters[fmt](0,unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ local t = type(d)
+ if t == 'string' then
+ return d
+ end
if unit == true then
unit = "pt"
fmt = "%0.5f%s"
@@ -368,23 +342,27 @@ local function nodetodimen(d,unit,fmt,strip)
fmt = "%0.5f%s"
end
end
- local id = getid(d)
+ if t == "number" then
+ local str = formatters[fmt](d*dimenfactors[unit],unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ local id = d.id
if id == kern_code then
- local str = formatters[fmt](getfield(d,"width")*dimenfactors[unit],unit)
+ local str = formatters[fmt](d.width*dimenfactors[unit],unit)
return strip and lpegmatch(stripper,str) or str
end
if id == glue_code then
- d = getfield(d,"spec")
+ d = d.spec
end
- if not d or not getid(d) == gluespec_code then
+ if not d or not d.id == gluespec_code then
local str = formatters[fmt](0,unit)
return strip and lpegmatch(stripper,str) or str
end
- local width = getfield(d,"width")
- local plus = getfield(d,"stretch_order")
- local minus = getfield(d,"shrink_order")
- local stretch = getfield(d,"stretch")
- local shrink = getfield(d,"shrink")
+ local width = d.width
+ local plus = d.stretch_order
+ local minus = d.shrink_order
+ local stretch = d.stretch
+ local shrink = d.shrink
if plus ~= 0 then
plus = " plus " .. stretch/65536 .. fillcodes[plus]
elseif stretch ~= 0 then
@@ -401,39 +379,11 @@ local function nodetodimen(d,unit,fmt,strip)
else
minus = ""
end
- local str = formatters[fmt](getfield(d,"width")*dimenfactors[unit],unit)
+ local str = formatters[fmt](d.width*dimenfactors[unit],unit)
return (strip and lpegmatch(stripper,str) or str) .. plus .. minus
end
-local function numbertodimen(d,unit,fmt,strip)
- if not d then
- local str = formatters[fmt](0,unit)
- return strip and lpegmatch(stripper,str) or str
- end
- local t = type(d)
- if t == 'string' then
- return d
- elseif t == "number" then
- if unit == true then
- unit = "pt"
- fmt = "%0.5f%s"
- else
- unit = unit or 'pt'
- if not fmt then
- fmt = "%s%s"
- elseif fmt == true then
- fmt = "%0.5f%s"
- end
- end
- local str = formatters[fmt](d*dimenfactors[unit],unit)
- return strip and lpegmatch(stripper,str) or str
- else
- return nodetodimen(d,unit,fmt,strip) -- real node
- end
-end
-
number.todimen = numbertodimen
-nodes .todimen = nodetodimen
function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end
function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end
@@ -448,19 +398,6 @@ function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end
function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end
function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end
-function nodes.topoints (n,fmt) return nodetodimen(n,"pt",fmt) end
-function nodes.toinches (n,fmt) return nodetodimen(n,"in",fmt) end
-function nodes.tocentimeters (n,fmt) return nodetodimen(n,"cm",fmt) end
-function nodes.tomillimeters (n,fmt) return nodetodimen(n,"mm",fmt) end
-function nodes.toscaledpoints(n,fmt) return nodetodimen(n,"sp",fmt) end
-function nodes.toscaledpoints(n) return n .. "sp" end
-function nodes.tobasepoints (n,fmt) return nodetodimen(n,"bp",fmt) end
-function nodes.topicas (n,fmt) return nodetodimen(n "pc",fmt) end
-function nodes.todidots (n,fmt) return nodetodimen(n,"dd",fmt) end
-function nodes.tociceros (n,fmt) return nodetodimen(n,"cc",fmt) end
-function nodes.tonewdidots (n,fmt) return nodetodimen(n,"nd",fmt) end
-function nodes.tonewciceros (n,fmt) return nodetodimen(n,"nc",fmt) end
-
-- stop redefinition
local points = function(n)
@@ -506,13 +443,8 @@ number.basepoints = basepoints
number.pts = pts
number.nopts = nopts
-nodes.points = function(n) return numbertodimen(n,"pt",true,true) end
-nodes.basepoints = function(n) return numbertodimen(n,"bp",true,true) end
-nodes.pts = function(n) return numbertodimen(n,"pt",true) end
-nodes.nopts = function(n) return format("%.5f",n*ptfactor) end
-
-local colors = { }
-tracers.colors = colors
+local colors = { }
+tracers.colors = colors
local unsetvalue = attributes.unsetvalue
@@ -522,34 +454,36 @@ local m_color = attributes.list[a_color] or { }
function colors.set(n,c,s)
local mc = m_color[c]
- local nn = tonut(n)
- if mc then
- local mm = s or texgetattribute(a_colormodel)
- setattr(nn,a_colormodel,mm <= 0 and mm or 1)
- setattr(nn,a_color,mc)
+ if not mc then
+ n[a_color] = unsetvalue
else
- setattr(nn,a_color,unsetvalue)
+ if not n[a_colormodel] then
+ n[a_colormodel] = s or 1
+ end
+ n[a_color] = mc
end
return n
end
function colors.setlist(n,c,s)
- local nn = tonut(n)
- local mc = m_color[c] or unsetvalue
- local mm = s or texgetattribute(a_colormodel)
- if mm <= 0 then
- mm = 1
- end
- while nn do
- setattr(nn,a_colormodel,mm)
- setattr(nn,a_color,mc)
- nn = getnext(nn)
+ local f = n
+ while n do
+ local mc = m_color[c]
+ if not mc then
+ n[a_color] = unsetvalue
+ else
+ if not n[a_colormodel] then
+ n[a_colormodel] = s or 1
+ end
+ n[a_color] = mc
+ end
+ n = n.next
end
- return n
+ return f
end
function colors.reset(n)
- setattr(tonut(n),a_color,unsetvalue)
+ n[a_color] = unsetvalue
return n
end
@@ -562,22 +496,31 @@ local a_transparency = attributes.private('transparency')
local m_transparency = attributes.list[a_transparency] or { }
function transparencies.set(n,t)
- setattr(tonut(n),a_transparency,m_transparency[t] or unsetvalue)
+ local mt = m_transparency[t]
+ if not mt then
+ n[a_transparency] = unsetvalue
+ else
+ n[a_transparency] = mt
+ end
return n
end
function transparencies.setlist(n,c,s)
- local nn = tonut(n)
- local mt = m_transparency[c] or unsetvalue
- while nn do
- setattr(nn,a_transparency,mt)
- nn = getnext(nn)
+ local f = n
+ while n do
+ local mt = m_transparency[c]
+ if not mt then
+ n[a_transparency] = unsetvalue
+ else
+ n[a_transparency] = mt
+ end
+ n = n.next
end
- return n
+ return f
end
function transparencies.reset(n)
- setattr(n,a_transparency,unsetvalue)
+ n[a_transparency] = unsetvalue
return n
end
@@ -594,76 +537,52 @@ end
-- although tracers are used seldom
local function setproperties(n,c,s)
- local nn = tonut(n)
local mm = texgetattribute(a_colormodel)
- setattr(nn,a_colormodel,mm > 0 and mm or 1)
- setattr(nn,a_color,m_color[c])
- setattr(nn,a_transparency,m_transparency[c])
+ n[a_colormodel] = mm > 0 and mm or 1
+ n[a_color] = m_color[c]
+ n[a_transparency] = m_transparency[c]
return n
end
tracers.setproperties = setproperties
-function tracers.setlist(n,c,s)
- local nn = tonut(n)
+function tracers.setlistv(n,c,s)
+ local f = n
local mc = m_color[c]
local mt = m_transparency[c]
local mm = texgetattribute(a_colormodel)
if mm <= 0 then
mm = 1
end
- while nn do
- setattr(nn,a_colormodel,mm)
- setattr(nn,a_color,mc)
- setattr(nn,a_transparency,mt)
- nn = getnext(nn)
+ while n do
+ n[a_colormodel] = mm
+ n[a_color] = mc
+ n[a_transparency] = mt
+ n = n.next
end
- return n
+ return f
end
function tracers.resetproperties(n)
- local nn = tonut(n)
- setattr(nn,a_color,unsetvalue)
- setattr(nn,a_transparency,unsetvalue)
+ n[a_color] = unsetvalue
+ n[a_transparency] = unsetvalue
return n
end
--- this one returns a nut
+function tracers.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
+ return setproperties(new_rule(w,h,d),c,s)
+end
+
+-- only nodes
local nodestracerpool = { }
-local nutstracerpool = { }
tracers.pool = {
nodes = nodestracerpool,
- nuts = nutstracerpool,
}
-table.setmetatableindex(nodestracerpool,function(t,k,v)
- local f = nutstracerpool[k]
- local v = function(...)
- return tonode(f(...))
- end
- t[k] = v
- return v
-end)
-
-function nutstracerpool.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
+function nodestracerpool.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
return setproperties(new_rule(w,h,d),c,s)
end
tracers.rule = nodestracerpool.rule -- for a while
-
--- local function show(head,n,message)
--- print("START",message or "")
--- local i = 0
--- for current in traverse(head) do
--- local prev = getprev(current)
--- local next = getnext(current)
--- i = i + 1
--- print(i, prev and nodecodes[getid(prev)],nodecodes[getid(current)],next and nodecodes[getid(next)])
--- if i == n then
--- break
--- end
--- end
--- print("STOP", message or "")
--- end
diff --git a/tex/context/base/node-tst.lua b/tex/context/base/node-tst.lua
index 7f5102d5f..bfe0051bd 100644
--- a/tex/context/base/node-tst.lua
+++ b/tex/context/base/node-tst.lua
@@ -24,26 +24,17 @@ local rightskip_code = skipcodes.rightskip
local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip
local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
-local nuts = nodes.nuts
+local find_node_tail = node.tail or node.slide
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getchar = nuts.getchar
-local getsubtype = nuts.getsubtype
-
-local find_node_tail = nuts.tail
-
-function nuts.leftmarginwidth(n) -- todo: three values
+function nodes.leftmarginwidth(n) -- todo: three values
while n do
- local id = getid(n)
+ local id = n.id
if id == glue_code then
- return getsubtype(n) == leftskip_code and getfield(getfield(n,"spec"),"width") or 0
+ return n.subtype == leftskip_code and n.spec.width or 0
elseif id == whatsit_code then
- n = getnext(n)
+ n = n.next
elseif id == hlist_code then
- return getfield(n,"width")
+ return n.width
else
break
end
@@ -51,15 +42,15 @@ function nuts.leftmarginwidth(n) -- todo: three values
return 0
end
-function nuts.rightmarginwidth(n)
+function nodes.rightmarginwidth(n)
if n then
n = find_node_tail(n)
while n do
- local id = getid(n)
+ local id = n.id
if id == glue_code then
- return getsubtype(n) == rightskip_code and getfield(getfield(n,"spec"),"width") or 0
+ return n.subtype == rightskip_code and n.spec.width or 0
elseif id == whatsit_code then
- n = getprev(n)
+ n = n.prev
else
break
end
@@ -68,15 +59,15 @@ function nuts.rightmarginwidth(n)
return false
end
-function nuts.somespace(n,all)
+function nodes.somespace(n,all)
if n then
- local id = getid(n)
+ local id = n.id
if id == glue_code then
- return (all or (getfield(getfield(n,"spec"),"width") ~= 0)) and glue_code
+ return (all or (n.spec.width ~= 0)) and glue_code
elseif id == kern_code then
- return (all or (getfield(n,"kern") ~= 0)) and kern
+ return (all or (n.kern ~= 0)) and kern
elseif id == glyph_code then
- local category = chardata[getchar(n)].category
+ local category = chardata[n.char].category
-- maybe more category checks are needed
return (category == "zs") and glyph_code
end
@@ -84,12 +75,12 @@ function nuts.somespace(n,all)
return false
end
-function nuts.somepenalty(n,value)
+function nodes.somepenalty(n,value)
if n then
- local id = getid(n)
+ local id = n.id
if id == penalty_code then
if value then
- return getfield(n,"penalty") == value
+ return n.penalty == value
else
return true
end
@@ -98,38 +89,32 @@ function nuts.somepenalty(n,value)
return false
end
-function nuts.is_display_math(head)
- local n = getprev(head)
+function nodes.is_display_math(head)
+ local n = head.prev
while n do
- local id = getid(n)
+ local id = n.id
if id == penalty_code then
elseif id == glue_code then
- if getsubtype(n) == abovedisplayshortskip_code then
+ if n.subtype == abovedisplayshortskip_code then
return true
end
else
break
end
- n = getprev(n)
+ n = n.prev
end
- n = getnext(head)
+ n = head.next
while n do
- local id = getid(n)
+ local id = n.id
if id == penalty_code then
elseif id == glue_code then
- if getsubtype(n) == belowdisplayshortskip_code then
+ if n.subtype == belowdisplayshortskip_code then
return true
end
else
break
end
- n = getnext(n)
+ n = n.next
end
return false
end
-
-nodes.leftmarginwidth = nodes.vianuts(nuts.leftmarginwidth)
-nodes.rightmarginwidth = nodes.vianuts(nuts.rightmarginwidth)
-nodes.somespace = nodes.vianuts(nuts.somespace)
-nodes.somepenalty = nodes.vianuts(nuts.somepenalty)
-nodes.is_display_math = nodes.vianuts(nuts.is_display_math)
diff --git a/tex/context/base/node-typ.lua b/tex/context/base/node-typ.lua
index 4c33e3199..4a2ef8d49 100644
--- a/tex/context/base/node-typ.lua
+++ b/tex/context/base/node-typ.lua
@@ -8,38 +8,26 @@ if not modules then modules = { } end modules ['node-typ'] = {
-- code has been moved to blob-ini.lua
-local typesetters = nodes.typesetters or { }
-nodes.typesetters = typesetters
+local typesetters = nodes.typesetters or { }
+nodes.typesetters = typesetters
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
+local hpack_node_list = nodes.hpack
+local vpack_node_list = nodes.vpack
+local fast_hpack_list = nodes.fasthpack
-local setfield = nuts.setfield
-local getfont = nuts.getfont
-
-local hpack_node_list = nuts.hpack
-local vpack_node_list = nuts.vpack
-local fast_hpack_list = nuts.fasthpack
-local copy_node = nuts.copy
-
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_glyph = nodepool.glyph
local new_glue = nodepool.glue
local utfvalues = utf.values
-local currentfont = font.current
-local fontparameters = fonts.hashes.parameters
+local currentfont = font.current
+local fontparameters = fonts.hashes.parameters
-local function tonodes(str,fontid,spacing,templateglyph) -- quick and dirty
+local function tonodes(str,fontid,spacing) -- quick and dirty
local head, prev = nil, nil
if not fontid then
- if templateglyph then
- fontid = getfont(templateglyph)
- else
- fontid = currentfont()
- end
+ fontid = currentfont()
end
local fp = fontparameters[fontid]
local s, p, m
@@ -56,10 +44,6 @@ local function tonodes(str,fontid,spacing,templateglyph) -- quick and dirty
next = new_glue(s,p,m)
spacedone = true
end
- elseif templateglyph then
- next = copy_glyph(templateglyph)
- setfield(next,"char",c)
- spacedone = false
else
next = new_glyph(fontid or 1,c)
spacedone = false
@@ -69,8 +53,8 @@ local function tonodes(str,fontid,spacing,templateglyph) -- quick and dirty
elseif not head then
head = next
else
- setfield(prev,"next",next)
- setfield(next,"prev",prev)
+ prev.next = next
+ next.prev = prev
end
prev = next
end
@@ -93,30 +77,17 @@ end
local tovpackfast = tovpack
-local tnuts = { }
-nuts.typesetters = tnuts
-
-tnuts.tonodes = tonodes
-tnuts.tohpack = tohpack
-tnuts.tohpackfast = tohpackfast
-tnuts.tovpack = tovpack
-tnuts.tovpackfast = tovpackfast
-
-tnuts.hpack = tohpack -- obsolete
-tnuts.fast_hpack = tohpackfast -- obsolete
-tnuts.vpack = tovpack -- obsolete
-
-typesetters.tonodes = function(...) local h, b = tonodes (...) return tonode(h), b end
-typesetters.tohpack = function(...) local h, b = tohpack (...) return tonode(h), b end
-typesetters.tohpackfast = function(...) local h, b = tohpackfast(...) return tonode(h), b end
-typesetters.tovpack = function(...) local h, b = tovpack (...) return tonode(h), b end
-typesetters.tovpackfast = function(...) local h, b = tovpackfast(...) return tonode(h), b end
+typesetters.tonodes = tonodes
+typesetters.tohpack = tohpack
+typesetters.tohpackfast = tohpackfast
+typesetters.tovpack = tovpack
+typesetters.tovpackfast = tovpackfast
-typesetters.hpack = typesetters.tohpack -- obsolete
-typesetters.fast_hpack = typesetters.tofasthpack -- obsolete
-typesetters.vpack = typesetters.tovpack -- obsolete
+typesetters.hpack = tohpack
+typesetters.fast_hpack = tohpackfast
+typesetters.vpack = tovpack
-- node.write(nodes.typestters.hpack("Hello World!"))
-- node.write(nodes.typestters.hpack("Hello World!",1,100*1024*10))
-string.tonodes = function(...) return tonode(tonodes(...)) end -- quite convenient
+string.tonodes = tonodes -- quite convenient
diff --git a/tex/context/base/pack-rul.lua b/tex/context/base/pack-rul.lua
index c8ed0722b..329ea63b8 100644
--- a/tex/context/base/pack-rul.lua
+++ b/tex/context/base/pack-rul.lua
@@ -21,25 +21,15 @@ local line_code = nodes.listcodes.line
local texsetdimen = tex.setdimen
local texsetcount = tex.setcount
-
-local nuts = nodes.nuts
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getlist = nuts.getlist
-local getsubtype = nuts.getsubtype
-local getbox = nuts.getbox
-
-local hpack = nuts.hpack
-local free = nuts.free
-local copy = nuts.copy_list
-local traverse_id = nuts.traverse_id
-local node_dimensions = nuts.dimensions
+local texgetbox = tex.getbox
+local hpack = nodes.hpack
+local free = nodes.free
+local copy = nodes.copy_list
+local traverse_id = nodes.traverse_id
+local node_dimensions = nodes.dimensions
function commands.doreshapeframedbox(n)
- local box = getbox(n)
+ local box = texgetbox(n)
local noflines = 0
local firstheight = nil
local lastdepth = nil
@@ -48,27 +38,27 @@ function commands.doreshapeframedbox(n)
local maxwidth = 0
local totalwidth = 0
local averagewidth = 0
- local boxwidth = getfield(box,"width")
+ local boxwidth = box.width
if boxwidth ~= 0 then -- and h.subtype == vlist_code
- local list = getlist(box)
+ local list = box.list
if list then
local function check(n,repack)
if not firstheight then
- firstheight = getfield(n,"height")
+ firstheight = n.height
end
- lastdepth = getfield(n,"depth")
+ lastdepth = n.depth
noflines = noflines + 1
- local l = getlist(n)
+ local l = n.list
if l then
if repack then
- local subtype = getsubtype(n)
+ local subtype = n.subtype
if subtype == box_code or subtype == line_code then
- lastlinelength = node_dimensions(l,getfield(n,"dir")) -- used to be: hpack(copy(l)).width
+ lastlinelength = node_dimensions(l,n.dir) -- used to be: hpack(copy(l)).width
else
- lastlinelength = getfield(n,"width")
+ lastlinelength = n.width
end
else
- lastlinelength = getfield(n,"width")
+ lastlinelength = n.width
end
if lastlinelength > maxwidth then
maxwidth = lastlinelength
@@ -94,27 +84,28 @@ function commands.doreshapeframedbox(n)
elseif maxwidth ~= 0 then
if hdone then
for h in traverse_id(hlist_code,list) do
- local l = getlist(h)
+ local l = h.list
if l then
- local subtype = getsubtype(h)
+ local subtype = h.subtype
if subtype == box_code or subtype == line_code then
- l = hpack(l,maxwidth,'exactly',getfield(h,"dir")) -- multiple return values
- setfield(h,"list",l)
- setfield(h,"shift",0) -- needed for display math, so no width check possible
+ h.list = hpack(l,maxwidth,'exactly',h.dir)
+ h.shift = 0 -- needed for display math
end
- setfield(h,"width",maxwidth)
+ h.width = maxwidth
end
end
+ box.width = maxwidth -- moved
+ averagewidth = noflines > 0 and totalwidth/noflines or 0
end
-- if vdone then
-- for v in traverse_id(vlist_code,list) do
- -- local width = getfield(n,"width")
+ -- local width = n.width
-- if width > maxwidth then
- -- setfield(v,"width",maxwidth)
+ -- v.width = maxwidth
-- end
-- end
-- end
- setfield(box,"width",maxwidth)
+ box.width = maxwidth
averagewidth = noflines > 0 and totalwidth/noflines or 0
end
end
@@ -128,18 +119,18 @@ function commands.doreshapeframedbox(n)
end
function commands.doanalyzeframedbox(n)
- local box = getbox(n)
+ local box = texgetbox(n)
local noflines = 0
local firstheight = nil
local lastdepth = nil
- if getfield(box,"width") ~= 0 then
- local list = getlist(box)
+ if box.width ~= 0 then
+ local list = box.list
if list then
local function check(n)
if not firstheight then
- firstheight = getfield(n,"height")
+ firstheight = n.height
end
- lastdepth = getfield(n,"depth")
+ lastdepth = n.depth
noflines = noflines + 1
end
for h in traverse_id(hlist_code,list) do
diff --git a/tex/context/base/pack-rul.mkiv b/tex/context/base/pack-rul.mkiv
index 8fcf8f548..377d39499 100644
--- a/tex/context/base/pack-rul.mkiv
+++ b/tex/context/base/pack-rul.mkiv
@@ -2564,25 +2564,10 @@
\inheritedframedtextframed\bgroup
\let\\=\endgraf
\framedtextparameter\c!inner % oud spul
- \edef\p_framed_text_depthcorrection{\framedtextparameter\c!depthcorrection}%
- \ifx\p_framed_text_depthcorrection\v!on
- \pack_framed_text_start_depth_correction
- \else
- \bgroup
- \fi
-\vskip-\strutdp % brrr why is this needed ... needs to be sorted out, see testcase 1
+ \doif{\framedtextparameter\c!depthcorrection}\v!on\pack_framed_text_start_depth_correction
\doinhibitblank
\setupindenting[\framedtextparameter\c!indenting]%
- \useframedtextstyleandcolor\c!style\c!color
- \ignorespaces}
-
-% testcase 1:
-%
-% \showstruts
-% \startframedtext[align={normal,tolerant},offset=0pt] \input tufte \stopframedtext
-% \startframedtext[align={normal,tolerant},offset=0pt,depthcorrection=off] \input tufte \stopframedtext
-% \startframedtext[align={normal,tolerant},offset=0pt,depthcorrection=off] \inframed{x} \stopframedtext
-% \framed[align={normal,tolerant},offset=0pt]{\input tufte }
+ \useframedtextstyleandcolor\c!style\c!color}
%D The \type {none} option is handy for nested usage, as in the presentation
%D styles, where we don't want interference.
@@ -2592,11 +2577,7 @@
\unexpanded\def\pack_framed_text_stop % no \baselinecorrection, see faq docs
{\endgraf
\removelastskip
- \ifx\p_framed_text_depthcorrection\v!on
- \pack_framed_text_stop_depth_correction
- \else
- \egroup
- \fi
+ \doif{\framedtextparameter\c!depthcorrection}\v!on\pack_framed_text_stop_depth_correction
\stopboxedcontent
\ifconditional\c_framed_text_location_none
\egroup
diff --git a/tex/context/base/page-brk.mkiv b/tex/context/base/page-brk.mkiv
index 11dc04bfd..cc9a9b4d2 100644
--- a/tex/context/base/page-brk.mkiv
+++ b/tex/context/base/page-brk.mkiv
@@ -316,204 +316,75 @@
%D Test page breaks.
-% \newdimen \d_page_tests_test
-% \newconstant\c_page_tests_mode
+\newdimen \d_page_tests_test
+\newconstant\c_page_tests_mode
-\newconstant\testpagemethod % old
-\newconstant\testpagetrigger % old
+\newconstant\testpagemethod % todo: \testnewpage[method=,lines=,voffset=]
+\newconstant\testpagetrigger
-% \unexpanded\def\testpage {\c_page_tests_mode\plusone \dodoubleempty\page_tests_test} %
-% \unexpanded\def\testpageonly{\c_page_tests_mode\plustwo \dodoubleempty\page_tests_test} % no penalties added to the mvl
-% \unexpanded\def\testpagesync{\c_page_tests_mode\plusthree\dodoubleempty\page_tests_test} % force sync
-%
-% \def\page_tests_test[#1][#2]% don't change, only add more methods
-% {\relax % needed before \if
-% \ifconditional\c_page_breaks_enabled
-% % new from here
-% \ifcase\testpagetrigger
-% \endgraf
-% \or\ifvmode
-% \dosomebreak\allowbreak
-% \else % indeed?
-% \vadjust{\allowbreak}%
-% \endgraf
-% \fi\fi
-% % till here
-% \ifdim\pagegoal<\maxdimen \relax
-% \ifdim\pagetotal<\pagegoal \relax
-% \d_page_tests_test\dimexpr
-% #1\lineheight
-% +\pagetotal
-% \ifdim\lastskip<\parskip+\parskip\fi
-% \ifsecondargument+#2\fi
-% \relax
-% \ifcase\testpagemethod
-% \ifdim\d_page_tests_test>.99\pagegoal
-% \penalty-\plustenthousand
-% \fi
-% \or
-% \ifdim\dimexpr\d_page_tests_test-\pagegoal\relax>-\lineheight
-% \penalty-\plustenthousand
-% \fi
-% \or
-% \getnoflines\pagegoal
-% \ifdim\dimexpr\d_page_tests_test-\noflines\lineheight\relax>-\lineheight
-% \penalty-\plustenthousand
-% \fi
-% \or % same as 0 but more accurate
-% \ifdim\dimexpr\d_page_tests_test-10\scaledpoint\relax>\pagegoal
-% \penalty-\plustenthousand
-% \fi
-% \fi
-% \else\ifnum\c_page_tests_mode=\plusthree
-% \page_tests_flush_so_far
-% \fi\fi
-% \else\ifnum\c_page_tests_mode=\plusone
-% \goodbreak
-% \fi\fi
-% \else
-% \endgraf
-% \fi}
-%
-% \def\page_tests_flush_so_far
-% {\endgraf
-% \ifdim\pagetotal>\pagegoal
-% \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
-% \goodbreak
-% \else
-% \page
-% \fi
-% \fi}
-
-\installcorenamespace {pagechecker}
-\installcorenamespace {pagecheckermethod}
-
-\installcommandhandler \??pagechecker {pagechecker} \??pagechecker
-
-\setuppagechecker
- [\c!method=1,
- \c!before=,
- \c!after=,
- \c!inbetween=,
- \c!lines=\plusthree,
- \c!offset=\zeropoint]
-
-\def\page_check_amount
- {\dimexpr
- \pagecheckerparameter\c!lines\lineheight
- +\pagetotal
- \ifdim\lastskip<\parskip+\parskip\fi
- +\pagecheckerparameter\c!offset
- \relax}
-
-\unexpanded\def\checkpage
- {\dodoubleempty\page_check}
-
-\def\page_check[#1][#2]%
+\unexpanded\def\testpage {\c_page_tests_mode\plusone \dodoubleempty\page_tests_test} %
+\unexpanded\def\testpageonly{\c_page_tests_mode\plustwo \dodoubleempty\page_tests_test} % no penalties added to the mvl
+\unexpanded\def\testpagesync{\c_page_tests_mode\plusthree\dodoubleempty\page_tests_test} % force sync
+
+\def\page_tests_test[#1][#2]% don't change, only add more methods
{\relax % needed before \if
- \endgraf
\ifconditional\c_page_breaks_enabled
- \begingroup
- \edef\currentpagechecker{#1}%
- \ifsecondargument\setupcurrentpagechecker[#2]\fi
- \csname\??pagecheckermethod\pagecheckerparameter\c!method\endcsname
- \endgroup
- \fi}
-
-\setvalue{\??pagecheckermethod 0}%
- {\ifdim\pagegoal<\maxdimen \relax
- \ifdim\pagetotal<\pagegoal \relax
- \ifdim\page_check_amount>.99\pagegoal
- \pagecheckerparameter\c!before
- \penalty-\plustenthousand
- \pagecheckerparameter\c!after
- \else
- \pagecheckerparameter\c!inbetween
- \fi
- \else
- \pagecheckerparameter\c!inbetween
- \fi
- \else
- \pagecheckerparameter\c!inbetween
- \fi}
-
-\setvalue{\??pagecheckermethod 1}%
- {\ifdim\pagegoal<\maxdimen \relax
- \ifdim\pagetotal<\pagegoal \relax
- \ifdim\dimexpr\page_check_amount-\pagegoal\relax>-\lineheight
- \pagecheckerparameter\c!before
- \penalty-\plustenthousand
- \pagecheckerparameter\c!after
- \else
- \pagecheckerparameter\c!inbetween
- \fi
- \else
- \pagecheckerparameter\c!inbetween
- \fi
- \else
- \goodbreak
- \pagecheckerparameter\c!inbetween
- \fi}
-
-\setvalue{\??pagecheckermethod 2}%
- {\ifdim\pagegoal<\maxdimen \relax
- \ifdim\pagetotal<\pagegoal \relax
- \getnoflines\pagegoal
- \ifdim\dimexpr\page_check_amount-\noflines\lineheight\relax>-\lineheight
- \pagecheckparameter\c!before
- \penalty-\plustenthousand
- \pagecheckerparameter\c!after
- \else
- \pagecheckerparameter\c!inbetween
- \fi
- \else
- \pagecheckerparameter\c!inbetween
- \fi
+ % new from here
+ \ifcase\testpagetrigger
+ \endgraf
+ \or\ifvmode
+ \dosomebreak\allowbreak
+ \else % indeed?
+ \vadjust{\allowbreak}%
+ \endgraf
+ \fi\fi
+ % till here
+ \ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \d_page_tests_test\dimexpr
+ #1\lineheight
+ +\pagetotal
+ \ifdim\lastskip<\parskip+\parskip\fi
+ \ifsecondargument+#2\fi
+ \relax
+ \ifcase\testpagemethod
+ \ifdim\d_page_tests_test>.99\pagegoal
+ \penalty-\plustenthousand
+ \fi
+ \or
+ \ifdim\dimexpr\d_page_tests_test-\pagegoal\relax>-\lineheight
+ \penalty-\plustenthousand
+ \fi
+ \or
+ \getnoflines\pagegoal
+ \ifdim\dimexpr\d_page_tests_test-\noflines\lineheight\relax>-\lineheight
+ \penalty-\plustenthousand
+ \fi
+ \or % same as 0 but more accurate
+ \ifdim\dimexpr\d_page_tests_test-10\scaledpoint\relax>\pagegoal
+ \penalty-\plustenthousand
+ \fi
+ \fi
+ \else\ifnum\c_page_tests_mode=\plusthree
+ \page_tests_flush_so_far
+ \fi\fi
+ \else\ifnum\c_page_tests_mode=\plusone
+ \goodbreak
+ \fi\fi
\else
- \pagecheckerparameter\c!inbetween
+ \endgraf
\fi}
-\setvalue{\??pagecheckermethod 3}%
- {\ifdim\pagegoal<\maxdimen \relax
- \ifdim\pagetotal<\pagegoal \relax
- \ifdim\dimexpr\page_check_amount-10\scaledpoint\relax>\pagegoal
- \pagecheckerparameter\c!before
- \penalty-\plustenthousand
- \pagecheckerparameter\c!after
- \else
- \pagecheckerparameter\c!inbetween
- \fi
+\def\page_tests_flush_so_far
+ {\endgraf
+ \ifdim\pagetotal>\pagegoal
+ \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
+ \goodbreak
\else
- \ifdim\pagetotal>\pagegoal
- \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
- \goodbreak
- \pagecheckerparameter\c!inbetween
- \else
- \pagecheckerparameter\c!before
- \page
- \pagecheckerparameter\c!after
- \fi
- \else
- \pagecheckerparameter\c!inbetween
- \fi
+ \page
\fi
- \else
- \pagecheckerparameter\c!inbetween
\fi}
-\definepagechecker[\s!unknown:0] [\c!method=0,\c!before=,\c!after=,\c!inbetween=]
-\definepagechecker[\s!unknown:1][\s!unknown:0][\c!method=1]
-\definepagechecker[\s!unknown:2][\s!unknown:0][\c!method=2]
-\definepagechecker[\s!unknown:3][\s!unknown:0][\c!method=3]
-
-\def\page_tests_test_a[#1][#2]{\normalexpanded{\checkpage[\s!unknown:1][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
-\def\page_tests_test_b[#1][#2]{\normalexpanded{\checkpage[\s!unknown:2][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
-\def\page_tests_test_c[#1][#2]{\normalexpanded{\checkpage[\s!unknown:3][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
-
-\unexpanded\def\testpage {\dodoubleempty\page_tests_test_a} %
-\unexpanded\def\testpageonly{\dodoubleempty\page_tests_test_b} % no penalties added to the mvl
-\unexpanded\def\testpagesync{\dodoubleempty\page_tests_test_c} % force sync
-
%D Test column breaks.
\unexpanded\def\testcolumn
diff --git a/tex/context/base/page-lay.mkiv b/tex/context/base/page-lay.mkiv
index 19f237242..81eb0423c 100644
--- a/tex/context/base/page-lay.mkiv
+++ b/tex/context/base/page-lay.mkiv
@@ -1026,12 +1026,12 @@
\unexpanded\def\startlayout[#1]%
{\page
- \globalpushmacro\currentlayout
+ \pushmacro\currentlayout
\doiflayoutdefinedelse{#1}{\setuplayout[#1]}\donothing} % {\setuplayout[\currentlayout]}}
\unexpanded\def\stoplayout
{\page
- \globalpopmacro\currentlayout
+ \popmacro\currentlayout
\setuplayout[\currentlayout]}
% NOG EENS NAGAAN WANNEER NU GLOBAL EN WANNEER NIET
@@ -1275,7 +1275,7 @@
{\globalpopmacro\currentlayout
\globalpopmacro\page_paper_restore
\page_paper_restore
- \setuplayout[\currentlayout]\relax} % explicit !
+ \setuplayout\relax}
%D \macros
%D {showprint, showframe, showlayout, showsetups}
diff --git a/tex/context/base/page-lin.lua b/tex/context/base/page-lin.lua
index 66b7e4684..7e8e9ad8a 100644
--- a/tex/context/base/page-lin.lua
+++ b/tex/context/base/page-lin.lua
@@ -8,35 +8,31 @@ if not modules then modules = { } end modules ['page-lin'] = {
-- experimental -> will become builders
--- if there is demand for it, we can support multiple numbering streams
--- and use more than one attibute
+local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
-local next, tonumber = next, tonumber
+local report_lines = logs.reporter("lines")
-local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
+local attributes, nodes, node, context = attributes, nodes, node, context
-local report_lines = logs.reporter("lines")
+nodes.lines = nodes.lines or { }
+local lines = nodes.lines
-local attributes = attributes
-local nodes = nodes
-local context = context
+lines.data = lines.data or { } -- start step tag
+local data = lines.data
+local last = #data
-nodes.lines = nodes.lines or { }
-local lines = nodes.lines
+local texgetbox = tex.getbox
-lines.data = lines.data or { } -- start step tag
-local data = lines.data
-local last = #data
+lines.scratchbox = lines.scratchbox or 0
-lines.scratchbox = lines.scratchbox or 0
+local leftmarginwidth = nodes.leftmarginwidth
-storage.register("lines/data", data, "nodes.lines.data")
+storage.register("lines/data", lines.data, "nodes.lines.data")
-local variables = interfaces.variables
+-- if there is demand for it, we can support multiple numbering streams
+-- and use more than one attibute
-local v_next = variables.next
-local v_page = variables.page
-local v_no = variables.no
+local variables = interfaces.variables
local nodecodes = nodes.nodecodes
@@ -53,25 +49,12 @@ local current_list = { }
local cross_references = { }
local chunksize = 250 -- not used in boxed
-local nuts = nodes.nuts
-
-local getid = nuts.getid
-local getnext = nuts.getnext
-local getattr = nuts.getattr
-local getlist = nuts.getlist
-local getbox = nuts.getbox
-local getfield = nuts.getfield
-
-local setfield = nuts.setfield
-
-local traverse_id = nuts.traverse_id
-local traverse = nuts.traverse
-local copy_node = nuts.copy
-local hpack_node = nuts.hpack
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local is_display_math = nuts.is_display_math
-local leftmarginwidth = nuts.leftmarginwidth
+local traverse_id = node.traverse_id
+local traverse = node.traverse
+local copy_node = node.copy
+local hpack_node = node.hpack
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
-- cross referencing
@@ -84,16 +67,16 @@ end
local function resolve(n,m) -- we can now check the 'line' flag (todo)
while n do
- local id = getid(n)
+ local id = n.id
if id == whatsit_code then -- why whatsit
- local a = getattr(n,a_linereference)
+ local a = n[a_linereference]
if a then
cross_references[a] = m
end
elseif id == hlist_code or id == vlist_code then
- resolve(getlist(n),m)
+ resolve(n.list,m)
end
- n = getnext(n)
+ n = n.next
end
end
@@ -182,20 +165,20 @@ local function check_number(n,a,skip,sameline)
if sameline then
skipflag = 0
if trace_numbers then
- report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
+ report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
end
elseif not skip and s % d.step == 0 then
skipflag, d.start = 1, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
+ report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
end
else
skipflag, d.start = 0, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
+ report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
end
end
- context.makelinenumber(tag,skipflag,s,getfield(n,"shift"),getfield(n,"width"),leftmarginwidth(getlist(n)),getfield(n,"dir"))
+ context.makelinenumber(tag,skipflag,s,n.shift,n.width,leftmarginwidth(n.list),n.dir)
end
end
@@ -206,26 +189,26 @@ end
local function identify(list)
if list then
for n in traverse_id(hlist_code,list) do
- if getattr(n,a_linenumber) then
+ if n[a_linenumber] then
return list
end
end
local n = list
while n do
- local id = getid(n)
+ local id = n.id
if id == hlist_code or id == vlist_code then
- local ok = identify(getlist(n))
+ local ok = identify(n.list)
if ok then
return ok
end
end
- n = getnext(n)
+ n = n.next
end
end
end
function boxed.stage_zero(n)
- return identify(getlist(getbox(n)))
+ return identify(texgetbox(n).list)
end
-- reset ranges per page
@@ -234,39 +217,39 @@ end
function boxed.stage_one(n,nested)
current_list = { }
- local box = getbox(n)
+ local box = texgetbox(n)
if box then
- local list = getlist(box)
+ local list = box.list
if nested then
list = identify(list)
end
local last_a, last_v, skip = nil, -1, false
for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found
- if getfield(n,"height") == 0 and getfield(n,"depth") == 0 then
+ if n.height == 0 and n.depth == 0 then
-- skip funny hlists -- todo: check line subtype
else
- local list = getlist(n)
- local a = getattr(list,a_linenumber)
+ local list = n.list
+ local a = list[a_linenumber]
if a and a > 0 then
if last_a ~= a then
local da = data[a]
local ma = da.method
- if ma == v_next then
+ if ma == variables.next then
skip = true
- elseif ma == v_page then
+ elseif ma == variables.page then
da.start = 1 -- eventually we will have a normal counter
end
last_a = a
if trace_numbers then
- report_lines("starting line number range %s: start %s, continue %s",a,da.start,da.continue or v_no)
+ report_lines("starting line number range %s: start %s, continue",a,da.start,da.continue or "no")
end
end
- if getattr(n,a_displaymath) then
- if is_display_math(n) then
+ if n[a_displaymath] then
+ if nodes.is_display_math(n) then
check_number(n,a,skip)
end
else
- local v = getattr(list,a_verbatimline)
+ local v = list[a_verbatimline]
if not v or v ~= last_v then
last_v = v
check_number(n,a,skip)
@@ -285,7 +268,7 @@ function boxed.stage_two(n,m)
if #current_list > 0 then
m = m or lines.scratchbox
local t, tn = { }, 0
- for l in traverse_id(hlist_code,getlist(getbox(m))) do
+ for l in traverse_id(hlist_code,texgetbox(m).list) do
tn = tn + 1
t[tn] = copy_node(l)
end
@@ -293,8 +276,7 @@ function boxed.stage_two(n,m)
local li = current_list[i]
local n, m, ti = li[1], li[2], t[i]
if ti then
- setfield(ti,"next",getlist(n))
- setfield(n,"list",ti)
+ ti.next, n.list = n.list, ti
resolve(n,m)
else
report_lines("error in linenumbering (1)")
diff --git a/tex/context/base/page-mak.mkvi b/tex/context/base/page-mak.mkvi
index c910f281d..71af520a1 100644
--- a/tex/context/base/page-mak.mkvi
+++ b/tex/context/base/page-mak.mkvi
@@ -91,60 +91,13 @@
\def\page_makeup_start_yes[#name]% [#settings]%
{\doifelsecommandhandler\??makeup{#name}\page_makeup_start_indeed\page_makeup_start_nop[#name]}%
-% case 1:
-%
-% \setuplayout[height=5cm]
-%
-% case 2:
-%
-% \definelayout[crap][height=10cm]
-% \definelayout[standard][crap]
-%
-% case 3:
-%
-% \setuplayout[standard][height=15cm]
-%
-% case 4:
-%
-% \definelayout[whatever][height=2cm]
-% \setuplayout[whatever]
-
\def\page_makeup_start_indeed[#name][#settings]%
- {% the next grouping hack is somewhat messy:
- \begingroup
- % we need to figure out the current layout
- \xdef\m_page_makeup_name{#name}%
- \let\currentmakeup\m_page_makeup_name
- \let\currentlayout\m_page_makeup_name
- \xdef\m_page_makeup_layout_parent{\layoutparameter\s!parent}%
- \setupcurrentmakeup[#settings]%
- \edef\p_page{\makeupparameter\c!page}%
- \ifx\p_page\empty
- \endgroup
- \page % new, so best not have dangling mess here like references (we could capture then and flush embedded)
- \else\ifx\p_page\v!no
- % nothing
- \endgroup
- \else
- \endgroup
- \page[\p_page]%
- \fi\fi
- % some dirty trickery (sorry) for determining if we have
- % - a layout definition at all
- % - inherit from the parent of that definition
- % - inherit from the current layout otherwise
- \ifx\m_page_makeup_name\currentlayout
- % we already use the layout
- \else\ifx\m_page_makeup_layout_parent\??layout
- % we inherit from the current layout
- \normalexpanded{\setuplayout[#name][\s!parent=\??layout\currentlayout]}% is remembered but checked later anyway
- % \else
- % we have an inherited layout
- \fi\fi
+ {\doifelsenothing{\namedmakeupparameter{#name}\c!page}
+ {\page}% new, so best not have dangling mess here like references (we could capture then and flush embedded)
+ {\page[\namedmakeupparameter{#name}\c!page]}%
\startlayout[#name]% includes \page
\bgroup
- %\edef\currentmakeup{#name}%
- \let\currentmakeup\m_page_makeup_name
+ \edef\currentmakeup{#name}%
\setupcurrentmakeup[#settings]%
\setsystemmode\v!makeup
\the\t_page_makeup_every_setup
@@ -184,12 +137,7 @@
\fi \fi
\strc_pagenumbers_page_state_pop % new
\egroup
- \stoplayout % includes \page
- \ifx\m_page_makeup_name\currentlayout
- \else\ifx\m_page_makeup_layout_parent\??layout
- \normalexpanded{\setuplayout[\m_page_makeup_name][\s!parent=\??layout]}% is remembered but checked later anyway
- % \else
- \fi\fi}
+ \stoplayout} % includes \page
\setvalue{\??makeupdoublesided\v!yes}%
{\emptyhbox
@@ -236,7 +184,6 @@
\c!headerstate=\v!stop,
\c!footerstate=\v!stop,
\c!pagestate=\v!stop] % in manual ! ! !
-% \c!pagestate=\v!start]
\definemakeup
[\v!standard]
diff --git a/tex/context/base/page-mix.lua b/tex/context/base/page-mix.lua
index 30a1fdccd..7d13d9e4e 100644
--- a/tex/context/base/page-mix.lua
+++ b/tex/context/base/page-mix.lua
@@ -15,73 +15,46 @@ if not modules then modules = { } end modules ["page-mix"] = {
local concat = table.concat
+local nodecodes = nodes.nodecodes
+local gluecodes = nodes.gluecodes
+local nodepool = nodes.pool
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local insert_code = nodecodes.ins
+local mark_code = nodecodes.mark
+
+local new_hlist = nodepool.hlist
+local new_vlist = nodepool.vlist
+local new_glue = nodepool.glue
+
+local hpack = node.hpack
+local vpack = node.vpack
+local freenode = node.free
+local concatnodes = nodes.concat
+
+local texgetbox = tex.getbox
+local texsetbox = tex.setbox
+local texgetskip = tex.getskip
+
+local points = number.points
+
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local variables = interfaces.variables
+local v_yes = variables.yes
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_columns = variables.columns
+
local trace_state = false trackers.register("mixedcolumns.trace", function(v) trace_state = v end)
local trace_detail = false trackers.register("mixedcolumns.detail", function(v) trace_detail = v end)
local report_state = logs.reporter("mixed columns")
-local nodecodes = nodes.nodecodes
-local gluecodes = nodes.gluecodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
-local penalty_code = nodecodes.penalty
-local insert_code = nodecodes.ins
-local mark_code = nodecodes.mark
-local rule_code = nodecodes.rule
-
-local topskip_code = gluecodes.topskip
-local lineskip_code = gluecodes.lineskip
-local baselineskip_code = gluecodes.baselineskip
-local userskip_code = gluecodes.userskip
-
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local nodetostring = nuts.tostring
-local listtoutf = nodes.listtoutf
-
-local hpack = nuts.hpack
-local vpack = nuts.vpack
-local freenode = nuts.free
-local concatnodes = nuts.concat
-local slidenodes = nuts.slide -- ok here as we mess with prev links intermediately
-local traversenodes = nuts.traverse
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getsubtype = nuts.getsubtype
-local getbox = nuts.getbox
-local setbox = nuts.setbox
-local getskip = nuts.getskip
-local getattribute = nuts.getattribute
-
-local nodepool = nuts.pool
-
-local new_hlist = nodepool.hlist
-local new_vlist = nodepool.vlist
-local new_glue = nodepool.glue
-
-local points = number.points
-
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local variables = interfaces.variables
-local v_yes = variables.yes
-local v_global = variables["global"]
-local v_local = variables["local"]
-local v_columns = variables.columns
-local v_fixed = variables.fixed
-local v_auto = variables.auto
-local v_none = variables.none
-local v_more = variables.more
-local v_less = variables.less
-
pagebuilders = pagebuilders or { }
pagebuilders.mixedcolumns = pagebuilders.mixedcolumns or { }
local mixedcolumns = pagebuilders.mixedcolumns
@@ -104,13 +77,13 @@ local function collectinserts(result,nxt,nxtid)
local inserts, currentskips, nextskips, inserttotal = { }, 0, 0, 0
while nxt do
if nxtid == insert_code then
- inserttotal = inserttotal + getfield(nxt,"height") + getfield(nxt,"depth")
- local s = getsubtype(nxt)
+ inserttotal = inserttotal + nxt.height + nxt.depth
+ local s = nxt.subtype
local c = inserts[s]
if not c then
c = { }
inserts[s] = c
- local width = getfield(getskip(s),"width")
+ local width = texgetskip(s).width
if not result.inserts[s] then
currentskips = currentskips + width
end
@@ -127,9 +100,9 @@ local function collectinserts(result,nxt,nxtid)
else
break
end
- nxt = getnext(nxt)
+ nxt = nxt.next
if nxt then
- nxtid = getid(nxt)
+ nxtid = nxt.id
else
break
end
@@ -155,30 +128,30 @@ end
local function discardtopglue(current,discarded)
local size = 0
while current do
- local id = getid(current)
+ local id = current.id
if id == glue_code then
- size = size + getfield(getfield(current,"spec"),"width")
+ size = size + current.spec.width
discarded[#discarded+1] = current
- current = getnext(current)
+ current = current.next
elseif id == penalty_code then
- if getfield(current,"penalty") == forcedbreak then
+ if current.penalty == forcedbreak then
discarded[#discarded+1] = current
- current = getnext(current)
- while current and getid(current) == glue_code do
- size = size + getfield(getfield(current,"spec"),"width")
+ current = current.next
+ while current and current.id == glue_code do
+ size = size + current.spec.width
discarded[#discarded+1] = current
- current = getnext(current)
+ current = current.next
end
else
discarded[#discarded+1] = current
- current = getnext(current)
+ current = current.next
end
else
break
end
end
if current then
- setfield(current,"prev",nil) -- prevent look back
+ current.prev = nil
end
return current, size
end
@@ -189,13 +162,13 @@ local function stripbottomglue(results,discarded)
local r = results[i]
local t = r.tail
while t and t ~= r.head do
- local prev = getprev(t)
+ local prev = t.prev
if not prev then
break
end
- local id = getid(t)
+ local id = t.id
if id == penalty_code then
- if getfield(t,"penalty") == forcedbreak then
+ if t.penalty == forcedbreak then
break
else
discarded[#discarded+1] = t
@@ -204,7 +177,7 @@ local function stripbottomglue(results,discarded)
end
elseif id == glue_code then
discarded[#discarded+1] = t
- local width = getfield(getfield(t,"spec"),"width")
+ local width = t.spec.width
if trace_state then
report_state("columns %s, discarded bottom glue %p",i,width)
end
@@ -228,21 +201,20 @@ local function setsplit(specification) -- a rather large function
report_state("fatal error, no box")
return
end
- local list = getbox(box)
+ local list = texgetbox(box)
if not list then
report_state("fatal error, no list")
return
end
- local head = getlist(list) or specification.originalhead
+ local head = list.head or specification.originalhead
if not head then
report_state("fatal error, no head")
return
end
- slidenodes(head) -- we can have set prev's to nil to prevent backtracking
local discarded = { }
local originalhead = head
- local originalwidth = specification.originalwidth or getfield(list,"width")
- local originalheight = specification.originalheight or getfield(list,"height")
+ local originalwidth = specification.originalwidth or list.width
+ local originalheight = specification.originalheight or list.height
local current = head
local skipped = 0
local height = 0
@@ -305,20 +277,20 @@ local function setsplit(specification) -- a rather large function
local current = start
-- first skip over glue and penalty
while current do
- local id = getid(current)
+ local id = current.id
if id == glue_code or id == penalty_code then
- current = getprev(current)
+ current = current.prev
else
break
end
end
-- then skip over content
while current do
- local id = getid(current)
+ local id = current.id
if id == glue_code or id == penalty_code then
break
else
- current = getprev(current)
+ current = current.prev
end
end
if not current then
@@ -352,7 +324,7 @@ local function setsplit(specification) -- a rather large function
if current == head then
result.tail = head
else
- result.tail = getprev(current)
+ result.tail = current.prev
end
result.height = height
result.depth = depth
@@ -372,9 +344,6 @@ local function setsplit(specification) -- a rather large function
report_state("setting collector to column %s",column)
end
current, skipped = discardtopglue(current,discarded)
- if trace_detail and skipped ~= 0 then
- report_state("check > column 1, discarded %p",skipped)
- end
head = current
return true, skipped
end
@@ -397,7 +366,7 @@ local function setsplit(specification) -- a rather large function
end
end
if trace_detail then
- report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p => %a (height %p, depth %p, skip %p)",
+ report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p, discarded %p => %a (height %p, depth %p, skip %p)",
where,curcol,delta,threshold,advance,total,target,state,skipped,height,depth,skip)
end
return state, skipped
@@ -418,7 +387,7 @@ local function setsplit(specification) -- a rather large function
head = current
local function process_skip(current,nxt)
- local advance = getfield(getfield(current,"spec"),"width")
+ local advance = current.spec.width
if advance ~= 0 then
local state, skipped = checked(advance,"glue")
if trace_state then
@@ -442,7 +411,7 @@ local function setsplit(specification) -- a rather large function
end
local function process_kern(current,nxt)
- local advance = getfield(current,"kern")
+ local advance = current.kern
if advance ~= 0 then
local state, skipped = checked(advance,"kern")
if trace_state then
@@ -465,10 +434,10 @@ local function setsplit(specification) -- a rather large function
local function process_rule(current,nxt)
-- simple variant of h|vlist
- local advance = getfield(current,"height") -- + getfield(current,"depth")
+ local advance = current.height -- + current.depth
local state, skipped = checked(advance+currentskips,"rule")
if trace_state then
- report_state("%-7s > column %s, state %a, rule, advance %p, height %p","rule",column,state,advance,inserttotal,height)
+ report_state("%-7s > column %s, state %a, rule, advance %p, height %p","line",column,state,advance,inserttotal,height)
if skipped ~= 0 then
report_state("%-7s > column %s, discarded %p","rule",column,skipped)
end
@@ -482,7 +451,7 @@ local function setsplit(specification) -- a rather large function
else
height = height + currentskips
end
- depth = getfield(current,"depth")
+ depth = current.depth
skip = 0
end
@@ -493,12 +462,12 @@ local function setsplit(specification) -- a rather large function
-- [chapter] [penalty] [section] [penalty] [first line]
local function process_penalty(current,nxt)
- local penalty = getfield(current,"penalty")
+ local penalty = current.penalty
if penalty == 0 then
lastlocked = nil
lastcurrent = nil
elseif penalty == forcedbreak then
- local needed = getattribute(current,a_checkedbreak)
+ local needed = current[a_checkedbreak]
local proceed = not needed or needed == 0
if not proceed then
local available = target - height
@@ -546,12 +515,12 @@ local function setsplit(specification) -- a rather large function
end
local function process_list(current,nxt)
- local nxtid = nxt and getid(nxt)
+ local nxtid = nxt and nxt.id
line = line + 1
local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0
- local advance = getfield(current,"height") -- + getfield(current,"depth")
+ local advance = current.height -- + current.depth
if trace_state then
- report_state("%-7s > column %s, content: %s","line",column,listtoutf(getlist(current),true,true))
+ report_state("%-7s > column %s, content: %s","line",column,listtoutf(current.list,true,true))
end
if nxt and (nxtid == insert_code or nxtid == mark_code) then
nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid)
@@ -572,7 +541,7 @@ local function setsplit(specification) -- a rather large function
else
height = height + currentskips
end
- depth = getfield(current,"depth")
+ depth = current.depth
skip = 0
if inserts then
-- so we already collect them ... makes backtracking tricky ... alternatively
@@ -586,8 +555,8 @@ local function setsplit(specification) -- a rather large function
while current do
- local id = getid(current)
- local nxt = getnext(current)
+ local id = current.id
+ local nxt = current.next
backtracked = false
@@ -660,7 +629,7 @@ local function setsplit(specification) -- a rather large function
specification.overflow = overflow
specification.discarded = discarded
- setfield(getbox(specification.box),"list",nil)
+ texgetbox(specification.box).list = nil
return specification
end
@@ -672,12 +641,12 @@ function mixedcolumns.finalize(result)
local r = results[i]
local h = r.head
if h then
- setfield(h,"prev",nil)
+ h.prev = nil
local t = r.tail
if t then
- setfield(t,"next",nil)
+ t.next = nil
else
- setfield(h,"next",nil)
+ h.next = nil
r.tail = h
end
for c, list in next, r.inserts do
@@ -686,13 +655,13 @@ function mixedcolumns.finalize(result)
local l = list[i]
local h = new_hlist()
t[i] = h
- setfield(h,"list",getfield(l,"head"))
- setfield(h,"height",getfield(l,"height"))
- setfield(h,"depth",getfield(l,"depth"))
- setfield(l,"head",nil)
+ h.head = l.head
+ h.height = l.height
+ h.depth = l.depth
+ l.head = nil
end
- setfield(t[1],"prev",nil) -- needs checking
- setfield(t[#t],"next",nil) -- needs checking
+ t[1].prev = nil -- needs checking
+ t[#t].next = nil -- needs checking
r.inserts[c] = t
end
end
@@ -764,13 +733,13 @@ function mixedcolumns.getsplit(result,n)
return new_glue(result.originalwidth)
end
- setfield(h,"prev",nil) -- move up
+ h.prev = nil -- move up
local strutht = result.strutht
local strutdp = result.strutdp
local lineheight = strutht + strutdp
local v = new_vlist()
- setfield(v,"list",h)
+ v.head = h
-- local v = vpack(h,"exactly",height)
@@ -792,14 +761,14 @@ function mixedcolumns.getsplit(result,n)
dp = result.depth
end
- setfield(v,"width",wd)
- setfield(v,"height",ht)
- setfield(v,"depth",dp)
+ v.width = wd
+ v.height = ht
+ v.depth = dp
if trace_state then
- local id = getid(h)
+ local id = h.id
if id == hlist_code then
- report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",listtoutf(getlist(h)))
+ report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",nodes.toutf(h.list))
else
report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"head node",nodecodes[id])
end
@@ -808,8 +777,8 @@ function mixedcolumns.getsplit(result,n)
for c, list in next, r.inserts do
local l = concatnodes(list)
local b = vpack(l) -- multiple arguments, todo: fastvpack
- -- setbox("global",c,b)
- setbox(c,b)
+ -- texsetbox("global",c,b)
+ texsetbox(c,b)
r.inserts[c] = nil
end
@@ -853,7 +822,7 @@ end
function commands.mixgetsplit(n)
if result then
- context(tonode(mixedcolumns.getsplit(result,n)))
+ context(mixedcolumns.getsplit(result,n))
end
end
@@ -865,13 +834,13 @@ end
function commands.mixflushrest()
if result then
- context(tonode(mixedcolumns.getrest(result)))
+ context(mixedcolumns.getrest(result))
end
end
function commands.mixflushlist()
if result then
- context(tonode(mixedcolumns.getlist(result)))
+ context(mixedcolumns.getlist(result))
end
end
diff --git a/tex/context/base/page-mix.mkiv b/tex/context/base/page-mix.mkiv
index d2bb38ca0..5d1c54a71 100644
--- a/tex/context/base/page-mix.mkiv
+++ b/tex/context/base/page-mix.mkiv
@@ -517,8 +517,7 @@
%D footnotes. Eventually we will have multiple strategies available.
\unexpanded\def\page_mix_routine_construct#1%
- {\d_page_mix_max_height\mixedcolumnsparameter\c!maxheight % can have changed due to header=high
- \ctxcommand{mixsetsplit {
+ {\ctxcommand{mixsetsplit {
box = \number\b_page_mix_collected,
nofcolumns = \number\c_page_mix_n_of_columns,
maxheight = \number\d_page_mix_max_height,
diff --git a/tex/context/base/page-mul.mkiv b/tex/context/base/page-mul.mkiv
index 73d84fe14..a874cd116 100644
--- a/tex/context/base/page-mul.mkiv
+++ b/tex/context/base/page-mul.mkiv
@@ -1605,11 +1605,9 @@
\else
\balancecolumnsfalse
\fi
- % % this won't work (blocked by check for overloading; too fuzzy anyway)
- % \installalign\v!yes {\page_columns_align_option_yes }% \stretchcolumnstrue \inheritcolumnsfalse
- % \installalign\v!no {\page_columns_align_option_no }% \stretchcolumnsfalse\inheritcolumnsfalse
- % \installalign\v!text{\page_columns_align_option_text}% \stretchcolumnsfalse\inheritcolumnstrue
- % %
+ \installalign\v!yes {\page_columns_align_option_yes }%
+ \installalign\v!no {\page_columns_align_option_no }%
+ \installalign\v!text{\page_columns_align_option_text}%
\stretchcolumnsfalse
\inheritcolumnstrue
\edef\p_align{\columnsparameter\c!align}%
diff --git a/tex/context/base/page-str.lua b/tex/context/base/page-str.lua
index f2ac27cd9..35ce85609 100644
--- a/tex/context/base/page-str.lua
+++ b/tex/context/base/page-str.lua
@@ -20,7 +20,7 @@ local tasks = nodes.tasks
local new_kern = nodepool.kern
local new_glyph = nodepool.glyph
-local slide_nodelist = node.slide
+local find_tail = node.slide
local write_node = node.write
local free_node = node.free
local copy_nodelist = node.copy_list
@@ -73,7 +73,7 @@ function streams.collect(head,where)
end
local last = dana[#dana]
if last then
- local tail = slide_nodelist(last)
+ local tail = find_tail(last)
tail.next, head.prev = head, tail
elseif last == false then
dana[#dana] = head
@@ -202,7 +202,7 @@ function streams.synchronize(list) -- this is an experiment !
else
-- this is not yet ok as we also need to keep an eye on vertical spacing
-- so we might need to do some splitting or whatever
- local tail = vbox.list and slide_nodelist(vbox.list)
+ local tail = vbox.list and find_tail(vbox.list)
local n, delta = 0, delta_height -- for tracing
while delta > 0 do
-- we need to add some interline penalties
diff --git a/tex/context/base/page-str.mkiv b/tex/context/base/page-str.mkiv
index a8fab9c6c..200a71377 100644
--- a/tex/context/base/page-str.mkiv
+++ b/tex/context/base/page-str.mkiv
@@ -29,6 +29,8 @@
%D
%D Remark: marknotes are gone, at least for a while.
+\writestatus{loading}{ConTeXt Page Macros / Page Streams}
+
\registerctxluafile{page-str}{1.001}
\unprotect
diff --git a/tex/context/base/publ-aut.lua b/tex/context/base/publ-aut.lua
deleted file mode 100644
index ba492a93b..000000000
--- a/tex/context/base/publ-aut.lua
+++ /dev/null
@@ -1,550 +0,0 @@
-if not modules then modules = { } end modules ['publ-aut'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if not characters then
- dofile(resolvers.findfile("char-def.lua"))
- dofile(resolvers.findfile("char-ini.lua"))
-end
-
-local context = context
-local chardata = characters.data
-
-local tostring = tostring
-local concat = table.concat
-local lpeg = lpeg
-local utfchar = utf.char
-
-local publications = publications or { }
-
-local datasets = publications.datasets or { }
-publications.datasets = datasets
-
-publications.authors = publications.authors or { }
-local authors = publications.authors
-
-local P, C, V, Cs, Ct, lpegmatch, lpegpatterns = lpeg.P, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Ct, lpeg.match, lpeg.patterns
-
--- local function makesplitter(separator)
--- return Ct { "start",
--- start = (Cs((V("outer") + (1-separator))^1) + separator^1)^1,
--- start = Cs(V("outer")) + (Cs((V("inner") + (1-separator))^1) + separator^1)^1,
--- outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^0) * (P("}")/""),
--- inner = P("{") * ((V("inner") + P(1-P("}")))^0) * P("}"),
--- }
--- end
-
-local space = P(" ")
-local comma = P(",")
-local firstcharacter = lpegpatterns.utf8byte
-
--- local andsplitter = lpeg.tsplitat(space^1 * "and" * space^1)
--- local commasplitter = lpeg.tsplitat(space^0 * comma * space^0)
--- local spacesplitter = lpeg.tsplitat(space^1)
-
-local p_and = space^1 * "and" * space^1
-local p_comma = space^0 * comma * space^0
-local p_space = space^1
-
-local andsplitter = Ct { "start",
- start = (Cs((V("inner") + (1-p_and))^1) + p_and)^1,
- inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
-}
-
-local commasplitter = Ct { "start",
- start = Cs(V("outer")) + (Cs((V("inner") + (1-p_comma))^1) + p_comma)^1,
- outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
- inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
-}
-
-local spacesplitter = Ct { "start",
- start = Cs(V("outer")) + (Cs((V("inner") + (1-p_space))^1) + p_space)^1,
- outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
- inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
-}
-
-local function is_upper(str)
- local first = lpegmatch(firstcharacter,str)
- local okay = chardata[first]
- return okay and okay.category == "lu"
-end
-
-local cache = { } -- 33% reuse on tugboat.bib
-local nofhits = 0
-local nofused = 0
-
-local function splitauthorstring(str)
- if not str then
- return
- end
- nofused = nofused + 1
- local authors = cache[str]
- if authors then
- -- hit 1
- -- print("hit 1",author,nofhits,nofused,math.round(100*nofhits/nofused))
- return { authors } -- we assume one author
- end
- local authors = lpegmatch(andsplitter,str)
- for i=1,#authors do
- local author = authors[i]
- local detail = cache[author]
- if detail then
- -- hit 2
- -- print("hit 2",author,nofhits,nofused,math.round(100*nofhits/nofused))
- end
- if not detail then
- local firstnames, vons, surnames, initials, juniors
- local split = lpegmatch(commasplitter,author)
--- inspect(split)
- local n = #split
- if n == 1 then
- -- First von Last
- local words = lpegmatch(spacesplitter,author)
- firstnames, vons, surnames = { }, { }, { }
- local i, n = 1, #words
- while i <= n do
- local w = words[i]
- if is_upper(w) then
- firstnames[#firstnames+1], i = w, i + 1
- else
- break
- end
- end
- while i <= n do
- local w = words[i]
- if is_upper(w) then
- break
- else
- vons[#vons+1], i = w, i + 1
- end
- end
- if i <= n then
- while i <= n do
- surnames[#surnames+1], i = words[i], i + 1
- end
- elseif #vons == 0 then
- surnames[1] = firstnames[#firstnames]
- firstnames[#firstnames] = nil
- else
- -- mess
- end
- -- safeguard
- if #surnames == 0 then
- firstnames = { }
- vons = { }
- surnames = { author }
- end
- elseif n == 2 then
- -- von Last, First
- firstnames, vons, surnames = { }, { }, { }
- local words = lpegmatch(spacesplitter,split[1])
- local i, n = 1, #words
- while i <= n do
- local w = words[i]
- if is_upper(w) then
- break
- else
- vons[#vons+1], i = w, i + 1
- end
- end
- while i <= n do
- surnames[#surnames+1], i = words[i], i + 1
- end
- --
- local words = lpegmatch(spacesplitter,split[2])
- local i, n = 1, #words
- while i <= n do
- local w = words[i]
- if is_upper(w) then
- firstnames[#firstnames+1], i = w, i + 1
- else
- break
- end
- end
- while i <= n do
- vons[#vons+1], i = words[i], i + 1
- end
- else
- -- von Last, Jr ,First
- firstnames = lpegmatch(spacesplitter,split[1])
- juniors = lpegmatch(spacesplitter,split[2])
- surnames = lpegmatch(spacesplitter,split[3])
- if n > 3 then
- -- error
- end
- end
- if #surnames == 0 then
- surnames[1] = firstnames[#firstnames]
- firstnames[#firstnames] = nil
- end
- if firstnames then
- initials = { }
- for i=1,#firstnames do
- initials[i] = utfchar(lpegmatch(firstcharacter,firstnames[i]))
- end
- end
- detail = {
- original = author,
- firstnames = firstnames,
- vons = vons,
- surnames = surnames,
- initials = initials,
- juniors = juniors,
- }
- cache[author] = detail
- nofhits = nofhits + 1
- end
- authors[i] = detail
- end
- return authors
-end
-
--- local function splitauthors(dataset,tag,field)
--- local entries = datasets[dataset]
--- local luadata = entries.luadata
--- if not luadata then
--- return { }
--- end
--- local entry = luadata[tag]
--- if not entry then
--- return { }
--- end
--- return splitauthorstring(entry[field])
--- end
-
-local function the_initials(initials,symbol)
- local t, symbol = { }, symbol or "."
- for i=1,#initials do
- t[i] = initials[i] .. symbol
- end
- return t
-end
-
--- authors
-
-local settings = { }
-
--- local defaultsettings = {
--- firstnamesep = " ",
--- vonsep = " ",
--- surnamesep = " ",
--- juniorsep = " ",
--- surnamejuniorsep = ", ",
--- juniorjuniorsep = ", ",
--- surnamefirstnamesep = ", ",
--- surnameinitialsep = ", ",
--- namesep = ", ",
--- lastnamesep = " and ",
--- finalnamesep = " and ",
--- etallimit = 1000,
--- etaldisplay = 1000,
--- etaltext = "",
--- }
-
-local defaultsettings = {
- firstnamesep = [[\btxlistvariantparameter{firstnamesep}]],
- vonsep = [[\btxlistvariantparameter{vonsep}]],
- surnamesep = [[\btxlistvariantparameter{surnamesep}]],
- juniorsep = [[\btxlistvariantparameter{juniorsep}]],
- surnamejuniorsep = [[\btxlistvariantparameter{surnamejuniorsep}]],
- juniorjuniorsep = [[\btxlistvariantparameter{juniorjuniorsep}]],
- surnamefirstnamesep = [[\btxlistvariantparameter{surnamefirstnamesep}]],
- surnameinitialsep = [[\btxlistvariantparameter{surnameinitialsep}]],
- namesep = [[\btxlistvariantparameter{namesep}]],
- lastnamesep = [[\btxlistvariantparameter{lastnamesep}]],
- finalnamesep = [[\btxlistvariantparameter{finalnamesep}]],
- --
- etaltext = [[\btxlistvariantparameter{etaltext}]],
- --
- etallimit = 1000,
- etaldisplay = 1000,
-}
-
-function authors.setsettings(s)
-end
-
-authors.splitstring = splitauthorstring
-
--- [firstnames] [firstnamesep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (Taco, von Hoekwater, jr)
-
-function authors.normal(author,settings)
- local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors
- local result, settings = { }, settings or defaultsettings
- if firstnames and #firstnames > 0 then
- result[#result+1] = concat(firstnames," ")
- result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep
- end
- if vons and #vons > 0 then
- result[#result+1] = concat(vons," ")
- result[#result+1] = settings.vonsep or defaultsettings.vonsep
- end
- if surnames and #surnames > 0 then
- result[#result+1] = concat(surnames," ")
- if juniors and #juniors > 0 then
- result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
- result[#result+1] = concat(juniors," ")
- end
- elseif juniors and #juniors > 0 then
- result[#result+1] = concat(juniors," ")
- end
- return concat(result)
-end
-
--- [initials] [initialsep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (T, von Hoekwater, jr)
-
-function authors.normalshort(author,settings)
- local initials, vons, surnames, juniors = author.initials, author.vons, author.surnames, author.juniors
- local result, settings = { }, settings or defaultsettings
- if initials and #initials > 0 then
- result[#result+1] = concat(initials," ")
- result[#result+1] = settings.initialsep or defaultsettings.initialsep
- end
- if vons and #vons > 0 then
- result[#result+1] = concat(vons," ")
- result[#result+1] = settings.vonsep or defaultsettings.vonsep
- end
- if surnames and #surnames > 0 then
- result[#result+1] = concat(surnames," ")
- if juniors and #juniors > 0 then
- result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
- result[#result+1] = concat(juniors," ")
- end
- elseif juniors and #juniors > 0 then
- result[#result+1] = concat(juniors," ")
- end
- return concat(result)
-end
-
--- vons surnames juniors, firstnames
-
--- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [firstnames] (von Hoekwater jr, Taco)
-
-function authors.inverted(author,settings)
- local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors
- local result, settings = { }, settings or defaultsettings
- if vons and #vons > 0 then
- result[#result+1] = concat(vons," ")
- result[#result+1] = settings.vonsep or defaultsettings.vonsep
- end
- if surnames and #surnames > 0 then
- result[#result+1] = concat(surnames," ")
- if juniors and #juniors > 0 then
- result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
- result[#result+1] = concat(juniors," ")
- end
- elseif juniors and #juniors > 0 then
- result[#result+1] = concat(juniors," ")
- end
- if firstnames and #firstnames > 0 then
- result[#result+1] = settings.surnamefirstnamesep or defaultsettings.surnamefirstnamesep
- result[#result+1] = concat(firstnames," ")
- end
- return concat(result)
-end
-
--- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [initials] (von Hoekwater jr, T)
-
-function authors.invertedshort(author,settings)
- local vons, surnames, initials, juniors = author.vons, author.surnames, author.initials, author.juniors
- local result, settings = { }, settings or defaultsettings
- if vons and #vons > 0 then
- result[#result+1] = concat(vons," ")
- result[#result+1] = settings.vonsep or defaultsettings.vonsep
- end
- if surnames and #surnames > 0 then
- result[#result+1] = concat(surnames," ")
- if juniors and #juniors > 0 then
- result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
- result[#result+1] = concat(juniors," ")
- end
- elseif juniors and #juniors > 0 then
- result[#result+1] = concat(juniors," ")
- end
- if initials and #initials > 0 then
- result[#result+1] = settings.surnameinitialsep or defaultsettings.surnameinitialsep
- result[#result+1] = concat(the_initials(initials)," ")
- end
- return concat(result)
-end
-
-local lastconcatsize = 1
-
-local function concatnames(t,settings)
- local namesep = settings.namesep
- local lastnamesep = settings.lastnamesep
- local finalnamesep = settings.finalnamesep
- local lastconcatsize = #t
- if lastconcatsize > 2 then
- local s = { }
- for i=1,lastconcatsize-2 do
- s[i] = t[i] .. namesep
- end
- s[lastconcatsize-1], s[lastconcatsize] = t[lastconcatsize-1] .. finalnamesep, t[lastconcatsize]
- return concat(s)
- elseif lastconcatsize > 1 then
- return concat(t,lastnamesep)
- elseif lastconcatsize > 0 then
- return t[1]
- else
- return ""
- end
-end
-
-function authors.concat(dataset,tag,field,settings)
- table.setmetatableindex(settings,defaultsettings)
- local combiner = settings.combiner
- if not combiner or type(combiner) == "string" then
- combiner = authors[combiner or "normal"] or authors.normal
- end
- local split = datasets[dataset].details[tag][field]
- local etallimit = settings.etallimit or 1000
- local etaldisplay = settings.etaldisplay or etallimit
- local max = split and #split or 0
- if max == 0 then
- -- error
- end
- if max > etallimit and etaldisplay < max then
- max = etaldisplay
- end
- local combined = { }
- for i=1,max do
- combined[i] = combiner(split[i],settings)
- end
- local result = concatnames(combined,settings)
- if #combined <= max then
- return result
- else
- return result .. settings.etaltext
- end
-end
-
-function commands.btxauthor(...)
- context(authors.concat(...))
-end
-
-function authors.short(author,year)
- -- todo
--- local result = { }
--- if author then
--- local authors = splitauthors(author)
--- for a=1,#authors do
--- local aa = authors[a]
--- local initials = aa.initials
--- for i=1,#initials do
--- result[#result+1] = initials[i]
--- end
--- local surnames = aa.surnames
--- for s=1,#surnames do
--- result[#result+1] = utfchar(lpegmatch(firstcharacter,surnames[s]))
--- end
--- end
--- end
--- if year then
--- result[#result+1] = year
--- end
--- return concat(result)
-end
-
--- We can consider creating a hashtable key -> entry but I wonder if
--- pays off.
-
-local compare = sorters.comparers.basic -- (a,b)
-local strip = sorters.strip
-local splitter = sorters.splitters.utf
-
-function authors.preparedsort(dataset,list,sorttype_a,sorttype_b,sorttype_c)
- local luadata = datasets[dataset].luadata
- local details = datasets[dataset].details
- local valid = { }
- local splitted = { }
- table.setmetatableindex(splitted,function(t,k) -- could be done in the sorter but seldom that many shared
- local v = splitter(k,true) -- in other cases
- t[k] = v
- return v
- end)
- local snippets = { }
- for i=1,#list do
- -- either { tag, tag, ... } or { { tag, index }, { tag, index } }
- local li = list[i]
- local tag = type(li) == "string" and li or li[1]
- local entry = luadata[tag]
- local detail = details[tag]
- local suffix = tostring(i)
- local year = nil
- local assembled = nil
- if entry and detail then
- local key = detail[sorttype_a] or detail[sorttype_b] or detail[sorttype_c]
- if key then
- -- maybe an option is to also sort the authors first
- local n = #key
- local s = 0
- for i=1,n do
- local k = key[i]
- local vons = k.vons
- local surnames = k.surnames
- local initials = k.initials
- if vons and #vons > 0 then
- s = s + 1 ; snippets[s] = concat(vons," ")
- end
- if surnames and #surnames > 0 then
- s = s + 1 ; snippets[s] = concat(surnames," ")
- end
- if initials and #initials > 0 then
- s = s + 1 ; snippets[s] = concat(initials," ")
- end
- end
- assembled = concat(snippets," ",1,s)
- else
- assembled = ""
- end
- year = entry.year or "9998"
- else
- assembled = ""
- year = "9999"
- end
- valid[i] = {
- index = i,
- split = {
- splitted[strip(assembled)],
- splitted[year],
- splitted[suffix],
- },
--- names = assembled,
--- year = year,
--- suffix = suffix,
- }
- end
- return valid
-end
-
-function authors.sorted(dataset,list,sorttype) -- experimental
- local valid = authors.preparedsort(dataset,list,sorttype)
- if #valid == 0 or #valid ~= #list then
- return list
- else
- sorters.sort(valid,compare)
- for i=1,#valid do
- valid[i] = valid[i].index
- end
- return valid
- end
-end
-
--- local dataset = publications.datasets.test
---
--- local function add(str)
--- dataset.details[str] = { author = publications.authors.splitstring(str) }
--- end
---
--- add("Hagen, Hans and Hoekwater, Taco Whoever T. Ex. and Henkel Hut, Hartmut Harald von der")
--- add("Hans Hagen and Taco Whoever T. Ex. Hoekwater and Hartmut Harald von der Henkel Hut")
--- add("de Gennes, P. and Gennes, P. de")
--- add("van't Hoff, J. H. and {van't Hoff}, J. H.")
---
--- local list = table.keys(dataset.details)
--- local sort = publications.authors.sorted("test",list,"author")
--- local test = { } for i=1,#sort do test[i] = dataset.details[list[sort[i]]] end
diff --git a/tex/context/base/publ-dat.lua b/tex/context/base/publ-dat.lua
deleted file mode 100644
index 8fce94822..000000000
--- a/tex/context/base/publ-dat.lua
+++ /dev/null
@@ -1,529 +0,0 @@
-if not modules then modules = { } end modules ['publ-dat'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: strip the @ in the lpeg instead of on do_definition and do_shortcut
--- todo: store bibroot and bibrootdt
-
---[[ldx--
-This is a prelude to integrated bibliography support. This file just loads
-bibtex files and converts them to xml so that the we access the content
-in a convenient way. Actually handling the data takes place elsewhere.
---ldx]]--
-
-if not characters then
- dofile(resolvers.findfile("char-def.lua"))
- dofile(resolvers.findfile("char-ini.lua"))
- dofile(resolvers.findfile("char-tex.lua"))
-end
-
-local chardata = characters.data
-local lowercase = characters.lower
-
-local lower, gsub, concat = string.lower, string.gsub, table.concat
-local next, type = next, type
-local utfchar = utf.char
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local textoutf = characters and characters.tex.toutf
-local settings_to_hash, settings_to_array = utilities.parsers.settings_to_hash, utilities.parsers.settings_to_array
-local formatters = string.formatters
-local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
-local xmlcollected, xmltext, xmlconvert = xml.collected, xml.text, xmlconvert
-local setmetatableindex = table.setmetatableindex
-
--- todo: more allocate
-
-local P, R, S, V, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
-
-local trace = false trackers.register("publications", function(v) trace = v end)
-local report = logs.reporter("publications")
-
-publications = publications or { }
-local publications = publications
-
-local datasets = publications.datasets or { }
-publications.datasets = datasets
-
-publications.statistics = publications.statistics or { }
-local publicationsstats = publications.statistics
-
-publicationsstats.nofbytes = 0
-publicationsstats.nofdefinitions = 0
-publicationsstats.nofshortcuts = 0
-publicationsstats.nofdatasets = 0
-
-local xmlplaceholder = "\n"
-
-local defaultshortcuts = {
- jan = "1",
- feb = "2",
- mar = "3",
- apr = "4",
- may = "5",
- jun = "6",
- jul = "7",
- aug = "8",
- sep = "9",
- oct = "10",
- nov = "11",
- dec = "12",
-}
-
-function publications.new(name)
- publicationsstats.nofdatasets = publicationsstats.nofdatasets + 1
- local dataset = {
- name = name or "dataset " .. publicationsstats.nofdatasets,
- nofentries = 0,
- shortcuts = { },
- luadata = { },
- xmldata = xmlconvert(xmlplaceholder),
- -- details = { },
- nofbytes = 0,
- entries = nil, -- empty == all
- sources = { },
- loaded = { },
- fields = { },
- userdata = { },
- used = { },
- commands = { }, -- for statistical purposes
- status = {
- resources = false,
- userdata = false,
- },
- }
- setmetatableindex(dataset,function(t,k)
- -- will become a plugin
- if k == "details" and publications.enhance then
- dataset.details = { }
- publications.enhance(dataset.name)
- return dataset.details
- end
- end)
- return dataset
-end
-
-function publications.markasupdated(name)
- if name == "string" then
- datasets[name].details = nil
- else
- datasets.details = nil
- end
-end
-
-setmetatableindex(datasets,function(t,k)
- if type(k) == "table" then
- return k -- so we can use this accessor as checker
- else
- local v = publications.new(k)
- datasets[k] = v
- return v
- end
-end)
-
--- we apply some normalization
-
-local space = S(" \t\n\r\f") -- / " "
-
------ command = P("\\") * Cc("btxcmd{") * (R("az","AZ")^1) * Cc("}")
------ command = P("\\") * (Carg(1) * C(R("az","AZ")^1) / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
-local command = P("\\") * (Carg(1) * C(R("az","AZ")^1) * space^0 / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
-local somemath = P("$") * ((1-P("$"))^1) * P("$") -- let's not assume nested math
-local any = P(1)
-local done = P(-1)
-local one_l = P("{") / ""
-local one_r = P("}") / ""
-local two_l = P("{{") / ""
-local two_r = P("}}") / ""
-local special = P("#") / "\\letterhash"
-
-local filter_0 = S('\\{}')
-local filter_1 = (1-filter_0)^0 * filter_0
-local filter_2 = Cs(
--- {{...}} ... {{...}}
--- two_l * (command + special + any - two_r - done)^0 * two_r * done +
--- one_l * (command + special + any - one_r - done)^0 * one_r * done +
- (somemath + command + special + any )^0
-)
-
--- Currently we expand shortcuts and for large ones (like the acknowledgements
--- in tugboat.bib this is not that efficient. However, eventually strings get
--- hashed again.
-
-local function do_shortcut(key,value,dataset)
- publicationsstats.nofshortcuts = publicationsstats.nofshortcuts + 1
- dataset.shortcuts[key] = value
-end
-
-local function getindex(dataset,luadata,tag)
- local found = luadata[tag]
- if found then
- return found.index or 0
- else
- local index = dataset.nofentries + 1
- dataset.nofentries = index
- return index
- end
-end
-
-publications.getindex = getindex
-
--- todo: categories : metatable that lowers and also counts
--- todo: fields : metatable that lowers
-
-local function do_definition(category,tag,tab,dataset)
- publicationsstats.nofdefinitions = publicationsstats.nofdefinitions + 1
- local fields = dataset.fields
- local luadata = dataset.luadata
- local found = luadata[tag]
- local index = getindex(dataset,luadata,tag)
- local entries = {
- category = lower(category),
- tag = tag,
- index = index,
- }
- for i=1,#tab,2 do
- local original = tab[i]
- local normalized = fields[original]
- if not normalized then
- normalized = lower(original) -- we assume ascii fields
- fields[original] = normalized
- end
- local value = tab[i+1]
- value = textoutf(value)
- if lpegmatch(filter_1,value) then
- value = lpegmatch(filter_2,value,1,dataset.commands) -- we need to start at 1 for { }
- end
- if normalized == "crossref" then
- local parent = luadata[value]
- if parent then
- setmetatableindex(entries,parent)
- else
- -- warning
- end
- end
- entries[normalized] = value
- end
- luadata[tag] = entries
-end
-
-local function resolve(s,dataset)
- return dataset.shortcuts[s] or defaultshortcuts[s] or s -- can be number
-end
-
-local percent = P("%")
-local start = P("@")
-local comma = P(",")
-local hash = P("#")
-local escape = P("\\")
-local single = P("'")
-local double = P('"')
-local left = P('{')
-local right = P('}')
-local both = left + right
-local lineending = S("\n\r")
-local space = S(" \t\n\r\f") -- / " "
-local spacing = space^0
-local equal = P("=")
------ collapsed = (space^1)/ " "
-local collapsed = (lpegpatterns.whitespace^1)/ " "
-
------ balanced = lpegpatterns.balanced
-local balanced = P {
- [1] = ((escape * (left+right)) + (collapsed + 1 - (left+right)) + V(2))^0,
- [2] = left * V(1) * right
-}
-
-local keyword = C((R("az","AZ","09") + S("@_:-"))^1)
-local key = C((1-space-equal)^1)
-local tag = C((1-space-comma)^1)
-local reference = keyword
-local category = P("@") * C((1-space-left)^1)
-local s_quoted = ((escape*single) + collapsed + (1-single))^0
-local d_quoted = ((escape*double) + collapsed + (1-double))^0
-
-local b_value = (left /"") * balanced * (right /"")
-local s_value = (single/"") * (b_value + s_quoted) * (single/"")
-local d_value = (double/"") * (b_value + d_quoted) * (double/"")
-local r_value = reference * Carg(1) /resolve
-
-local somevalue = s_value + d_value + b_value + r_value
-local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0))
-
-local assignment = spacing * key * spacing * equal * spacing * value * spacing
-local shortcut = P("@") * (P("string") + P("STRING")) * spacing * left * ((assignment * Carg(1))/do_shortcut * comma^0)^0 * spacing * right
-local definition = category * spacing * left * spacing * tag * spacing * comma * Ct((assignment * comma^0)^0) * spacing * right * Carg(1) / do_definition
-local comment = keyword * spacing * left * (1-right)^0 * spacing * right
-local forget = percent^1 * (1-lineending)^0
-
--- todo \%
-
-local bibtotable = (space + forget + shortcut + definition + comment + 1)^0
-
--- loadbibdata -> dataset.luadata
--- loadtexdata -> dataset.luadata
--- loadluadata -> dataset.luadata
-
--- converttoxml -> dataset.xmldata from dataset.luadata
-
-function publications.loadbibdata(dataset,content,source,kind)
- dataset = datasets[dataset]
- statistics.starttiming(publications)
- publicationsstats.nofbytes = publicationsstats.nofbytes + #content
- dataset.nofbytes = dataset.nofbytes + #content
- if source then
- table.insert(dataset.sources, { filename = source, checksum = md5.HEX(content) })
- dataset.loaded[source] = kind or true
- end
- dataset.newtags = #dataset.luadata > 0 and { } or dataset.newtags
- publications.markasupdated(dataset)
- lpegmatch(bibtotable,content or "",1,dataset)
- statistics.stoptiming(publications)
-end
-
--- we could use xmlescape again
-
-local cleaner_0 = S('<>&')
-local cleaner_1 = (1-cleaner_0)^0 * cleaner_0
-local cleaner_2 = Cs ( (
- P("<") / "<" +
- P(">") / ">" +
- P("&") / "&" +
- P(1)
-)^0)
-
-local compact = false -- can be a directive but then we also need to deal with newlines ... not now
-
-function publications.converttoxml(dataset,nice) -- we have fields !
- dataset = datasets[dataset]
- local luadata = dataset and dataset.luadata
- if luadata then
- statistics.starttiming(publications)
- statistics.starttiming(xml)
- --
- local result, r = { }, 0
- --
- r = r + 1 ; result[r] = ""
- r = r + 1 ; result[r] = ""
- --
- if nice then
- local f_entry_start = formatters[" "]
- local f_entry_stop = " "
- local f_field = formatters[" %s"]
- for tag, entry in sortedhash(luadata) do
- r = r + 1 ; result[r] = f_entry_start(tag,entry.category,entry.index)
- for key, value in sortedhash(entry) do
- if key ~= "tag" and key ~= "category" and key ~= "index" then
- if lpegmatch(cleaner_1,value) then
- value = lpegmatch(cleaner_2,value)
- end
- if value ~= "" then
- r = r + 1 ; result[r] = f_field(key,value)
- end
- end
- end
- r = r + 1 ; result[r] = f_entry_stop
- end
- else
- local f_entry_start = formatters[""]
- local f_entry_stop = ""
- local f_field = formatters["%s"]
- for tag, entry in next, luadata do
- r = r + 1 ; result[r] = f_entry_start(entry.tag,entry.category,entry.index)
- for key, value in next, entry do
- if key ~= "tag" and key ~= "category" and key ~= "index" then
- if lpegmatch(cleaner_1,value) then
- value = lpegmatch(cleaner_2,value)
- end
- if value ~= "" then
- r = r + 1 ; result[r] = f_field(key,value)
- end
- end
- end
- r = r + 1 ; result[r] = f_entry_stop
- end
- end
- --
- r = r + 1 ; result[r] = ""
- --
- result = concat(result,nice and "\n" or nil)
- --
- dataset.xmldata = xmlconvert(result, {
- resolve_entities = true,
- resolve_predefined_entities = true, -- in case we have escaped entities
- -- unify_predefined_entities = true, -- & -> &
- utfize_entities = true,
- } )
- --
- statistics.stoptiming(xml)
- statistics.stoptiming(publications)
- if lxml then
- lxml.register(formatters["btx:%s"](dataset.name),dataset.xmldata)
- end
- end
-end
-
-local loaders = publications.loaders or { }
-publications.loaders = loaders
-
-function loaders.bib(dataset,filename,kind)
- dataset = datasets[dataset]
- local data = io.loaddata(filename) or ""
- if data == "" then
- report("empty file %a, nothing loaded",filename)
- elseif trace then
- report("loading file",filename)
- end
- publications.loadbibdata(dataset,data,filename,kind)
-end
-
-function loaders.lua(dataset,filename) -- if filename is a table we load that one
- dataset = datasets[dataset]
- if type(dataset) == "table" then
- dataset = datasets[dataset]
- end
- local data = type(filename) == "table" and filename or table.load(filename)
- if data then
- local luadata = dataset.luadata
- for tag, entry in next, data do
- if type(entry) == "table" then
- entry.index = getindex(dataset,luadata,tag)
- luadata[tag] = entry -- no cleaning yet
- end
- end
- end
-end
-
-function loaders.xml(dataset,filename)
- dataset = datasets[dataset]
- local luadata = dataset.luadata
- local root = xml.load(filename)
- for entry in xmlcollected(root,"/bibtex/entry") do
- local attributes = entry.at
- local tag = attributes.tag
- local entry = {
- category = attributes.category
- }
- for field in xmlcollected(entry,"/field") do
- -- entry[field.at.name] = xmltext(field)
- entry[field.at.name] = field.dt[1] -- no cleaning yet
- end
- -- local edt = entry.dt
- -- for i=1,#edt do
- -- local e = edt[i]
- -- local a = e.at
- -- if a and a.name then
- -- t[a.name] = e.dt[1] -- no cleaning yet
- -- end
- -- end
- entry.index = getindex(dataset,luadata,tag)
- luadata[tag] = entry
- end
-end
-
-setmetatableindex(loaders,function(t,filetype)
- local v = function(dataset,filename)
- report("no loader for file %a with filetype %a",filename,filetype)
- end
- t[k] = v
- return v
-end)
-
-function publications.load(dataset,filename,kind)
- dataset = datasets[dataset]
- statistics.starttiming(publications)
- local files = settings_to_array(filename)
- for i=1,#files do
- local filetype, filename = string.splitup(files[i],"::")
- if not filename then
- filename = filetype
- filetype = file.suffix(filename)
- end
- local fullname = resolvers.findfile(filename,"bib")
- if dataset.loaded[fullname] then -- will become better
- -- skip
- elseif fullname == "" then
- report("no file %a",filename)
- else
- loaders[filetype](dataset,fullname)
- end
- if kind then
- dataset.loaded[fullname] = kind
- end
- end
- statistics.stoptiming(publications)
- return dataset
-end
-
-local checked = function(s,d) d[s] = (d[s] or 0) + 1 end
-local checktex = ( (1-P("\\"))^1 + P("\\") * ((C(R("az","AZ")^1) * Carg(1))/checked))^0
-
-function publications.analyze(dataset)
- dataset = datasets[dataset]
- local data = dataset.luadata
- local categories = { }
- local fields = { }
- local commands = { }
- for k, v in next, data do
- categories[v.category] = (categories[v.category] or 0) + 1
- for k, v in next, v do
- fields[k] = (fields[k] or 0) + 1
- lpegmatch(checktex,v,1,commands)
- end
- end
- dataset.analysis = {
- categories = categories,
- fields = fields,
- commands = commands,
- }
-end
-
--- str = [[
--- @COMMENT { CRAP }
--- @STRING{ hans = "h a n s" }
--- @STRING{ taco = "t a c o" }
--- @SOMETHING{ key1, abc = "t a c o" , def = "h a n s" }
--- @SOMETHING{ key2, abc = hans # taco }
--- @SOMETHING{ key3, abc = "hans" # taco }
--- @SOMETHING{ key4, abc = hans # "taco" }
--- @SOMETHING{ key5, abc = hans # taco # "hans" # "taco"}
--- @SOMETHING{ key6, abc = {oeps {oeps} oeps} }
--- ]]
-
--- local dataset = publications.new()
--- publications.tolua(dataset,str)
--- publications.toxml(dataset)
--- publications.toxml(dataset)
--- print(dataset.xmldata)
--- inspect(dataset.luadata)
--- inspect(dataset.xmldata)
--- inspect(dataset.shortcuts)
--- print(dataset.nofbytes,statistics.elapsedtime(publications))
-
--- local dataset = publications.new()
--- publications.load(dataset,"IEEEabrv.bib")
--- publications.load(dataset,"IEEEfull.bib")
--- publications.load(dataset,"IEEEexample.bib")
--- publications.toxml(dataset)
--- print(dataset.nofbytes,statistics.elapsedtime(publications))
-
--- local dataset = publications.new()
--- publications.load(dataset,"gut.bib")
--- publications.load(dataset,"komoedie.bib")
--- publications.load(dataset,"texbook1.bib")
--- publications.load(dataset,"texbook2.bib")
--- publications.load(dataset,"texbook3.bib")
--- publications.load(dataset,"texgraph.bib")
--- publications.load(dataset,"texjourn.bib")
--- publications.load(dataset,"texnique.bib")
--- publications.load(dataset,"tugboat.bib")
--- publications.toxml(dataset)
--- print(dataset.nofbytes,statistics.elapsedtime(publications))
-
--- print(table.serialize(dataset.luadata))
--- print(table.serialize(dataset.xmldata))
--- print(table.serialize(dataset.shortcuts))
--- print(xml.serialize(dataset.xmldata))
diff --git a/tex/context/base/publ-imp-apa.mkiv b/tex/context/base/publ-imp-apa.mkiv
deleted file mode 100644
index 3f7b119af..000000000
--- a/tex/context/base/publ-imp-apa.mkiv
+++ /dev/null
@@ -1,547 +0,0 @@
-%D \module
-%D [ file=publ-imp-apa,
-%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
-%D title=APA bibliography style,
-%D subtitle=Publications,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
-%D by \PRAGMA. See mreadme.pdf for details.
-
-% common
-
-% \loadbtxdefinitionfile[def]
-
-\startsetups btx:apa:common:wherefrom
- \btxdoifelse {address} {
- \getvariable{btx:temp}{left}
- \btxdoifelse {country} {
- \btxdoifelse {\getvariable{btx:temp}{label}} {
- \btxflush{address}\btxcomma\btxflush{country}: \btxflush{\getvariable{btx:temp}{label}}
- } {
- \btxflush{address}\btxcomma\btxflush{country}
- }
- } {
- \btxdoifelse {\getvariable{btx:temp}{label}} {
- \btxflush{address}\btxcomma\btxflush{\getvariable{btx:temp}{label}}
- } {
- \btxflush{address}
- }
- }
- \getvariable{btx:temp}{right}
- } {
- \btxdoifelse {country} {
- \getvariable{btx:temp}{left}
- \btxdoifelse {\getvariable{btx:temp}{label}} {
- \btxflush{country}: \btxflush{\getvariable{btx:temp}{label}}
- } {
- \btxflush{country}
- }
- \getvariable{btx:temp}{right}
- } {
- \btxdoifelse {\getvariable{btx:temp}{label}} {
- \getvariable{btx:temp}{left}
- \btxflush{\getvariable{btx:temp}{label}}
- \getvariable{btx:temp}{right}
- } {
- \getvariable{btx:temp}{otherwise}
- }
- }
- }
-\stopsetups
-
-% \setvariables[btx:temp][label=,left=,right=,otherwise=]
-
-\startsetups btx:apa:common:publisher
- \begingroup
- \setvariables[btx:temp][label=publisher]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:organization
- \begingroup
- \setvariables[btx:temp][label=organization]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:school
- \begingroup
- \setvariables[btx:temp][label=school]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:institution
- \begingroup
- \setvariables[btx:temp][label=institution]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:school:subsentence
- \begingroup
- \setvariables[btx:temp][label=school,left=\btxcomma,right=\btxperiod,otherwise=\btxperiod]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:institution:subsentence
- \begingroup
- \setvariables[btx:temp][label=institution,left=\btxcomma,right=\btxperiod,otherwise=\btxperiod]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:publisher:sentence
- \begingroup
- \setvariables[btx:temp][label=publisher,left=\btxspace,right=\btxperiod]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:organization:sentence
- \begingroup
- \setvariables[btx:temp][label=organization,left=\btxspace,right=\btxperiod]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:title-and-series
- \btxdoif {title} {
- \btxflush{title}
- \btxdoif {series} {
- \btxlparent\btxflush{series}\btxrparent
- }
- \btxperiod
- }
-\stopsetups
-
-\startsetups btx:apa:common:title-it-and-series
- \btxdoif {title} {
- \bgroup\it\btxflush{title}\/\egroup
- \btxdoif {series} {
- \btxlparent\btxflush{series}\btxrparent
- }
- \btxperiod
- }
-\stopsetups
-
-\disablemode[btx:apa:edited-book]
-
-\startsetups btx:apa:common:author-and-year
- \btxdoif {author} {
- \btxflushauthor{author}
- }
- \btxdoif {year} {
- \btxlparent\btxflush{year}\btxrparent
- }
- \btxperiod
-\stopsetups
-
-\startsetups btx:apa:common:author-or-key-and-year
- \btxdoifelse {author} {
- \btxflushauthor{author}
- } {
- \btxdoif {key} {
- \btxlbracket\btxsetup{btx:format:key}\btxrbracket
- }
- }
- \btxdoif {year} {
- \btxlparent\btxflush{year}\btxrparent
- }
- \btxperiod
-\stopsetups
-
-\startsetups btx:apa:common:author-editors-crossref-year
- \btxdoif {author} {
- \btxflushauthor{author}
- } {
- \btxdoifelse {editor} {
- \enablemode[btx:apa:edited-book]
- \btxflushauthor{editor}
- \btxcomma\btxsingularplural{editor}{editor}{editors}
- } {
- % weird period
- \btxdoif {crossref} {
- \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket\btxperiod
- }
- }
- }
- \btxdoif {year} {
- \btxlparent\btxflush{year}\btxrparent
- }
- \btxperiod
-\stopsetups
-
-\startsetups btx:apa:common:editor-or-key-and-year
- \btxdoifelse {editor} {
- \enablemode[btx:apa:edited-book]
- \btxflushauthor{editor}
- \btxcomma\btxsingularplural{editor}{editor}{editors}
- } {
- \btxdoif {key} {
- \btxlbracket\btxsetup{btx:format:key}\btxrbracket
- }
- }
- \btxspace
- \btxdoif {year} {
- \btxlparent\btxflush{year}\btxrparent
- }
- \btxperiod
-\stopsetups
-
-\startsetups btx:apa:common:note
- \btxdoif {note} {
- \btxspace\btxflush{note}\btxperiod
- }
-\stopsetups
-
-\startsetups btx:apa:common:comment
- \btxdoif {comment} {
- \btxspace\btxflush{comment}\btxperiod
- }
-\stopsetups
-
-\startsetups btx:apa:common:pages:p
- \btxdoif {pages} {
- \btxspace\btxflush{pages}\btxspace p\btxperiod
- }
-\stopsetups
-
-\startsetups btx:apa:common:pages:pp
- \btxdoif {pages} {
- \btxspace\btxflush{pages}\btxspace pp\btxperiod
- }
-\stopsetups
-
-\startsetups btx:apa:common:pages:pages
- \btxdoif {pages} {
- \btxcomma pages~\btxflush{pages}
- }
-\stopsetups
-
-\startsetups btx:apa:common:edition:sentense
- \btxdoif {edition} {
- \btxspace\btxflush{edition}\btxspace edition\btxperiod
- }
-\stopsetups
-
-% check when the next is used (no period)
-
-% \startsetups btx:apa:common:edition
-% \btxdoif {edition} {
-% \btxspace\btxflush{edition}\btxspace edition
-% }
-% \stopsetups
-
-% we can share more, todo
-
-% specific
-
-\startsetups btx:apa:article
- \btxsetup{btx:apa:common:author-or-key-and-year}
- \btxdoif {title} {
- \btxflush{title}\btxperiod
- }
- \btxdoifelse {journal} {
- \bgroup\it\btxflush{journal}\/\egroup
- } {
- \btxdoif {crossref} {
- In\btxspace\btxflush{crossref}
- }
- }
- \btxdoifelse {volume} {
- \btxcomma\bgroup\it\btxflush{volume}\/\egroup
- \btxdoif {issue} {
- \btxlparent\btxflush{issue}\btxrparent
- }
- \btxdoif {pages} {
- \btxcomma\btxflush{pages}
- }
- \btxperiod
- } {
- \btxsetup{btx:apa:common:pages:pp}
- }
- \btxsetup{btx:apa:common:note}
- \btxsetup{btx:apa:common:comment}
-\stopsetups
-
-\startsetups btx:apa:book
- \btxsetup{btx:apa:common:author-editors-crossref-year}
- \btxdoif {title} {
- \bgroup\it\btxflush{title}\/\egroup
- \doifmodeelse {btx:apa:edited-book} {
- \btxdoifelse {volume} {
- \btxspace Number\nonbreakablespace\btxflush{volume}
- \btxdoifelse {series} {
- \btxspace in\nonbreakablespace\btxflush{series}\btxperiod
- } {
- \btxdoifelse {crossref} {
- \btxspace in\btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
- } {
- \btxperiod
- }
- }
- } {
- \btxdoif {series} {
- \btxspace\btxflush{series}
- }
- \btxperiod
- }
- } {
- \btxdoifelse {crossref} {
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxperiod
- \btxdoif {volume} {
- Volume\nonbreakablespace\btxflush{volume}\btxspace of\nonbreakablespace
- }
- } {
- \btxdoif {volume} {
- \btxcomma volume\nonbreakablespace\btxflush{volume}
- \btxdoif {series} {
- \btxspace of\nonbreakablespace\bgroup\it\btxflush{series}\/\egroup
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}
- }
- \btxsetup{btx:apa:common:pages:pages}
- }
- \btxperiod
- }
- }
- }
- \btxsetup{btx:apa:common:edition:sentence}
- \btxsetup{btx:apa:common:publisher:sentence}
- \btxsetup{btx:apa:common:pages:p}% twice?
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:inbook
- \btxsetup{btx:apa:common:author-editors-crossref-year}
- \btxdoifelse {title} {
- \bgroup\it\btxflush{title}\/\egroup
- } {
- \doifmodeelse {btx:apa:edited-book} {
- \btxdoifelse {volume} {
- \btxspace number\nonbreakablespace\btxflush{volume}
- \btxdoifelse {series} {
- \btxspace in\nonbreakablespace\btxflush{series}\btxperiod
- } {
- \btxdoifelse {crossref} {
- \btxspace in\btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
- } {
- \btxperiod
- }
- }
- } {
- \btxdoif {series} {
- \btxspace\btxflush{series}\btxperiod
- }
- }
- } {
- \btxdoifelse {crossref} {
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxdoif {volume} {
- Volume\nonbreakablespace\btxflush{volume}\btxspace of\nonbreakablespace
- }
- \btxdoif {crossref} {
- \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
- }
- } {
- \btxdoif {volume} {
- \btxcomma volume\nonbreakablespace\btxflush{volume}
- \btxdoif {series} {
- \btxspace of\nonbreakablespace\bgroup\it\btxflush{series}\/\egroup
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxperiod
- }
- }
- }
- }
- \btxspace
- \btxsetup{btx:apa:common:edition:sentence}
- \btxsetup{btx:apa:common:publisher}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:booklet
- \btxsetup{btx:apa:common:author-or-key-and-year}
- \btxsetup{btx:apa:common:title-it-and-series}
- \btxsetup{btx:apa:common:edition:sentence}
- \btxsetup{btx:apa:common:publication:sentence}
- \btxsetup{btx:apa:common:pages:p}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:manual
- \btxsetup{btx:apa:common:author-or-key-and-year}
- \btxsetup{btx:apa:common:title-it-and-series}
- \btxsetup{btx:apa:common:edition:sentence}
- \btxsetup{btx:apa:common:organization:sentence}
- \btxsetup{btx:apa:common:pages:p}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:incollection
- \btxsetup{btx:apa:common:author-and-year}
- \btxdoif {arttitle} {
- \btxflush{arttitle}\btxperiod
- }
- In\btxspace
- \btxdoifelse {title} {
- \btxflushauthor{editor}\btxcomma
- \bgroup\it\btxflush{title}\/\egroup
- \btxdoif {series} {
- \btxdoif {volume} {
- \btxcomma number\btxspace\btxflush{volume}\btxspace in
- }
- \btxspace\btxflush{series}
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}\btxspace
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxdoif {edition} {
- \btxspace\btxflush{edition}\btxspace edition
- }
- \btxsetup{btx:apa:common:publisher:sentence}
- } {
- \btxdoif {crossref} {
- \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}
- }
- \btxspace
- \btxsetup{btx:apa:common:pages:pages}
- }
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:inproceedings
- \btxsetup{btx:apa:common:author-and-year}
- \btxdoif {arttitle} {
- \btxflush{arttitle}\btxperiod
- }
- In\btxspace
- \btxdoifelse {title} {
- \btxdoif {editor} {
- \btxflush{btx:apa:format:editors}
- \btxcomma\btxsingularplural{editor}{editor}{editors}\btxcomma
- }
- \bgroup\it\btxflush{title}\/\egroup
- \btxdoif {series} {
- \btxdoif {volume} {
- \btxcomma number~\btxflush{volume} in
- }
- \btxspace
- \btxflush{series}
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}\btxspace
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxperiod
- \btxsetup{btx:apa:common:organization:sentence}
- } {
- \btxdoif {crossref} {
- \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}\btxspace
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxperiod
- }
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:proceedings
- \btxsetup{btx:apa:common:editor-or-key-and-year}
- \btxdoif {title} {
- \bgroup\it\btxflush{title}\/\egroup
- \btxdoif {volume} {
- \btxcomma number\btxspace\btxflush{volume}\btxspace in\btxspace
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}\btxspace
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxperiod
- \btxsetup{btx:apa:common:organization:sentence}
- }
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:common:thesis
- \btxsetup{btx:apa:common:author-and-year}
- \btxsetup{btx:apa:common:title-it-and-series}
- \btxdoifelse {type} {
- \btxflush{type}
- } {
- \getvariable{btx:temp}{label}
- }
- \btxsetup{btx:apa:common:school:subsentence}
- \btxsetup{btx:apa:common:pages:p}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:mastersthesis
- \setvariables[btx:temp][label=Master's thesis]
- \btxsetup{btx:apa:common:thesis}
-\stopsetups
-
-\startsetups btx:apa:phdthesis
- \setvariables[btx:temp][label=PhD thesis]
- \btxsetup{btx:apa:common:thesis}
-\stopsetups
-
-\startsetups btx:apa:techreport
- \btxsetup{btx:apa:common:author-and-year}
- \btxsetup{btx:apa:common:title-and-series}
- \btxdoifelse {type} {
- \btxflush{type}
- \btxdoif {volume} {
- \btxspace\btxflush{volume}
- }
- } {
- \btxspace Technical Report
- }
- \btxsetup{btx:apa:common:institution:subsentence}
- \btxsetup{btx:apa:common:pages:p}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:misc
- \btxsetup{btx:apa:common:author-and-year}
- \btxsetup{btx:apa:common:title-and-series}
- \btxsetup{btx:apa:common:publisher:sentence}
- \btxsetup{btx:apa:common:pages:p}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:unpublished
- \btxsetup{btx:apa:common:author-and-year}
- \btxsetup{btx:apa:common:title-and-series}
- \btxsetup{btx:apa:common:pages:p}
- \btxdoif {type} {
- \btxlparent\btxflush{type}\btxrparent
- }
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\endinput
diff --git a/tex/context/base/publ-imp-cite.mkiv b/tex/context/base/publ-imp-cite.mkiv
deleted file mode 100644
index d64c2132c..000000000
--- a/tex/context/base/publ-imp-cite.mkiv
+++ /dev/null
@@ -1,74 +0,0 @@
-%D \module
-%D [ file=publ-imp-cite,
-%D version=2013.12.24,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=XML,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\startsetups btx:cite:author
- \btxcitevariant{author}
-\stopsetups
-
-\startsetups btx:cite:authoryear
- \btxcitevariant{authoryear}
-\stopsetups
-
-\startsetups btx:cite:authoryears
- \btxcitevariant{authoryears}
-\stopsetups
-
-% \startsetups btx:cite:authornum
-% \btxcitevariant{author}
-% \btxcitevariantparameter\c!inbetween
-% \btxcitevariant{num}
-% \stopsetups
-
-\startsetups btx:cite:authornum
- \btxcitevariant{authornum}
-\stopsetups
-
-\startsetups btx:cite:year
- \btxcitevariant{year}
-\stopsetups
-
-\startsetups btx:cite:short
- \btxcitevariant{short}
-\stopsetups
-
-\startsetups btx:cite:serial
- \btxcitevariant{serial}
-\stopsetups
-
-\startsetups btx:cite:key
- \currentbtxtag % \btxcitevariant{tag}
-\stopsetups
-
-\startsetups btx:cite:doi
- todo: \btxcitevariant{doi}
-\stopsetups
-
-\startsetups btx:cite:url
- todo: \btxcitevariant{url}
-\stopsetups
-
-\startsetups btx:cite:type
- \btxcitevariant{category}
-\stopsetups
-
-\startsetups btx:cite:page
- \btxcitevariant{page}
-\stopsetups
-
-\startsetups btx:cite:none
- % dummy
-\stopsetups
-
-\startsetups btx:cite:num
- \btxcitevariant{num}
-\stopsetups
diff --git a/tex/context/base/publ-imp-commands.mkiv b/tex/context/base/publ-imp-commands.mkiv
deleted file mode 100644
index 14e2dbae1..000000000
--- a/tex/context/base/publ-imp-commands.mkiv
+++ /dev/null
@@ -1,15 +0,0 @@
-\unprotect
-
-% for tugboat
-
-\definebtxcommand\hbox {\hbox}
-\definebtxcommand\vbox {\vbox}
-\definebtxcommand\llap {\llap}
-\definebtxcommand\rlap {\rlap}
-\definebtxcommand\url #1{\hyphenatedurl{#1}}
-\definebtxcommand\acro #1{\dontleavehmode{\smallcaps#1}}
-
-\let\<<
-\let\<>
-
-\protect \endinput
diff --git a/tex/context/base/publ-imp-definitions.mkiv b/tex/context/base/publ-imp-definitions.mkiv
deleted file mode 100644
index 2cf2e3e8e..000000000
--- a/tex/context/base/publ-imp-definitions.mkiv
+++ /dev/null
@@ -1,68 +0,0 @@
-%D \module
-%D [ file=publ-imp-def,
-%D version=2013.12.24,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=Definitions,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-%D Here we collect some helper setups. We assume that checking of a field
-%D happens in the calling setup, if only because that is the place where
-%D fences are also dealt with.
-
-\unprotect
-
-\startxmlsetups btx:format:crossref
- \cite[\btxfield{crossref}]
-\stopxmlsetups
-
-\startxmlsetups btx:format:key
- \btxfield{short}
-\stopxmlsetups
-
-\startxmlsetups btx:format:doi
- \edef\currentbtxfielddoi{\btxfield{doi}}
- \ifx\currentbtxfielddoi\empty
- {\tttf no-doi}
- \else\ifconditional\btxinteractive
- \goto{\hyphenatedurl{\currentbtxfielddoi}}[url(http://dx.doi.org/\currentbtxfielddoi)]
- \else
- \hyphenatedurl{\currentbtxfielddoi}
- \fi\fi
-\stopxmlsetups
-
-\startxmlsetups btx:format:url
- \edef\currentbtxfieldurl{\btxfield{url}}
- \ifx\currentbtxfieldurl\empty
- {\tttf no-url}
- \else\ifconditional\btxinteractive
- \goto{\hyphenatedurl{\currentbtxfieldurl}}[url(\currentbtxfieldurl)]
- \else
- \hyphenatedurl{\currentbtxfieldurl}
- \fi\fi
-\stopxmlsetups
-
-\startxmlsetups btx:format:month
- \edef\currentbtxfieldmonth{\btxfield{month}}
- \ifx\currentbtxfieldmonth\empty
- {\tttf no-month}
- \else
- \edef\p_monthconversion{\btxlistvariantparameter\c!monthconversion}
- \ifx\p_monthconversion\empty % month month:mnem
- \currentbtxfieldmonth
- \else
- \doifnumberelse \currentbtxfieldmonth {
- \convertnumber\p_monthconversion\currentbtxfieldmonth
- } {
- \currentbtxfieldmonth
- }
- \fi
- \fi
-\stopxmlsetups
-
-\protect
diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua
deleted file mode 100644
index 6bf6714da..000000000
--- a/tex/context/base/publ-ini.lua
+++ /dev/null
@@ -1,1425 +0,0 @@
-if not modules then modules = { } end modules ['publ-ini'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- for the moment here
-
-local lpegmatch = lpeg.match
-local P, C, Ct, Cs = lpeg.P, lpeg.C, lpeg.Ct, lpeg.Cs
-
-local lpegmatch = lpeg.match
-local pattern = Cs((1 - P(1) * P(-1))^0 * (P(".")/"" + P(1)))
-
-local manipulators = {
- stripperiod = function(str) return lpegmatch(pattern,str) end,
- uppercase = characters.upper,
- lowercase = characters.lower,
-}
-
-local manipulation = C((1-P("->"))^1) * P("->") * C(P(1)^0)
-
-local pattern = manipulation / function(operation,str)
- local manipulator = manipulators[operation]
- return manipulator and manipulator(str) or str
-end
-
-local function manipulated(str)
- return lpegmatch(pattern,str) or str
-end
-
-utilities.parsers.manipulation = manipulation
-utilities.parsers.manipulators = manipulators
-utilities.parsers.manipulated = manipulated
-
-function commands.manipulated(str)
- context(manipulated(str))
-end
-
--- use: for rest in gmatch(reference,"[^, ]+") do
-
-local next, rawget, type = next, rawget, type
-local match, gmatch, format, gsub = string.match, string.gmatch, string.format, string.gsub
-local concat, sort = table.concat, table.sort
-local utfsub = utf.sub
-local formatters = string.formatters
-local allocate = utilities.storage.allocate
-local settings_to_array, settings_to_set = utilities.parsers.settings_to_array, utilities.parsers.settings_to_set
-local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
-local lpegmatch = lpeg.match
-local P, C, Ct = lpeg.P, lpeg.C, lpeg.Ct
-
-local report = logs.reporter("publications")
-local trace = false trackers.register("publications", function(v) trace = v end)
-
-local datasets = publications.datasets
-
-local variables = interfaces.variables
-
-local v_local = variables["local"]
-local v_global = variables["global"]
-
-local v_force = variables.force
-local v_standard = variables.standard
-local v_start = variables.start
-local v_none = variables.none
-local v_left = variables.left
-local v_right = variables.right
-local v_middle = variables.middle
-local v_inbetween = variables.inbetween
-
-local v_short = variables.short
-local v_cite = variables.cite
-local v_default = variables.default
-local v_reference = variables.reference
-local v_dataset = variables.dataset
-local v_author = variables.author or "author"
-local v_editor = variables.editor or "editor"
-
-local numbertochar = converters.characters
-
-local logsnewline = logs.newline
-local logspushtarget = logs.pushtarget
-local logspoptarget = logs.poptarget
-local csname_id = token.csname_id
-
-local basicsorter = sorters.basicsorter -- (a,b)
-local sortcomparer = sorters.comparers.basic -- (a,b)
-local sortstripper = sorters.strip
-local sortsplitter = sorters.splitters.utf
-
-local context = context
-
-local ctx_btxlistparameter = context.btxlistparameter
-local ctx_btxcitevariantparameter = context.btxcitevariantparameter
-local ctx_btxlistvariantparameter = context.btxlistvariantparameter
-local ctx_btxdomarkcitation = context.btxdomarkcitation
-local ctx_setvalue = context.setvalue
-local ctx_firstoftwoarguments = context.firstoftwoarguments
-local ctx_secondoftwoarguments = context.secondoftwoarguments
-local ctx_firstofoneargument = context.firstofoneargument
-local ctx_gobbleoneargument = context.gobbleoneargument
-local ctx_btxdirectlink = context.btxdirectlink
-local ctx_btxhandlelistentry = context.btxhandlelistentry
-local ctx_btxchecklistentry = context.btxchecklistentry
-local ctx_dodirectfullreference = context.dodirectfullreference
-local ctx_directsetup = context.directsetup
-
-statistics.register("publications load time", function()
- local publicationsstats = publications.statistics
- local nofbytes = publicationsstats.nofbytes
- if nofbytes > 0 then
- return string.format("%s seconds, %s bytes, %s definitions, %s shortcuts",
- statistics.elapsedtime(publications),nofbytes,publicationsstats.nofdefinitions,publicationsstats.nofshortcuts)
- else
- return nil
- end
-end)
-
-luatex.registerstopactions(function()
- logspushtarget("logfile")
- logsnewline()
- report("start used btx commands")
- logsnewline()
- local undefined = csname_id("undefined*crap")
- for name, dataset in sortedhash(datasets) do
- for command, n in sortedhash(dataset.commands) do
- local c = csname_id(command)
- if c and c ~= undefined then
- report("%-20s %-20s % 5i %s",name,command,n,"known")
- else
- local u = csname_id(utf.upper(command))
- if u and u ~= undefined then
- report("%-20s %-20s % 5i %s",name,command,n,"KNOWN")
- else
- report("%-20s %-20s % 5i %s",name,command,n,"unknown")
- end
- end
- end
- end
- logsnewline()
- report("stop used btxcommands")
- logsnewline()
- logspoptarget()
-end)
-
--- multipass, we need to sort because hashing is random per run and not per
--- version (not the best changed feature of lua)
-
-local collected = allocate()
-local tobesaved = allocate()
-
--- we use a a dedicated (and efficient as it know what it deals with) serializer,
--- also because we need to ignore the 'details' field
-
-local function serialize(t)
- local f_key_table = formatters[" [%q] = {"]
- local f_key_string = formatters[" %s = %q,"]
- local r = { "return {" }
- local m = 1
- for tag, entry in sortedhash(t) do
- m = m + 1
- r[m] = f_key_table(tag)
- local s = sortedkeys(entry)
- for i=1,#s do
- local k = s[i]
- -- if k ~= "details" then
- m = m + 1
- r[m] = f_key_string(k,entry[k])
- -- end
- end
- m = m + 1
- r[m] = " },"
- end
- r[m] = "}"
- return concat(r,"\n")
-end
-
-local function finalizer()
- local prefix = tex.jobname -- or environment.jobname
- local setnames = sortedkeys(datasets)
- for i=1,#setnames do
- local name = setnames[i]
- local dataset = datasets[name]
- local userdata = dataset.userdata
- local checksum = nil
- local username = file.addsuffix(file.robustname(formatters["%s-btx-%s"](prefix,name)),"lua")
- if userdata and next(userdata) then
- if job.passes.first then
- local newdata = serialize(userdata)
- checksum = md5.HEX(newdata)
- io.savedata(username,newdata)
- end
- else
- os.remove(username)
- username = nil
- end
- local loaded = dataset.loaded
- local sources = dataset.sources
- local used = { }
- for i=1,#sources do
- local source = sources[i]
- if loaded[source.filename] ~= "previous" then -- or loaded[source.filename] == "current"
- used[#used+1] = source
- end
- end
- tobesaved[name] = {
- usersource = {
- filename = username,
- checksum = checksum,
- },
- datasources = used,
- }
- end
-end
-
-local function initializer()
- statistics.starttiming(publications)
-collected = publications.collected or collected -- for the moment as we load runtime
- for name, state in next, collected do
- local dataset = datasets[name]
- local datasources = state.datasources
- local usersource = state.usersource
- if datasources then
- for i=1,#datasources do
- local filename = datasources[i].filename
- publications.load(dataset,filename,"previous")
- end
- end
- if usersource then
- dataset.userdata = table.load(usersource.filename) or { }
- end
- end
- statistics.stoptiming(publications)
- function initializer() end -- will go, for now, runtime loaded
-end
-
-job.register('publications.collected',tobesaved,initializer,finalizer)
-
-if not publications.authors then
- initializer() -- for now, runtime loaded
-end
-
--- basic access
-
-local function getfield(dataset,tag,name)
- local d = datasets[dataset].luadata[tag]
- return d and d[name]
-end
-
-local function getdetail(dataset,tag,name)
- local d = datasets[dataset].details[tag]
- return d and d[name]
-end
-
-function commands.btxsingularorplural(dataset,tag,name) -- todo: make field dependent
- local d = datasets[dataset].details[tag]
- if d then
- d = d[name]
- end
- if d then
- d = #d <= 1
- end
- commands.doifelse(d)
-end
-
--- basic loading
-
-function commands.usebtxdataset(name,filename)
- publications.load(datasets[name],filename,"current")
-end
-
-function commands.convertbtxdatasettoxml(name,nice)
- publications.converttoxml(datasets[name],nice)
-end
-
--- enhancing
-
-local splitauthorstring = publications.authors.splitstring
-
-local pagessplitter = lpeg.splitat(P("-")^1)
-
--- maybe not redo when already done
-
-function publications.enhance(dataset) -- for the moment split runs (maybe publications.enhancers)
- statistics.starttiming(publications)
- if type(dataset) == "string" then
- dataset = datasets[dataset]
- end
- local luadata = dataset.luadata
- local details = dataset.details
- -- author, editor
- for tag, entry in next, luadata do
- local author = entry.author
- local editor = entry.editor
- details[tag] = {
- author = author and splitauthorstring(author),
- editor = editor and splitauthorstring(editor),
- }
- end
- -- short
- local shorts = { }
- for tag, entry in next, luadata do
- local author = details[tag].author
- if author then
- -- number depends on sort order
- local t = { }
- if #author == 0 then
- -- what
- else
- local n = #author == 1 and 3 or 1
- for i=1,#author do
- local surnames = author[i].surnames
- if not surnames or #surnames == 0 then
- -- error
- else
- t[#t+1] = utfsub(surnames[1],1,n)
- end
- end
- end
- local year = tonumber(entry.year) or 0
- local short = formatters["%t%02i"](t,math.mod(year,100))
- local s = shorts[short]
- if not s then
- shorts[short] = tag
- elseif type(s) == "string" then
- shorts[short] = { s, tag }
- else
- s[#s+1] = tag
- end
- else
- --
- end
- end
- for short, tags in next, shorts do
- if type(tags) == "table" then
- sort(tags)
- for i=1,#tags do
--- details[tags[i]].short = short .. numbertochar(i)
-local detail = details[tags[i]]
-detail.short = short
-detail.suffix = numbertochar(i)
- end
- else
- details[tags].short = short
- end
- end
- -- pages
- for tag, entry in next, luadata do
- local pages = entry.pages
- if pages then
- local first, last = lpegmatch(pagessplitter,pages)
- details[tag].pages = first and last and { first, last } or pages
- end
- end
- -- keywords
- for tag, entry in next, luadata do
- local keyword = entry.keyword
- if keyword then
- details[tag].keyword = settings_to_set(keyword)
- end
- end
- statistics.stoptiming(publications)
-end
-
-function commands.addbtxentry(name,settings,content)
- local dataset = datasets[name]
- if dataset then
- publications.addtexentry(dataset,settings,content)
- end
-end
-
-function commands.setbtxdataset(name)
- local dataset = rawget(datasets,name)
- if dataset then
- context(name)
- else
- report("unknown dataset %a",name)
- end
-end
-
-function commands.setbtxentry(name,tag)
- local dataset = rawget(datasets,name)
- if dataset then
- if dataset.luadata[tag] then
- context(tag)
- else
- report("unknown tag %a in dataset %a",tag,name)
- end
- else
- report("unknown dataset %a",name)
- end
-end
-
--- rendering of fields (maybe multiple manipulators)
-
-local manipulation = utilities.parsers.manipulation
-local manipulators = utilities.parsers.manipulators
-
--- local function checked(field)
--- local m, f = lpegmatch(manipulation,field)
--- if m then
--- return manipulators[m], f or field
--- else
--- return nil, field
--- end
--- end
-
-local manipulation = Ct((C((1-P("->"))^1) * P("->"))^1) * C(P(1)^0)
-
-local function checked(field)
- local m, f = lpegmatch(manipulation,field)
- if m then
- return m, f or field
- else
- return nil, field
- end
-end
-
-local function manipulated(actions,str)
- for i=1,#actions do
- local action = manipulators[actions[i]]
- if action then
- str = action(str) or str
- end
- end
- return str
-end
-
-function commands.btxflush(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local fields = dataset.luadata[tag]
- if fields then
- local manipulator, field = checked(field)
- local value = fields[field]
- if type(value) == "string" then
- -- context(manipulator and manipulator(value) or value)
- context(manipulator and manipulated(manipulator,value) or value)
- return
- end
- local details = dataset.details[tag]
- if details then
- local value = details[field]
- if type(value) == "string" then
- -- context(manipulator and manipulator(value) or value)
- context(manipulator and manipulated(manipulator,value) or value)
- return
- end
- end
- report("unknown field %a of tag %a in dataset %a",field,tag,name)
- else
- report("unknown tag %a in dataset %a",tag,name)
- end
- else
- report("unknown dataset %a",name)
- end
-end
-
-function commands.btxdetail(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local details = dataset.details[tag]
- if details then
- local manipulator, field = checked(field)
- local value = details[field]
- if type(value) == "string" then
- -- context(manipulator and manipulator(value) or value)
- context(manipulator and manipulated(manipulator,value) or value)
- else
- report("unknown detail %a of tag %a in dataset %a",field,tag,name)
- end
- else
- report("unknown tag %a in dataset %a",tag,name)
- end
- else
- report("unknown dataset %a",name)
- end
-end
-
-function commands.btxfield(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local fields = dataset.luadata[tag]
- if fields then
- local manipulator, field = checked(field)
- local value = fields[field]
- if type(value) == "string" then
- -- context(manipulator and manipulator(value) or value)
- context(manipulator and manipulated(manipulator,value) or value)
- else
- report("unknown field %a of tag %a in dataset %a",field,tag,name)
- end
- else
- report("unknown tag %a in dataset %a",tag,name)
- end
- else
- report("unknown dataset %a",name)
- end
-end
-
--- testing: to be speed up with testcase
-
-function commands.btxdoifelse(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local data = dataset.luadata[tag]
- local value = data and data[field]
- if value and value ~= "" then
- ctx_firstoftwoarguments()
- return
- end
- end
- ctx_secondoftwoarguments()
-end
-
-function commands.btxdoif(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local data = dataset.luadata[tag]
- local value = data and data[field]
- if value and value ~= "" then
- ctx_firstofoneargument()
- return
- end
- end
- ctx_gobbleoneargument()
-end
-
-function commands.btxdoifnot(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local data = dataset.luadata[tag]
- local value = data and data[field]
- if value and value ~= "" then
- ctx_gobbleoneargument()
- return
- end
- end
- ctx_firstofoneargument()
-end
-
--- -- alternative approach: keep data at the tex end
-
-function publications.listconcat(t)
- local n = #t
- if n > 0 then
- context(t[1])
- if n > 1 then
- if n > 2 then
- for i=2,n-1 do
- ctx_btxlistparameter("sep")
- context(t[i])
- end
- ctx_btxlistparameter("finalsep")
- else
- ctx_btxlistparameter("lastsep")
- end
- context(t[n])
- end
- end
-end
-
-function publications.citeconcat(t)
- local n = #t
- if n > 0 then
- context(t[1])
- if n > 1 then
- if n > 2 then
- for i=2,n-1 do
- ctx_btxcitevariantparameter("sep")
- context(t[i])
- end
- ctx_btxcitevariantparameter("finalsep")
- else
- ctx_btxcitevariantparameter("lastsep")
- end
- context(t[n])
- end
- end
-end
-
-function publications.singularorplural(singular,plural)
- if lastconcatsize and lastconcatsize > 1 then
- context(plural)
- else
- context(singular)
- end
-end
-
--- function commands.makebibauthorlist(settings) -- ?
--- if not settings then
--- return
--- end
--- local dataset = datasets[settings.dataset]
--- if not dataset or dataset == "" then
--- return
--- end
--- local tag = settings.tag
--- if not tag or tag == "" then
--- return
--- end
--- local asked = settings_to_array(tag)
--- if #asked == 0 then
--- return
--- end
--- local compress = settings.compress
--- local interaction = settings.interactionn == v_start
--- local limit = tonumber(settings.limit)
--- local found = { }
--- local hash = { }
--- local total = 0
--- local luadata = dataset.luadata
--- for i=1,#asked do
--- local tag = asked[i]
--- local data = luadata[tag]
--- if data then
--- local author = data.a or "Xxxxxxxxxx"
--- local year = data.y or "0000"
--- if not compress or not hash[author] then
--- local t = {
--- author = author,
--- name = name, -- first
--- year = { [year] = name },
--- }
--- total = total + 1
--- found[total] = t
--- hash[author] = t
--- else
--- hash[author].year[year] = name
--- end
--- end
--- end
--- for i=1,total do
--- local data = found[i]
--- local author = data.author
--- local year = table.keys(data.year)
--- table.sort(year)
--- if interaction then
--- for i=1,#year do
--- year[i] = formatters["\\bibmaybeinteractive{%s}{%s}"](data.year[year[i]],year[i])
--- end
--- end
--- ctx_setvalue("currentbibyear",concat(year,","))
--- if author == "" then
--- ctx_setvalue("currentbibauthor","")
--- else -- needs checking
--- local authors = settings_to_array(author) -- {{}{}},{{}{}}
--- local nofauthors = #authors
--- if nofauthors == 1 then
--- if interaction then
--- author = formatters["\\bibmaybeinteractive{%s}{%s}"](data.name,author)
--- end
--- ctx_setvalue("currentbibauthor",author)
--- else
--- limit = limit or nofauthors
--- if interaction then
--- for i=1,#authors do
--- authors[i] = formatters["\\bibmaybeinteractive{%s}{%s}"](data.name,authors[i])
--- end
--- end
--- if limit == 1 then
--- ctx_setvalue("currentbibauthor",authors[1] .. "\\bibalternative{otherstext}")
--- elseif limit == 2 and nofauthors == 2 then
--- ctx_setvalue("currentbibauthor",concat(authors,"\\bibalternative{andtext}"))
--- else
--- for i=1,limit-1 do
--- authors[i] = authors[i] .. "\\bibalternative{namesep}"
--- end
--- if limit < nofauthors then
--- authors[limit+1] = "\\bibalternative{otherstext}"
--- ctx_setvalue("currentbibauthor",concat(authors,"",1,limit+1))
--- else
--- authors[limit-1] = authors[limit-1] .. "\\bibalternative{andtext}"
--- ctx_setvalue("currentbibauthor",concat(authors))
--- end
--- end
--- end
--- end
--- -- the following use: currentbibauthor and currentbibyear
--- if i == 1 then
--- context.ixfirstcommand()
--- elseif i == total then
--- context.ixlastcommand()
--- else
--- context.ixsecondcommand()
--- end
--- end
--- end
-
-local patterns = { "publ-imp-%s.mkiv", "publ-imp-%s.tex" }
-
-local function failure(name)
- report("unknown library %a",name)
-end
-
-local function action(name,foundname)
- context.input(foundname)
-end
-
-function commands.loadbtxdefinitionfile(name) -- a more specific name
- commands.uselibrary {
- name = gsub(name,"^publ%-",""),
- patterns = patterns,
- action = action,
- failure = failure,
- onlyonce = false,
- }
-end
-
--- lists:
-
-publications.lists = publications.lists or { }
-local lists = publications.lists
-
-local context = context
-local structures = structures
-
-local references = structures.references
-local sections = structures.sections
-
--- per rendering
-
-local renderings = { } --- per dataset
-
-table.setmetatableindex(renderings,function(t,k)
- local v = {
- list = { },
- done = { },
- alldone = { },
- used = { },
- registered = { },
- ordered = { },
- shorts = { },
- method = v_none,
- currentindex = 0,
- }
- t[k] = v
- return v
-end)
-
--- why shorts vs tags: only for sorting
-
-function lists.register(dataset,tag,short) -- needs checking now that we split
- local r = renderings[dataset]
- if not short or short == "" then
- short = tag
- end
- if trace then
- report("registering publication entry %a with shortcut %a",tag,short)
- end
- local top = #r.registered + 1
- -- do we really need these
- r.registered[top] = tag
- r.ordered [tag] = top
- r.shorts [tag] = short
-end
-
-function lists.nofregistered(dataset)
- return #renderings[dataset].registered
-end
-
-function lists.setmethod(dataset,method)
- local r = renderings[dataset]
- r.method = method or v_none
- r.list = { }
- r.done = { }
-end
-
-local function validkeyword(dataset,tag,keyword)
- local ds = datasets[dataset]
- if not ds then
- report("unknown dataset %a",dataset)
- return
- end
- local dt = ds.details[tag]
- if not dt then
- report("no details for tag %a",tag)
- return
- end
- local kw = dt.keyword
- if kw then
--- inspect(keyword)
--- inspect(kw)
- for k in next, keyword do
- if kw[k] then
- return true
- end
- end
- end
-end
-
-function lists.collectentries(specification)
- local dataset = specification.btxdataset
- if not dataset then
- return
- end
- local rendering = renderings[dataset]
--- specification.names = "btx"
- local method = rendering.method
- if method == v_none then
- return
- end
--- method=v_local --------------------
- local result = structures.lists.filter(specification)
- --
- local keyword = specification.keyword
- if keyword and keyword ~= "" then
- keyword = settings_to_set(keyword)
- else
- keyword = nil
- end
- lists.result = result
- local section = sections.currentid()
- local list = rendering.list
- local done = rendering.done
- local alldone = rendering.alldone
- if method == v_local then
- for listindex=1,#result do
- local r = result[listindex]
- local u = r.userdata
- if u and u.btxset == dataset then
- local tag = u.btxref
- if tag and done[tag] ~= section then
- if not keyword or validkeyword(dataset,tag,keyword) then
- done[tag] = section
- alldone[tag] = true
- list[#list+1] = { tag, listindex }
- end
- end
- end
- end
- elseif method == v_global then
- for listindex=1,#result do
- local r = result[listindex]
- local u = r.userdata
- if u and u.btxset == dataset then
- local tag = u.btxref
- if tag and not alldone[tag] and done[tag] ~= section then
- if not keyword or validkeyword(dataset,tag,keyword) then
- done[tag] = section
- alldone[tag] = true
- list[#list+1] = { tag, listindex }
- end
- end
- end
- end
- elseif method == v_force then
- -- only for checking, can have duplicates, todo: collapse page numbers, although
- -- we then also needs deferred writes
- for listindex=1,#result do
- local r = result[listindex]
- local u = r.userdata
- if u and u.btxset == dataset then
- local tag = u.btxref
- if tag then
- if not keyword or validkeyword(dataset,tag,keyword) then
- list[#list+1] = { tag, listindex }
- end
- end
- end
- end
- elseif method == v_dataset then
- local luadata = datasets[dataset].luadata
- for tag, data in table.sortedhash(luadata) do
- if not keyword or validkeyword(dataset,tag,keyword) then
- list[#list+1] = { tag }
- end
- end
- end
-end
-
-lists.sorters = {
- [v_short] = function(dataset,rendering,list)
- local shorts = rendering.shorts
- local function compare(a,b)
- local aa, bb = a and a[1], b and b[1]
- if aa and bb then
- aa, bb = shorts[aa], shorts[bb]
- return aa and bb and aa < bb
- end
- return false
- end
- sort(list,compare)
- end,
- [v_reference] = function(dataset,rendering,list)
- local function compare(a,b)
- local aa, bb = a and a[1], b and b[1]
- if aa and bb then
- return aa and bb and aa < bb
- end
- return false
- end
- sort(list,compare)
- end,
- [v_dataset] = function(dataset,rendering,list)
- local function compare(a,b)
- local aa, bb = a and a[1], b and b[1]
- if aa and bb then
- aa, bb = list[aa].index or 0, list[bb].index or 0
- return aa and bb and aa < bb
- end
- return false
- end
- sort(list,compare)
- end,
- -- [v_default] = function(dataset,rendering,list) -- not really needed
- -- local ordered = rendering.ordered
- -- local function compare(a,b)
- -- local aa, bb = a and a[1], b and b[1]
- -- if aa and bb then
- -- aa, bb = ordered[aa], ordered[bb]
- -- return aa and bb and aa < bb
- -- end
- -- return false
- -- end
- -- sort(list,compare)
- -- end,
- [v_author] = function(dataset,rendering,list)
- local valid = publications.authors.preparedsort(dataset,list,v_author,v_editor)
- if #valid == 0 or #valid ~= #list then
- -- nothing to sort
- else
- -- if needed we can wrap compare and use the list directly but this is cleaner
- sorters.sort(valid,sortcomparer)
- for i=1,#valid do
- local v = valid[i]
- valid[i] = list[v.index]
- end
- return valid
- end
- end,
-}
-
-function lists.flushentries(dataset,sortvariant)
- local rendering = renderings[dataset]
- local list = rendering.list
- local sort = lists.sorters[sortvariant] or lists.sorters[v_default]
- if type(sort) == "function" then
- list = sort(dataset,rendering,list) or list
- end
- for i=1,#list do
- ctx_setvalue("currentbtxindex",i)
- ctx_btxhandlelistentry(list[i][1]) -- we can pass i here too ... more efficient to avoid the setvalue
- end
-end
-
-function lists.fetchentries(dataset)
- local list = renderings[dataset].list
- for i=1,#list do
- ctx_setvalue("currentbtxindex",i)
- ctx_btxchecklistentry(list[i][1])
- end
-end
-
-function lists.filterall(dataset)
- local r = renderings[dataset]
- local list = r.list
- local registered = r.registered
- for i=1,#registered do
- list[i] = { registered[i], i }
- end
-end
-
-function lists.registerplaced(dataset,tag)
- renderings[dataset].used[tag] = true
-end
-
-function lists.doifalreadyplaced(dataset,tag)
- commands.doifelse(renderings[dataset].used[tag])
-end
-
--- we ask for :tag but when we can't find it we go back
--- to look for previous definitions, and when not found again
--- we look forward
-
-local function compare(a,b)
- local aa, bb = a and a[3], b and b[3]
- return aa and bb and aa < bb
-end
-
--- maybe hash subsets
--- how efficient is this? old leftovers?
-
--- rendering ?
-
-local f_reference = formatters["r:%s:%s:%s"] -- dataset, instance (block), tag
-local f_destination = formatters["d:%s:%s:%s"] -- dataset, instance (block), tag
-
-function lists.resolve(dataset,reference) -- maybe already feed it split
- -- needs checking (the prefix in relation to components)
- local subsets = nil
- local block = tex.count.btxblock
- local collected = references.collected
- local prefix = nil -- todo: dataset ?
- if prefix and prefix ~= "" then
- subsets = { collected[prefix] or collected[""] }
- else
- local components = references.productdata.components
- local subset = collected[""]
- if subset then
- subsets = { subset }
- else
- subsets = { }
- end
- for i=1,#components do
- local subset = collected[components[i]]
- if subset then
- subsets[#subsets+1] = subset
- end
- end
- end
--- inspect(subsets)
- if #subsets > 0 then
- local result, nofresult, done = { }, 0, { }
- for i=1,#subsets do
- local subset = subsets[i]
- for rest in gmatch(reference,"[^, ]+") do
- local blk, tag, found = block, nil, nil
- if block then
- tag = f_destination(dataset,blk,rest)
- found = subset[tag]
- if not found then
- for i=block-1,1,-1 do
- tag = f_destination(dataset,blk,rest)
--- tag = i .. ":" .. rest
- found = subset[tag]
- if found then
- blk = i
- break
- end
- end
- end
- end
- if not found then
- blk = "*"
- tag = f_destination(dataset,blk,rest)
- found = subset[tag]
- end
- if found then
- local current = tonumber(found.entries and found.entries.text) -- tonumber needed
- if current and not done[current] then
- nofresult = nofresult + 1
- result[nofresult] = { blk, rest, current }
- done[current] = true
- end
- end
- end
- end
- local first, last, firsti, lasti, firstr, lastr
- local collected, nofcollected = { }, 0
- for i=1,nofresult do
- local r = result[i]
- local current = r[3]
- if not first then
- first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r
- elseif current == last + 1 then
- last, lasti, lastr = current, i, r
- else
- if last > first + 1 then
- nofcollected = nofcollected + 1
- collected[nofcollected] = { firstr, lastr }
- else
- nofcollected = nofcollected + 1
- collected[nofcollected] = firstr
- if last > first then
- nofcollected = nofcollected + 1
- collected[nofcollected] = lastr
- end
- end
- first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r
- end
- end
- if first and last then
- if last > first + 1 then
- nofcollected = nofcollected + 1
- collected[nofcollected] = { firstr, lastr }
- else
- nofcollected = nofcollected + 1
- collected[nofcollected] = firstr
- if last > first then
- nofcollected = nofcollected + 1
- collected[nofcollected] = lastr
- end
- end
- end
- if nofcollected > 0 then
--- inspect(reference)
--- inspect(result)
--- inspect(collected)
- for i=1,nofcollected do
- local c = collected[i]
- if i == nofcollected then
- ctx_btxlistvariantparameter("lastpubsep")
- elseif i > 1 then
- ctx_btxlistvariantparameter("pubsep")
- end
- if #c == 3 then -- a range (3 is first or last)
- ctx_btxdirectlink(f_reference(dataset,c[1],c[2]),c[3])
- else
- local f, l = c[2], c[2]
- ctx_btxdirectlink(f_reference(dataset,f[1],f[2]),f[3])
- context.endash() -- to do
- ctx_btxdirectlink(f_reference(dataset,l[4],l[5]),l[6])
- end
- end
- else
- context("[btx error 1]")
- end
- else
- context("[btx error 2]")
- end
-end
-
-local done = { }
-
-function commands.btxreference(dataset,block,tag,data)
- local ref = f_reference(dataset,block,tag)
- if not done[ref] then
- done[ref] = true
--- context("<%s>",data)
- ctx_dodirectfullreference(ref,data)
- end
-end
-
-local done = { }
-
-function commands.btxdestination(dataset,block,tag,data)
- local ref = f_destination(dataset,block,tag)
- if not done[ref] then
- done[ref] = true
--- context("<<%s>>",data)
- ctx_dodirectfullreference(ref,data)
- end
-end
-
-commands.btxsetlistmethod = lists.setmethod
-commands.btxresolvelistreference = lists.resolve
-commands.btxregisterlistentry = lists.registerplaced
-commands.btxaddtolist = lists.addentry
-commands.btxcollectlistentries = lists.collectentries
-commands.btxfetchlistentries = lists.fetchentries
-commands.btxflushlistentries = lists.flushentries
-commands.btxdoifelselistentryplaced = lists.doifalreadyplaced
-
-local citevariants = { }
-publications.citevariants = citevariants
-
--- helper
-
-local function sortedtags(dataset,list,sorttype)
- local luadata = datasets[dataset].luadata
- local valid = { }
- for i=1,#list do
- local tag = list[i]
- local entry = luadata[tag]
- if entry then
- local key = entry[sorttype]
- if key then
- valid[#valid+1] = {
- tag = tag,
- split = sortsplitter(sortstripper(key))
- }
- else
- end
- end
- end
- if #valid == 0 or #valid ~= #list then
- return list
- else
- sorters.sort(valid,basicsorter)
- for i=1,#valid do
- valid[i] = valid[i].tag
- end
- return valid
- end
-end
-
--- todo: standard : current
-
-local prefixsplitter = lpeg.splitat("::")
-
-function commands.btxhandlecite(dataset,tag,mark,variant,sorttype,setup) -- variant for tracing
- local prefix, rest = lpegmatch(prefixsplitter,tag)
- if rest then
- dataset = prefix
- else
- rest = tag
- end
- ctx_setvalue("currentbtxdataset",dataset)
- local tags = settings_to_array(rest)
- if #tags > 0 then
- if sorttype and sorttype ~= "" then
- tags = sortedtags(dataset,tags,sorttype)
- end
- ctx_btxcitevariantparameter(v_left)
- for i=1,#tags do
- local tag = tags[i]
- ctx_setvalue("currentbtxtag",tag)
- if i > 1 then
- ctx_btxcitevariantparameter(v_middle)
- end
- if mark ~= false then
- ctx_btxdomarkcitation(dataset,tag)
- end
- ctx_directsetup(setup) -- cite can become alternative
- end
- ctx_btxcitevariantparameter(v_right)
- else
- -- error
- end
-end
-
-function commands.btxhandlenocite(dataset,tag,mark)
- if mark ~= false then
- local prefix, rest = lpegmatch(prefixsplitter,tag)
- if rest then
- dataset = prefix
- else
- rest = tag
- end
- ctx_setvalue("currentbtxdataset",dataset)
- local tags = settings_to_array(rest)
- for i=1,#tags do
- ctx_btxdomarkcitation(dataset,tags[i])
- end
- end
-end
-
-function commands.btxcitevariant(dataset,block,tags,variant)
- local action = citevariants[variant] or citevariants.default
- if action then
- action(dataset,tags,variant)
- end
-end
-
-function citevariants.default(dataset,tags,variant)
- local content = getfield(dataset,tags,variant)
- if content then
- context(content)
- end
-end
-
--- todo : sort
--- todo : choose between publications or commands namespace
--- todo : use details.author
--- todo : sort details.author
-
-local function collectauthoryears(dataset,tags)
- local luadata = datasets[dataset].luadata
- local list = settings_to_array(tags)
- local found = { }
- local result = { }
- local order = { }
- for i=1,#list do
- local tag = list[i]
- local entry = luadata[tag]
- if entry then
- local year = entry.year
- local author = entry.author
- if author and year then
- local a = found[author]
- if not a then
- a = { }
- found[author] = a
- order[#order+1] = author
- end
- local y = a[year]
- if not y then
- y = { }
- a[year] = y
- end
- y[#y+1] = tag
- end
- end
- end
- -- found = { author = { year_1 = { e1, e2, e3 } } }
- for i=1,#order do
- local author = order[i]
- local years = found[author]
- local yrs = { }
- for year, entries in next, years do
- if subyears then
- -- -- add letters to all entries of an author and if so shouldn't
- -- -- we tag all years of an author as soon as we do this?
- -- if #entries > 1 then
- -- for i=1,#years do
- -- local entry = years[i]
- -- -- years[i] = year .. string.char(i + string.byte("0") - 1)
- -- end
- -- end
- else
- yrs[#yrs+1] = year
- end
- end
- result[i] = { author = author, years = yrs }
- end
- return result, order
-end
-
--- (name, name and name) .. how names? how sorted?
--- todo: we loop at the tex end .. why not here
--- \cite[{hh,afo},kvm]
-
--- maybe we will move this tex anyway
-
-function citevariants.author(dataset,tags)
- local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
- publications.citeconcat(order)
-end
-
-local function authorandyear(dataset,tags,formatter)
- local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
- for i=1,#result do
- local r = result[i]
- order[i] = formatter(r.author,r.years) -- reuse order
- end
- publications.citeconcat(order)
-end
-
-function citevariants.authoryear(dataset,tags)
- authorandyear(dataset,tags,formatters["%s (%, t)"])
-end
-
-function citevariants.authoryears(dataset,tags)
- authorandyear(dataset,tags,formatters["%s, %, t"])
-end
-
-function citevariants.authornum(dataset,tags)
- local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
- publications.citeconcat(order)
- ctx_btxcitevariantparameter(v_inbetween)
- lists.resolve(dataset,tags) -- left/right ?
-end
-
--- function citevariants.short(dataset,tags)
--- local short = getdetail(dataset,tags,"short")
--- if short then
--- context(short)
--- end
--- end
-
-function citevariants.short(dataset,tags)
- local short = getdetail(dataset,tags,"short")
- local suffix = getdetail(dataset,tags,"suffix")
- if suffix then
- context(short .. suffix)
- elseif short then
- context(short)
- end
-end
-
-function citevariants.page(dataset,tags)
- local pages = getdetail(dataset,tags,"pages")
- if not pages then
- -- nothing
- elseif type(pages) == "table" then
- context(pages[1])
- ctx_btxcitevariantparameter(v_inbetween)
- context(pages[2])
- else
- context(pages)
- end
-end
-
-function citevariants.num(dataset,tags)
--- ctx_btxdirectlink(f_destination(dataset,block,tags),listindex) -- not okay yet
- lists.resolve(dataset,tags)
-end
-
-function citevariants.serial(dataset,tags) -- the traditional fieldname is "serial" and not "index"
- local index = getfield(dataset,tags,"index")
- if index then
- context(index)
- end
-end
-
--- List variants
-
-local listvariants = { }
-publications.listvariants = listvariants
-
--- function commands.btxhandlelist(dataset,block,tag,variant,setup)
--- if sorttype and sorttype ~= "" then
--- tags = sortedtags(dataset,tags,sorttype)
--- end
--- ctx_setvalue("currentbtxtag",tag)
--- ctx_btxlistvariantparameter(v_left)
--- ctx_directsetup(setup)
--- ctx_btxlistvariantparameter(v_right)
--- end
-
-function commands.btxlistvariant(dataset,block,tags,variant,listindex)
- local action = listvariants[variant] or listvariants.default
- if action then
- action(dataset,block,tags,variant,tonumber(listindex) or 0)
- end
-end
-
-function listvariants.default(dataset,block,tags,variant)
- context("?")
-end
-
-function listvariants.num(dataset,block,tags,variant,listindex)
- ctx_btxdirectlink(f_destination(dataset,block,tags),listindex) -- not okay yet
-end
-
--- function listvariants.short(dataset,block,tags,variant,listindex)
--- local short = getdetail(dataset,tags,variant,variant)
--- if short then
--- context(short)
--- end
--- end
-
-function listvariants.short(dataset,block,tags,variant,listindex)
- local short = getdetail(dataset,tags,"short","short")
- local suffix = getdetail(dataset,tags,"suffix","suffix")
- if suffix then
- context(short .. suffix)
- elseif short then
- context(short)
- end
-end
diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv
deleted file mode 100644
index 42226695c..000000000
--- a/tex/context/base/publ-ini.mkiv
+++ /dev/null
@@ -1,963 +0,0 @@
-%D \module
-%D [ file=publ-ini,
-%D version=2013.05.12,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=Initialization,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-% todo: we cannot use 'default' as this wipes metadata names (maybe no longer do that)
-% todo: \v!cite => \s!cite
-% todo: interface with (ml)bibtex (export -> call -> import)
-% todo: check if 'all' etc are ok ... either use list or use other criterium
-
-% \definecolor[btx:field] [darkred]
-% \definecolor[btx:crossref][darkblue]
-% \definecolor[btx:key] [darkgreen]
-% \definecolor[btx:todo] [darkyellow]
-
-%D We operate on several axis:
-%D
-%D \startitemize[packed]
-%D \startitem we can have several databases (or combinations) \stopitem
-%D \startitem we can add entries to them if needed (coded in tex) \stopitem
-%D \startitem we can have several lists each using one of the databases \stopitem
-%D \startitem we can render each list or citation independently \stopitem
-%D \stopitemize
-%D
-%D We assume that the rendering of a list entry is consistent in a document,
-%D although one can redefine properties if needed. Adding more granularity would
-%D complicate the user interface beyond comprehension.
-
-\writestatus{loading}{ConTeXt Publication Support / Initialization}
-
-\registerctxluafile{publ-dat}{1.001}
-\registerctxluafile{publ-aut}{1.001}
-\registerctxluafile{publ-usr}{1.001}
-\registerctxluafile{publ-ini}{1.001}
-\registerctxluafile{publ-oth}{1.001} % this could become an option
-
-\unprotect
-
-\def\s!btx{btx}
-\def\v!btxlist{btxlist}
-
-% a dedicated construction mechanism
-
-\installcorenamespace {btxlist}
-
-\installcommandhandler \??btxlist {btxlist} \??btxlist
-
-\unexpanded\setvalue{\??constructioninitializer\v!btxlist}%
- {\let\currentbtxlist \currentconstruction
- \let\constructionparameter \btxlistparameter
- \let\detokenizedconstructionparameter\detokenizedbtxlistparameter
- \let\letconstructionparameter \letbtxlistparameter
- \let\useconstructionstyleandcolor \usebtxliststyleandcolor
- \let\setupcurrentconstruction \setupcurrentbtxlist}
-
-\expandafter\let\csname\??constructionmainhandler \v!btxlist\expandafter\endcsname\csname\??constructionmainhandler \v!construction\endcsname
-\expandafter\let\csname\??constructioncommandhandler\v!btxlist\expandafter\endcsname\csname\??constructioncommandhandler\v!construction\endcsname
-\expandafter\let\csname\??constructiontexthandler \v!btxlist\expandafter\endcsname\csname\??constructiontexthandler \v!construction\endcsname
-
-\unexpanded\setvalue{\??constructioncommandhandler\v!btxlist}%
- {\csname\??constructionstarthandler\v!construction\endcsname
- \csname\??constructionstophandler \v!construction\endcsname
- \endgroup}
-
-\unexpanded\setvalue{\??constructionstarthandler\v!btxlist}%
- {\csname\??constructionstarthandler\v!construction\endcsname}
-
-\unexpanded\setvalue{\??constructionstophandler\v!btxlist}%
- {\csname\??constructionstophandler\v!construction\endcsname
- \endgroup}
-
-\unexpanded\def\startbtxlistentry#1%
- {\begingroup
- \strc_constructions_initialize{#1}%
- \csname\??constructionstarthandler\currentconstructionhandler\endcsname}
-
-\unexpanded\def\stopbtxlistentry
- {\csname\??constructionstophandler\currentconstructionhandler\endcsname}
-
-\unexpanded\setvalue{\??constructiontexthandler\v!btxlist}%
- {\begingroup
- \useconstructionstyleandcolor\c!headstyle\c!headcolor % move to \currentconstructiontext
- \the\everyconstruction
- \constructionparameter\c!headcommand
- {\strut
- \constructionparameter\c!text
- \btx_reference_inject}%
- \endgroup}
-
-\unexpanded\def\strc_constructions_initialize#1% class instance
- {\edef\currentconstruction{#1}%
- \let\currentconstructionlistentry\!!zerocount
- \expandafter\let\expandafter\currentconstructionmain \csname\??constructionmain \currentconstruction\endcsname
- \expandafter\let\expandafter\currentconstructionlevel \csname\??constructionlevel\currentconstruction\endcsname
- \expandafter\let\expandafter\currentconstructionhandler\csname\??constructionclass\currentconstruction\endcsname
- \csname\??constructioninitializer\currentconstructionhandler\endcsname}
-
-\appendtoks
- % \ifx\currentbtxlistparent\empty
- % \defineconstruction[\currentbtxlist][\currentbtxlistparent][\s!handler=\v!btxlist,\c!level=1]%
- % \else
- % \defineconstruction[\currentbtxlist][\s!handler=\v!btxlist,\c!level=1]%
- % \fi
- \ifx\currentbtxlistparent\empty
- \letvalue{\??constructionmain\currentbtxlist}\currentbtxlist
- \else
- \letvalue{\??constructionmain\currentbtxlist}\currentbtxlistparent
- \fi
- \setevalue{\??constructionlevel\currentbtxlist}{\number\btxlistparameter\c!level}%
- \setevalue{\??constructionclass\currentbtxlist}{\btxlistparameter\s!handler}%
-\to \everydefinebtxlist
-
-\setupbtxlist
- [\s!handler=\v!btxlist,
- \c!level=1]
-
-\setupbtxlist
- [\c!alternative=\v!left,
- \c!headstyle=,
- \c!titlestyle=,
- %\c!style=,
- %\c!color=,
- %\c!headcolor=,
- %\c!titlecolor=,
- \c!width=4\emwidth,
- \c!distance=\emwidth,
- %\c!titledistance=.5\emwidth,
- %\c!hang=,
- %\c!sample=,
- %\c!align=,
- %\c!headalign=,
- \c!margin=\v!no,
- \c!before=\blank,
- \c!inbetween=\blank,
- \c!after=\blank,
- %\c!indentnext=\v!yes,
- %\c!indenting=\v!never,
- %\c!titleleft=(,
- %\c!titleright=),
- %\c!closesymbol=,
- %\c!closecommand=\wordright,
- \c!display=\v!yes,
- \c!command=,
- %\c!titlecommand=,
- %\c!expansion=\v!no,
- %\c!xmlsetup=,
- %\s!catcodes=,
- %\c!title=\v!yes,
- %\c!text=,
- ]
-
-% here starts the bib stuff
-
-\installcorenamespace {btxdataset}
-\installcorenamespace {btxlistvariant}
-\installcorenamespace {btxcitevariant}
-\installcorenamespace {btxrendering}
-\installcorenamespace {btxcommand}
-\installcorenamespace {btxnumbering}
-
-\installcommandhandler \??btxdataset {btxdataset} \??btxdataset
-\installcommandhandler \??btxlistvariant {btxlistvariant} \??btxlistvariant
-\installcommandhandler \??btxcitevariant {btxcitevariant} \??btxcitevariant
-\installcommandhandler \??btxrendering {btxrendering} \??btxrendering
-
-\unexpanded\def\usebtxdataset
- {\dodoubleargument\publ_use_dataset}
-
-\def\publ_use_dataset[#1][#2]%
- {\ifsecondargument
- \ctxcommand{usebtxdataset("#1","#2")}%
- \else
- \ctxcommand{usebtxdataset("\v!standard","#1")}%
- \fi}
-
-\definebtxdataset
- [\v!standard]
-
-% \usebtxdataset
-% [standard]
-% [mybibs.bib]
-
-\unexpanded\def\startpublication
- {\dodoubleempty\publ_set_publication}
-
-\let\stoppublication\relax
-
-\def\publ_set_publication[#1][#2]%
- {\begingroup
- \catcode\commentasciicode\othercatcode
- \ifsecondargument
- \expandafter\publ_set_publication_indeed
- \else\iffirstargument
- \doubleexpandafter\publ_set_publication_checked
- \else
- \doubleexpandafter\publ_set_publication_default
- \fi\fi{#1}{#2}}
-
-\def\publ_set_publication_default#1#2%
- {\publ_set_publication_indeed\v!standard{#1}}
-
-\def\publ_set_publication_checked#1#2%
- {\doifassignmentelse{#1}
- {\publ_set_publication_indeed\v!standard{#1}}
- {\publ_set_publication_indeed{#1}{}}}
-
-\def\publ_set_publication_indeed#1#2#3\stoppublication
- {\ctxcommand{addbtxentry("#1",\!!bs#2\!!es,\!!bs\detokenize{#3}\!!es)}%
- \endgroup
- \ignorespaces}
-
-% commands
-
-\unexpanded\def\btxcommand#1%
- {\ifcsname\??btxcommand#1\endcsname
- \expandafter\publ_command_yes
- \else
- \expandafter\publ_command_nop
- \fi{#1}}
-
-\let\btxcmd\btxcommand
-
-\def\publ_command_yes#1%
- {\csname\??btxcommand#1\endcsname}
-
-\def\publ_command_nop#1%
- {\ifcsname#1\endcsname
- \writestatus\m!publications{unknown command: #1, using built-in context variant #1}%
- %\setuxvalue{\??btxcommand#1}{\expandafter\noexpand\csname#1\endcsname}%
- \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname#1\endcsname
- \else\ifcsname\utfupper{#1}\endcsname
- \writestatus\m!publications{unknown command: #1, using built-in context variant \utfupper{#1}}%
- %\setuxvalue{\??btxcommand#1}{\expandafter\noexpand\csname\utfupper{#1}\endcsname}%
- \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname\utfupper{#1}\endcsname
- \else
- \writestatus\m!publications{unknown command: #1}%
- \setugvalue{\??btxcommand#1}{\underbar{\tttf#1}}%
- \fi\fi
- \publ_command_yes{#1}}
-
-\unexpanded\def\definebtxcommand#1% {body} #1..#n{body}
- {\setuvalue{\??btxcommand\strippedcsname#1}}%
-
-% access
-
-\let\currentbtxtag \empty
-\let\currentbtxdataset\v!standard
-
-\unexpanded\def\setbtxdataset[#1]%
- {\edef\currentbtxdataset{\ctxcommand{setbtxdataset("#1")}}}
-
-\unexpanded\def\setbtxentry[#1]%
- {\edef\currentbtxtag{\ctxcommand{setbtxentry("\currentbtxdataset","#1")}}}
-
-% \let\btxsetdataset\setbtxdataset
-% \let\btxsetentry \setbtxentry
-
-\def\btxfield #1{\ctxcommand{btxfield("\currentbtxdataset","\currentbtxtag","#1")}}
-\def\btxdetail #1{\ctxcommand{btxdetail("\currentbtxdataset","\currentbtxtag","#1")}}
-\def\btxflush #1{\ctxcommand{btxflush("\currentbtxdataset","\currentbtxtag","#1")}}
-%def\btxrendering#1{\ctxcommand{btxrendering("\currentbtxdataset","\currentbtxtag","#1","\btxrenderingparameter\c!interaction")}}
-\def\btxdoifelse #1{\ctxcommand{btxdoifelse("\currentbtxdataset","\currentbtxtag","#1")}}
-\def\btxdoif #1{\ctxcommand{btxdoif("\currentbtxdataset","\currentbtxtag","#1")}}
-\def\btxdoifnot #1{\ctxcommand{btxdoifnot("\currentbtxdataset","\currentbtxtag","#1")}}
-
-\let\btxsetup \directsetup
-
-%D How complex will we go? Can we assume that e.g. an apa style will not be mixed
-%D with another one? I think this assumption is okay. For manuals we might want to
-%D mix but we can work around it.
-
-%D Rendering.
-
-\unexpanded\def\btxspace {\removeunwantedspaces\space}
-\unexpanded\def\btxperiod {\removeunwantedspaces.\space}
-\unexpanded\def\btxcomma {\removeunwantedspaces,\space}
-\unexpanded\def\btxlparent {\removeunwantedspaces\space(}
-\unexpanded\def\btxrparent {\removeunwantedspaces)\space}
-\unexpanded\def\btxlbracket{\removeunwantedspaces\space[}
-\unexpanded\def\btxrbracket{\removeunwantedspaces]\space}
-
-%D Rendering lists and citations.
-
-\newconditional\c_btx_trace
-
-\installtextracker
- {btxrendering}
- {\settrue \c_btx_trace}
- {\setfalse\c_btx_trace}
-
-\unexpanded\def\startbtxrendering
- {\begingroup
- \dosingleempty\btx_start_rendering}
-
-\def\btx_start_rendering[#1]%
- {\edef\currentbtxrendering{#1}}
-
-\unexpanded\def\stopbtxrendering
- {\endgroup}
-
-\unexpanded\def\btxtodo#1%
- {[#1]}
-
-%D Specific rendering definitions (like apa):
-
-\unexpanded\def\loadbtxdefinitionfile[#1]%
- {\ctxcommand{loadbtxdefinitionfile("#1")}}
-
-%D Lists:
-
-\newdimen\d_publ_number_width
-%newdimen\d_publ_number_distance
-
-\ifdefined\btxblock \else \newcount\btxblock \fi \btxblock\plusone
-\ifdefined\btxcounter \else \newcount\btxcounter \fi
-
-\newtoks \everysetupbtxlistplacement % name will change
-\newtoks \everysetupbtxciteplacement % name will change
-
-% \def\publ_list_processor % bibref -> btx (old method, keep as reference)
-% {\ctxcommand{btxaddtolist("\currentbtxrendering",\currentlistindex,"btxref")}}
-
-\definelist % only used for selecting
- [btx]
-
-\setuplist
- [btx]%
- [\c!state=\v!start]%
-
-\appendtoks
- \ifx\currentbtxrenderingparent\empty
- \definebtxlist
- [\currentbtxrendering]%
- \else
- \definebtxlist
- [\currentbtxrendering]%
- [\currentbtxrenderingparent]%
- \fi
-\to \everydefinebtxrendering
-
-\unexpanded\def\btx_entry_inject
- {\begingroup
- \edef\currentbtxcategory{\btxfield{category}}%
- \ignorespaces
- \directsetup{\s!btx:\currentbtxalternative:\currentbtxcategory}%
- \removeunwantedspaces
- \endgroup}
-
-\unexpanded\def\completebtxrendering{\dodoubleempty\publ_place_list_complete}
-\unexpanded\def\placebtxrendering {\dodoubleempty\publ_place_list_standard}
-
-\let\completelistofpublications\completebtxrendering
-\let\placelistofpublications \placebtxrendering
-
-\def\publ_place_list_check_criterium
- {\edef\currentbtxcriterium{\btxrenderingparameter\c!criterium}% \v!cite will become \s!cite
- \ifx\currentbtxcriterium\empty
- \let\currentbtxcriterium\v!previous
- \else\ifx\currentbtxcriterium\v!cite
- \let\currentbtxcriterium\v!here
- \fi\fi}
-
-\def\publ_place_list_complete[#1][#2]% title might become obsolete, just headtext
- {\begingroup
- \edef\currentbtxrendering{#1}%
- \setupcurrentbtxrendering[#2]%
- \let\currentlist\s!btx
- \let\currentbtxlist\currentbtxrendering
- \publ_place_list_check_criterium
- \edef\currentbtxrenderingtitle{\btxrenderingparameter\c!title}%
- \ifx\currentbtxrenderingtitle\empty
- \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\headtext{\currentbtxrendering}}]}%
- \else
- \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\currentbtxrenderingtitle}]}%
- \fi
- \publ_place_list_indeed
- \stopnamedsection
- \endgroup}
-
-\def\publ_place_list_standard[#1][#2]%
- {\begingroup
- \edef\currentbtxrendering{#1}%
- \setupcurrentbtxrendering[#2]%
- \let\currentlist\s!btx
- \let\currentbtxlist\currentbtxrendering
- \publ_place_list_check_criterium
- \publ_place_list_indeed
- \endgroup}
-
-\newconditional\c_publ_place_all
-\newconditional\c_publ_place_register % to be interfaced
-\newconditional\c_publ_place_check % to be interfaced
-
-\appendtoks
- \ifx\currentbtxcriterium\v!all % move this check to lua ... easier to test there anyway
- \settrue\c_publ_place_all
- \else
- \setfalse\c_publ_place_all
- \fi
-\to \everysetupbtxlistplacement
-
-\def\publ_place_list_indeed
- {\startbtxrendering[\currentbtxrendering]%
- \directsetup{\btxrenderingparameter\c!setups}%
- % \determinelistcharacteristics[\currentbtxrendering]%
- \edef\currentbtxalternative{\btxrenderingparameter\c!alternative}%
- \edef\currentbtxdataset{\btxrenderingparameter\c!dataset}%
- \let\currentlist\s!btx
- \let\currentbtxlist\currentbtxrendering
- \the\everysetupbtxlistplacement
- \forgetall
- \ctxcommand{btxsetlistmethod("\currentbtxdataset","\btxrenderingparameter\c!method")}%
- \startpacked[\v!blank]%
- % here we just collect items
- \ctxcommand{btxcollectlistentries {
- names = "btx",
- criterium = "\currentbtxcriterium",
- number = "\btxrenderingparameter\c!number",
- btxdataset = "\currentbtxdataset",
- keyword = "\btxrenderingparameter\c!keyword",
- }}%
- % next we analyze the width
- \ifx\btx_reference_inject_indeed\relax \else
- \edef\p_width{\btxrenderingparameter\c!width}%
- \ifx\p_width\v!auto
- \scratchcounter\btxcounter
- \setbox\scratchbox\vbox{\settrialtypesetting\ctxcommand{btxfetchlistentries("\currentbtxdataset")}}%
- \d_publ_number_width\wd\scratchbox
- \global\btxcounter\scratchcounter
- \letbtxlistparameter\c!width\d_publ_number_width
- \fi
- \fi
- % this actually typesets them
- \ctxcommand{btxflushlistentries("\currentbtxdataset","\btxrenderingparameter\c!sorttype")}%
- \stoppacked
- \stopbtxrendering
- \global\advance\btxblock\plusone}
-
-\def\currentbtxblock{\number\btxblock}
-
-\def\publ_place_list_entry_checked
- {\ctxcommand{btxdoifelselistentryplaced("\currentbtxdataset","\currentbtxtag")}\donothing\publ_place_list_entry}
-
-\def\publ_place_list_entry_register
- {\ctxcommand{btxregisterlistentry("\currentbtxdataset","\currentbtxtag")}}
-
-\unexpanded\def\btxhandlelistentry#1% called at the lua end
- {\begingroup
- \edef\currentbtxtag{#1}%
- \ifconditional\c_publ_place_all
- \publ_place_list_entry
- \else\ifconditional\c_publ_place_check
- \publ_place_list_entry_checked
- \else
- \publ_place_list_entry
- \fi\fi
- \endgroup}
-
-\unexpanded\def\publ_place_list_entry
- {\global\advance\btxcounter\plusone
- \ifconditional\c_publ_place_register
- \publ_place_list_entry_register
- \fi
- \let\currentlist\s!btx
- \startbtxlistentry\currentbtxrendering
- \btx_entry_inject
- \stopbtxlistentry}
-
-\unexpanded\def\btxchecklistentry#1% called at the lua end
- {\begingroup
- \edef\currentbtxtag{#1}%
- \ifx\currentbtxcriterium\v!all % move this check to lua ... easier to test there anyway
- \publ_check_list_entry
- \else
- \ctxcommand{btxdoifelselistentryplaced("\currentbtxdataset","\currentbtxtag")}\donothing\publ_check_list_entry
- \fi
- \endgroup}
-
-\unexpanded\def\publ_check_list_entry
- {\global\advance\btxcounter\plusone
- % todo, switch to font
- \hbox{\btx_reference_checked}%
- \par}
-
-\unexpanded\def\btx_reference_inject % we can use a faster \reference
- {\dontleavehmode\begingroup % no box
- \iftrialtypesetting\else
- \ctxcommand{btxdestination("\currentbtxdataset","\currentbtxblock","\currentbtxtag","\number\btxcounter")}%
- \fi
- \btx_reference_inject_indeed
- \endgroup}
-
-\unexpanded\def\btx_reference_checked
- {\dontleavehmode\hbox\bgroup
- \btx_reference_inject_indeed
- \egroup}
-
-\setuvalue{\??btxnumbering\v!short }{\btxlistvariant{short}} % these will be setups
-\setuvalue{\??btxnumbering\v!bib }{\btxlistvariant{num}} % these will be setups
-\setuvalue{\??btxnumbering\s!unknown}{\btxlistvariant{num}} % these will be setups
-\setuvalue{\??btxnumbering\v!yes }{\btxlistvariant{num}} % these will be setups
-
-\appendtoks
- \edef\p_btx_numbering{\btxrenderingparameter\c!numbering}%
- \letlistparameter\c!numbercommand\firstofoneargument % for the moment, no doubling needed
- \ifx\p_btx_numbering\v!no
- \letlistparameter\c!textcommand\outdented % needed? we can use titlealign
- \letlistparameter\c!symbol \v!none
- \letlistparameter\c!aligntitle \v!yes
- \let\btx_reference_inject_indeed\relax
- \else
- \ifcsname\??btxnumbering\p_btx_numbering\endcsname \else
- \let\p_btx_numbering\s!unknown
- \fi
- \letlistparameter\c!headnumber\v!always
- \expandafter\let\expandafter\btx_reference_inject_indeed\csname\??btxnumbering\p_btx_numbering\endcsname
- \fi
-\to \everysetupbtxlistplacement
-
-% \appendtoks
-% \edef\currentbtxcriterium{\btxrenderingparameter\c!criterium}%
-% \to \everysetupbtxlistplacement
-
-\unexpanded\def\btxflushauthor
- {\doifnextoptionalelse\btx_flush_author_yes\btx_flush_author_nop}
-
-\def\btx_flush_author_yes[#1]{\btx_flush_author{#1}}
-\def\btx_flush_author_nop {\btx_flush_author{\btxlistvariantparameter\c!author}}
-
-\unexpanded\def\btx_flush_author#1#2%
- {\edef\currentbtxfield{#2}%
- \let\currentbtxlistvariant\currentbtxfield
- \ctxcommand{btxauthor("\currentbtxdataset","\currentbtxtag","\currentbtxfield",{
- combiner = "#1",
- etallimit = \number\btxlistvariantparameter\c!etallimit,
- etaldisplay = \number\btxlistvariantparameter\c!etaldisplay,
- })}}
-
-\unexpanded\def\btxflushauthornormal {\btx_flush_author{normal}} % #1
-\unexpanded\def\btxflushauthornormalshort {\btx_flush_author{normalshort}} % #1
-\unexpanded\def\btxflushauthorinverted {\btx_flush_author{inverted}} % #1
-\unexpanded\def\btxflushauthorinvertedshort{\btx_flush_author{invertedshort}} % #1
-
-% \btxflushauthor{author}
-% \btxflushauthor{artauthor}
-% \btxflushauthor{editor}
-%
-% \btxflushauthor[normal]{author}
-% \btxflushauthor[normalshort]{author}
-% \btxflushauthor[inverted]{author}
-% \btxflushauthor[invertedshort]{author}
-
-% Interaction
-
-\newconditional\btxinteractive
-
-\unexpanded\def\btxdoifelseinteraction
- {\iflocation
- \edef\p_interaction{\btxcitevariantparameter\c!interaction}%
- \ifx\p_interaction\v!stop
- \doubleexpandafter\secondoftwoarguments
- \else
- \doubleexpandafter\firstoftwoarguments
- \fi
- \else
- \expandafter\secondoftwoarguments
- \fi}
-
-\appendtoks
- \iflocation
- \edef\p_interaction{\btxlistvariantparameter\c!interaction}%
- \ifx\p_interaction\v!stop
- \let\doifelsebtxinteractionelse\secondoftwoarguments
- \setfalse\btxinteractive
- \else
- \let\doifelsebtxinteractionelse\firstoftwoarguments
- \settrue\btxinteractive
- \fi
- \else
- \let\doifelsebtxinteractionelse\secondoftwoarguments
- \setfalse\btxinteractive
- \fi
-\to \everysetupbtxlistplacement
-
-% bib -> btx
-
-\unexpanded\def\btxgotolink#1[#2]{\doifreferencefoundelse{\bibrefprefix#2}{\goto{#1}[\bibrefprefix#2]}{#1}}
-\unexpanded\def\btxatlink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\at [\bibrefprefix#1]}{#1}}
-\unexpanded\def\btxinlink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\expanded{\goto{\currentreferencetext}}[\bibrefprefix#1]}{#1}}
-
-\unexpanded\def\btxdirectlink#1#2{\goto{#2 {\tttf[#1]}}[#1]}
-\unexpanded\def\btxdirectlink#1#2{\goto{#2}[#1]}
-
-\let\gotobiblink\btxgotolink
-\let\atbiblink \btxatlink
-\let\inbiblink \btxinlink
-
-\unexpanded\def\btxnumberedreference[#1]% \bibtexnumref (replaced by \cite[num])
- {\dontleavehmode
- \begingroup
- \btxcitevariantparameter\v!left
- \penalty\plustenthousand % todo
- \ctxcommand{btxresolvelistreference("\currentbtxdataset","#1")}% todo: split dataset from #1, so another call
- \btxcitevariantparameter\v!right
- \endgroup}
-
-% \def\btxnumberedplaceholder[#1]% \nobibtexnumref
-% {[#1]}
-
-\appendtoks
- % for old times sake, for a while at least
- \let\maybeyear\gobbleoneargument
- \let\noopsort \gobbleoneargument
-\to \everysetupbtxlistplacement
-
-\appendtoks
- % for old times sake, for a while at least
- \let\maybeyear\gobbleoneargument
- \let\noopsort \gobbleoneargument
-\to \everysetupbtxciteplacement
-
-\appendtoks
- \doifnot{\btxrenderingparameter\c!continue}\v!yes
- {\global\btxcounter\zerocount}%
-\to \everysetupbtxlistplacement
-
-%D When a publication is cited, we need to signal that somehow. This is done with the
-%D following (not user) command. We could tag without injecting a node but this way
-%D we also store the location, which makes it possible to ask local lists.
-
-\newconditional\c_publ_cite_write
-
-% for reference, but split now done at the lua end
-%
-% \def\publ_cite_write#1% not used
-% {\splitstring#1\at::\to\askedbtxrendering\and\askedbtxtag
-% \ifx\askedbtxtag\empty
-% \let\currentbtxtag \askedbtxrendering
-% \else
-% \let\currentbtxtag \askedbtxtag
-% \let\currentbtxrendering\askedbtxrendering
-% \fi
-% \iftrialtypesetting \else
-% \processcommacommand[\currentbtxtag]{\publ_cite_indeed\currentbtxrendering}%
-% \fi}
-
-\def\publ_cite_indeed#1#2%
- {\expanded{\writedatatolist[btx][btxset=#1,btxref=#2]}}
-
-\def\btxdomarkcitation#1#2% called from lua end
- {\iftrialtypesetting \else
- \writedatatolist[btx][btxset=#1,btxref=#2]% \c!location=\v!here
- \fi}
-
-%D \macros{cite,nocite,citation,nocitation,usecitation}
-%D
-%D The inline \type {\cite} command creates a (often) short reference to a publication
-%D and for historic reasons uses a strict test for brackets. This means, at least
-%D in the default case that spaces are ignored in the argument scanner. The \type
-%D {\citation} commands is more liberal but also gobbles following spaces. Both
-%D commands insert a reference as well as a visual clue.
-%D
-%D The \type {no} commands all do the same (they are synonyms): they make sure that
-%D a reference is injected but show nothing. However, they do create a node so best
-%D attach them to some text in order to avoid spacing interferences. A slightly
-%D less efficient alternative is \type {\cite[none][tag]}.
-
-% [tags]
-% [settings|variant][tags]
-% [base::tags]
-% [settings|variant][base::tags]
-
-\unexpanded\def\btxcite
- {\dontleavehmode
- \begingroup
- \strictdoifnextoptionalelse\publ_cite_tags_options\publ_cite_tags_indeed}
-
-\unexpanded\def\publ_cite_tags_indeed#1%
- {\letinteractionparameter\c!style\empty
- \edef\currentbtxcitevariant{\btxcitevariantparameter\c!alternative}%
- \edef\currentbtxcitetag{#1}%
- \publ_cite_variant
- \endgroup}
-
-\let\publ_citation_tags_indeed\publ_cite_tags_indeed
-
-\unexpanded\def\publ_cite_tags_options[#1]%
- {\strictdoifnextoptionalelse{\publ_cite_tags_options_indeed{#1}}{\publ_cite_tags_indeed{#1}}}
-
-\unexpanded\def\publ_cite_tags_options_indeed#1[#2]%
- {\edef\currentbtxcitetag{#2}%
- \doifassignmentelse{#1}
- {\publ_cite_tags_settings_indeed{#1}}
- {\publ_cite_tags_variants_indeed{#1}}}
-
-\def\publ_cite_tags_settings_indeed#1%
- {\letinteractionparameter\c!style\empty
- %\letinteractionparameter\c!color\empty
- \getdummyparameters[\c!alternative=,\c!extras=,#1]%
- \edef\p_alternative{\dummyparameter\c!alternative}%
- \ifx\p_alternative\empty \else
- \let\currentbtxcitevariant\p_alternative
- \fi
- \setupcurrentbtxcitevariantparameters[#1]%
- \edef\p_extras{\dummyparameter\c!extras}%
- \ifx\p_extras\empty \else
- \edef\p_right{\btxcitevariantparameter\c!right}%
- \ifx\p_right\empty \else
- \setexpandedbtxcitevariantparameter\p_right{\p_extras\p_right}%
- \fi
- \fi
- \publ_cite_variant
- \endgroup}
-
-\def\publ_cite_tags_variants_indeed#1%
- {\letinteractionparameter\c!style\empty
- \edef\currentbtxcitevariant{#1}%
- \publ_cite_variant
- \endgroup}
-
-\newconditional\btxcitecompress
-
-\def\publ_cite_variant
- {\edef\p_compress{\btxcitevariantparameter\c!compress}%
- % \ifx\p_compress\v!no
- % \setfalse\btxcitecompress
- % \else
- % \settrue\btxcitecompress
- % \fi
- \begingroup
- \settrue\c_publ_cite_write
- \publ_cite_handle_variant_indeed[\currentbtxcitetag]}
-
-\unexpanded\def\publ_cite_handle_variant#1%
- {\begingroup
- \the\everysetupbtxciteplacement
- \edef\currentbtxcitevariant{#1}%
- \dosingleargument\publ_cite_handle_variant_indeed}
-
-\def\publ_cite_handle_variant_indeed[#1]%
- {\usebtxcitevariantstyleandcolor\c!style\c!color
- \letbtxcitevariantparameter\c!alternative\currentbtxcitevariant
- \ctxcommand{btxhandlecite(%
- "\currentbtxdataset",%
- "#1",%
- \iftrialtypesetting false\else true\fi,%
- "\currentbtxcitevariant",%
- "\btxcitevariantparameter\c!sorttype",%
- "\btxcitevariantparameter\c!setups"%
- )}%
- \endgroup}
-
-\unexpanded\def\btxcitation
- {\dontleavehmode
- \begingroup
- \dodoubleempty\publ_citation}
-
-\def\publ_citation[#1][#2]% could be made more efficient but not now
- {\ifsecondargument
- \publ_cite_tags_options_indeed{#1}[#2]%
- \else
- \publ_cite_tags_indeed{#1}%
- \fi}
-
-\unexpanded\def\btxnocite
- {\dosingleempty\publ_cite_no}
-
-\unexpanded\def\publ_cite_no[#1]%
- {\iftrialtypesetting \else
- \ctxcommand{btxhandlenocite("\currentbtxdataset","#1",true)}%
- \fi}
-
-%D Compatibility:
-
-\let\cite \btxcite
-\let\citation \btxcitation
-\let\nocite \btxnocite
-\let\nocitation \btxnocite
-\let\usepublication\btxnocite
-
-%D Cite helpers:
-
-\unexpanded\def\btxcitevariant#1%
- {\ctxcommand{btxcitevariant("\currentbtxdataset","\currentbtxblock","\currentbtxtag","#1")}}
-
-%D List helpers:
-
-\def\currentbtxindex{0}
-
-\unexpanded\def\btxlistvariant#1% was \currentbtxindex
- {\begingroup
- \edef\currentbtxlistvariant{#1}%
- \btxlistvariantparameter\c!left
- \ctxcommand{btxlistvariant("\currentbtxdataset","\currentbtxblock","\currentbtxtag","#1","\number\btxcounter")}% some can go
- \btxlistvariantparameter\c!right
- \endgroup}
-
-%D Whatever helpers:
-
-\unexpanded\def\btxsingularplural#1%
- {\ctxcommand{btxsingularorplural("\currentbtxdataset","\currentbtxtag","#1")}}
-
-\let\btxsingularorplural\btxsingularplural
-
-%D Loading variants:
-
-\appendtoks
- \loadbtxdefinitionfile[\btxrenderingparameter\c!alternative]
-\to \everysetupbtxrendering
-
-%D Defaults:
-
-\setupbtxrendering
- [\c!dataset=\v!standard,
- \c!method=\v!global,
- \c!setups=btx:rendering:\btxrenderingparameter\c!alternative,
- \c!alternative=apa,
- \c!sorttype=,
- \c!criterium=,
- \c!refcommand=authoryears, % todo
- \c!numbering=\v!yes,
-% \c!autohang=\v!no,
- \c!width=\v!auto,
- \c!distance=1.5\emwidth]
-
-\definebtxrendering
- [\v!standard]
-
-\setupbtxcitevariant
- [\c!interaction=\v!start,
- \c!setups=btx:cite:\btxcitevariantparameter\c!alternative,
- \c!alternative=num,
- \c!andtext={ and },
- \c!otherstext={ et al.},
- \c!pubsep={, },
- \c!lastpubsep={ and },
- \c!compress=\v!no,
- \c!inbetween={ },
- \c!left=,
- \c!right=]
-
-\definebtxcitevariant
- [author]
- [%c!sorttype=,
- \c!left={(},
- \c!middle={, },
- \c!right={)}]
-
-\definebtxcitevariant
- [authoryear]
- [\c!compress=\v!yes,
- \c!inbetween={, },
- \c!left={(},
- \c!middle={, },
- \c!right={)}]
-
-\definebtxcitevariant
- [authoryears]
- [authoryear]
-
-\definebtxcitevariant
- [authornum]
- [author]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [year]
- [\c!left={(},
- \c!right={)}]
-
-\definebtxcitevariant
- [key]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [serial]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [page]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [short]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [type]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [doi]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [url]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [page]
- [\c!left=,
- \c!right=,
- \c!inbetween=\endash]
-
-\definebtxcitevariant
- [num]
- [\c!compress=\v!yes,
- \c!inbetween={--},
- \c!left={[},
- \c!right={]}]
-
-\setupbtxlistvariant
- [\c!namesep={, },
- \c!lastnamesep={ and },
- \c!finalnamesep={ and },
- \c!firstnamesep={ },
- \c!juniorsep={ },
- \c!vonsep={ },
- \c!surnamesep={, },
- \c!surnameinitialsep={, },
- \c!surnamefirstnamesep={, },
- \c!etallimit=5,
- \c!etaldisplay=5,
- \c!etaltext={ et al.},
- \c!monthconversion=\v!number,
- \c!authorconversion=\v!normal]
-
-\definebtxlistvariant
- [author]
- [author=invertedshort] % we could also do this in the apa style itself
-
-\definebtxlistvariant
- [editor]
- [author]
-
-\definebtxlistvariant
- [artauthor]
- [author]
-
-% Do we want these in the format? Loading them delayed is somewhat messy.
-
-\loadbtxdefinitionfile[apa]
-\loadbtxdefinitionfile[cite]
-\loadbtxdefinitionfile[commands]
-\loadbtxdefinitionfile[definitions]
-
-\protect
diff --git a/tex/context/base/publ-old.mkiv b/tex/context/base/publ-old.mkiv
deleted file mode 100644
index f616428e6..000000000
--- a/tex/context/base/publ-old.mkiv
+++ /dev/null
@@ -1,22 +0,0 @@
-%D \module
-%D [ file=publ-old,
-%D version=2013.12.24,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=Old Fashioned \BIBTEX,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\unprotect
-
-% we could use runtime commands instead
-
-\unexpanded\def\setupbibtex {\usemodule[oldbibtex]\setupbibtex}
-\unexpanded\def\setuppublications {\usemodule[oldbibtex]\setuppublications}
-\unexpanded\def\setuppublicationlist{\usemodule[oldbibtex]\setuppublicationlist}
-
-\protect
diff --git a/tex/context/base/publ-oth.lua b/tex/context/base/publ-oth.lua
deleted file mode 100644
index 14da19f9c..000000000
--- a/tex/context/base/publ-oth.lua
+++ /dev/null
@@ -1,146 +0,0 @@
-if not modules then modules = { } end modules ['publ-oth'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local P, S, C, Ct, Cf, Cg, Cmt, Carg = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.Cmt, lpeg.Carg
-local lpegmatch = lpeg.match
-
-local p_endofline = lpeg.patterns.newline
-
-local loaders = publications.loaders
-local getindex = publications.getindex
-
-local function addfield(t,k,v,fields)
- k = fields[k]
- if k then
- local tk = t[k]
- if tk then
- t[k] = tk .. " and " .. v
- else
- t[k] = v
- end
- end
- return t
-end
-
-local function checkfield(_,_,t,categories,all)
- local tag = t.tag
- if tag then
- local category = t.category
- t.tag = nil
- t.category = categories[category] or category
- all[tag] = t
- end
- return true
-end
-
--- endnotes --
-
-local fields = {
- ["@"] = "tag",
- ["0"] = "category",
- ["A"] = "author",
- ["E"] = "editor",
- ["T"] = "title",
- ["D"] = "year",
- ["I"] = "publisher",
-}
-
-local categories = {
- ["Journal Article"] = "article",
-}
-
-local entry = P("%") * Cg(C(1) * (S(" \t")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
-local record = Cf(Ct("") * (entry^1), addfield)
-local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
-
-function publications.endnotes_to_btx(data)
- local all = { }
- lpegmatch(records,data,1,fields,categories,all)
- return all
-end
-
-function loaders.endnote(dataset,filename)
- -- we could combine the next into checkfield but let's not create too messy code
- loaders.lua(dataset,publications.endnotes_to_btx(io.loaddata(filename) or ""))
-end
-
--- refman --
-
-local entry = Cg(C((1-lpeg.S(" \t")-p_endofline)^1) * (S(" \t-")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
-local record = Cf(Ct("") * (entry^1), addfield)
-local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
-
-local fields = {
- ["SN"] = "tag",
- ["TY"] = "category",
- ["A1"] = "author",
- ["E1"] = "editor",
- ["T1"] = "title",
- ["Y1"] = "year",
- ["PB"] = "publisher",
-}
-
-local categories = {
- ["JOUR"] = "article",
-}
-
-function publications.refman_to_btx(data)
- local all = { }
- lpegmatch(records,data,1,fields,categories,all)
- return all
-end
-
-function loaders.refman(dataset,filename)
- -- we could combine the next into checkfield but let's not create too messy code
- loaders.lua(dataset,publications.refman_to_btx(io.loaddata(filename) or ""))
-end
-
--- test --
-
--- local endnote = [[
--- %0 Journal Article
--- %T Scientific Visualization, Overviews, Methodologies, and Techniques
--- %A Nielson, Gregory M
--- %A Hagen, Hans
--- %A Müller, Heinrich
--- %@ 0818677776
--- %D 1994
--- %I IEEE Computer Society
---
--- %0 Journal Article
--- %T Scientific Visualization, Overviews, Methodologies, and Techniques
--- %A Nielson, Gregory M
--- %A Hagen, Hans
--- %A Müller, Heinrich
--- %@ 0818677775
--- %D 1994
--- %I IEEE Computer Society
--- ]]
---
--- local refman = [[
--- TY - JOUR
--- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
--- A1 - Nielson, Gregory M
--- A1 - Hagen, Hans
--- A1 - Müller, Heinrich
--- SN - 0818677776
--- Y1 - 1994
--- PB - IEEE Computer Society
---
--- TY - JOUR
--- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
--- A1 - Nielson, Gregory M
--- A1 - Hagen, Hans
--- A1 - Müller, Heinrich
--- SN - 0818677775
--- Y1 - 1994
--- PB - IEEE Computer Society
--- ]]
---
--- inspect(publications.endnotes_to_btx(endnote))
--- inspect(publications.refman_to_btx(refman))
diff --git a/tex/context/base/publ-tra.lua b/tex/context/base/publ-tra.lua
deleted file mode 100644
index 98c81d800..000000000
--- a/tex/context/base/publ-tra.lua
+++ /dev/null
@@ -1,296 +0,0 @@
-if not modules then modules = { } end modules ['publ-tra'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local sortedhash = table.sortedhash
-
-local tracers = { }
-publications.tracers = tracers
-local datasets = publications.datasets
-
-local context = context
-local NC, NR = context.NC, context.NR
-local bold = context.bold
-local darkgreen, darkred, darkblue = context.darkgreen, context.darkred, context.darkblue
-
-local fields = table.sorted {
- "abstract",
- "address",
- "annotate",
- "author",
- "booktitle",
- "chapter",
- "comment",
- "country",
- "doi",
- "edition",
- "editor",
- "eprint",
- "howpublished",
- "institution",
- "isbn",
- "issn",
- "journal",
- "key",
- "keyword",
- "keywords",
- "language",
- "lastchecked",
- "month",
- "names",
- "note",
- "notes",
- "number",
- "organization",
- "pages",
- "publisher",
- "school",
- "series",
- "size",
- "title",
- "type",
- "url",
- "volume",
- "year",
- "nationality",
- "assignee",
- "bibnumber",
- "day",
- "dayfiled",
- "monthfiled",
- "yearfiled",
- "revision",
-}
-
-local citevariants = table.sorted {
- "author",
- "authoryear",
- "authoryears",
- "authornum",
- "year",
- "short",
- "serial",
- "key",
- "doi",
- "url",
- "type",
- "page",
- "none",
- "num",
-}
-
-local listvariants = table.sorted {
- "author",
- "editor",
- "artauthor",
-}
-
--- local categories = table.sorted {
--- "article",
--- "book",
--- "booklet",
--- "conference",
--- "inbook",
--- "incollection",
--- "inproceedings",
--- "manual",
--- "mastersthesis",
--- "misc",
--- "phdthesis",
--- "proceedings",
--- "techreport",
--- "unpublished",
--- }
-
-local categories = {
- article = {
- required = { "author", "title", "journal", "year" },
- optional = { "volume", "number", "pages", "month", "note" },
- },
- book = {
- required = { { "author", "editor" }, "title", "publisher", "year" },
- optional = { { "volume", "number" }, "series", "address", "edition", "month","note" },
- },
- booklet = {
- required = { "title" },
- optional = { "author", "howpublished", "address", "month", "year", "note" },
- },
- inbook = {
- required = { { "author", "editor" }, "title", { "chapter", "pages" }, "publisher","year" },
- optional = { { "volume", "number" }, "series", "type", "address", "edition", "month", "note" },
- },
- incollection = {
- required = { "author", "title", "booktitle", "publisher", "year" },
- optional = { "editor", { "volume", "number" }, "series", "type", "chapter", "pages", "address", "edition", "month", "note" },
- },
- inproceedings = {
- required = { "author", "title", "booktitle", "year" },
- optional = { "editor", { "volume", "number" }, "series", "pages", "address", "month","organization", "publisher", "note" },
- },
- manual = {
- required = { "title" },
- optional = { "author", "organization", "address", "edition", "month", "year", "note" },
- },
- mastersthesis = {
- required = { "author", "title", "school", "year" },
- optional = { "type", "address", "month", "note" },
- },
- misc = {
- required = { "author", "title", "howpublished", "month", "year", "note" },
- optional = { "author", "title", "howpublished", "month", "year", "note" },
- },
- phdthesis = {
- required = { "author", "title", "school", "year" },
- optional = { "type", "address", "month", "note" },
- },
- proceedings = {
- required = { "title", "year" },
- optional = { "editor", { "volume", "number" }, "series", "address", "month", "organization", "publisher", "note" },
- },
- techreport = {
- required = { "author", "title", "institution", "year" },
- optional = { "type", "number", "address", "month", "note" },
- },
- unpublished = {
- required = { "author", "title", "note" },
- optional = { "month", "year" },
- },
-}
-
-
-publications.tracers.fields = fields
-publications.tracers.categories = categories
-publications.tracers.citevariants = citevariants
-publications.tracers.listvariants = listvariants
--- -- --
-
-function tracers.showdatasetfields(dataset)
- local luadata = datasets[dataset].luadata
- if next(luadata) then
- context.starttabulate { "|lT|lT|pT|" }
- NC() bold("tag")
- NC() bold("category")
- NC() bold("fields")
- NC() NR() context.FL() -- HL()
- for k, v in sortedhash(luadata) do
- NC() context(k)
- NC() context(v.category)
- NC()
- for k, v in sortedhash(v) do
- if k ~= "details" and k ~= "tag" and k ~= "category" then
- context("%s ",k)
- end
- end
- NC() NR()
- end
- context.stoptabulate()
- end
-end
-
-function tracers.showdatasetcompleteness(dataset)
-
- dataset = datasets[dataset]
-
- local preamble = { "|lBTw(10em)|p|" }
-
- local function required(key,value,indirect)
- NC() darkgreen(key)
- NC() if indirect then
- darkblue(value)
- elseif value then
- context(value)
- else
- darkred("\\tttf [missing]")
- end
- NC() NR()
- end
-
- local function optional(key,value,indirect)
- NC() context(key)
- NC() if indirect then
- darkblue(value)
- elseif value then
- context(value)
- end
- NC() NR()
- end
-
- local function identified(tag,crossref)
- NC() context("tag")
- NC() if crossref then
- context("\\tttf %s\\hfill\\darkblue => %s",tag,crossref)
- else
- context("\\tttf %s",tag)
- end
- NC() NR()
- end
-
- local luadata = datasets[dataset].luadata
-
- if next(luadata) then
- for tag, entry in table.sortedhash(luadata) do
- local category = entry.category
- local fields = categories[category]
- if fields then
- context.starttabulate(preamble)
- identified(tag,entry.crossref)
- context.HL()
- local requiredfields = fields.required
- local optionalfields = fields.optional
- for i=1,#requiredfields do
- local r = requiredfields[i]
- if type(r) == "table" then
- local okay = true
- for i=1,#r do
- local ri = r[i]
- if rawget(entry,ri) then
- required(ri,entry[ri])
- okay = true
- elseif entry[ri] then
- required(ri,entry[ri],true)
- okay = true
- end
- end
- if not okay then
- required(table.concat(r,"\\letterbar "))
- end
- elseif rawget(entry,r) then
- required(r,entry[r])
- elseif entry[r] then
- required(r,entry[r],true)
- else
- required(r)
- end
- end
- for i=1,#optionalfields do
- local o = optionalfields[i]
- if type(o) == "table" then
- for i=1,#o do
- local oi = o[i]
- if rawget(entry,oi) then
- optional(oi,entry[oi])
- elseif entry[oi] then
- optional(oi,entry[oi],true)
- end
- end
- elseif rawget(entry,o) then
- optional(o,entry[o])
- elseif entry[o] then
- optional(o,entry[o],true)
- end
- end
- context.stoptabulate()
- else
- -- error
- end
- end
- end
-
-end
-
-commands.showbtxdatasetfields = tracers.showdatasetfields
-commands.showbtxdatasetcompleteness = tracers.showdatasetcompleteness
diff --git a/tex/context/base/publ-tra.mkiv b/tex/context/base/publ-tra.mkiv
deleted file mode 100644
index 49fb6d962..000000000
--- a/tex/context/base/publ-tra.mkiv
+++ /dev/null
@@ -1,35 +0,0 @@
-%D \module
-%D [ file=publ-tra,
-%D version=2013.12.24,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=Tracing,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-% todo: make this a runtime module
-% todo: use the module interface
-
-\writestatus{loading}{ConTeXt Publication Support / Tracing}
-
-\registerctxluafile{publ-tra}{1.001}
-
-\unprotect
-
-\unexpanded\def\showbtxdatasetfields
- {\dosingleempty\publ_dataset_show_fields}
-
-\def\publ_dataset_show_fields[#1]%
- {\ctxcommand{showbtxdatasetfields("\iffirstargument#1\else\currentbtxdataset\fi")}}
-
-\unexpanded\def\showbtxdatasetcompleteness
- {\dosingleempty\publ_dataset_show_completeness}
-
-\def\publ_dataset_show_completeness[#1]%
- {\ctxcommand{showbtxdatasetcompleteness("\iffirstargument#1\else\currentbtxdataset\fi")}}
-
-\protect \endinput
diff --git a/tex/context/base/publ-usr.lua b/tex/context/base/publ-usr.lua
deleted file mode 100644
index 6bb93ebee..000000000
--- a/tex/context/base/publ-usr.lua
+++ /dev/null
@@ -1,91 +0,0 @@
-if not modules then modules = { } end modules ['publ-usr'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- local chardata = characters.data
-
--- local str = [[
--- \startpublication[k=Berdnikov:TB21-2-129,t=article,a={{Berdnikov},{}},y=2000,n=2257,s=BHHJ00]
--- \artauthor[]{Alexander}[A.]{}{Berdnikov}
--- \artauthor[]{Hans}[H.]{}{Hagen}
--- \artauthor[]{Taco}[T.]{}{Hoekwater}
--- \artauthor[]{Bogus{\l}aw}[B.]{}{Jackowski}
--- \pubyear{2000}
--- \arttitle{{Even more MetaFun with \MP: A request for permission}}
--- \journal{TUGboat}
--- \issn{0896-3207}
--- \volume{21}
--- \issue{2}
--- \pages{129--130}
--- \month{6}
--- \stoppublication
--- ]]
-
-local remapped = {
- artauthor = "author",
- arttitle = "title",
-}
-
-local P, Cs, R, Cc, Carg = lpeg.P, lpeg.Cs, lpeg.R, lpeg.Cc, lpeg.Carg
-
-local function register(target,key,a,b,c,d,e)
- key = remapped[key] or key
- if b and d and e then
- local s = nil
- if b ~= "" and b then
- s = s and s .. " " .. b or b
- end
- if d ~= "" and d then
- s = s and s .. " " .. d or d
- end
- if e ~= "" and e then
- s = s and s .. " " .. e or e
- end
- if a ~= "" and a then
- s = s and s .. " " .. a or a
- end
- local value = target[key]
- if s then
- if value then
- target[key] = value .. " and " .. s
- else
- target[key] = s
- end
- else
- if not value then
- target[key] = s
- end
- end
- else
- target[key] = b
- end
-end
-
-local leftbrace = P("{")
-local rightbrace = P("}")
-local leftbracket = P("[")
-local rightbracket = P("]")
-
-local key = P("\\") * Cs(R("az","AZ")^1) * lpeg.patterns.space^0
-local mandate = leftbrace * Cs(lpeg.patterns.balanced) * rightbrace + Cc(false)
-local optional = leftbracket * Cs((1-rightbracket)^0) * rightbracket + Cc(false)
-local value = optional^-1 * mandate^-1 * optional^-1 * mandate^-2
-
-local pattern = ((Carg(1) * key * value) / register + P(1))^0
-
-function publications.addtexentry(dataset,settings,content)
- settings = utilities.parsers.settings_to_hash(settings)
- local data = {
- tag = settings.tag or settings.k or "no tag",
- category = settings.category or settings.t or "article",
- }
- lpeg.match(pattern,content,1,data) -- can set tag too
- dataset.userdata[data.tag] = data
- dataset.luadata[data.tag] = data
- publications.markasupdated(dataset)
- return data
-end
diff --git a/tex/context/base/publ-usr.mkiv b/tex/context/base/publ-usr.mkiv
deleted file mode 100644
index cb078f424..000000000
--- a/tex/context/base/publ-usr.mkiv
+++ /dev/null
@@ -1,2 +0,0 @@
-% todo
-
diff --git a/tex/context/base/publ-xml.mkiv b/tex/context/base/publ-xml.mkiv
deleted file mode 100644
index 007f9bb27..000000000
--- a/tex/context/base/publ-xml.mkiv
+++ /dev/null
@@ -1,114 +0,0 @@
-%D \module
-%D [ file=publ-xml,
-%D version=2013.12.24,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=XML,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{ConTeXt Publication Support / XML}
-
-\unprotect
-
-\unexpanded\def\convertbtxdatasettoxml
- {\dosingleempty\publ_convert_to_xml}
-
-\def\publ_convert_to_xml[#1]%
- {\ctxcommand{convertbtxdatasettoxml("\iffirstargument#1\else\v!standard\fi",true)}} % or current when not empty
-
-% \startxmlsetups btx:initialize
-% \xmlregistereddocumentsetups{#1}{}
-% \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
-% \xmlmain{#1}
-% \stopxmlsetups
-
-\startxmlsetups btx:initialize
- \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
- \xmlmain{#1}
-\stopxmlsetups
-
-% \startxmlsetups btx:entry
-% \xmlflush{#1}
-% \stopxmlsetups
-
-\startxmlsetups btx:field
- \xmlflushcontext{#1}
-\stopxmlsetups
-
-\protect \endinput
-
-% \startxmlsetups bibtex:entry:getkeys
-% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='author']/text()}}
-% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='year' ]/text()}}
-% \xmladdsortentry{bibtex}{#1}{\xmlatt{#1}{tag}}
-% \stopxmlsetups
-
-% \startbuffer
-% \startxmlsetups xml:bibtex:sorter
-% \xmlresetsorter{bibtex}
-% % \xmlfilter{#1}{entry/command(bibtex:entry:getkeys)}
-% \xmlfilter{#1}{
-% bibtex
-% /entry[@category='article']
-% /field[@name='author' and find(text(),'Knuth')]
-% /../command(bibtex:entry:getkeys)}
-% \xmlsortentries{bibtex}
-% \xmlflushsorter{bibtex}{bibtex:entry:flush}
-% \stopxmlsetups
-% \stopbuffer
-
-% \bgroup
-% \setups[bibtex-commands]
-% \getbuffer
-% \egroup
-
-% \startxmlsetups bibtex:entry:flush
-% \xmlfilter{#1}{/field[@name='author']/context()} / %
-% \xmlfilter{#1}{/field[@name='year' ]/context()} / %
-% \xmlatt{#1}{tag}\par
-% \stopxmlsetups
-
-% \startpacked
-% \getbuffer
-% \stoppacked
-
-
-% \unexpanded\def\btx_xml_list_handle_entry
-% {\begingroup
-% \ignorespaces
-% \xmlfilter{btx:\currentbtxrendering}{/bibtex/entry[@tag='\currentbtxtag']/command(btx:format)}%
-% \removeunwantedspaces
-% \endgroup}
-
-% \startxmlsetups btx:format
-% \btxlistparameter\c!before\relax % prevents lookahead
-% \edef\currentbibxmlnode {#1}
-% \edef\currentbibxmltag {\xmlatt{#1}{tag}}
-% \edef\currentbtxcategory{\xmlatt{#1}{category}}
-% \ignorespaces
-% \xmlcommand{#1}{.}{btx:\currentbtxformat:\currentbibxmlcategory}
-% \removeunwantedspaces
-% \btxlistparameter\c!after\relax % prevents lookahead
-% \stopxmlsetups
-
-% \startxmlsetups btx:list
-% \xmlfilter{#1}{/bibtex/entry/command(bibtex:format)}
-% \stopxmlsetups
-
-% \startxmlsetups btx:btx
-% \xmlfilter{#1}{/entry/command(btx:format)}
-% \stopxmlsetups
-
-% \unexpanded\def\btx_xml_doifelse#1{\xmldoifelse\currentbibxmlnode{/field[@name='#1']}}
-% \unexpanded\def\btx_xml_doif #1{\xmldoif \currentbibxmlnode{/field[@name='#1']}}
-% \unexpanded\def\btx_xml_doifnot #1{\xmldoifnot \currentbibxmlnode{/field[@name='#1']}}
-% \def\btx_xml_flush #1{\xmlcontext \currentbibxmlnode{/field[@name='#1']}}
-% \def\btx_xml_setup {\xmlsetup \currentbibxmlnode} % {#1}
-% \unexpanded\def\btx_xml_todo #1{[#1]}
-
-% \xmlfilter{#1}{/field[@name='\currentbtxfield']/btxconcat('\currentbtxfield')}
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index e9ea6393b..7d8064b29 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -34,7 +34,6 @@
%logo [FGA] {fga}
%logo [FGBBS] {fgbbs}
\logo [ACROBAT] {Acro\-bat}
-\logo [APA] {apa}
\logo [AFM] {afm}
\logo [API] {api}
\logo [ALEPH] {Aleph} % {\mathematics{\aleph}}
@@ -48,7 +47,6 @@
\logo [ASCIITEX] {ascii\TeX}
\logo [BACHOTEX] {Bacho\TeX}
\logo [BIBTEX] {bib\TeX}
-\logo [MLBIBTEX] {MLbib\TeX}
\logo [BLUESKY] {BlueSky}
\logo [BMP] {bmp}
\logo [BSD] {bsd}
diff --git a/tex/context/base/s-inf-03.mkiv b/tex/context/base/s-inf-03.mkiv
index fc654fef5..822173d00 100644
--- a/tex/context/base/s-inf-03.mkiv
+++ b/tex/context/base/s-inf-03.mkiv
@@ -16,7 +16,7 @@
\definefont
[TitlePageFont]
- [MonoBold at 15pt]
+ [MonoBold at 16pt]
\setupbodyfont
[tt,8pt]
@@ -25,7 +25,7 @@
\definefont
[TitlePageFont]
- [MonoBold at 18pt]
+ [MonoBold at 20pt]
\setupbodyfont
[tt]
@@ -352,7 +352,6 @@ for k, v in table.sortedpairs(_G) do
end
end
-
\stopluacode
\stoptext
diff --git a/tex/context/base/s-languages-hyphenation.lua b/tex/context/base/s-languages-hyphenation.lua
index c16c5bd2d..660392f80 100644
--- a/tex/context/base/s-languages-hyphenation.lua
+++ b/tex/context/base/s-languages-hyphenation.lua
@@ -24,7 +24,7 @@ local newglue = nodepool.glue
local insert_node_after = node.insert_after
local traverse_by_id = node.traverse_id
local hyphenate = lang.hyphenate
-local find_tail = node.tail
+local find_tail = node.slide
local remove_node = nodes.remove
local tracers = nodes.tracers
diff --git a/tex/context/base/s-math-coverage.lua b/tex/context/base/s-math-coverage.lua
index 5f1c7cc5a..a74e24450 100644
--- a/tex/context/base/s-math-coverage.lua
+++ b/tex/context/base/s-math-coverage.lua
@@ -123,7 +123,7 @@ function moduledata.math.coverage.showalphabets()
end
function moduledata.math.coverage.showcharacters()
- context.startmixedcolumns()
+ context.startcolumns()
context.setupalign { "nothyphenated" }
context.starttabulate { "|T|i2|Tpl|" }
for u, d in table.sortedpairs(chardata) do
@@ -150,7 +150,7 @@ function moduledata.math.coverage.showcharacters()
end
end
context.stoptabulate()
- context.stopmixedcolumns()
+ context.stopcolumns()
end
-- This is a somewhat tricky table as we need to bypass the math machinery.
diff --git a/tex/context/base/scrp-cjk.lua b/tex/context/base/scrp-cjk.lua
index 9050da6be..681fc4c43 100644
--- a/tex/context/base/scrp-cjk.lua
+++ b/tex/context/base/scrp-cjk.lua
@@ -14,29 +14,15 @@ if not modules then modules = { } end modules ['scrp-cjk'] = {
-- sense either because otherwise a wanted space at the end of a
-- line would have to be a hard coded ones.
-local utfchar = utf.getchar
-
-local nuts = nodes.nuts
-local tonut = nodes.tonut
-local tonode = nodes.tonode
-
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local copy_node = nuts.copy
-local remove_node = nuts.remove
-local traverse_id = nuts.traverse_id
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getattr = nuts.getattr
-local getsubtype = nuts.getsubtype
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-
-local nodepool = nuts.pool
+local utfchar = utf.char
+
+local insert_node_after = nodes.insert_after
+local insert_node_before = nodes.insert_before
+local remove_node = nodes.remove
+local copy_node = nodes.copy
+local traverse_id = nodes.traverse_id
+
+local nodepool = nodes.pool
local new_glue = nodepool.glue
local new_kern = nodepool.kern
local new_penalty = nodepool.penalty
@@ -102,20 +88,20 @@ end
-- at font definition time and/or just assume a correct font
local function trace_detail(current,what)
- local prev = getprev(current)
- local c_id = getid(current)
- local p_id = prev and getid(prev)
+ local prev = current.prev
+ local c_id = current.id
+ local p_id = prev and prev.id
if c_id == glyph_code then
- local c_ch = getchar(current)
+ local c_ch = current.char
if p_id == glyph_code then
- local p_ch = p_id and getchar(prev)
+ local p_ch = p_id and prev.char
report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,c_ch,hash[c_ch])
else
report_details("[%s] [%C %a]",what,c_ch,hash[c_ch])
end
else
if p_id == glyph_code then
- local p_ch = p_id and getchar(prev)
+ local p_ch = p_id and prev.char
report_details("[%C %a] [%s]",p_ch,hash[p_ch],what)
else
report_details("[%s]",what)
@@ -124,8 +110,8 @@ local function trace_detail(current,what)
end
local function trace_detail_between(p,n,what)
- local p_ch = getchar(p)
- local n_ch = getchar(n)
+ local p_ch = p.char
+ local n_ch = n.char
report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,n_ch,hash[n_ch])
end
@@ -441,29 +427,29 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = getnext(first), getid(first)
+ local upcoming, id = first.next, first.id
if id == glyph_code then
- local a = getattr(first,a_scriptstatus)
+ local a = first[a_scriptstatus]
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = getfont(first)
+ local font = first.font
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
+ set_parameters(font,numbertodataset[first[a_scriptinjection]])
end
action(head,first)
end
end
previous = current
else -- glue
- local p, n = getprev(first), upcoming
+ local p, n = first.prev, upcoming
if p and n then
- local pid, nid = getid(p), getid(n)
+ local pid, nid = p.id, n.id
if pid == glyph_code and nid == glyph_code then
- local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
+ local pa, na = p[a_scriptstatus], n[a_scriptstatus]
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -509,24 +495,23 @@ scripts.installmethod {
}
function scripts.decomposehangul(head)
- local head = tonut(head)
local done = false
for current in traverse_id(glyph_code,head) do
- local lead_consonant, medial_vowel, tail_consonant = decomposed(getchar(current))
+ local lead_consonant, medial_vowel, tail_consonant = decomposed(current.char)
if lead_consonant then
- setfield(current,"char",lead_consonant)
+ current.char = lead_consonant
local m = copy_node(current)
- setfield(m,"char",medial_vowel)
+ m.char = medial_vowel
head, current = insert_node_after(head,current,m)
if tail_consonant then
local t = copy_node(current)
- setfield(t,"char",tail_consonant)
+ t.char = tail_consonant
head, current = insert_node_after(head,current,t)
end
done = true
end
end
- return tonode(head), done
+ return head, done
end
-- nodes.tasks.prependaction("processors","normalizers","scripts.decomposehangul")
@@ -697,29 +682,29 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = getnext(first), getid(first)
+ local upcoming, id = first.next, first.id
if id == glyph_code then
- local a = getattr(first,a_scriptstatus)
+ local a = first[a_scriptstatus]
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = getfont(first)
+ local font = first.font
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
+ set_parameters(font,numbertodataset[first[a_scriptinjection]])
end
action(head,first)
end
end
previous = current
else -- glue
- local p, n = getprev(first), upcoming
+ local p, n = first.prev, upcoming
if p and n then
- local pid, nid = getid(p), getid(n)
+ local pid, nid = p.id, n.id
if pid == glyph_code and nid == glyph_code then
- local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
+ local pa, na = p[a_scriptstatus], n[a_scriptstatus]
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -919,32 +904,34 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = getnext(first), getid(first)
+ local upcoming, id = first.next, first.id
if id == glyph_code then
- local a = getattr(first,a_scriptstatus)
+ local a = first[a_scriptstatus]
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = getfont(first)
+ local font = first.font
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
+ set_parameters(font,numbertodataset[first[a_scriptinjection]])
end
action(head,first)
end
end
previous = current
- -- elseif id == math_code then
- -- upcoming = getnext(end_of_math(current))
- -- previous = "start"
+
+-- elseif id == math_code then
+-- upcoming = end_of_math(current).next
+-- previous = "start"
+
else -- glue
- local p, n = getprev(first), upcoming -- we should remember prev
+ local p, n = first.prev, upcoming -- we should remember prev
if p and n then
- local pid, nid = getid(p), getid(n)
+ local pid, nid = p.id, n.id
if pid == glyph_code and nid == glyph_code then
- local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
+ local pa, na = p[a_scriptstatus], n[a_scriptstatus]
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -953,17 +940,17 @@ local function process(head,first,last)
or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean
previous = "start"
else -- if head ~= first then
- if id == glue_code and getsubtype(first) == userskip_code then -- also scriptstatus check?
- -- for the moment no distinction possible between space and userskip
- local w = getfield(getfield(first,"spec"),"width")
- local s = spacedata[getfont(p)]
- if w == s then -- could be option
- if trace_details then
- trace_detail_between(p,n,"space removed")
- end
- remove_node(head,first,true)
- end
- end
+if id == glue_code and first.subtype == userskip_code then -- also scriptstatus check?
+ -- for the moment no distinction possible between space and userskip
+ local w = first.spec.width
+ local s = spacedata[p.font]
+ if w == s then -- could be option
+ if trace_details then
+ trace_detail_between(p,n,"space removed")
+ end
+ remove_node(head,first,true)
+ end
+end
previous = pcjk
-- else
-- previous = pcjk
diff --git a/tex/context/base/scrp-eth.lua b/tex/context/base/scrp-eth.lua
index 8ecbce522..597afa1b5 100644
--- a/tex/context/base/scrp-eth.lua
+++ b/tex/context/base/scrp-eth.lua
@@ -9,17 +9,9 @@ if not modules then modules = { } end modules ['scrp-eth'] = {
-- at some point I will review the script code but for the moment we
-- do it this way; so space settings like with cjk yet
-local nuts = nodes.nuts
+local insert_node_before = node.insert_before
-local getnext = nuts.getnext
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getattr = nuts.getattr
-
-local insert_node_before = nuts.insert_before
-
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_glue = nodepool.glue
local new_penalty = nodepool.penalty
@@ -45,13 +37,13 @@ local inter_character_stretch_factor = 1
local inter_character_shrink_factor = 1
local function space_glue(current)
- local data = numbertodataset[getattr(current,a_scriptinjection)]
+ local data = numbertodataset[current[a_scriptinjection]]
if data then
inter_character_space_factor = data.inter_character_space_factor or 1
inter_character_stretch_factor = data.inter_character_stretch_factor or 1
inter_character_shrink_factor = data.inter_character_shrink_factor or 1
end
- local font = getfont(current)
+ local font = current.font
if lastfont ~= font then
local pf = parameters[font]
space = pf.space
@@ -112,9 +104,9 @@ local function process(head,first,last)
local injector = false
local current = first
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local scriptstatus = getattr(current,a_scriptstatus)
+ local scriptstatus = current[a_scriptstatus]
local category = numbertocategory[scriptstatus]
if injector then
local action = injector[category]
@@ -129,7 +121,7 @@ local function process(head,first,last)
if current == last then
break
else
- current = getnext(current)
+ current = current.next
end
end
end
diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua
index a6bfe4cf9..56422e622 100644
--- a/tex/context/base/scrp-ini.lua
+++ b/tex/context/base/scrp-ini.lua
@@ -14,7 +14,7 @@ local attributes, nodes, node = attributes, nodes, node
local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end)
local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end)
local trace_splitting = false trackers.register("scripts.splitting", function(v) trace_splitting = v end)
-local trace_splitdetail = false trackers.register("scripts.splitting.detail", function(v) trace_splitdetail = v end)
+local trace_splitdetail = false trackers.register("scripts.splitring.detail", function(v) trace_splitdetail = v end)
local report_preprocessing = logs.reporter("scripts","preprocessing")
local report_splitting = logs.reporter("scripts","splitting")
@@ -22,6 +22,9 @@ local report_splitting = logs.reporter("scripts","splitting")
local utfbyte, utfsplit = utf.byte, utf.split
local gmatch = string.gmatch
+local first_glyph = node.first_glyph or node.first_character
+local traverse_id = node.traverse_id
+
local texsetattribute = tex.setattribute
local nodecodes = nodes.nodecodes
@@ -45,23 +48,9 @@ local setmetatableindex = table.setmetatableindex
local enableaction = nodes.tasks.enableaction
local disableaction = nodes.tasks.disableaction
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getchar = nuts.getchar
-local getfont = nuts.getfont
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-
-local insert_node_after = nuts.insert_after
-local first_glyph = nuts.first_glyph
-local traverse_id = nuts.traverse_id
-
-local nodepool = nuts.pool
+local insert_node_after = node.insert_after
+local nodepool = nodes.pool
local new_glue = nodepool.glue
local new_rule = nodepool.rule
local new_penalty = nodepool.penalty
@@ -411,7 +400,7 @@ scripts.numbertocategory = numbertocategory
local function colorize(start,stop)
for n in traverse_id(glyph_code,start) do
- local kind = numbertocategory[getattr(n,a_scriptstatus)]
+ local kind = numbertocategory[n[a_scriptstatus]]
if kind then
local ac = scriptcolors[kind]
if ac then
@@ -443,17 +432,16 @@ end
-- we can have a fonts.hashes.originals
function scripts.injectors.handler(head)
- head = tonut(head)
local start = first_glyph(head) -- we already have glyphs here (subtype 1)
if not start then
- return tonode(head), false
+ return head, false
else
local last_a, normal_process, lastfont, originals = nil, nil, nil, nil
local done, first, last, ok = false, nil, nil, false
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- local a = getattr(start,a_scriptinjection)
+ local a = start[a_scriptinjection]
if a then
if a ~= last_a then
if first then
@@ -475,7 +463,7 @@ function scripts.injectors.handler(head)
normal_process = handler.injector
end
if normal_process then
- local f = getfont(start)
+ local f = start.font
if f ~= lastfont then
originals = fontdata[f].resources
if resources then
@@ -485,13 +473,13 @@ function scripts.injectors.handler(head)
end
lastfont = f
end
- local c = getchar(start)
+ local c = start.char
if originals then
c = originals[c] or c
end
local h = hash[c]
if h then
- setattr(start,a_scriptstatus,categorytonumber[h])
+ start[a_scriptstatus] = categorytonumber[h]
if not first then
first, last = start, start
else
@@ -552,7 +540,7 @@ function scripts.injectors.handler(head)
first, last = nil, nil
end
end
- start = getnext(start)
+ start = start.next
end
if ok then
if trace_analyzing then
@@ -565,7 +553,7 @@ function scripts.injectors.handler(head)
end
done = true
end
- return tonode(head), done
+ return head, done
end
end
@@ -695,11 +683,11 @@ end)
local categories = characters.categories or { }
local function hit(root,head)
- local current = getnext(head)
+ local current = head.next
local lastrun = false
local lastfinal = false
- while current and getid(current) == glyph_code do
- local char = getchar(current)
+ while current and current.id == glyph_code do
+ local char = current.char
local newroot = root[char]
if newroot then
local final = newroot.final
@@ -713,7 +701,7 @@ local function hit(root,head)
else
return lastrun, lastfinal
end
- current = getnext(current)
+ current = current.next
end
if lastrun then
return lastrun, lastfinal
@@ -722,13 +710,12 @@ end
local tree, attr, proc
-function splitters.handler(head) -- todo: also first_glyph test
- head = tonut(head)
+function splitters.handler(head)
local current = head
local done = false
while current do
- if getid(current) == glyph_code then
- local a = getattr(current,a_scriptsplitting)
+ if current.id == glyph_code then
+ local a = current[a_scriptsplitting]
if a then
if a ~= attr then
local handler = numbertohandler[a]
@@ -737,14 +724,14 @@ function splitters.handler(head) -- todo: also first_glyph test
proc = handler.splitter
end
if proc then
- local root = tree[getchar(current)]
+ local root = tree[current.char]
if root then
-- we don't check for attributes in the hitter (yet)
local last, final = hit(root,current)
if last then
- local next = getnext(last)
- if next and getid(next) == glyph_code then
- local nextchar = getchar(next)
+ local next = last.next
+ if next and next.id == glyph_code then
+ local nextchar = next.char
if tree[nextchar] then
if trace_splitdetail then
if type(final) == "string" then
@@ -773,9 +760,9 @@ function splitters.handler(head) -- todo: also first_glyph test
end
end
end
- current = getnext(current)
+ current = current.next
end
- return tonode(head), done
+ return head, done
end
local function marker(head,current,font,color) -- could become: nodes.tracers.marker
@@ -805,8 +792,8 @@ end
local last_a, last_f, last_s, last_q
function splitters.insertafter(handler,head,first,last,detail)
- local a = getattr(first,a_scriptsplitting)
- local f = getfont(first)
+ local a = first[a_scriptsplitting]
+ local f = first.font
if a ~= last_a or f ~= last_f then
last_s = emwidths[f] * numbertodataset[a].inter_word_stretch_factor
last_a = a
@@ -883,15 +870,15 @@ setmetatableindex(cache_nop,function(t,k) local v = { } t[k] = v return v end)
-- playing nice
function autofontfeature.handler(head)
- for n in traverse_id(glyph_code,tonut(head)) do
- -- if getattr(n,a_scriptinjection) then
+ for n in traverse_id(glyph_code,head) do
+ -- if n[a_scriptinjection] then
-- -- already tagged by script feature, maybe some day adapt
-- else
- local char = getchar(n)
+ local char = n.char
local script = otfscripts[char]
if script then
- local dynamic = getattr(n,0) or 0
- local font = getfont(n)
+ local dynamic = n[0] or 0
+ local font = n.font
if dynamic > 0 then
local slot = cache_yes[font]
local attr = slot[script]
@@ -917,7 +904,7 @@ function autofontfeature.handler(head)
end
end
if attr ~= 0 then
- setattr(n,0,attr)
+ n[0] = attr
-- maybe set scriptinjection when associated
end
end
diff --git a/tex/context/base/sort-ini.lua b/tex/context/base/sort-ini.lua
index d279f1253..479d1c489 100644
--- a/tex/context/base/sort-ini.lua
+++ b/tex/context/base/sort-ini.lua
@@ -82,7 +82,7 @@ local v_first = variables.first
local v_last = variables.last
local validmethods = table.tohash {
- "ch", -- raw character (for tracing)
+ -- "ch", -- raw character
"mm", -- minus mapping
"zm", -- zero mapping
"pm", -- plus mapping
@@ -120,7 +120,7 @@ local sorters = sorters
local constants = sorters.constants
local data, language, method, digits
-local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence, usedinsequence
+local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence
local thefirstofsplit
local mte = { -- todo: assign to t
@@ -334,9 +334,6 @@ local function setlanguage(l,m,d,u)
end
end
data.sequence = sequence
- usedinsequence = table.tohash(sequence)
- data.usedinsequence = usedinsequence
--- usedinsequence.ch = true -- better just store the string
if trace_tests then
report_sorters("using sort sequence: % t",sequence)
end
@@ -375,9 +372,7 @@ local function basicsort(sort_a,sort_b)
return 0
end
--- todo: compile compare function
-
-local function basic(a,b) -- trace ea and eb
+function comparers.basic(a,b) -- trace ea and eb
local ea, eb = a.split, b.split
local na, nb = #ea, #eb
if na == 0 and nb == 0 then
@@ -437,12 +432,6 @@ local function basic(a,b) -- trace ea and eb
end
end
-comparers.basic = basic
-
-function sorters.basicsorter(a,b)
- return basic(a,b) == -1
-end
-
local function numify(s)
s = digitsoffset + tonumber(s) -- alternatively we can create range
if s > digitsmaximum then
@@ -488,7 +477,7 @@ sorters.firstofsplit = firstofsplit
-- for the moment we use an inefficient bunch of tables but once
-- we know what combinations make sense we can optimize this
-function splitters.utf(str,checked) -- we could append m and u but this is cleaner, s is for tracing
+function splitters.utf(str) -- we could append m and u but this is cleaner, s is for tracing
if #replacements > 0 then
-- todo make an lpeg for this
for k=1,#replacements do
@@ -591,31 +580,18 @@ function splitters.utf(str,checked) -- we could append m and u but this is clean
-- p_mapping = { p_mappings[fs][1] }
-- end
-- end
+ local t = {
+ ch = char,
+ uc = byte,
+ mc = m_case,
+ zc = z_case,
+ pc = p_case,
+ mm = m_mapping,
+ zm = z_mapping,
+ pm = p_mapping,
+ }
- if checked then
- return {
- ch = trace_tests and char or nil, -- not in sequence
- uc = usedinsequence.uc and byte or nil,
- mc = usedinsequence.mc and m_case or nil,
- zc = usedinsequence.zc and z_case or nil,
- pc = usedinsequence.pc and p_case or nil,
- mm = usedinsequence.mm and m_mapping or nil,
- zm = usedinsequence.zm and z_mapping or nil,
- pm = usedinsequence.pm and p_mapping or nil,
- }
- else
- return {
- ch = char,
- uc = byte,
- mc = m_case,
- zc = z_case,
- pc = p_case,
- mm = m_mapping,
- zm = z_mapping,
- pm = p_mapping,
- }
- end
-
+ return t
end
local function packch(entry)
diff --git a/tex/context/base/sort-lan.lua b/tex/context/base/sort-lan.lua
index 6b0cc5007..6d16c0d80 100644
--- a/tex/context/base/sort-lan.lua
+++ b/tex/context/base/sort-lan.lua
@@ -310,7 +310,7 @@ local ch, CH = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11)
definitions["cz"] = {
replacements = {
- { "ch", ch }, { "Ch", ch }, { "CH", ch }
+ { "ch", ch }, { "CH", CH }
},
entries = {
["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["č"] = "č",
diff --git a/tex/context/base/spac-ali.lua b/tex/context/base/spac-ali.lua
index 08e33c5b8..25cc6cd66 100644
--- a/tex/context/base/spac-ali.lua
+++ b/tex/context/base/spac-ali.lua
@@ -10,26 +10,13 @@ local div = math.div
local format = string.format
local tasks = nodes.tasks
+local appendaction = tasks.appendaction
+local prependaction = tasks.prependaction
+local disableaction = tasks.disableaction
local enableaction = tasks.enableaction
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getsubtype = nuts.getsubtype
-
-local hpack_nodes = nuts.hpack -- nodes.fasthpack not really faster here
-local linked_nodes = nuts.linked
+local slide_nodes = node.slide
+local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
local unsetvalue = attributes.unsetvalue
@@ -40,6 +27,8 @@ local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local line_code = listcodes.line
+local nodepool = nodes.pool
+
local new_stretch = nodepool.stretch
local a_realign = attributes.private("realign")
@@ -67,10 +56,10 @@ local function handler(head,leftpage,realpageno)
local current = head
local done = false
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code then
- if getsubtype(current) == line_code then
- local a = getattr(current,a_realign)
+ if current.subtype == line_code then
+ local a = current[a_realign]
if not a or a == 0 then
-- skip
else
@@ -86,12 +75,12 @@ local function handler(head,leftpage,realpageno)
action = leftpage and 2 or 1
end
if action == 1 then
- setfield(current,"list",hpack_nodes(linked_nodes(getlist(current),new_stretch(3)),getfield(current,"width"),"exactly"))
+ current.list = hpack_nodes(current.list .. new_stretch(3),current.width,"exactly")
if trace_realign then
report_realign("flushing left, align %a, page %a, realpage %a",align,pageno,realpageno)
end
elseif action == 2 then
- setfield(current,"list",hpack_nodes(linked_nodes(new_stretch(3),getlist(current)),getfield(current,"width"),"exactly"))
+ current.list = hpack_nodes(new_stretch(3) .. current.list,current.width,"exactly")
if trace_realign then
report_realign("flushing right. align %a, page %a, realpage %a",align,pageno,realpageno)
end
@@ -101,14 +90,14 @@ local function handler(head,leftpage,realpageno)
done = true
nofrealigned = nofrealigned + 1
end
- setattr(current,a_realign,unsetvalue)
+ current[a_realign] = unsetvalue
end
end
- handler(getlist(current),leftpage,realpageno)
+ handler(current.list,leftpage,realpageno)
elseif id == vlist_code then
- handler(getlist(current),leftpage,realpageno)
+ handler(current.list,leftpage,realpageno)
end
- current = getnext(current)
+ current = current.next
end
return head, done
end
@@ -116,8 +105,7 @@ end
function alignments.handler(head)
local leftpage = isleftpage(true,false)
local realpageno = texgetcount("realpageno")
- local head, done = handler(tonut(head),leftpage,realpageno)
- return tonode(head), done
+ return handler(head,leftpage,realpageno)
end
local enabled = false
diff --git a/tex/context/base/spac-ali.mkiv b/tex/context/base/spac-ali.mkiv
index cf95064a2..9c7e81379 100644
--- a/tex/context/base/spac-ali.mkiv
+++ b/tex/context/base/spac-ali.mkiv
@@ -585,36 +585,13 @@
\unexpanded\def\spac_align_use_now#1%
{\csname\??alignmentnormalcache#1\endcsname}
-% Maybe we need something different in columns.
+% The keywords:
\unexpanded\def\installalign#1#2% beware: commands must be unexpandable!
{\ifcsname\??aligncommand#1\endcsname \else
\setvalue{\??aligncommand#1}{\t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
\fi}
-% beware, toks stuff and states are set at a differt time, so installalign is
-% only for special options
-%
-% \setvalue{\??aligncommand whatever}%
-% {\c_spac_align_state_horizontal\plushundred
-% \t_spac_align_collected\expandafter{\the\t_spac_align_collected .....}}
-%
-% this one could deal with both
-%
-% \unexpanded\def\installalignoption#1#2%
-% {\ifcsname\??aligncommand#1\endcsname \else
-% \setvalue{\??aligncommand#1}%
-% {\spac_align_set_horizontal_none
-% \c_spac_align_state_horizontal\plushundred % don't set
-% \t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
-% \fi}
-%
-% \installalignoption
-% {whatever}
-% {}
-
-% The keywords:
-
\letvalue{\??aligncommand\empty }\empty
\setvalue{\??aligncommand\v!broad }{\c_spac_align_state_broad \plusone }
\setvalue{\??aligncommand\v!wide }{\c_spac_align_state_broad \plustwo }
diff --git a/tex/context/base/spac-chr.lua b/tex/context/base/spac-chr.lua
index 4122a64b6..db98b42a6 100644
--- a/tex/context/base/spac-chr.lua
+++ b/tex/context/base/spac-chr.lua
@@ -22,29 +22,14 @@ report_characters = logs.reporter("typesetting","characters")
local nodes, node = nodes, node
-local nuts = nodes.nuts
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-local copy_node_list = nuts.copy_list
-local traverse_id = nuts.traverse_id
+local insert_node_after = nodes.insert_after
+local remove_node = nodes.remove
+local copy_node_list = nodes.copy_list
+local traverse_id = nodes.traverse_id
local tasks = nodes.tasks
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_penalty = nodepool.penalty
local new_glue = nodepool.glue
@@ -78,47 +63,48 @@ local c_zero = byte('0')
local c_period = byte('.')
local function inject_quad_space(unicode,head,current,fraction)
- local attr = getfield(current,"attr")
+ local attr = current.attr
if fraction ~= 0 then
- fraction = fraction * fontquads[getfont(current)]
+ fraction = fraction * fontquads[current.font]
end
local glue = new_glue(fraction)
- setfield(glue,"attr",attr)
- setfield(current,"attr",nil)
- setattr(glue,a_character,unicode)
+-- glue.attr = copy_node_list(attr)
+ glue.attr = attr
+ current.attr = nil
+ glue[a_character] = unicode
head, current = insert_node_after(head,current,glue)
return head, current
end
local function inject_char_space(unicode,head,current,parent)
- local attr = getfield(current,"attr")
- local font = getfont(current)
+ local attr = current.attr
+ local font = current.font
local char = fontcharacters[font][parent]
local glue = new_glue(char and char.width or fontparameters[font].space)
- setfield(glue,"attr",attr)
- setfield(current,"attr",nil)
- setattr(glue,a_character,unicode)
+ glue.attr = current.attr
+ current.attr = nil
+ glue[a_character] = unicode
head, current = insert_node_after(head,current,glue)
return head, current
end
local function inject_nobreak_space(unicode,head,current,space,spacestretch,spaceshrink)
- local attr = getfield(current,"attr")
+ local attr = current.attr
local glue = new_glue(space,spacestretch,spaceshrink)
local penalty = new_penalty(10000)
- setfield(glue,"attr",attr)
- setfield(current,"attr",nil)
- setattr(glue,a_character,unicode)
+ glue.attr = attr
+ current.attr = nil
+ glue[a_character] = unicode
head, current = insert_node_after(head,current,penalty)
head, current = insert_node_after(head,current,glue)
return head, current
end
local function nbsp(head,current)
- local para = fontparameters[getfont(current)]
- if getattr(current,a_alignstate) == 1 then -- flushright
+ local para = fontparameters[current.font]
+ if current[a_alignstate] == 1 then -- flushright
head, current = inject_nobreak_space(0x00A0,head,current,para.space,0,0)
- setfield(current,"subtype",space_skip_code)
+ current.subtype = space_skip_code
else
head, current = inject_nobreak_space(0x00A0,head,current,para.space,para.spacestretch,para.spaceshrink)
end
@@ -135,7 +121,7 @@ end
function characters.replacenbspaces(head)
for current in traverse_id(glyph_code,head) do
- if getchar(current) == 0x00A0 then
+ if current.char == 0x00A0 then
local h = nbsp(head,current)
if h then
head = remove_node(h,current,true)
@@ -161,21 +147,21 @@ local methods = {
-- don't have the 'local' value.
[0x00A0] = function(head,current) -- nbsp
- local next = getnext(current)
- if next and getid(next) == glyph_code then
- local char = getchar(next)
+ local next = current.next
+ if next and next.id == glyph_code then
+ local char = next.char
if char == 0x200C or char == 0x200D then -- nzwj zwj
- next = getnext(next)
- if next and nbsphash[getchar(next)] then
+ next = next.next
+ if next and nbsphash[next.char] then
return false
end
elseif nbsphash[char] then
return false
end
end
- local prev = getprev(current)
- if prev and getid(prev) == glyph_code and nbsphash[getchar(prev)] then
- return false
+ local prev = current.prev
+ if prev and prev.id == glyph_code and nbsphash[prev.char] then
+ return false -- kannada
end
return nbsp(head,current)
end,
@@ -229,11 +215,11 @@ local methods = {
end,
[0x202F] = function(head,current) -- narrownobreakspace
- return inject_nobreak_space(0x202F,head,current,fontquads[getfont(current)]/8)
+ return inject_nobreak_space(0x202F,head,current,fontquads[current.font]/8)
end,
[0x205F] = function(head,current) -- math thinspace
- return inject_nobreak_space(0x205F,head,current,fontparameters[getfont(current)].space/8)
+ return inject_nobreak_space(0x205F,head,current,fontparameters[current.font].space/8)
end,
-- [0xFEFF] = function(head,current) -- zerowidthnobreakspace
@@ -242,15 +228,14 @@ local methods = {
}
-function characters.handler(head) -- todo: use traverse_id
- head = tonut(head)
+function characters.handler(head)
local current = head
local done = false
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local next = getnext(current)
- local char = getchar(current)
+ local next = current.next
+ local char = current.char
local method = methods[char]
if method then
if trace_characters then
@@ -264,8 +249,8 @@ function characters.handler(head) -- todo: use traverse_id
end
current = next
else
- current = getnext(current)
+ current = current.next
end
end
- return tonode(head), done
+ return head, done
end
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index 7d78d6c12..0035c4119 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -37,6 +37,7 @@ local nodes, node, trackers, attributes, context, commands, tex = nodes, node,
local texlists = tex.lists
local texgetdimen = tex.getdimen
local texnest = tex.nest
+local texgetbox = tex.getbox
local variables = interfaces.variables
@@ -62,41 +63,23 @@ local a_skiporder = attributes.private('skiporder')
local a_snapmethod = attributes.private('snapmethod')
local a_snapvbox = attributes.private('snapvbox')
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-local ntostring = nuts.tostring
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getsubtype = nuts.getsubtype
-local getbox = nuts.getbox
-
-local find_node_tail = nuts.tail
-local free_node = nuts.free
-local free_node_list = nuts.flush_list
-local copy_node = nuts.copy
-local traverse_nodes = nuts.traverse
-local traverse_nodes_id = nuts.traverse_id
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-local count_nodes = nuts.count
-local hpack_node = nuts.hpack
-local vpack_node = nuts.vpack
-local writable_spec = nuts.writable_spec
-local nodereference = nuts.reference
-
-local listtoutf = nodes.listtoutf
+local find_node_tail = node.tail
+local free_node = node.free
+local free_node_list = node.flush_list
+local copy_node = node.copy
+local traverse_nodes = node.traverse
+local traverse_nodes_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove
+local count_nodes = nodes.count
local nodeidstostring = nodes.idstostring
+local hpack_node = node.hpack
+local vpack_node = node.vpack
+local writable_spec = nodes.writable_spec
+local listtoutf = nodes.listtoutf
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_penalty = nodepool.penalty
local new_kern = nodepool.kern
@@ -196,26 +179,28 @@ end
-- local rule_id = nodecodes.rule
-- local vlist_id = nodecodes.vlist
-- function nodes.makevtop(n)
--- if getid(n) == vlist_id then
--- local list = getlist(n)
--- local height = (list and getid(list) <= rule_id and getfield(list,"height")) or 0
--- setfield(n,"depth",getfield(n,"depth") - height + getfield(n,"height")
--- setfield(n,"height",height
+-- if n.id == vlist_id then
+-- local list = n.list
+-- local height = (list and list.id <= rule_id and list.height) or 0
+-- n.depth = n.depth - height + n.height
+-- n.height = height
-- end
-- end
+local reference = nodes.reference
+
local function validvbox(parentid,list)
if parentid == hlist_code then
- local id = getid(list)
+ local id = list.id
if id == whatsit_code then -- check for initial par subtype
- list = getnext(list)
+ list = list.next
if not next then
return nil
end
end
local done = nil
for n in traverse_nodes(list) do
- local id = getid(n)
+ local id = n.id
if id == vlist_code or id == hlist_code then
if done then
return nil
@@ -229,9 +214,9 @@ local function validvbox(parentid,list)
end
end
if done then
- local id = getid(done)
+ local id = done.id
if id == hlist_code then
- return validvbox(id,getlist(done))
+ return validvbox(id,done.list)
end
end
return done -- only one vbox
@@ -241,19 +226,19 @@ end
local function already_done(parentid,list,a_snapmethod) -- todo: done when only boxes and all snapped
-- problem: any snapped vbox ends up in a line
if list and parentid == hlist_code then
- local id = getid(list)
+ local id = list.id
if id == whatsit_code then -- check for initial par subtype
- list = getnext(list)
+ list = list.next
if not next then
return false
end
end
--~ local i = 0
for n in traverse_nodes(list) do
- local id = getid(n)
---~ i = i + 1 print(i,nodecodes[id],getattr(n,a_snapmethod))
+ local id = n.id
+--~ i = i + 1 print(i,nodecodes[id],n[a_snapmethod])
if id == hlist_code or id == vlist_code then
- local a = getattr(n,a_snapmethod)
+ local a = n[a_snapmethod]
if not a then
-- return true -- not snapped at all
elseif a == 0 then
@@ -291,11 +276,11 @@ end
-- check variables.none etc
local function snap_hlist(where,current,method,height,depth) -- method.strut is default
- local list = getlist(current)
+ local list = current.list
local t = trace_vsnapping and { }
if t then
t[#t+1] = formatters["list content: %s"](listtoutf(list))
- t[#t+1] = formatters["parent id: %s"](nodereference(current))
+ t[#t+1] = formatters["parent id: %s"](reference(current))
t[#t+1] = formatters["snap method: %s"](method.name)
t[#t+1] = formatters["specification: %s"](method.specification)
end
@@ -327,8 +312,7 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
t[#t+1] = formatters["auto: snapht %p snapdp %p"](snapht,snapdp)
end
end
- local h = height or getfield(current,"height")
- local d = depth or getfield(current,"depth")
+ local h, d = height or current.height, depth or current.depth
local hr, dr, ch, cd = method.hfraction or 1, method.dfraction or 1, h, d
local tlines, blines = method.tlines or 1, method.blines or 1
local done, plusht, plusdp = false, snapht, snapdp
@@ -355,22 +339,22 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if method.first then
local thebox = current
- local id = getid(thebox)
+ local id = thebox.id
if id == hlist_code then
- thebox = validvbox(id,getlist(thebox))
- id = thebox and getid(thebox)
+ thebox = validvbox(id,thebox.list)
+ id = thebox and thebox.id
end
if thebox and id == vlist_code then
- local list = getlist(thebox)
+ local list = thebox.list
local lh, ld
for n in traverse_nodes_id(hlist_code,list) do
- lh = getfield(n,"height")
- ld = getfield(n,"depth")
+ lh = n.height
+ ld = n.depth
break
end
if lh then
- local ht = getfield(thebox,"height")
- local dp = getfield(thebox,"depth")
+ local ht = thebox.height
+ local dp = thebox.depth
if t then
t[#t+1] = formatters["first line: height %p depth %p"](lh,ld)
t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
@@ -378,9 +362,9 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
local delta = h - lh
ch, cd = lh, delta + d
h, d = ch, cd
- local shifted = hpack_node(getlist(current))
- setfield(shifted,"shift",delta)
- setfield(current,"list",shifted)
+ local shifted = hpack_node(current.list)
+ shifted.shift = delta
+ current.list = shifted
done = true
if t then
t[#t+1] = formatters["first: height %p depth %p shift %p"](ch,cd,delta)
@@ -393,21 +377,20 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
end
elseif method.last then
local thebox = current
- local id = getid(thebox)
+ local id = thebox.id
if id == hlist_code then
- thebox = validvbox(id,getlist(thebox))
- id = thebox and getid(thebox)
+ thebox = validvbox(id,thebox.list)
+ id = thebox and thebox.id
end
if thebox and id == vlist_code then
- local list = getlist(thebox)
- local lh, ld
+ local list, lh, ld = thebox.list
for n in traverse_nodes_id(hlist_code,list) do
- lh = getfield(n,"height")
- ld = getfield(n,"depth")
+ lh = n.height
+ ld = n.depth
end
if lh then
- local ht = getfield(thebox,"height")
- local dp = getfield(thebox,"depth")
+ local ht = thebox.height
+ local dp = thebox.depth
if t then
t[#t+1] = formatters["last line: height %p depth %p" ](lh,ld)
t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
@@ -415,9 +398,9 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
local delta = d - ld
cd, ch = ld, delta + h
h, d = ch, cd
- local shifted = hpack_node(getlist(current))
- setfield(shifted,"shift",delta)
- setfield(current,"list",shifted)
+ local shifted = hpack_node(current.list)
+ shifted.shift = delta
+ current.list = shifted
done = true
if t then
t[#t+1] = formatters["last: height %p depth %p shift %p"](ch,cd,delta)
@@ -478,25 +461,25 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if offset then
-- we need to set the attr
if t then
- t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,getfield(current,"width"),getfield(current,"height"),getfield(current,"depth"))
+ t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
end
- local shifted = hpack_node(getlist(current))
- setfield(shifted,"shift",offset)
- setfield(current,"list",shifted)
+ local shifted = hpack_node(current.list)
+ shifted.shift = offset
+ current.list = shifted
if t then
- t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,getfield(current,"width"),getfield(current,"height"),getfield(current,"depth"))
+ t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
end
- setattr(shifted,a_snapmethod,0)
- setattr(current,a_snapmethod,0)
+ shifted[a_snapmethod] = 0
+ current[a_snapmethod] = 0
end
if not height then
- setfield(current,"height",ch)
+ current.height = ch
if t then
t[#t+1] = formatters["forced height: %p"](ch)
end
end
if not depth then
- setfield(current,"depth",cd)
+ current.depth = cd
if t then
t[#t+1] = formatters["forced depth: %p"](cd)
end
@@ -510,17 +493,17 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
t[#t+1] = formatters["final depth: %p -> %p"](d,cd)
end
if t then
- report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[getid(current)],t)
+ report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[current.id],t)
end
return h, d, ch, cd, lines
end
local function snap_topskip(current,method)
- local spec = getfield(current,"spec")
- local w = getfield(spec,"width")
+ local spec = current.spec
+ local w = spec.width
local wd = w
- if getfield(spec,"writable") then
- setfield(spec,"width",0)
+ if spec.writable then
+ spec.width = 0
wd = 0
end
return w, wd
@@ -681,18 +664,18 @@ local trace_list, tracing_info, before, after = { }, false, "", ""
local function nodes_to_string(head)
local current, t = head, { }
while current do
- local id = getid(current)
+ local id = current.id
local ty = nodecodes[id]
if id == penalty_code then
- t[#t+1] = formatters["%s:%s"](ty,getfield(current,"penalty"))
+ t[#t+1] = formatters["%s:%s"](ty,current.penalty)
elseif id == glue_code then -- or id == kern_code then -- to be tested
t[#t+1] = formatters["%s:%p"](ty,current)
elseif id == kern_code then
- t[#t+1] = formatters["%s:%p"](ty,getfield(current,"kern"))
+ t[#t+1] = formatters["%s:%p"](ty,current.kern)
else
t[#t+1] = ty
end
- current = getnext(current)
+ current = current.next
end
return concat(t," + ")
end
@@ -716,7 +699,7 @@ local function trace_info(message, where, what)
end
local function trace_node(what)
- local nt = nodecodes[getid(what)]
+ local nt = nodecodes[what.id]
local tl = trace_list[#trace_list]
if tl and tl[1] == "node" then
trace_list[#trace_list] = { "node", formatters["%s + %s"](tl[2],nt) }
@@ -726,8 +709,8 @@ local function trace_node(what)
end
local function trace_done(str,data)
- if getid(data) == penalty_code then
- trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,getfield(data,"penalty")) }
+ if data.id == penalty_code then
+ trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,data.penalty) }
else
trace_list[#trace_list+1] = { "glue", formatters["%s | %p"](str,data) }
end
@@ -765,31 +748,22 @@ local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
local topskip_code = skipcodes.topskip
local splittopskip_code = skipcodes.splittopskip
--- local function free_glue_node(n)
--- free_node(n)
--- local s = getfield(n,"spec")
--- if s then
--- free_node(s)
--- end
--- end
-
local free_glue_node = free_node
-local free_glue_spec = function() end -- free_node
function vspacing.snapbox(n,how)
local sv = snapmethods[how]
if sv then
- local box = getbox(n)
- local list = getlist(box)
+ local box = texgetbox(n)
+ local list = box.list
if list then
- local s = getattr(list,a_snapmethod)
+ local s = list[a_snapmethod]
if s == 0 then
if trace_vsnapping then
-- report_snapper("box list not snapped, already done")
end
else
- local ht = getfield(box,"height")
- local dp = getfield(box,"depth")
+ local ht = box.height
+ local dp = box.depth
if false then -- todo: already_done
-- assume that the box is already snapped
if trace_vsnapping then
@@ -798,14 +772,14 @@ function vspacing.snapbox(n,how)
end
else
local h, d, ch, cd, lines = snap_hlist("box",box,sv,ht,dp)
- setfield(box,"height",ch)
- setfield(box,"depth",cd)
+ box.height= ch
+ box.depth = cd
if trace_vsnapping then
report_snapper("box list snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s",
h,d,ch,cd,sv.name,sv.specification,"direct",lines,listtoutf(list))
end
- setattr(box,a_snapmethod,0) --
- setattr(list,a_snapmethod,0) -- yes or no
+ box[a_snapmethod] = 0 --
+ list[a_snapmethod] = 0 -- yes or no
end
end
end
@@ -827,10 +801,8 @@ local w, h, d = 0, 0, 0
----- w, h, d = 100*65536, 65536, 65536
local function forced_skip(head,current,width,where,trace)
- if head == current then
- if getsubtype(head) == baselineskip_code then
- width = width - getfield(getfield(head,"spec"),"width")
- end
+ if head == current and head.subtype == baselineskip_code then
+ width = width - head.spec.width
end
if width == 0 then
-- do nothing
@@ -862,25 +834,25 @@ local special_penalty_max = 35000
local function specialpenalty(start,penalty)
-- nodes.showsimplelist(texlists.page_head,1)
- local current = find_node_tail(tonut(texlists.page_head)) -- no texlists.page_tail yet
+ local current = find_node_tail(texlists.page_head)
while current do
- local id = getid(current)
+ local id = current.id
if id == glue_code then
- current = getprev(current)
+ current = current.prev
elseif id == penalty_code then
- local p = getfield(current,"penalty")
+ local p = current.penalty
if p == penalty then
if trace_vspacing then
report_vspacing("overloading penalty %a",p)
end
return current
elseif p >= 10000 then
- current = getprev(current)
+ current = current.prev
else
break
end
else
- current = getprev(current)
+ current = current.prev
end
end
end
@@ -903,12 +875,12 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
head = insert_node_before(head,current,p)
end
if glue_data then
- local spec = getfield(glue_data,"spec")
+ local spec = glue_data.spec
if force_glue then
if trace then trace_done("flushed due to " .. why,glue_data) end
- head = forced_skip(head,current,getfield(spec,"width"),"before",trace)
+ head = forced_skip(head,current,spec.width,"before",trace)
free_glue_node(glue_data)
- elseif getfield(spec,"writable") then
+ elseif spec.writable then
if trace then trace_done("flushed due to " .. why,glue_data) end
head = insert_node_before(head,current,glue_data)
else
@@ -928,12 +900,12 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
if trace then trace_info("start analyzing",where,what) end
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code or id == vlist_code then
-- needs checking, why so many calls
if snap then
- local list = getlist(current)
- local s = getattr(current,a_snapmethod)
+ local list = current.list
+ local s = current[a_snapmethod]
if not s then
-- if trace_vsnapping then
-- report_snapper("mvl list not snapped")
@@ -947,8 +919,8 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if sv then
-- check if already snapped
if list and already_done(id,list,a_snapmethod) then
- local ht = getfield(current,"height")
- local dp = getfield(current,"depth")
+ local ht = current.height
+ local dp = current.depth
-- assume that the box is already snapped
if trace_vsnapping then
report_snapper("mvl list already snapped at (%p,%p): %s",ht,dp,listtoutf(list))
@@ -963,39 +935,40 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif trace_vsnapping then
report_snapper("mvl %a not snapped due to unknown snap specification: %s",nodecodes[id],listtoutf(list))
end
- setattr(current,a_snapmethod,0)
+ current[a_snapmethod] = 0
end
else
--
end
-- tex.prevdepth = 0
flush("list")
- current = getnext(current)
+ current = current.next
elseif id == penalty_code then
- -- natural_penalty = getfield(current,"penalty")
+ -- natural_penalty = current.penalty
-- if trace then trace_done("removed penalty",current) end
-- head, current = remove_node(head, current, true)
- current = getnext(current)
+ current = current.next
elseif id == kern_code then
- if snap and trace_vsnapping and getfield(current,"kern") ~= 0 then
- report_snapper("kern of %p kept",getfield(current,"kern"))
+ if snap and trace_vsnapping and current.kern ~= 0 then
+ report_snapper("kern of %p kept",current.kern)
end
flush("kern")
- current = getnext(current)
+ current = current.next
elseif id == glue_code then
- local subtype = getsubtype(current)
+ local subtype = current.subtype
if subtype == userskip_code then
- local sc = getattr(current,a_skipcategory) -- has no default, no unset (yet)
- local so = getattr(current,a_skiporder) or 1 -- has 1 default, no unset (yet)
- local sp = getattr(current,a_skippenalty) -- has no default, no unset (yet)
+ local sc = current[a_skipcategory] -- has no default, no unset (yet)
+ local so = current[a_skiporder] or 1 -- has 1 default, no unset (yet)
+ local sp = current[a_skippenalty] -- has no default, no unset (yet)
if sp and sc == penalty then
- if where == "page" and sp >= special_penalty_min and sp <= special_penalty_max then
- local previousspecial = specialpenalty(current,sp)
- if previousspecial then
- setfield(previousspecial,"penalty",0)
- sp = 0
- end
- end
+
+if where == "page" and sp >= special_penalty_min and sp <= special_penalty_max then
+ local previousspecial = specialpenalty(current,sp)
+ if previousspecial then
+ previousspecial.penalty = 0
+ sp = 0
+ end
+end
if not penalty_data then
penalty_data = sp
elseif penalty_order < so then
@@ -1010,38 +983,37 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_done("flush",glue_data) end
head = insert_node_before(head,current,glue_data)
if trace then trace_natural("natural",current) end
- current = getnext(current)
+ current = current.next
else
-- not look back across head
-- todo: prev can be whatsit (latelua)
- local previous = getprev(current)
- if previous and getid(previous) == glue_code and getsubtype(previous) == userskip_code then
- local ps = getfield(previous,"spec")
- if getfield(ps,"writable") then
- local cs = getfield(current,"spec")
- if getfield(cs,"writable") and getfield(ps,"stretch_order") == 0 and getfield(ps,"shrink_order") == 0 and getfield(cs,"stretch_order") == 0 and getfield(cs,"shrink_order") == 0 then
- local pw, pp, pm = getfield(ps,"width"), getfield(ps,"stretch"), getfield(ps,"shrink")
- local cw, cp, cm = getfield(cs,"width"), getfield(cs,"stretch"), getfield(cs,"shrink")
+ local previous = current.prev
+ if previous and previous.id == glue_code and previous.subtype == userskip_code then
+ local ps = previous.spec
+ if ps.writable then
+ local cs = current.spec
+ if cs.writable and ps.stretch_order == 0 and ps.shrink_order == 0 and cs.stretch_order == 0 and cs.shrink_order == 0 then
+ local pw, pp, pm = ps.width, ps.stretch, ps.shrink
+ local cw, cp, cm = cs.width, cs.stretch, cs.shrink
-- ps = writable_spec(previous) -- no writable needed here
-- ps.width, ps.stretch, ps.shrink = pw + cw, pp + cp, pm + cm
- free_glue_spec(ps)
- setfield(previous,"spec",new_gluespec(pw + cw, pp + cp, pm + cm)) -- else topskip can disappear
+ previous.spec = new_gluespec(pw + cw, pp + cp, pm + cm) -- else topskip can disappear
if trace then trace_natural("removed",current) end
head, current = remove_node(head, current, true)
-- current = previous
if trace then trace_natural("collapsed",previous) end
- -- current = getnext(current)
+ -- current = current.next
else
if trace then trace_natural("filler",current) end
- current = getnext(current)
+ current = current.next
end
else
if trace then trace_natural("natural (no prev spec)",current) end
- current = getnext(current)
+ current = current.next
end
else
if trace then trace_natural("natural (no prev)",current) end
- current = getnext(current)
+ current = current.next
end
end
glue_order, glue_data = 0, nil
@@ -1074,12 +1046,12 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif glue_order == so then
-- is now exclusive, maybe support goback as combi, else why a set
if sc == largest then
- local cs, gs = getfield(current,"spec"), getfield(glue_data,"spec")
- local cw, gw = getfield(cs,"width"), getfield(gs,"width")
+ local cs, gs = current.spec, glue_data.spec
+ local cw, gw = cs.width, gs.width
if cw > gw then
if trace then trace_skip("largest",sc,so,sp,current) end
free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head,current)
+ head, current, glue_data = remove_node(head, current)
else
if trace then trace_skip("remove smallest",sc,so,sp,current) end
head, current = remove_node(head, current, true)
@@ -1087,7 +1059,7 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif sc == goback then
if trace then trace_skip("goback",sc,so,sp,current) end
free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head,current)
+ head, current, glue_data = remove_node(head, current)
elseif sc == force then
-- last one counts, some day we can provide an accumulator and largest etc
-- but not now
@@ -1101,11 +1073,11 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
head, current = remove_node(head, current, true)
elseif sc == add then
if trace then trace_skip("add",sc,so,sp,current) end
- -- local old, new = glue_data.spec, getfield(current,"spec")
- local old, new = writable_spec(glue_data), getfield(current,"spec")
- setfield(old,"width",getfield(old,"width") + getfield(new,"width"))
- setfield(old,"stretch",getfield(old,"stretch") + getfield(new,"stretch"))
- setfield(old,"shrink",getfield(old,"shrink") + getfield(new,"shrink"))
+ -- local old, new = glue_data.spec, current.spec
+ local old, new = writable_spec(glue_data), current.spec
+ old.width = old.width + new.width
+ old.stretch = old.stretch + new.stretch
+ old.shrink = old.shrink + new.shrink
-- toto: order
head, current = remove_node(head, current, true)
else
@@ -1121,13 +1093,12 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
elseif subtype == lineskip_code then
if snap then
- local s = getattr(current,a_snapmethod)
+ local s = current[a_snapmethod]
if s and s ~= 0 then
- setattr(current,a_snapmethod,0)
- local spec = getfield(current,"spec")
- if getfield(spec,"writable") then
+ current[a_snapmethod] = 0
+ if current.spec.writable then
local spec = writable_spec(current)
- setfield(spec,"width",0)
+ spec.width = 0
if trace_vsnapping then
report_snapper("lineskip set to zero")
end
@@ -1140,16 +1111,15 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_skip("lineskip",sc,so,sp,current) end
flush("lineskip")
end
- current = getnext(current)
+ current = current.next
elseif subtype == baselineskip_code then
if snap then
- local s = getattr(current,a_snapmethod)
+ local s = current[a_snapmethod]
if s and s ~= 0 then
- setattr(current,a_snapmethod,0)
- local spec = getfield(current,"spec")
- if getfield(spec,"writable") then
+ current[a_snapmethod] = 0
+ if current.spec.writable then
local spec = writable_spec(current)
- setfield(spec,"width",0)
+ spec.width = 0
if trace_vsnapping then
report_snapper("baselineskip set to zero")
end
@@ -1162,17 +1132,17 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_skip("baselineskip",sc,so,sp,current) end
flush("baselineskip")
end
- current = getnext(current)
+ current = current.next
elseif subtype == parskip_code then
-- parskip always comes later
if ignore_whitespace then
if trace then trace_natural("ignored parskip",current) end
head, current = remove_node(head, current, true)
elseif glue_data then
- local ps = getfield(current,"spec")
- local gs = getfield(glue_data,"spec")
- if getfield(ps,"writable") and getfield(gs,"writable") and getfield(ps,"width") > getfield(gs,"width") then
- setfield(glue_data,"spec",copy_node(ps))
+ local ps = current.spec
+ local gs = glue_data.spec
+ if ps.writable and gs.writable and ps.width > gs.width then
+ glue_data.spec = copy_node(ps)
if trace then trace_natural("taking parskip",current) end
else
if trace then trace_natural("removed parskip",current) end
@@ -1184,9 +1154,9 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
elseif subtype == topskip_code or subtype == splittopskip_code then
if snap then
- local s = getattr(current,a_snapmethod)
+ local s = current[a_snapmethod]
if s and s ~= 0 then
- setattr(current,a_snapmethod,0)
+ current[a_snapmethod] = 0
local sv = snapmethods[s]
local w, cw = snap_topskip(current,sv)
if trace_vsnapping then
@@ -1200,46 +1170,46 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_skip("topskip",sc,so,sp,current) end
flush("topskip")
end
- current = getnext(current)
+ current = current.next
elseif subtype == abovedisplayskip_code then
--
if trace then trace_skip("above display skip (normal)",sc,so,sp,current) end
flush("above display skip (normal)")
- current = getnext(current)
+ current = current.next
--
elseif subtype == belowdisplayskip_code then
--
if trace then trace_skip("below display skip (normal)",sc,so,sp,current) end
flush("below display skip (normal)")
- current = getnext(current)
- --
+ current = current.next
+ --
elseif subtype == abovedisplayshortskip_code then
--
if trace then trace_skip("above display skip (short)",sc,so,sp,current) end
flush("above display skip (short)")
- current = getnext(current)
+ current = current.next
--
elseif subtype == belowdisplayshortskip_code then
--
if trace then trace_skip("below display skip (short)",sc,so,sp,current) end
flush("below display skip (short)")
- current = getnext(current)
+ current = current.next
--
else -- other glue
if snap and trace_vsnapping then
- local spec = getfield(current,"spec")
- if getfield(spec,"writable") and getfield(spec,"width") ~= 0 then
- report_snapper("glue %p of type %a kept",getfield(spec,"width"),skipcodes[subtype])
- -- setfield(spec,"width",0)
+ local spec = current.spec
+ if spec.writable and spec.width ~= 0 then
+ report_snapper("glue %p of type %a kept",current.spec.width,skipcodes[subtype])
+ -- spec.width = 0
end
end
- if trace then trace_skip(formatters["glue of type %a"](subtype),sc,so,sp,current) end
+ if trace then trace_skip(formatter["glue of type %a"](subtype),sc,so,sp,current) end
flush("some glue")
- current = getnext(current)
+ current = current.next
end
else
- flush(formatters["node with id %a"](id))
- current = getnext(current)
+ flush("something else")
+ current = current.next
end
end
if trace then trace_info("stop analyzing",where,what) end
@@ -1260,8 +1230,7 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if not tail then tail = find_node_tail(head) end
if trace then trace_done("result",glue_data) end
if force_glue then
- local spec = getfield(glue_data,"spec")
- head, tail = forced_skip(head,tail,getfield(spec,"width"),"after",trace)
+ head, tail = forced_skip(head,tail,glue_data.spec.width,"after",trace)
free_glue_node(glue_data)
else
head, tail = insert_node_after(head,tail,glue_data)
@@ -1274,7 +1243,7 @@ texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevd
end
show_tracing(head)
if oldhead ~= head then
- trace_info("head has been changed from %a to %a",nodecodes[getid(oldhead)],nodecodes[getid(head)])
+ trace_info("head has been changed from %a to %a",nodecodes[oldhead.id],nodecodes[head.id])
end
end
return head, true
@@ -1302,17 +1271,16 @@ end
function vspacing.pagehandler(newhead,where)
-- local newhead = texlists.contrib_head
if newhead then
- newhead = tonut(newhead)
local newtail = find_node_tail(newhead) -- best pass that tail, known anyway
local flush = false
stackhack = true -- todo: only when grid snapping once enabled
-- todo: fast check if head = tail
for n in traverse_nodes(newhead) do -- we could just look for glue nodes
- local id = getid(n)
+ local id = n.id
if id ~= glue_code then
flush = true
- elseif getsubtype(n) == userskip_code then
- if getattr(n,a_skipcategory) then
+ elseif n.subtype == userskip_code then
+ if n[a_skipcategory] then
stackhack = true
else
flush = true
@@ -1324,36 +1292,35 @@ function vspacing.pagehandler(newhead,where)
if flush then
if stackhead then
if trace_collect_vspacing then report("appending %s nodes to stack (final): %s",newhead) end
- setfield(stacktail,"next",newhead)
- setfield(newhead,"prev",stacktail)
+ stacktail.next = newhead
+ newhead.prev = stacktail
newhead = stackhead
stackhead, stacktail = nil, nil
end
if stackhack then
stackhack = false
if trace_collect_vspacing then report("processing %s nodes: %s",newhead) end
- -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
- newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
else
if trace_collect_vspacing then report("flushing %s nodes: %s",newhead) end
-- texlists.contrib_head = newhead
end
- return tonode(newhead)
else
if stackhead then
if trace_collect_vspacing then report("appending %s nodes to stack (intermediate): %s",newhead) end
- setfield(stacktail,"next",newhead)
- setfield(newhead,"prev",stacktail)
+ stacktail.next = newhead
+ newhead.prev = stacktail
else
if trace_collect_vspacing then report("storing %s nodes in stack (initial): %s",newhead) end
stackhead = newhead
end
stacktail = newtail
-- texlists.contrib_head = nil
- -- newhead = nil
+ newhead = nil
end
end
- return nil
+ return newhead
end
local ignore = table.tohash {
@@ -1363,23 +1330,18 @@ local ignore = table.tohash {
}
function vspacing.vboxhandler(head,where)
- if head and not ignore[where] then
- local h = tonut(head)
- if getnext(h) then
- h = collapser(h,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper
- return tonode(h)
- end
+ if head and not ignore[where] and head.next then
+ head = collapser(head,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper
end
return head
end
function vspacing.collapsevbox(n) -- for boxes but using global a_snapmethod
- local box = getbox(n)
+ local box = texgetbox(n)
if box then
- local list = getlist(box)
+ local list = box.list
if list then
- list = collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod)
- setfield(box,"list",vpack_node(list))
+ box.list = vpack_node(collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod))
end
end
end
@@ -1390,9 +1352,7 @@ end
local outer = texnest[0]
function vspacing.resetprevdepth()
- if texlists.hold_head then
- outer.prevdepth = 0
- end
+ outer.prevdepth = 0
end
-- interface
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index ae09bb5ae..a74501e41 100644
Binary files a/tex/context/base/status-files.pdf and b/tex/context/base/status-files.pdf differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 8f1f3e5c8..a591afb75 100644
Binary files a/tex/context/base/status-lua.pdf and b/tex/context/base/status-lua.pdf differ
diff --git a/tex/context/base/status-mkiv.lua b/tex/context/base/status-mkiv.lua
index 339bc24f6..caa7dc16c 100644
--- a/tex/context/base/status-mkiv.lua
+++ b/tex/context/base/status-mkiv.lua
@@ -2014,13 +2014,13 @@ return {
{
category = "mkiv",
filename = "bibl-bib",
- loading = "on demand",
+ loading = "always",
status = "pending",
},
{
category = "mkiv",
filename = "bibl-tra",
- loading = "on demand",
+ loading = "always",
status = "pending",
},
{
@@ -2534,60 +2534,6 @@ return {
loading = "on demand",
status = "okay",
},
- {
- category = "mkiv",
- filename = "publ-ini",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-old",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-tra",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-usr",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-xml",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-imp-apa",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-imp-cite",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-imp-definitions",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-imp-commands",
- loading = "always",
- status = "pending",
- },
},
lua = {
{
@@ -2660,12 +2606,12 @@ return {
{
category = "lua",
filename = "bibl-bib",
- loading = "on demand",
+ status = "todo",
},
{
category = "lua",
filename = "bibl-tra",
- loading = "on demand",
+ status = "todo",
},
{
category = "lua",
@@ -5051,42 +4997,6 @@ return {
filename = "x-mathml",
status = "todo",
},
- {
- category = "lua",
- filename = "publ-ini",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
- {
- category = "lua",
- filename = "publ-aut",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
- {
- category = "lua",
- filename = "publ-dat",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
- {
- category = "lua",
- filename = "publ-oth",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
- {
- category = "lua",
- filename = "publ-tra",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
- {
- category = "lua",
- filename = "publ-usr",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
},
metafun = {
{
diff --git a/tex/context/base/strc-lst.mkvi b/tex/context/base/strc-lst.mkvi
index f78881221..63c3e030a 100644
--- a/tex/context/base/strc-lst.mkvi
+++ b/tex/context/base/strc-lst.mkvi
@@ -889,7 +889,6 @@
\startsetups[\??listrenderings:abc]
\endgraf % are we grouped?
-% \advance % yes or no ... \rightskip is also honored
\leftskip\listparameter\c!margin % after \endgraf !
\listparameter\c!before
\endgraf
diff --git a/tex/context/base/strc-mar.lua b/tex/context/base/strc-mar.lua
index 258787d0a..b3a6e8f35 100644
--- a/tex/context/base/strc-mar.lua
+++ b/tex/context/base/strc-mar.lua
@@ -19,27 +19,14 @@ local commands = commands
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getbox = nuts.getbox
-
-local traversenodes = nuts.traverse
-
+local traversenodes = nodes.traverse
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local texsetattribute = tex.setattribute
+local texgetbox = tex.getbox
local a_marks = attributes.private("structure","marks")
@@ -119,9 +106,9 @@ end
local function sweep(head,first,last)
for n in traversenodes(head) do
- local id = getid(n)
+ local id = n.id
if id == glyph_code then
- local a = getattr(n,a_marks)
+ local a = n[a_marks]
if not a then
-- next
elseif first == 0 then
@@ -131,7 +118,7 @@ local function sweep(head,first,last)
end
elseif id == hlist_code or id == vlist_code then
if boxes_too then
- local a = getattr(n,a_marks)
+ local a = n[a_marks]
if not a then
-- next
elseif first == 0 then
@@ -140,7 +127,7 @@ local function sweep(head,first,last)
last = a
end
end
- local list = getlist(n)
+ local list = n.list
if list then
first, last = sweep(list,first,last)
end
@@ -156,9 +143,9 @@ setmetatableindex(classes, function(t,k) local s = settings_to_array(k) t[k] = s
local lasts = { }
function marks.synchronize(class,n,option)
- local box = getbox(n)
+ local box = texgetbox(n)
if box then
- local first, last = sweep(getlist(box),0,0)
+ local first, last = sweep(box.list,0,0)
if option == v_keep and first == 0 and last == 0 then
if trace_marks_get or trace_marks_set then
report_marks("action %a, class %a, box %a","retain at synchronize",class,n)
diff --git a/tex/context/base/strc-mat.mkiv b/tex/context/base/strc-mat.mkiv
index 18cb005cb..b9263cdb0 100644
--- a/tex/context/base/strc-mat.mkiv
+++ b/tex/context/base/strc-mat.mkiv
@@ -630,13 +630,13 @@
% \prevdepth-\maxdimen % texbook pagina 79-80
% \fi
% \noindent % else funny hlist with funny baselineskip
-% $$% \Ucheckedstartdisplaymath
+% $$% \Ustartdisplaymath
% \setdisplaydimensions
% \startinnermath}
%
% \unexpanded\def\stopdisplaymath
% {\stopinnermath
-% $$% \Ucheckedstopdisplaymath
+% $$% \Ustopdisplaymath
% \par
% \afterdisplayspace
% \par
@@ -663,13 +663,13 @@
\fi
\fi
\noindent % else funny hlist with funny baselineskip
- \Ucheckedstartdisplaymath
+ $$% \Ustartdisplaymath
\setdisplaydimensions
\startinnermath}
\unexpanded\def\stopdisplaymath
{\stopinnermath
- \Ucheckedstopdisplaymath
+ $$% \Ustopdisplaymath
\par
\ifvmode
\ifcase\c_strc_formulas_space_model
diff --git a/tex/context/base/strc-pag.lua b/tex/context/base/strc-pag.lua
index fd0a367aa..02ed5610f 100644
--- a/tex/context/base/strc-pag.lua
+++ b/tex/context/base/strc-pag.lua
@@ -40,19 +40,17 @@ local collected, tobesaved = allocate(), allocate()
pages.collected = collected
pages.tobesaved = tobesaved
-pages.nofpages = 0
local function initializer()
collected = pages.collected
tobesaved = pages.tobesaved
- pages.nofpages = #collected
end
job.register('structures.pages.collected', tobesaved, initializer)
local specification = { } -- to be checked
-function pages.save(prefixdata,numberdata,extradata)
+function pages.save(prefixdata,numberdata)
local realpage = texgetcount("realpageno")
local userpage = texgetcount("userpageno")
if realpage > 0 then
@@ -60,12 +58,10 @@ function pages.save(prefixdata,numberdata,extradata)
report_pages("saving page %s.%s",realpage,userpage)
end
local data = {
- number = userpage,
- viewerprefix = extradata.viewerprefix,
- state = extradata.state,
- block = sections.currentblock(),
- prefixdata = prefixdata and helpers.simplify(prefixdata),
- numberdata = numberdata and helpers.simplify(numberdata),
+ number = userpage,
+ block = sections.currentblock(),
+ prefixdata = prefixdata and helpers.simplify(prefixdata),
+ numberdata = numberdata and helpers.simplify(numberdata),
}
tobesaved[realpage] = data
if not collected[realpage] then
diff --git a/tex/context/base/strc-pag.mkiv b/tex/context/base/strc-pag.mkiv
index c4e9819ba..85cfeb40f 100644
--- a/tex/context/base/strc-pag.mkiv
+++ b/tex/context/base/strc-pag.mkiv
@@ -106,8 +106,6 @@
\let\setuppagenumber\setupuserpagenumber
\let\resetpagenumber\resetuserpagenumber
-% invisible =
-
\def\strc_pagenumbers_page_state_save % \normalexpanded?
{\ctxlua{structures.pages.save({
prefix = "\namedcounterparameter\s!userpage\c!prefix",
@@ -122,9 +120,6 @@
conversionset = "\namedcounterparameter\s!userpage\c!numberconversionset",
starter = \!!bs\namedcounterparameter\s!userpage\c!numberstarter\!!es,
stopper = \!!bs\namedcounterparameter\s!userpage\c!numberstopper\!!es,
- },{
- viewerprefix = \!!bs\namedcounterparameter\s!userpage\c!viewerprefix\!!es,
- state = \!!bs\namedcounterparameter\s!userpage\c!state\!!es,
}
)}}
diff --git a/tex/context/base/supp-box.lua b/tex/context/base/supp-box.lua
index 3c5a3383d..27078f46f 100644
--- a/tex/context/base/supp-box.lua
+++ b/tex/context/base/supp-box.lua
@@ -26,118 +26,101 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local glyph_code = nodecodes.glyph
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
+local new_penalty = nodes.pool.penalty
+local new_hlist = nodes.pool.hlist
+local new_glue = nodes.pool.glue
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattribute = nuts.getattribute
-local getbox = nuts.getbox
-
-local setfield = nuts.setfield
-local setbox = nuts.setbox
-
-local free_node = nuts.free
-local copy_list = nuts.copy_list
-local copy_node = nuts.copy
-local find_tail = nuts.tail
-
-local listtoutf = nodes.listtoutf
-
-local nodepool = nuts.pool
-local new_penalty = nodepool.penalty
-local new_hlist = nodepool.hlist
-local new_glue = nodepool.glue
+local free_node = nodes.free
+local copy_list = nodes.copy_list
+local copy_node = nodes.copy
+local find_tail = nodes.tail
+local texsetbox = tex.setbox
+local texgetbox = tex.getbox
local texget = tex.get
-local function hyphenatedlist(head)
- local current = head and tonut(head)
- while current do
- local id = getid(current)
- local next = getnext(current)
- local prev = getprev(current)
+local function hyphenatedlist(list)
+ while list do
+ local id, next, prev = list.id, list.next, list.prev
if id == disc_code then
- local hyphen = getfield(current,"pre")
+ local hyphen = list.pre
if hyphen then
local penalty = new_penalty(-500)
- -- insert_after etc
- setfield(hyphen,"next",penalty)
- setfield(penalty,"prev",hyphen)
- setfield(prev,"next",hyphen)
- setfield(next,"prev", penalty)
- setfield(penalty,"next",next)
- setfield(hyphen,"prev",prev)
- setfield(current,"pre",nil)
- free_node(current)
+ hyphen.next, penalty.prev = penalty, hyphen
+ prev.next, next.prev = hyphen, penalty
+ penalty.next, hyphen.prev = next, prev
+ list.pre = nil
+ free_node(list)
end
elseif id == vlist_code or id == hlist_code then
- hyphenatedlist(getlist(current))
+ hyphenatedlist(list.list)
end
- current = next
+ list = next
end
end
commands.hyphenatedlist = hyphenatedlist
function commands.showhyphenatedinlist(list)
- report_hyphenation("show: %s",listtoutf(tonut(list),false,true))
+ report_hyphenation("show: %s",nodes.listtoutf(list,false,true))
end
local function checkedlist(list)
if type(list) == "number" then
- return getlist(getbox(tonut(list)))
+ return texgetbox(list).list
else
- return tonut(list)
+ return list
end
end
-local function applytochars(current,doaction,noaction,nested)
+local function applytochars(list,what,nested)
+ local doaction = context[what or "ruledhbox"]
+ local noaction = context
+ local current = checkedlist(list)
while current do
- local id = getid(current)
+ local id = current.id
if nested and (id == hlist_code or id == vlist_code) then
context.beginhbox()
- applytochars(getlist(current),what,nested)
+ applytochars(current.list,what,nested)
context.endhbox()
elseif id ~= glyph_code then
- noaction(tonode(copy_node(current)))
+ noaction(copy_node(current))
else
- doaction(tonode(copy_node(current)))
+ doaction(copy_node(current))
end
- current = getnext(current)
+ current = current.next
end
end
-local function applytowords(current,doaction,noaction,nested)
+local function applytowords(list,what,nested)
+ local doaction = context[what or "ruledhbox"]
+ local noaction = context
+ local current = checkedlist(list)
local start
while current do
- local id = getid(current)
+ local id = current.id
if id == glue_code then
if start then
- doaction(tonode(copy_list(start,current)))
+ doaction(copy_list(start,current))
start = nil
end
- noaction(tonode(copy_node(current)))
+ noaction(copy_node(current))
elseif nested and (id == hlist_code or id == vlist_code) then
context.beginhbox()
- applytowords(getlist(current),what,nested)
+ applytowords(current.list,what,nested)
context.egroup()
elseif not start then
start = current
end
- current = getnext(current)
+ current = current.next
end
if start then
- doaction(tonode(copy_list(start)))
+ doaction(copy_list(start))
end
end
-commands.applytochars = function(list,what,nested) applytochars(checkedlist(list),context[what or "ruledhbox"],context,nested) end
-commands.applytowords = function(list,what,nested) applytowords(checkedlist(list),context[what or "ruledhbox"],context,nested) end
+commands.applytochars = applytochars
+commands.applytowords = applytowords
local split_char = lpeg.Ct(lpeg.C(1)^0)
local split_word = lpeg.tsplitat(lpeg.patterns.space)
@@ -193,36 +176,36 @@ end
local a_vboxtohboxseparator = attributes.private("vboxtohboxseparator")
function commands.vboxlisttohbox(original,target,inbetween)
- local current = getlist(getbox(original))
+ local current = texgetbox(original).list
local head = nil
local tail = nil
while current do
- local id = getid(current)
- local next = getnext(current)
+ local id = current.id
+ local next = current.next
if id == hlist_code then
- local list = getlist(current)
+ local list = current.list
if head then
if inbetween > 0 then
local n = new_glue(0,0,inbetween)
- setfield(tail,"next",n)
- setfield(n,"prev",tail)
+ tail.next = n
+ n.prev = tail
tail = n
end
- setfield(tail,"next",list)
- setfield(list,"prev",tail)
+ tail.next = list
+ list.prev = tail
else
head = list
end
tail = find_tail(list)
-- remove last separator
- if getid(tail) == hlist_code and getattribute(tail,a_vboxtohboxseparator) == 1 then
+ if tail.id == hlist_code and tail[a_vboxtohboxseparator] == 1 then
local temp = tail
- local prev = getprev(tail)
+ local prev = tail.prev
if next then
- local list = getlist(tail)
- setfield(prev,"next",list)
- setfield(list,"prev",prev)
- setfield(tail,"list",nil)
+ local list = tail.list
+ prev.next = list
+ list.prev = prev
+ tail.list = nil
tail = find_tail(list)
else
tail = prev
@@ -230,21 +213,21 @@ function commands.vboxlisttohbox(original,target,inbetween)
free_node(temp)
end
-- done
- setfield(tail,"next",nil)
- setfield(current,"list",nil)
+ tail.next = nil
+ current.list = nil
end
current = next
end
local result = new_hlist()
- setfield(result,"list",head)
- setbox(target,result)
+ result.list = head
+ texsetbox(target,result)
end
function commands.hboxtovbox(original)
- local b = getbox(original)
+ local b = texgetbox(original)
local factor = texget("baselineskip").width / texget("hsize")
- setfield(b,"depth",0)
- setfield(b,"height",getfield(b,"width") * factor)
+ b.depth = 0
+ b.height = b.width * factor
end
function commands.boxtostring(n)
diff --git a/tex/context/base/supp-mat.mkiv b/tex/context/base/supp-mat.mkiv
index 925f25cc4..f77ee3454 100644
--- a/tex/context/base/supp-mat.mkiv
+++ b/tex/context/base/supp-mat.mkiv
@@ -53,36 +53,6 @@
\let\normalstartdmath \Ustartdisplaymath
\let\normalstopdmath \Ustopdisplaymath
-% \unexpanded\def\Ustartdisplaymath
-% {\ifinner
-% \ifhmode
-% \normalUstartmath
-% \let\Ustopdisplaymath\normalUstopmath
-% \else
-% \normalUstartdisplaymath
-% \let\Ustopdisplaymath\normalUstopdisplaymath
-% \fi
-% \else
-% \normalUstartdisplaymath
-% \let\Ustopdisplaymath\normalUstopdisplaymath
-% \fi}
-
-\unexpanded\def\Ucheckedstartdisplaymath
- {\ifinner
- \ifhmode
- \normalUstartmath
- \let\Ucheckedstopdisplaymath\normalUstopmath
- \else
- \normalUstartdisplaymath
- \let\Ucheckedstopdisplaymath\normalUstopdisplaymath
- \fi
- \else
- \normalUstartdisplaymath
- \let\Ucheckedstopdisplaymath\normalUstopdisplaymath
- \fi}
-
-\let\Ucheckedstopdisplaymath\relax
-
\def\normalmathaligntab{&} % \let\normalmathaligntab\aligntab does to work well in a let to & (a def works ok)
\let\normalsuper \Usuperscript % obsolete
@@ -90,8 +60,8 @@
\let\startimath \Ustartmath
\let\stopimath \Ustopmath
-\let\startdmath \Ustartdisplaymath % \Ucheckedstartdisplaymath
-\let\stopdmath \Ustopdisplaymath % \Ucheckedstopdisplaymath
+\let\startdmath \Ustartdisplaymath
+\let\stopdmath \Ustopmath
\unexpanded\def\mathematics#1{\relax \ifmmode#1\else\normalstartimath#1\normalstopimath\fi}
\unexpanded\def\displaymath#1{\noindent \ifmmode#1\else\normalstartdmath#1\normalstopdmath\fi}
diff --git a/tex/context/base/syst-ini.mkiv b/tex/context/base/syst-ini.mkiv
index 38c34556a..ab1c53131 100644
--- a/tex/context/base/syst-ini.mkiv
+++ b/tex/context/base/syst-ini.mkiv
@@ -246,10 +246,9 @@
% Watch out, for the moment we disable the check for already being defined
% later we will revert this but first all chardefs must be replaced.
-\normalprotected\def\newconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1}
-\normalprotected\def\setnewconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1#1} % just a number
-\normalprotected\def\setconstant {} % dummy, no checking, so it warns
-\normalprotected\def\setconstantvalue#1#2{\csname#1\endcsname\numexpr#2\relax}
+\normalprotected\def\newconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1}
+\normalprotected\def\setnewconstant#1{\ifdefined#1\let#1\undefined\fi\newcount#1#1} % just a number
+\normalprotected\def\setconstant {} % dummy, no checking, so it warns
% maybe setconstant with check
@@ -745,9 +744,6 @@
\normalprotected\def\settrue #1{\let#1\conditionaltrue }
\normalprotected\def\setfalse#1{\let#1\conditionalfalse}
-\normalprotected\def\settruevalue #1{\expandafter\let\csname#1\endcsname\conditionaltrue }
-\normalprotected\def\setfalsevalue#1{\expandafter\let\csname#1\endcsname\conditionalfalse}
-
\let\newconditional\setfalse
\let\ifconditional \ifcase
diff --git a/tex/context/base/tabl-ntb.mkiv b/tex/context/base/tabl-ntb.mkiv
index 42c61f16c..a1ae94712 100644
--- a/tex/context/base/tabl-ntb.mkiv
+++ b/tex/context/base/tabl-ntb.mkiv
@@ -1667,7 +1667,7 @@
\setupTABLE
[\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
- % \c!framecolor=\s!black,
+ \c!framecolor=\s!black,
\c!width=\v!fit,
\c!height=\v!fit,
\c!autowidth=\v!yes,
@@ -1719,7 +1719,7 @@
\setupTABLE
[\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
- % \c!framecolor=\s!black,
+ \c!framecolor=\s!black,
\c!width=\v!fit,
\c!height=\v!fit,
\c!autowidth=\v!yes,
diff --git a/tex/context/base/tabl-tbl.mkiv b/tex/context/base/tabl-tbl.mkiv
index 82d1be893..cd5efa7f7 100644
--- a/tex/context/base/tabl-tbl.mkiv
+++ b/tex/context/base/tabl-tbl.mkiv
@@ -429,8 +429,7 @@
\aligntab
\tabl_tabulate_column_vrule_inject
\tabl_tabulate_color_side_left
-% \tabl_tabulate_inject_pre_skip{\the\s_tabl_tabulate_pre}%
- \tabl_tabulate_inject_pre_skip{\the\dimexpr\s_tabl_tabulate_pre}% get rid of plus
+ \tabl_tabulate_inject_pre_skip{\the\s_tabl_tabulate_pre}%
\alignmark\alignmark
\aligntab
\tabl_tabulate_color_side_both
@@ -497,7 +496,7 @@
\egroup
\aligntab
\noexpand\dostoptagged
- \tabl_tabulate_inject_post_skip{\the\dimexpr\s_tabl_tabulate_post}% get rid of plus
+ \tabl_tabulate_inject_post_skip{\the\s_tabl_tabulate_post}%
\alignmark\alignmark
}}%
\t_tabl_tabulate_dummy\expandafter{\the\t_tabl_tabulate_dummy\NC}%
diff --git a/tex/context/base/tabl-xtb.lua b/tex/context/base/tabl-xtb.lua
index 653eb6e08..488ef5b78 100644
--- a/tex/context/base/tabl-xtb.lua
+++ b/tex/context/base/tabl-xtb.lua
@@ -25,21 +25,18 @@ this mechamism will be improved so that it can replace its older cousin.
-- todo: use linked list instead of r/c array
-local tonumber = tonumber
+local commands, context, tex, node = commands, context, tex, node
-local commands = commands
-local context = context
-local tex = tex
-
-local texgetcount = tex.getcount
-local texsetcount = tex.setcount
-local texgetdimen = tex.getdimen
-local texsetdimen = tex.setdimen
-local texget = tex.get
+local texgetcount = tex.getcount
+local texsetcount = tex.setcount
+local texgetbox = tex.getbox
+local texgetdimen = tex.getdimen
+local texsetdimen = tex.setdimen
+local texget = tex.get
-local format = string.format
-local concat = table.concat
-local points = number.points
+local format = string.format
+local concat = table.concat
+local points = number.points
local context = context
local context_beginvbox = context.beginvbox
@@ -52,23 +49,13 @@ local variables = interfaces.variables
local setmetatableindex = table.setmetatableindex
local settings_to_hash = utilities.parsers.settings_to_hash
-local nuts = nodes.nuts -- here nuts gain hardly nothing
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getlist = nuts.getlist
-local getfield = nuts.getfield
-local getbox = nuts.getbox
-
-local setfield = nuts.setfield
+local copy_node_list = node.copy_list
+local hpack_node_list = node.hpack
+local vpack_node_list = node.vpack
+local slide_node_list = node.slide
+local flush_node_list = node.flush_list
-local copy_node_list = nuts.copy_list
-local hpack_node_list = nuts.hpack
-local flush_node_list = nuts.flush_list
-
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_glue = nodepool.glue
local new_kern = nodepool.kern
@@ -228,20 +215,20 @@ function xtables.set_reflow_width()
while row[c].span do -- can also be previous row ones
c = c + 1
end
- local tb = getbox("b_tabl_x")
+ local tb = texgetbox("b_tabl_x")
local drc = row[c]
--
drc.list = true -- we don't need to keep the content around as we're in trial mode (no: copy_node_list(tb))
--
- local widths, width = data.widths, getfield(tb,"width")
+ local widths, width = data.widths, tb.width
if width > widths[c] then
widths[c] = width
end
- local heights, height = data.heights, getfield(tb,"height")
+ local heights, height = data.heights, tb.height
if height > heights[r] then
heights[r] = height
end
- local depths, depth = data.depths, getfield(tb,"depth")
+ local depths, depth = data.depths, tb.depth
if depth > depths[r] then
depths[r] = depth
end
@@ -332,14 +319,14 @@ function xtables.set_reflow_height()
-- while row[c].span do -- we could adapt drc.nx instead
-- c = c + 1
-- end
- local tb = getbox("b_tabl_x")
+ local tb = texgetbox("b_tabl_x")
local drc = row[c]
if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2
- local heights, height = data.heights, getfield(tb,"height")
+ local heights, height = data.heights, tb.height
if height > heights[r] then
heights[r] = height
end
- local depths, depth = data.depths, getfield(tb,"depth")
+ local depths, depth = data.depths, tb.depth
if depth > depths[r] then
depths[r] = depth
end
@@ -386,7 +373,7 @@ function xtables.set_construct()
-- end
local drc = row[c]
-- this will change as soon as in luatex we can reset a box list without freeing
- drc.list = copy_node_list(getbox("b_tabl_x"))
+ drc.list = copy_node_list(texgetbox("b_tabl_x"))
-- c = c + drc.nx - 1
-- data.currentcolumn = c
end
@@ -659,23 +646,23 @@ function xtables.construct()
end
local list = drc.list
if list then
- setfield(list,"shift",getfield(list,"height") + getfield(list,"depth"))
+ list.shift = list.height + list.depth
-- list = hpack_node_list(list) -- is somehow needed
- -- setfield(list,"width",0)
- -- setfield(list,"height",0)
- -- setfield(list,"depth",0)
+ -- list.width = 0
+ -- list.height = 0
+ -- list.depth = 0
-- faster:
local h = new_hlist()
- setfield(h,"list",list)
+ h.list = list
list = h
--
if start then
- setfield(stop,"next",list)
- setfield(list,"prev",stop)
+ stop.next = list
+ list.prev = stop
else
start = list
end
- stop = list
+ stop = list -- one node anyway, so not needed: slide_node_list(list)
end
local step = widths[c]
if c < nofcolumns then
@@ -683,8 +670,8 @@ function xtables.construct()
end
local kern = new_kern(step)
if stop then
- setfield(stop,"next",kern)
- setfield(kern,"prev",stop)
+ stop.next = kern
+ kern.prev = stop
else -- can be first spanning next row (ny=...)
start = kern
end
@@ -693,8 +680,8 @@ function xtables.construct()
if start then
if rightmargindistance > 0 then
local kern = new_kern(rightmargindistance)
- setfield(stop,"next",kern)
- setfield(kern,"prev",stop)
+ stop.next = kern
+ kern.prev = stop
-- stop = kern
end
return start, heights[r] + depths[r], hasspan
@@ -734,7 +721,7 @@ function xtables.construct()
texsetdimen("global","d_tabl_x_final_width",0)
else
texsetcount("global","c_tabl_x_state",1)
- texsetdimen("global","d_tabl_x_final_width",getfield(body[1][1],"width"))
+ texsetdimen("global","d_tabl_x_final_width",body[1][1].width)
end
end
@@ -747,8 +734,8 @@ local function inject(row,copy,package)
end
if package then
context_beginvbox()
- context(tonode(list))
- context(tonode(new_kern(row[2])))
+ context(list)
+ context(new_kern(row[2]))
context_endvbox()
context_nointerlineskip() -- figure out a better way
if row[4] then
@@ -756,13 +743,13 @@ local function inject(row,copy,package)
elseif row[3] then
context_blank(row[3] .. "sp") -- why blank ?
else
- context(tonode(new_glue(0)))
+ context(new_glue(0))
end
else
- context(tonode(list))
- context(tonode(new_kern(row[2])))
+ context(list)
+ context(new_kern(row[2]))
if row[3] then
- context(tonode(new_glue(row[3])))
+ context(new_glue(row[3]))
end
end
end
@@ -835,7 +822,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],repeatheader)
end
if rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
if not repeatheader then
results[head_mode] = { }
@@ -848,7 +835,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(more[i],true)
end
if rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
end
elseif headsize > 0 and repeatheader then -- following chunk gets head
@@ -858,7 +845,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],true)
end
if rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
end
else -- following chunk gets nothing
@@ -885,7 +872,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
-- all is flushed and footer fits
if footsize > 0 then
if rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
for i=1,#foot do
inject(foot[i])
@@ -899,7 +886,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
-- todo: try to flush a few more lines
if repeatfooter and footsize > 0 then
if rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
for i=1,#foot do
inject(foot[i],true)
@@ -951,13 +938,13 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i])
end
if #head > 0 and rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
for i=1,#body do
inject(body[i])
end
if #foot > 0 and rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
for i=1,#foot do
inject(foot[i])
@@ -977,24 +964,6 @@ function xtables.cleanup()
flush_node_list(r[1])
end
end
-
- -- local rows = data.rows
- -- for i=1,#rows do
- -- local row = rows[i]
- -- for i=1,#row do
- -- local cell = row[i]
- -- local list = cell.list
- -- if list then
- -- cell.width = getfield(list,"width")
- -- cell.height = getfield(list,"height")
- -- cell.depth = getfield(list,"depth")
- -- cell.list = true
- -- end
- -- end
- -- end
- -- data.result = nil
- -- inspect(data)
-
data = table.remove(stack)
end
diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua
index fa9b0cf10..3447214bd 100644
--- a/tex/context/base/task-ini.lua
+++ b/tex/context/base/task-ini.lua
@@ -35,7 +35,6 @@ appendaction("processors", "characters", "typesetters.cases.handler")
appendaction("processors", "characters", "typesetters.breakpoints.handler") -- disabled
appendaction("processors", "characters", "scripts.injectors.handler") -- disabled
-appendaction("processors", "words", "languages.replacements.handler") -- disabled
appendaction("processors", "words", "builders.kernel.hyphenation") -- always on
appendaction("processors", "words", "languages.words.check") -- disabled -- might move up, no disc check needed then
@@ -58,7 +57,6 @@ appendaction("processors", "lists", "typesetters.digits.handler")
appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling)
------------("processors", "lists", "typesetters.initials.handler") -- disabled
-appendaction("shipouts", "normalizers", "builders.paragraphs.expansion.trace") -- disabled
appendaction("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled
appendaction("shipouts", "normalizers", "typesetters.alignments.handler")
appendaction("shipouts", "normalizers", "nodes.references.handler") -- disabled
@@ -117,12 +115,11 @@ appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler")
-- experimental too
-appendaction("mvlbuilders", "normalizers", "typesetters.checkers.handler")
-appendaction("vboxbuilders", "normalizers", "typesetters.checkers.handler")
+appendaction("mvlbuilders", "normalizers","typesetters.checkers.handler")
+appendaction("vboxbuilders","normalizers","typesetters.checkers.handler")
-- speedup: only kick in when used
-disableaction("processors", "languages.replacements.handler")
disableaction("processors", "typesetters.characteralign.handler")
disableaction("processors", "scripts.autofontfeature.handler")
disableaction("processors", "scripts.splitters.handler")
@@ -143,7 +140,6 @@ disableaction("processors", "typesetters.kerns.handler")
disableaction("processors", "typesetters.italics.handler")
disableaction("processors", "nodes.handlers.stripping")
-disableaction("shipouts", "builders.paragraphs.expansion.trace")
disableaction("shipouts", "typesetters.alignments.handler")
disableaction("shipouts", "nodes.rules.handler")
disableaction("shipouts", "nodes.shifts.handler")
diff --git a/tex/context/base/trac-inf.lua b/tex/context/base/trac-inf.lua
index 067cff27c..802f2e667 100644
--- a/tex/context/base/trac-inf.lua
+++ b/tex/context/base/trac-inf.lua
@@ -123,10 +123,7 @@ function statistics.show()
-- this code will move
local register = statistics.register
register("used platform", function()
- local mask = lua.mask or "ascii"
- return format("%s, type: %s, binary subtree: %s, symbol mask: %s (%s)",
- os.platform or "unknown",os.type or "unknown", environment.texos or "unknown",
- mask,mask == "utf" and "τεχ" or "tex")
+ return format("%s, type: %s, binary subtree: %s",os.platform or "unknown",os.type or "unknown", environment.texos or "unknown")
end)
register("luatex banner", function()
return lower(status.banner)
diff --git a/tex/context/base/trac-jus.lua b/tex/context/base/trac-jus.lua
index 00c871159..38220a752 100644
--- a/tex/context/base/trac-jus.lua
+++ b/tex/context/base/trac-jus.lua
@@ -14,30 +14,14 @@ typesetters.checkers = checkers
local a_alignstate = attributes.private("alignstate")
local a_justification = attributes.private("justification")
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local setlist = nuts.setlist
-
-local traverse_id = nuts.traverse_id
-local get_list_dimensions = nuts.dimensions
-local linked_nodes = nuts.linked
-local copy_node = nuts.copy
-
-local tracedrule = nodes.tracers.pool.nuts.rule
-
-local nodepool = nuts.pool
-
-local new_rule = nodepool.rule
-local new_hlist = nodepool.hlist
-local new_glue = nodepool.glue
-local new_kern = nodepool.kern
-
+local tracers = nodes.tracers
+local tracedrule = tracers.rule
+
+local new_rule = nodes.pool.rule
+local new_hlist = nodes.pool.hlist
+local new_glue = nodes.pool.glue
+local new_kern = nodes.pool.kern
+local get_list_dimensions = node.dimensions
local hlist_code = nodes.nodecodes.hlist
local texsetattribute = tex.setattribute
@@ -75,35 +59,34 @@ trackers.register("visualizers.justification", function(v)
end)
function checkers.handler(head)
- for current in traverse_id(hlist_code,tonut(head)) do
- if getattr(current,a_justification) == 1 then
- setattr(current,a_justification,0)
- local width = getfield(current,"width")
+ for current in node.traverse_id(hlist_code,head) do
+ if current[a_justification] == 1 then
+ current[a_justification] = 0
+ local width = current.width
if width > 0 then
- local list = getlist(current)
+ local list = current.list
if list then
local naturalwidth, naturalheight, naturaldepth = get_list_dimensions(list)
local delta = naturalwidth - width
if naturalwidth == 0 or delta == 0 then
-- special box
elseif delta >= max_threshold then
- local rule = tracedrule(delta,naturalheight,naturaldepth,getfield(list,"glue_set") == 1 and "trace:dr" or "trace:db")
- setfield(current,"list",linked_nodes(list,new_hlist(rule)))
+ local rule = tracedrule(delta,naturalheight,naturaldepth,list.glue_set == 1 and "trace:dr"or "trace:db")
+ current.list = list .. new_hlist(rule)
elseif delta <= min_threshold then
- local alignstate = getattr(list,a_alignstate)
+ local alignstate = list[a_alignstate]
if alignstate == 1 then
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dc")
- setfield(current,"list",linked_nodes(new_hlist(rule),list))
+ current.list = new_hlist(rule) .. list
elseif alignstate == 2 then
- local lrule = tracedrule(-delta/2,naturalheight,naturaldepth,"trace:dy")
- local rrule = copy_node(lrule)
- setfield(current,"list",linked_nodes(new_hlist(lrule),list,new_kern(delta/2),new_hlist(rrule)))
+ local rule = tracedrule(-delta/2,naturalheight,naturaldepth,"trace:dy")
+ current.list = new_hlist(rule^1) .. list .. new_kern(delta/2) .. new_hlist(rule)
elseif alignstate == 3 then
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dm")
- setfield(current,"list",linked_nodes(list,new_kern(delta),new_hlist(rule)))
+ current.list = list .. new_kern(delta) .. new_hlist(rule)
else
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dg")
- setfield(current,"list",linked_nodes(list,new_kern(delta),new_hlist(rule)))
+ current.list = list .. new_kern(delta) .. new_hlist(rule)
end
end
end
diff --git a/tex/context/base/trac-par.lua b/tex/context/base/trac-par.lua
index aab57ce5c..262a9cc33 100644
--- a/tex/context/base/trac-par.lua
+++ b/tex/context/base/trac-par.lua
@@ -1,25 +1,8 @@
-if not modules then modules = { } end modules ['trac-par'] = {
- version = 1.001,
- comment = "companion to node-par.mkiv",
- author = "Hans Hagen",
- copyright = "ConTeXt Development Team",
- license = "see context related readme files",
- comment = "a translation of the built in parbuilder, initial convertsin by Taco Hoekwater",
-}
+-- for the moment here:
local utfchar = utf.char
local concat = table.concat
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getid = nuts.getid
-local getnext = nuts.getnext
-local getlist = nuts.getlist
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
@@ -59,14 +42,14 @@ local function colorize(n)
-- tricky: the built-in method creates dummy fonts and the last line normally has the
-- original font and that one then has ex.auto set
while n do
- local id = getid(n)
+ local id = n.id
if id == glyph_code then
- local ne = getfield(n,"expansion_factor")
+ local ne = n.expansion_factor
if ne == 0 then
if length > 0 then flush() end
setnodecolor(n,"hz:zero")
else
- local f = getfont(n)
+ local f = n.font
if f ~= font then
if length > 0 then
flush()
@@ -96,8 +79,8 @@ local function colorize(n)
end
if trace_verbose then
length = length + 1
- list[length] = utfchar(getchar(n))
- width = width + getfield(n,"width") -- no kerning yet
+ list[length] = utfchar(n.char)
+ width = width + n.width -- no kerning yet
end
end
end
@@ -105,13 +88,13 @@ local function colorize(n)
if length > 0 then
flush()
end
- colorize(getlist(n),flush)
+ colorize(n.list,flush)
else -- nothing to show on kerns
if length > 0 then
flush()
end
end
- n = getnext(n)
+ n = n.next
end
if length > 0 then
flush()
@@ -121,14 +104,14 @@ end
builders.paragraphs.expansion = builders.paragraphs.expansion or { }
function builders.paragraphs.expansion.trace(head)
- colorize(tonut(head),true)
+ colorize(head,true)
return head
end
local tasks = nodes.tasks
--- tasks.prependaction("shipouts","normalizers","builders.paragraphs.expansion.trace")
--- tasks.disableaction("shipouts","builders.paragraphs.expansion.trace")
+tasks.prependaction("shipouts","normalizers","builders.paragraphs.expansion.trace")
+tasks.disableaction("shipouts","builders.paragraphs.expansion.trace")
local function set(v)
if v then
diff --git a/tex/context/base/trac-pro.lua b/tex/context/base/trac-pro.lua
index 897b6a15c..d6e0d0339 100644
--- a/tex/context/base/trac-pro.lua
+++ b/tex/context/base/trac-pro.lua
@@ -26,8 +26,7 @@ local registered = { }
local function report_index(k,name)
if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
+ report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
else
report_system("reference to %a in protected namespace %a",k,name)
end
@@ -35,8 +34,7 @@ end
local function report_newindex(k,name)
if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
+ report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
else
report_system("assignment to %a in protected namespace %a",k,name)
end
diff --git a/tex/context/base/trac-tim.lua b/tex/context/base/trac-tim.lua
index b4744291c..15ac9bf1b 100644
--- a/tex/context/base/trac-tim.lua
+++ b/tex/context/base/trac-tim.lua
@@ -88,7 +88,7 @@ local function convert(name)
delta = factor/delta
end
for k=1,#s do
- s[k] = format("(%.3f,%.3f)",k,(s[k]-b)*delta)
+ s[k] = format("(%s,%s)",k,(s[k]-b)*delta)
end
paths[tagname] = concat(s,"--")
end
diff --git a/tex/context/base/trac-vis.lua b/tex/context/base/trac-vis.lua
index 420e9a00d..dc8bcc5e7 100644
--- a/tex/context/base/trac-vis.lua
+++ b/tex/context/base/trac-vis.lua
@@ -34,7 +34,6 @@ local formatters = string.formatters
-- todo: inline concat (more efficient)
local nodecodes = nodes.nodecodes
-local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
@@ -59,41 +58,21 @@ local rightskip_code = gluecodes.rightskip
local whatsitcodes = nodes.whatsitcodes
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local setfield = nuts.setfield
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local getbox = nuts.getbox
-local getlist = nuts.getlist
-local getleader = nuts.getleader
-
-local hpack_nodes = nuts.hpack
-local vpack_nodes = nuts.vpack
-local copy_node = nuts.copy
-local copy_list = nuts.copy_list
-local free_node = nuts.free
-local free_node_list = nuts.flush_list
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local traverse_nodes = nuts.traverse
-local linked_nodes = nuts.linked
-
-local fast_hpack = nuts.fasthpack
-local fast_hpack_string = nuts.typesetters.fast_hpack
+local hpack_nodes = node.hpack
+local vpack_nodes = node.vpack
+local fast_hpack_string = nodes.typesetters.fast_hpack
+local copy_node = node.copy
+local copy_list = node.copy_list
+local free_node = node.free
+local free_node_list = node.flush_list
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local fast_hpack = nodes.fasthpack
+local traverse_nodes = node.traverse
local texgetattribute = tex.getattribute
local texsetattribute = tex.setattribute
-
+local texgetbox = tex.getbox
local unsetvalue = attributes.unsetvalue
local current_font = font.current
@@ -102,7 +81,7 @@ local exheights = fonts.hashes.exheights
local emwidths = fonts.hashes.emwidths
local pt_factor = number.dimenfactors.pt
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_rule = nodepool.rule
local new_kern = nodepool.kern
local new_glue = nodepool.glue
@@ -314,39 +293,39 @@ local c_white_d = "trace:dw"
local function sometext(str,layer,color,textcolor) -- we can just paste verbatim together .. no typesteting needed
local text = fast_hpack_string(str,usedfont)
- local size = getfield(text,"width")
+ local size = text.width
local rule = new_rule(size,2*exheight,exheight/2)
local kern = new_kern(-size)
if color then
setcolor(rule,color)
end
if textcolor then
- setlistcolor(getlist(text),textcolor)
+ setlistcolor(text.list,textcolor)
end
- local info = linked_nodes(rule,kern,text)
+ local info = rule .. kern .. text
setlisttransparency(info,c_zero)
info = fast_hpack(info)
if layer then
- setattr(info,a_layer,layer)
+ info[a_layer] = layer
end
- local width = getfield(info,"width")
- setfield(info,"width",0)
- setfield(info,"height",0)
- setfield(info,"depth",0)
+ local width = info.width
+ info.width = 0
+ info.height = 0
+ info.depth = 0
return info, width
end
local f_cache = { }
local function fontkern(head,current)
- local kern = getfield(current,"kern")
+ local kern = current.kern
local info = f_cache[kern]
if info then
-- print("hit fontkern")
else
local text = fast_hpack_string(formatters[" %0.3f"](kern*pt_factor),usedfont)
local rule = new_rule(emwidth/10,6*exheight,2*exheight)
- local list = getlist(text)
+ local list = text.list
if kern > 0 then
setlistcolor(list,c_positive_d)
elseif kern < 0 then
@@ -356,12 +335,13 @@ local function fontkern(head,current)
end
setlisttransparency(list,c_text_d)
settransparency(rule,c_text_d)
- setfield(text,"shift",-5 * exheight)
- info = fast_hpack(linked_nodes(rule,text))
- setattr(info,a_layer,l_fontkern)
- setfield(info,"width",0)
- setfield(info,"height",0)
- setfield(info,"depth",0)
+ text.shift = -5 * exheight
+ info = rule .. text
+ info = fast_hpack(info)
+ info[a_layer] = l_fontkern
+ info.width = 0
+ info.height = 0
+ info.depth = 0
f_cache[kern] = info
end
head = insert_node_before(head,current,copy_list(info))
@@ -402,7 +382,7 @@ local tags = {
}
local function whatsit(head,current)
- local what = getsubtype(current)
+ local what = current.subtype
local info = w_cache[what]
if info then
-- print("hit whatsit")
@@ -410,7 +390,7 @@ local function whatsit(head,current)
local tag = whatsitcodes[what]
-- maybe different text colors per tag
info = sometext(formatters["W:%s"](tag and tags[tag] or what),usedfont,nil,c_white)
- setattr(info,a_layer,l_whatsit)
+ info[a_layer] = l_whatsit
w_cache[what] = info
end
head, current = insert_node_after(head,current,copy_list(info))
@@ -418,13 +398,13 @@ local function whatsit(head,current)
end
local function user(head,current)
- local what = getsubtype(current)
+ local what = current.subtype
local info = w_cache[what]
if info then
-- print("hit user")
else
info = sometext(formatters["U:%s"](what),usedfont)
- setattr(info,a_layer,l_user)
+ info[a_layer] = l_user
w_cache[what] = info
end
head, current = insert_node_after(head,current,copy_list(info))
@@ -434,14 +414,14 @@ end
local b_cache = { }
local function ruledbox(head,current,vertical,layer,what,simple,previous)
- local wd = getfield(current,"width")
+ local wd = current.width
if wd ~= 0 then
- local ht = getfield(current,"height")
- local dp = getfield(current,"depth")
- local next = getnext(current)
- local prev = previous -- getprev(current) ... prev can be wrong in math mode
- setfield(current,"next",nil)
- setfield(current,"prev",nil)
+ local ht = current.height
+ local dp = current.depth
+ local next = current.next
+ local prev = previous -- current.prev ... prev can be wrong in math mode
+ current.next = nil
+ current.prev = nil
local linewidth = emwidth/10
local baseline, baseskip
if dp ~= 0 and ht ~= 0 then
@@ -450,16 +430,16 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
if not baseline then
-- due to an optimized leader color/transparency we need to set the glue node in order
-- to trigger this mechanism
- local leader = linked_nodes(new_glue(2*linewidth),new_rule(6*linewidth,linewidth,0),new_glue(2*linewidth))
+ local leader = new_glue(2*linewidth) .. new_rule(6*linewidth,linewidth,0) .. new_glue(2*linewidth)
-- setlisttransparency(leader,c_text)
leader = fast_hpack(leader)
-- setlisttransparency(leader,c_text)
baseline = new_glue(0)
- setfield(baseline,"leader",leader)
- setfield(baseline,"subtype",cleaders_code)
- local spec = getfield(baseline,"spec")
- setfield(spec,"stretch",65536)
- setfield(spec,"stretch_order",2)
+ baseline.leader = leader
+ baseline.subtype = cleaders_code
+ local spec = baseline.spec
+ spec.stretch = 65536
+ spec.stretch_order = 2
setlisttransparency(baseline,c_text)
b_cache.baseline = baseline
end
@@ -481,49 +461,47 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
this = b_cache[what]
if not this then
local text = fast_hpack_string(what,usedfont)
- this = linked_nodes(new_kern(-getfield(text,"width")),text)
+ this = new_kern(-text.width) .. text
setlisttransparency(this,c_text)
this = fast_hpack(this)
- setfield(this,"width",0)
- setfield(this,"height",0)
- setfield(this,"depth",0)
+ this.width = 0
+ this.height = 0
+ this.depth = 0
b_cache[what] = this
end
end
-- we need to trigger the right mode (else sometimes no whatits)
- local info = linked_nodes(
- this and copy_list(this) or nil,
- new_rule(linewidth,ht,dp),
- new_rule(wd-2*linewidth,-dp+linewidth,dp),
- new_rule(linewidth,ht,dp),
- new_kern(-wd+linewidth),
+ local info =
+ (this and copy_list(this) or nil) ..
+ new_rule(linewidth,ht,dp) ..
+ new_rule(wd-2*linewidth,-dp+linewidth,dp) ..
+ new_rule(linewidth,ht,dp) ..
+ new_kern(-wd+linewidth) ..
new_rule(wd-2*linewidth,ht,-ht+linewidth)
- )
if baseskip then
- info = linked_nodes(info,baseskip,baseline)
+ info = info .. baseskip .. baseline
end
setlisttransparency(info,c_text)
info = fast_hpack(info)
- setfield(info,"width",0)
- setfield(info,"height",0)
- setfield(info,"depth",0)
- setattr(info,a_layer,layer)
- local info = linked_nodes(current,new_kern(-wd),info)
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ info[a_layer] = layer
+ local info = current .. new_kern(-wd) .. info
info = fast_hpack(info,wd)
if vertical then
info = vpack_nodes(info)
end
if next then
- setfield(info,"next",next)
- setfield(next,"prev",info)
+ info.next = next
+ next.prev = info
end
if prev then
- if getid(prev) == gluespec_code then
- report_visualize("ignoring invalid prev")
- -- weird, how can this happen, an inline glue-spec, probably math
+ if prev.id == gluespec_code then
+ -- weird, how can this happen, an inline glue-spec
else
- setfield(info,"prev",prev)
- setfield(prev,"next",info)
+ info.prev = prev
+ prev.next = info
end
end
if head == current then
@@ -537,14 +515,14 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
end
local function ruledglyph(head,current,previous)
- local wd = getfield(current,"width")
+ local wd = current.width
if wd ~= 0 then
- local ht = getfield(current,"height")
- local dp = getfield(current,"depth")
- local next = getnext(current)
+ local ht = current.height
+ local dp = current.depth
+ local next = current.next
local prev = previous
- setfield(current,"next",nil)
- setfield(current,"prev",nil)
+ current.next = nil
+ current.prev = nil
local linewidth = emwidth/20
local baseline
if dp ~= 0 and ht ~= 0 then
@@ -552,32 +530,31 @@ local function ruledglyph(head,current,previous)
end
local doublelinewidth = 2*linewidth
-- could be a pdf rule
- local info = linked_nodes(
- new_rule(linewidth,ht,dp),
- new_rule(wd-doublelinewidth,-dp+linewidth,dp),
- new_rule(linewidth,ht,dp),
- new_kern(-wd+linewidth),
- new_rule(wd-doublelinewidth,ht,-ht+linewidth),
- new_kern(-wd+doublelinewidth),
+ local info =
+ new_rule(linewidth,ht,dp) ..
+ new_rule(wd-doublelinewidth,-dp+linewidth,dp) ..
+ new_rule(linewidth,ht,dp) ..
+ new_kern(-wd+linewidth) ..
+ new_rule(wd-doublelinewidth,ht,-ht+linewidth) ..
+ new_kern(-wd+doublelinewidth) ..
baseline
- )
setlistcolor(info,c_glyph)
setlisttransparency(info,c_glyph_d)
info = fast_hpack(info)
- setfield(info,"width",0)
- setfield(info,"height",0)
- setfield(info,"depth",0)
- setattr(info,a_layer,l_glyph)
- local info = linked_nodes(current,new_kern(-wd),info)
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ info[a_layer] = l_glyph
+ local info = current .. new_kern(-wd) .. info
info = fast_hpack(info)
- setfield(info,"width",wd)
+ info.width = wd
if next then
- setfield(info,"next",next)
- setfield(next,"prev",info)
+ info.next = next
+ next.prev = info
end
if prev then
- setfield(info,"prev",prev)
- setfield(prev,"next",info)
+ info.prev = prev
+ prev.next = info
end
if head == current then
return info, info
@@ -622,9 +599,9 @@ local tags = {
-- we sometimes pass previous as we can have issues in math (not watertight for all)
local function ruledglue(head,current,vertical)
- local spec = getfield(current,"spec")
- local width = getfield(spec,"width")
- local subtype = getsubtype(current)
+ local spec = current.spec
+ local width = spec.width
+ local subtype = current.subtype
local amount = formatters["%s:%0.3f"](tags[subtype] or (vertical and "VS") or "HS",width*pt_factor)
local info = g_cache[amount]
if info then
@@ -652,13 +629,13 @@ local function ruledglue(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, getnext(current)
+ return head, current.next
end
local k_cache = { }
local function ruledkern(head,current,vertical)
- local kern = getfield(current,"kern")
+ local kern = current.kern
local info = k_cache[kern]
if info then
-- print("kern hit")
@@ -678,13 +655,13 @@ local function ruledkern(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, getnext(current)
+ return head, current.next
end
local p_cache = { }
local function ruledpenalty(head,current,vertical)
- local penalty = getfield(current,"penalty")
+ local penalty = current.penalty
local info = p_cache[penalty]
if info then
-- print("penalty hit")
@@ -704,7 +681,7 @@ local function ruledpenalty(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, getnext(current)
+ return head, current.next
end
local function visualize(head,vertical)
@@ -725,8 +702,8 @@ local function visualize(head,vertical)
local attr = unsetvalue
local prev_trace_fontkern = nil
while current do
- local id = getid(current)
- local a = getattr(current,a_visual) or unsetvalue
+ local id = current.id
+ local a = current[a_visual] or unsetvalue
if a ~= attr then
prev_trace_fontkern = trace_fontkern
if a == unsetvalue then
@@ -759,30 +736,30 @@ local function visualize(head,vertical)
attr = a
end
if trace_strut then
- setattr(current,a_layer,l_strut)
+ current[a_layer] = l_strut
elseif id == glyph_code then
if trace_glyph then
head, current = ruledglyph(head,current,previous)
end
elseif id == disc_code then
if trace_glyph then
- local pre = getfield(current,"pre")
+ local pre = current.pre
if pre then
- setfield(current,"pre",ruledglyph(pre,pre))
+ current.pre = ruledglyph(pre,pre)
end
- local post = getfield(current,"post")
+ local post = current.post
if post then
- setfield(current,"post",ruledglyph(post,post))
+ current.post = ruledglyph(post,post)
end
- local replace = getfield(current,"replace")
+ local replace = current.replace
if replace then
- setfield(current,"replace",ruledglyph(replace,replace))
+ current.replace = ruledglyph(replace,replace)
end
end
elseif id == kern_code then
- local subtype = getsubtype(current)
+ local subtype = current.subtype
-- tricky ... we don't copy the trace attribute in node-inj (yet)
- if subtype == font_kern_code or getattr(current,a_fontkern) then
+ if subtype == font_kern_code or current[a_fontkern] then
if trace_fontkern or prev_trace_fontkern then
head, current = fontkern(head,current)
end
@@ -792,9 +769,9 @@ local function visualize(head,vertical)
end
end
elseif id == glue_code then
- local content = getleader(current)
+ local content = current.leader
if content then
- setfield(current,"leader",visualize(content,false))
+ current.leader = visualize(content,false)
elseif trace_glue then
head, current = ruledglue(head,current,vertical)
end
@@ -803,21 +780,21 @@ local function visualize(head,vertical)
head, current = ruledpenalty(head,current,vertical)
end
elseif id == disc_code then
- setfield(current,"pre",visualize(getfield(current,"pre")))
- setfield(current,"post",isualize(getfield(current,"post")))
- setfield(current,"replace",visualize(getfield(current,"replace")))
+ current.pre = visualize(current.pre)
+ current.post = visualize(current.post)
+ current.replace = visualize(current.replace)
elseif id == hlist_code then
- local content = getlist(current)
+ local content = current.list
if content then
- setfield(current,"list",visualize(content,false))
+ current.list = visualize(content,false)
end
if trace_hbox then
head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple,previous)
end
elseif id == vlist_code then
- local content = getlist(current)
+ local content = current.list
if content then
- setfield(current,"list",visualize(content,true))
+ current.list = visualize(content,true)
end
if trace_vtop then
head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple,previous)
@@ -834,7 +811,7 @@ local function visualize(head,vertical)
end
end
previous = current
- current = getnext(current)
+ current = current.next
end
return head
end
@@ -863,36 +840,25 @@ local function cleanup()
-- report_visualize("cache: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb)
end
-local function handler(head)
+function visualizers.handler(head)
if usedfont then
starttiming(visualizers)
-- local l = texgetattribute(a_layer)
-- local v = texgetattribute(a_visual)
-- texsetattribute(a_layer,unsetvalue)
-- texsetattribute(a_visual,unsetvalue)
- head = visualize(tonut(head))
+ head = visualize(head)
-- texsetattribute(a_layer,l)
-- texsetattribute(a_visual,v)
-- -- cleanup()
stoptiming(visualizers)
- return tonode(head), true
- else
- return head, false
end
+ return head, false
end
-visualizers.handler = handler
-
function visualizers.box(n)
- if usedfont then
- starttiming(visualizers)
- local box = getbox(n)
- setfield(box,"list",visualize(getlist(box)))
- stoptiming(visualizers)
- return head, true
- else
- return head, false
- end
+ local box = texgetbox(n)
+ box.list = visualizers.handler(box.list)
end
local last = nil
@@ -906,9 +872,9 @@ local mark = {
local function markfonts(list)
for n in traverse_nodes(list) do
- local id = getid(n)
+ local id = n.id
if id == glyph_code then
- local font = getfont(n)
+ local font = n.font
local okay = used[font]
if not okay then
last = last + 1
@@ -917,14 +883,14 @@ local function markfonts(list)
end
setcolor(n,okay)
elseif id == hlist_code or id == vlist_code then
- markfonts(getlist(n))
+ markfonts(n.list)
end
end
end
function visualizers.markfonts(list)
last, used = 0, { }
- markfonts(type(n) == "number" and getlist(getbox(n)) or n)
+ markfonts(type(n) == "number" and texgetbox(n).list or n)
end
function commands.markfonts(n)
diff --git a/tex/context/base/type-imp-buy.mkiv b/tex/context/base/type-imp-buy.mkiv
index dbfffe57c..9815cc44b 100644
--- a/tex/context/base/type-imp-buy.mkiv
+++ b/tex/context/base/type-imp-buy.mkiv
@@ -11,125 +11,27 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% monotype sabon
-
\starttypescriptcollection[sabon]
- \starttypescript [serif] [sabon] [name]
- %
- \definefontsynonym [Serif] [SabonMT]
- \definefontsynonym [SerifItalic] [SabonMT-Italic]
- \definefontsynonym [SerifSlanted] [SabonMT-Italic]
- \definefontsynonym [SerifBold] [SabonMT-SemiBold]
- \definefontsynonym [SerifBoldItalic] [SabonMT-SemiBoldItalic]
- \definefontsynonym [SerifBoldSlanted][SabonMT-SemiBoldItalic]
- \definefontsynonym [SerifCaps] [SabonMT-RegularSC]
- %
- \definefontsynonym[SabonMT] [sab_____]
- \definefontsynonym[SabonMT-Italic] [sabi____]
- \definefontsynonym[SabonMT-ItalicOsF] [saboi___]
- \definefontsynonym[SabonMT-SemiBoldOsF] [sabos___]
- \definefontsynonym[SabonMT-SemiBold] [sabs____]
- \definefontsynonym[SabonMT-RegularSC] [sabsc___]
- \definefontsynonym[SabonMT-SemiBoldItalic] [sabsi___]
- \definefontsynonym[SabonMT-SemiBoldItalicOsF][sasio___]
- %
- \stoptypescript
+\starttypescript [serif] [sabon] [name]
+
+ \definefontsynonym [Serif] [SabonMT]
+ \definefontsynonym [SerifItalic] [SabonMT-Italic]
+ \definefontsynonym [SerifSlanted] [SabonMT-Italic]
+ \definefontsynonym [SerifBold] [SabonMT-SemiBold]
+ \definefontsynonym [SerifBoldItalic] [SabonMT-SemiBoldItalic]
+ \definefontsynonym [SerifBoldSlanted] [SabonMT-SemiBoldItalic]
+ \definefontsynonym [SerifCaps] [SabonMT-RegularSC]
+
+ \definefontsynonym[SabonMT] [sab_____]
+ \definefontsynonym[SabonMT-Italic] [sabi____]
+ \definefontsynonym[SabonMT-ItalicOsF] [saboi___]
+ \definefontsynonym[SabonMT-SemiBoldOsF] [sabos___]
+ \definefontsynonym[SabonMT-SemiBold] [sabs____]
+ \definefontsynonym[SabonMT-RegularSC] [sabsc___]
+ \definefontsynonym[SabonMT-SemiBoldItalic] [sabsi___]
+ \definefontsynonym[SabonMT-SemiBoldItalicOsF][sasio___]
+\stoptypescript
\stoptypescriptcollection
-% itc stone
-
-\starttypescriptcollection[stone]
-
- \starttypescript [sans] [stone] [name]
- %
- \definefontsynonym [Sans] [StoneSansITC-Medium]
- \definefontsynonym [SansItalic] [StoneSansITC-MediumItalic]
- \definefontsynonym [SansSlanted] [StoneSansITC-MediumItalic]
- \definefontsynonym [SansBold] [StoneSansITC-Bold]
- \definefontsynonym [SansBoldItalic] [StoneSansITC-BoldItalic]
- \definefontsynonym [SansBoldSlanted][StoneSansITC-BoldItalic]
- \definefontsynonym [SansCaps] [StoneSansSCITC-Medium]
- %
- \definefontsynonym[StoneSansITC-Bold] [stosnb]
- \definefontsynonym[StoneSansITC-BoldItalic] [stosnbi]
- \definefontsynonym[StoneSansITC-Medium] [stosnm]
- \definefontsynonym[StoneSansITC-MediumItalic][stosnmi]
- \definefontsynonym[StoneSansSemITC-Semi] [stosns]
- \definefontsynonym[StoneSansSemITC-SemiIta] [stosnsi]
- \definefontsynonym[StoneSansSCITC-Medium] [stosnscm]
- \definefontsynonym[StoneSansSemSCITC-Semi] [stosnscs]
- %
- \stoptypescript
-
- \starttypescript [serif] [stone] [name]
- %
- \definefontsynonym [Serif] [StoneSerifITC-Medium]
- \definefontsynonym [SerifItalic] [StoneSerifITC-MediumItalic]
- \definefontsynonym [SerifSlanted] [StoneSerifITC-MediumItalic]
- \definefontsynonym [SerifBold] [StoneSerifITC-Bold]
- \definefontsynonym [SerifBoldItalic] [StoneSerifITC-BoldItalic]
- \definefontsynonym [SerifBoldSlanted][StoneSerifITC-BoldItalic]
- \definefontsynonym [SerifCaps] [StoneSerifSCITC-Medium]
- %
- \definefontsynonym[StoneSerifITC-Bold] [stosfb]
- \definefontsynonym[StoneSerifITC-BoldItalic] [stosfbi]
- \definefontsynonym[StoneSerifITC-Medium] [stosfm]
- \definefontsynonym[StoneSerifITC-MediumItalic][stosfmi]
- \definefontsynonym[StoneSerifSemITC-Semi] [stosfs]
- \definefontsynonym[StoneSerifSemITC-SemiIta] [stosfsi]
- \definefontsynonym[StoneSerifSCITC-Medium] [stosfscm]
- \definefontsynonym[StoneSerifSemSCITC-Semi] [stosfscs]
- %
- \stoptypescript
-
- \starttypescript [sans] [stone-oldstyle] [name]
- %
- \definefontsynonym [Sans] [StoneSansOSITC-Medium]
- \definefontsynonym [SansItalic] [StoneSansOSITC-MediumItalic]
- \definefontsynonym [SansSlanted] [StoneSansOSITC-MediumItalic]
- \definefontsynonym [SansBold] [StoneSansOSITC-Bold]
- \definefontsynonym [SansBoldItalic] [StoneSansOSITC-BoldItalic]
- \definefontsynonym [SansBoldSlanted][StoneSansOSITC-BoldItalic]
- \definefontsynonym [SansCaps] [StoneSansSCITC-Medium]
- %
- \definefontsynonym[StoneSansOSITC-Bold] [stosnob]
- \definefontsynonym[StoneSansOSITC-BoldItalic] [stosnobi]
- \definefontsynonym[StoneSansOSITC-Medium] [stosnom]
- \definefontsynonym[StoneSansOSITC-MediumItalic][stosnomi]
- \definefontsynonym[StoneSansSemOSITC-Semi] [stosnos]
- \definefontsynonym[StoneSansSemOSITC-SemiIta] [stosnosi]
- %
- \stoptypescript
-
- \starttypescript [serif] [stone-oldstyle] [name]
- %
- \definefontsynonym [Serif] [StoneSerifOSITC-Medium]
- \definefontsynonym [SerifItalic] [StoneSerifOSITC-MediumItalic]
- \definefontsynonym [SerifSlanted] [StoneSerifOSITC-MediumItalic]
- \definefontsynonym [SerifBold] [StoneSerifOSITC-Bold]
- \definefontsynonym [SerifBoldItalic] [StoneSerifOSITC-BoldItalic]
- \definefontsynonym [SerifBoldSlanted] [StoneSerifOSITC-BoldItalic]
- \definefontsynonym [SerifCaps] [StoneSerifSCITC-Medium]
- %
- \definefontsynonym[StoneSerifOSITC-Bold] [stosfob]
- \definefontsynonym[StoneSerifOSITC-BoldItalic] [stosfobi]
- \definefontsynonym[StoneSerifOSITC-Medium] [stosfom]
- \definefontsynonym[StoneSerifOSITC-MediumItalic][stosfomi]
- \definefontsynonym[StoneSerifSemOSITC-Semi] [stosfos]
- \definefontsynonym[StoneSerifSemOSITC-SemiIta] [stosfosi]
- %
- \stoptypescript
-
-\stoptypescriptcollection
-
-% linotype industria
-
-\starttypescriptcollection[industria]
-
- \starttypescript [sans] [industria] [name]
- \definefontsynonym[Industria-Solid][lt_50545]
- \stoptypescript
-
-\stoptypescriptcollection
diff --git a/tex/context/base/type-ini.lua b/tex/context/base/type-ini.lua
index 4f53fbf40..9ee97acae 100644
--- a/tex/context/base/type-ini.lua
+++ b/tex/context/base/type-ini.lua
@@ -35,9 +35,7 @@ end
local function failure_one(name)
name_two = gsub(name,"%-.*$","")
- if name == "loc" then
- -- ignore
- elseif name_two == name then
+ if name_two == name then
report_typescripts("unknown library %a",name_one)
else
commands.uselibrary {
diff --git a/tex/context/base/typo-bld.lua b/tex/context/base/typo-bld.lua
index ad37c36f4..bc9f66ee4 100644
--- a/tex/context/base/typo-bld.lua
+++ b/tex/context/base/typo-bld.lua
@@ -6,12 +6,9 @@ if not modules then modules = { } end modules ['typo-bld'] = { -- was node-par
license = "see context related readme files"
}
--- no need for nuts in the one-line demo (that might move anyway)
-
local insert, remove = table.insert, table.remove
-builders = builders or { }
-local builders = builders
+local builders, nodes, node = builders, nodes, node
builders.paragraphs = builders.paragraphs or { }
local parbuilders = builders.paragraphs
@@ -36,12 +33,11 @@ local texsetattribute = tex.setattribute
local texnest = tex.nest
local texlists = tex.lists
-local nodes = nodes
local nodepool = nodes.pool
local new_baselineskip = nodepool.baselineskip
local new_lineskip = nodepool.lineskip
-local insert_node_before = nodes.insert_before
-local hpack_node = nodes.hpack
+local insert_node_before = node.insert_before
+local hpack_node = node.hpack
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
@@ -173,6 +169,7 @@ function constructors.disable()
enabled = false
end
+
callbacks.register('linebreak_filter', processor, "breaking paragraps into lines")
statistics.register("linebreak processing time", function()
@@ -229,16 +226,7 @@ local function report(groupcode,head)
report_page_builder(" list : %s",head and nodeidstostring(head) or "")
end
--- use tex.[sg]etlist
-
function builders.buildpage_filter(groupcode)
- -- -- this needs checking .. gets called too often
- -- if group_code ~= "after_output" then
- -- if trace_page_builder then
- -- report(groupcode)
- -- end
- -- return nil, false
- -- end
local head, done = texlists.contrib_head, false
if head then
starttiming(builders)
@@ -249,16 +237,14 @@ function builders.buildpage_filter(groupcode)
stoptiming(builders)
-- -- doesn't work here (not passed on?)
-- tex.pagegoal = tex.vsize - tex.dimen.d_page_floats_inserted_top - tex.dimen.d_page_floats_inserted_bottom
- texlists.contrib_head = head or nil -- needs checking
--- tex.setlist("contrib_head",head,head and nodes.tail(head))
- return done and head or true -- no return value needed
+ texlists.contrib_head = head
+ return done and head or true
else
if trace_page_builder then
report(groupcode)
end
- return nil, false -- no return value needed
+ return nil, false
end
-
end
callbacks.register('vpack_filter', builders.vpack_filter, "vertical spacing etc")
diff --git a/tex/context/base/typo-brk.lua b/tex/context/base/typo-brk.lua
index be11da9c3..3558efa8e 100644
--- a/tex/context/base/typo-brk.lua
+++ b/tex/context/base/typo-brk.lua
@@ -20,36 +20,19 @@ local report_breakpoints = logs.reporter("typesetting","breakpoints")
local nodes, node = nodes, node
local settings_to_array = utilities.parsers.settings_to_array
+local copy_node = node.copy
+local copy_nodelist = node.copy_list
+local free_node = node.free
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove -- ! nodes
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local getfont = nuts.getfont
-local getid = nuts.getid
-local getfield = nuts.getfield
-local getattr = nuts.getattr
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local copy_node = nuts.copy
-local copy_nodelist = nuts.copy_list
-local free_node = nuts.free
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-
-local tonodes = nuts.tonodes
+local tonodes = nodes.tonodes
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local tasks = nodes.tasks
local v_reset = interfaces.variables.reset
@@ -97,82 +80,74 @@ local function insert_break(head,start,before,after)
end
methods[1] = function(head,start)
- if getprev(start) and getnext(start) then
+ if start.prev and start.next then
insert_break(head,start,10000,0)
end
return head, start
end
methods[2] = function(head,start) -- ( => (-
- if getprev(start) and getnext(start) then
+ if start.prev and start.next then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr")))
- setfield(start,"replace",tmp)
- local tmp = copy_node(tmp)
- local hyphen = copy_node(tmp)
- setfield(hyphen,"char",languages.prehyphenchar(getfield(tmp,"lang")))
- setfield(tmp,"next",hyphen)
- setfield(hyphen,"prev",tmp)
- setfield(start,"post",tmp)
+ start.attr = copy_nodelist(tmp.attr) -- todo: critical only
+ start.replace = tmp
+ local tmp, hyphen = copy_node(tmp), copy_node(tmp)
+ hyphen.char = languages.prehyphenchar(tmp.lang)
+ tmp.next, hyphen.prev = hyphen, tmp
+ start.post = tmp
insert_break(head,start,10000,10000)
end
return head, start
end
methods[3] = function(head,start) -- ) => -)
- if getprev(start) and getnext(start) then
+ if start.prev and start.next then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr")))
- setfield(start,"replace",tmp)
- local tmp = copy_node(tmp)
- local hyphen = copy_node(tmp)
- setfield(hyphen,"char",languages.prehyphenchar(getfield(tmp,"lang")))
- setfield(tmp,"prev",hyphen)
- setfield(hyphen,"next",tmp)
- setfield(start,"pre",hyphen)
+ start.attr = copy_nodelist(tmp.attr) -- todo: critical only
+ start.replace = tmp
+ local tmp, hyphen = copy_node(tmp), copy_node(tmp)
+ hyphen.char = languages.prehyphenchar(tmp.lang)
+ tmp.prev, hyphen.next = hyphen, tmp
+ start.pre = hyphen
insert_break(head,start,10000,10000)
end
return head, start
end
methods[4] = function(head,start) -- - => - - -
- if getprev(start) and getnext(start) then
+ if start.prev and start.next then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr")))
- setfield(start,"pre",copy_node(tmp))
- setfield(start,"post",copy_node(tmp))
- setfield(start,"replace",tmp)
+ start.attr = copy_nodelist(tmp.attr) -- todo: critical only
+ start.pre, start.post, start.replace = copy_node(tmp), copy_node(tmp), tmp
insert_break(head,start,10000,10000)
end
return head, start
end
methods[5] = function(head,start,settings) -- x => p q r
- if getprev(start) and getnext(start) then
+ if start.prev and start.next then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- local attr = getfield(tmp,"attr")
- local font = getfont(tmp)
- local left = settings.left
- local right = settings.right
- local middle = settings.middle
+ local attr = tmp.attr
+ local font = tmp.font
+ start.attr = copy_nodelist(attr) -- todo: critical only
+ local left, right, middle = settings.left, settings.right, settings.middle
if left then
- setfield(start,"pre",(tonodes(tostring(left),font,attr))) -- was right
+ start.pre = tonodes(tostring(left),font,attr) -- was right
end
if right then
- setfield(start,"post",(tonodes(tostring(right),font,attr))) -- was left
+ start.post = tonodes(tostring(right),font,attr) -- was left
end
if middle then
- setfield(start,"replace",(tonodes(tostring(middle),font,attr)))
+ start.replace = tonodes(tostring(middle),font,attr)
end
- setfield(start,"attr",copy_nodelist(attr)) -- todo: critical only
free_node(tmp)
insert_break(head,start,10000,10000)
end
@@ -180,32 +155,31 @@ methods[5] = function(head,start,settings) -- x => p q r
end
function breakpoints.handler(head)
- head = tonut(head)
local done, numbers = false, languages.numbers
local start, n = head, 0
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- local attr = getattr(start,a_breakpoints)
+ local attr = start[a_breakpoints]
if attr and attr > 0 then
- setattr(start,a_breakpoints,unsetvalue) -- maybe test for subtype > 256 (faster)
+ start[a_breakpoints] = unsetvalue -- maybe test for subtype > 256 (faster)
-- look ahead and back n chars
local data = mapping[attr]
if data then
local map = data.characters
- local cmap = map[getchar(start)]
+ local cmap = map[start.char]
if cmap then
- local lang = getfield(start,"lang")
+ local lang = start.lang
-- we do a sanity check for language
local smap = lang and lang >= 0 and lang < 0x7FFF and (cmap[numbers[lang]] or cmap[""])
if smap then
if n >= smap.nleft then
local m = smap.nright
- local next = getnext(start)
+ local next = start.next
while next do -- gamble on same attribute (not that important actually)
- local id = getid(next)
+ local id = next.id
if id == glyph_code then -- gamble on same attribute (not that important actually)
- if map[getchar(next)] then
+ if map[next.char] then
break
elseif m == 1 then
local method = methods[smap.type]
@@ -216,10 +190,10 @@ function breakpoints.handler(head)
break
else
m = m - 1
- next = getnext(next)
+ next = next.next
end
- elseif id == kern_code and getsubtype(next) == kerning_code then
- next = getnext(next)
+ elseif id == kern_code and next.subtype == kerning_code then
+ next = next.next
-- ignore intercharacter kerning, will go way
else
-- we can do clever and set n and jump ahead but ... not now
@@ -240,14 +214,14 @@ function breakpoints.handler(head)
else
-- n = n + 1 -- if we want single char handling (|-|) then we will use grouping and then we need this
end
- elseif id == kern_code and getsubtype(start) == kerning_code then
+ elseif id == kern_code and start.subtype == kerning_code then
-- ignore intercharacter kerning, will go way
else
n = 0
end
- start = getnext(start)
+ start = start.next
end
- return tonode(head), done
+ return head, done
end
local enabled = false
diff --git a/tex/context/base/typo-cap.lua b/tex/context/base/typo-cap.lua
index 78ed8700a..0fc1a3093 100644
--- a/tex/context/base/typo-cap.lua
+++ b/tex/context/base/typo-cap.lua
@@ -16,23 +16,9 @@ local report_casing = logs.reporter("typesetting","casing")
local nodes, node = nodes, node
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-
-local copy_node = nuts.copy
-local end_of_math = nuts.end_of_math
+local copy_node = nodes.copy
+local end_of_math = nodes.end_of_math
+
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
@@ -110,14 +96,14 @@ local lccodes = characters.lccodes
-- true false true == mixed
local function helper(start,attr,lastfont,n,codes,special,once,keepother)
- local char = getchar(start)
+ local char = start.char
local dc = codes[char]
if dc then
- local fnt = getfont(start)
+ local fnt = start.font
if keepother and dc == char then
local lfa = lastfont[n]
if lfa then
- setfield(start,"font",lfa)
+ start.font = lfa
return start, true
else
return start, false
@@ -126,10 +112,10 @@ local function helper(start,attr,lastfont,n,codes,special,once,keepother)
if special then
local lfa = lastfont[n]
if lfa then
- local previd = getid(getprev(start))
+ local previd = start.prev.id
if previd ~= glyph_code and previd ~= disc_code then
fnt = lfa
- setfield(start,"font",lfa)
+ start.font = lfa
end
end
end
@@ -151,18 +137,18 @@ local function helper(start,attr,lastfont,n,codes,special,once,keepother)
local chr = dc[i]
prev = start
if i == 1 then
- setfield(start,"char",chr)
+ start.char = chr
else
local g = copy_node(original)
- setfield(g,"char",chr)
- local next = getnext(start)
- setfield(g,"prev",start)
+ g.char = chr
+ local next = start.next
+ g.prev = start
if next then
- setfield(g,"next",next)
- setfield(start,"next",g)
- setfield(next,"prev",g)
+ g.next = next
+ start.next = g
+ next.prev = g
end
- start = g
+ start = g
end
end
if once then
@@ -175,7 +161,7 @@ local function helper(start,attr,lastfont,n,codes,special,once,keepother)
end
return start, false
elseif ifc[dc] then
- setfield(start,"char",dc)
+ start.char = dc
if once then
lastfont[n] = false
end
@@ -217,29 +203,29 @@ local function word(start,attr,lastfont,n)
end
local function blockrest(start)
- local n = getnext(start)
+ local n = start.next
while n do
- local id = getid(n)
- if id == glyph_code or id == disc_node and getattr(n,a_cases) == attr then
- setattr(n,a_cases,unsetvalue)
+ local id = n.id
+ if id == glyph_code or id == disc_node and n[a_cases] == attr then
+ n[a_cases] = unsetvalue
else
-- break -- we can have nested mess
end
- n = getnext(n)
+ n = n.next
end
end
local function Word(start,attr,lastfont,n) -- looks quite complex
lastfont[n] = false
- local prev = getprev(start)
- if prev and getid(prev) == kern_code and getsubtype(prev) == kerning_code then
- prev = getprev(prev)
+ local prev = start.prev
+ if prev and prev.id == kern_code and prev.subtype == kerning_code then
+ prev = prev.prev
end
if not prev then
blockrest(start)
return helper(start,attr,lastfont,n,uccodes)
end
- local previd = getid(prev)
+ local previd = prev.id
if previd ~= glyph_code and previd ~= disc_code then
-- only the first character is treated
blockrest(start)
@@ -253,14 +239,14 @@ end
local function Words(start,attr,lastfont,n)
lastfont[n] = false
- local prev = getprev(start)
- if prev and getid(prev) == kern_code and getsubtype(prev) == kerning_code then
- prev = getprev(prev)
+ local prev = start.prev
+ if prev and prev.id == kern_code and prev.subtype == kerning_code then
+ prev = prev.prev
end
if not prev then
return helper(start,attr,lastfont,n,uccodes)
end
- local previd = getid(prev)
+ local previd = prev.id
if previd ~= glyph_code and previd ~= disc_code then
return helper(start,attr,lastfont,n,uccodes)
else
@@ -286,15 +272,15 @@ end
local function random(start,attr,lastfont,n)
lastfont[n] = false
- local ch = getchar(start)
- local tfm = fontchar[getfont(start)]
+ local ch = start.char
+ local tfm = fontchar[start.font]
if lccodes[ch] then
while true do
local d = chardata[randomnumber(1,0xFFFF)]
if d then
local uc = uccodes[d]
if uc and tfm[uc] then -- this also intercepts tables
- setfield(start,"char",uc)
+ start.char = uc
return start, true
end
end
@@ -305,7 +291,7 @@ local function random(start,attr,lastfont,n)
if d then
local lc = lccodes[d]
if lc and tfm[lc] then -- this also intercepts tables
- setfield(start,"char",lc)
+ start.char = lc
return start, true
end
end
@@ -328,20 +314,19 @@ register(variables.cap, variables.capital) -- clone
register(variables.Cap, variables.Capital) -- clone
function cases.handler(head) -- not real fast but also not used on much data
- head = tonut(head)
local lastfont = { }
local lastattr = nil
local done = false
local start = head
while start do -- while because start can jump ahead
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- local attr = getattr(start,a_cases)
+ local attr = start[a_cases]
if attr and attr > 0 then
if attr ~= lastattr then
lastattr = attr
end
- setattr(start,a_cases,unsetvalue)
+ start[a_cases] = unsetvalue
local n, id, m = get(attr)
if lastfont[n] == nil then
lastfont[n] = id
@@ -360,27 +345,27 @@ function cases.handler(head) -- not real fast but also not used on much data
end
end
elseif id == disc_code then
- local attr = getattr(start,a_cases)
+ local attr = start[a_cases]
if attr and attr > 0 then
if attr ~= lastattr then
lastattr = attr
end
- setattr(start,a_cases,unsetvalue)
+ start[a_cases] = unsetvalue
local n, id, m = get(attr)
if lastfont[n] == nil then
lastfont[n] = id
end
local action = actions[n] -- map back to low number
if action then
- local replace = getfield(start,"replace")
+ local replace = start.replace
if replace then
action(replace,attr,lastfont,n)
end
- local pre = getfield(start,"pre")
+ local pre = start.pre
if pre then
action(pre,attr,lastfont,n)
end
- local post = getfield(start,"post")
+ local post = start.post
if post then
action(post,attr,lastfont,n)
end
@@ -390,10 +375,10 @@ function cases.handler(head) -- not real fast but also not used on much data
start = end_of_math(start)
end
if start then -- why test
- start = getnext(start)
+ start = start.next
end
end
- return tonode(head), done
+ return head, done
end
local enabled = false
diff --git a/tex/context/base/typo-cln.lua b/tex/context/base/typo-cln.lua
index b7e337662..2aa05b6d1 100644
--- a/tex/context/base/typo-cln.lua
+++ b/tex/context/base/typo-cln.lua
@@ -28,14 +28,7 @@ local tasks = nodes.tasks
local texsetattribute = tex.setattribute
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local setfield = nuts.setfield
-local getchar = nuts.getchar
-local getattr = nuts.getattr
-
-local traverse_id = nuts.traverse_id
+local traverse_id = node.traverse_id
local unsetvalue = attributes.unsetvalue
@@ -55,18 +48,18 @@ local resetter = { -- this will become an entry in char-def
function cleaners.handler(head)
local inline, done = false, false
- for n in traverse_id(glyph_code,tonut(head)) do
- local char = getchar(n)
+ for n in traverse_id(glyph_code,head) do
+ local char = n.char
if resetter[char] then
inline = false
elseif not inline then
- local a = getattr(n,a_cleaner)
+ local a = n[a_cleaner]
if a == 1 then -- currently only one cleaner so no need to be fancy
local upper = uccodes[char]
if type(upper) == "table" then
-- some day, not much change that \SS ends up here
else
- setfield(n,"char",upper)
+ n.char = upper
done = true
if trace_autocase then
report_autocase("")
diff --git a/tex/context/base/typo-dha.lua b/tex/context/base/typo-dha.lua
index 15e345ff8..d5ad66e7e 100644
--- a/tex/context/base/typo-dha.lua
+++ b/tex/context/base/typo-dha.lua
@@ -49,30 +49,13 @@ local trace_directions = false trackers.register("typesetters.directions.defa
local report_directions = logs.reporter("typesetting","text directions")
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-local nutstring = nuts.tostring
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-local end_of_math = nuts.end_of_math
-
-
-local nodepool = nuts.pool
+
+local insert_node_before = nodes.insert_before
+local insert_node_after = nodes.insert_after
+local remove_node = nodes.remove
+local end_of_math = nodes.end_of_math
+
+local nodepool = nodes.pool
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -125,7 +108,7 @@ end
local function process(start)
- local head = tonut(start) -- we have a global head
+ local head = start
local current = head
local inserted = nil
@@ -197,31 +180,31 @@ local function process(start)
end
local function nextisright(current)
- current = getnext(current)
- local id = getid(current)
+ current = current.next
+ local id = current.id
if id == glyph_code then
- local character = getchar(current)
+ local character = current.char
local direction = chardirections[character]
return direction == "r" or direction == "al" or direction == "an"
end
end
local function previsright(current)
- current = getprev(current)
- local id = getid(current)
+ current = current.prev
+ local id = current.id
if id == glyph_code then
- local character = getchar(current)
+ local char = current.char
local direction = chardirections[character]
return direction == "r" or direction == "al" or direction == "an"
end
end
while current do
- local id = getid(current)
+ local id = current.id
if id == math_code then
- current = getnext(end_of_math(getnext(current)))
+ current = end_of_math(current.next).next
else
- local attr = getattr(current,a_directions)
+ local attr = current[a_directions]
if attr and attr > 0 and attr ~= prevattr then
if not getglobal(a) then
lro, rlo = false, false
@@ -230,7 +213,7 @@ local function process(start)
end
if id == glyph_code then
if attr and attr > 0 then
- local character = getchar(current)
+ local character = current.char
local direction = chardirections[character]
local reversed = false
if rlo or override > 0 then
@@ -240,24 +223,24 @@ local function process(start)
end
elseif lro or override < 0 then
if direction == "r" or direction == "al" then
- setattr(current,a_state,s_isol)
+ current[a_state] = s_isol
direction = "l"
reversed = true
end
end
if direction == "on" then
local mirror = charmirrors[character]
- if mirror and fontchar[getfont(current)][mirror] then
+ if mirror and fontchar[current.font][mirror] then
local class = charclasses[character]
if class == "open" then
if nextisright(current) then
if autodir >= 0 then
force_auto_right_before(direction)
end
- setfield(current,"char",mirror)
+ current.char = mirror
done = true
elseif autodir < 0 then
- setfield(current,"char",mirror)
+ current.char = mirror
done = true
else
mirror = false
@@ -268,14 +251,14 @@ local function process(start)
local fencedir = fences[#fences]
fences[#fences] = nil
if fencedir < 0 then
- setfield(current,"char",mirror)
+ current.char = mirror
done = true
force_auto_right_before(direction)
else
mirror = false
end
elseif autodir < 0 then
- setfield(current,"char",mirror)
+ current.char = mirror
done = true
else
mirror = false
@@ -353,9 +336,9 @@ local function process(start)
-- we do nothing
end
elseif id == whatsit_code then
- local subtype = getsubtype(current)
+ local subtype = current.subtype
if subtype == localpar_code then
- local dir = getfield(current,"dir")
+ local dir = current.dir
if dir == 'TRT' then
autodir = -1
elseif dir == 'TLT' then
@@ -368,7 +351,7 @@ local function process(start)
if finish then
finish_auto_before()
end
- local dir = getfield(current,"dir")
+ local dir = current.dir
if dir == "+TRT" then
finish, autodir = "TRT", -1
elseif dir == "-TRT" then
@@ -387,7 +370,7 @@ local function process(start)
elseif finish then
finish_auto_before()
end
- local cn = getnext(current)
+ local cn = current.next
if cn then
-- we're okay
elseif finish then
@@ -407,7 +390,7 @@ local function process(start)
end
end
- return tonode(head), done
+ return head, done
end
diff --git a/tex/context/base/typo-dig.lua b/tex/context/base/typo-dig.lua
index 67849c6d4..ef05e62da 100644
--- a/tex/context/base/typo-dig.lua
+++ b/tex/context/base/typo-dig.lua
@@ -19,24 +19,10 @@ local report_digits = logs.reporter("typesetting","digits")
local nodes, node = nodes, node
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-
-local hpack_node = nuts.hpack
-local traverse_id = nuts.traverse_id
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
+local hpack_node = node.hpack
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
@@ -44,7 +30,7 @@ local unsetvalue = attributes.unsetvalue
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local tasks = nodes.tasks
local new_glue = nodepool.glue
@@ -80,20 +66,16 @@ function nodes.aligned(head,start,stop,width,how)
if how == "flushleft" or how == "middle" then
head, stop = insert_node_after(head,stop,new_glue(0,65536,65536))
end
- local prv = getprev(start)
- local nxt = getnext(stop)
- setfield(start,"prev",nil)
- setfield(stop,"next",nil)
+ local prv, nxt = start.prev, stop.next
+ start.prev, stop.next = nil, nil
local packed = hpack_node(start,width,"exactly") -- no directional mess here, just lr
if prv then
- setfield(prv,"next",packed)
- setfield(packed,"prev",prv)
+ prv.next, packed.prev = packed, prv
end
if nxt then
- setfield(nxt,"prev",packed)
- setfield(packed,"next",nxt)
+ nxt.prev, packed.next = packed, nxt
end
- if getprev(packed) then
+ if packed.prev then
return head, packed
else
return packed, packed
@@ -101,13 +83,12 @@ function nodes.aligned(head,start,stop,width,how)
end
actions[1] = function(head,start,attr)
- local font = getfont(start)
- local char = getchar(start)
+ local font = start.font
+ local char = start.char
local unic = chardata[font][char].tounicode
local what = unic and tonumber(unic,16) or char
if charbase[what].category == "nd" then
- local oldwidth = getfield(start,"width")
- local newwidth = getdigitwidth(font)
+ local oldwidth, newwidth = start.width, getdigitwidth(font)
if newwidth ~= oldwidth then
if trace_digits then
report_digits("digit trigger %a, instance %a, char %C, unicode %U, delta %s",
@@ -121,13 +102,12 @@ actions[1] = function(head,start,attr)
end
function digits.handler(head)
- head = tonut(head)
local done, current, ok = false, head, false
while current do
- if getid(current) == glyph_code then
- local attr = getattr(current,a_digits)
+ if current.id == glyph_code then
+ local attr = current[a_digits]
if attr and attr > 0 then
- setattr(current,a_digits,unsetvalue)
+ current[a_digits] = unsetvalue
local action = actions[attr%100] -- map back to low number
if action then
head, current, ok = action(head,current,attr)
@@ -137,11 +117,9 @@ function digits.handler(head)
end
end
end
- if current then
- current = getnext(current)
- end
+ current = current and current.next
end
- return tonode(head), done
+ return head, done
end
local m, enabled = 0, false -- a trick to make neighbouring ranges work
diff --git a/tex/context/base/typo-dir.lua b/tex/context/base/typo-dir.lua
index fbca0f024..a04028452 100644
--- a/tex/context/base/typo-dir.lua
+++ b/tex/context/base/typo-dir.lua
@@ -40,35 +40,21 @@ local trace_directions = false trackers.register("typesetters.directions",
local report_textdirections = logs.reporter("typesetting","text directions")
local report_mathdirections = logs.reporter("typesetting","math directions")
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-local nutstring = nuts.tostring
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local hasbit = number.hasbit
-local traverse_id = nuts.traverse_id
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-local end_of_math = nuts.end_of_math
+
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove
+local end_of_math = nodes.end_of_math
local texsetattribute = tex.setattribute
local texsetcount = tex.setcount
local unsetvalue = attributes.unsetvalue
+local hasbit = number.hasbit
+
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
local mathcodes = nodes.mathcodes
@@ -90,7 +76,7 @@ local vlist_code = nodecodes.vlist
local localpar_code = whatcodes.localpar
local dir_code = whatcodes.dir
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_textdir = nodepool.textdir
diff --git a/tex/context/base/typo-drp.lua b/tex/context/base/typo-drp.lua
index 3a87d94b3..903140dae 100644
--- a/tex/context/base/typo-drp.lua
+++ b/tex/context/base/typo-drp.lua
@@ -11,7 +11,9 @@ if not modules then modules = { } end modules ['typo-drp'] = {
local tonumber, type, next = tonumber, type, next
local ceil = math.ceil
-local settings_to_hash = utilities.parsers.settings_to_hash
+
+local utfbyte = utf.byte
+local utfchar = utf.char
local trace_initials = false trackers.register("typesetters.initials", function(v) trace_initials = v end)
local report_initials = logs.reporter("nodes","initials")
@@ -22,42 +24,19 @@ typesetters.initials = initials or { }
local nodes = nodes
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getfield = nuts.getfield
-local getattr = nuts.getattr
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local hpack_nodes = nuts.hpack
-
+local hpack_nodes = nodes.hpack
local nodecodes = nodes.nodecodes
local whatsitcodes = nodes.whatsitcodes
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_kern = nodepool.kern
-local insert_before = nuts.insert_before
-local insert_after = nuts.insert_after
-local remove_node = nuts.remove
-local traverse_id = nuts.traverse_id
-local traverse = nuts.traverse
-local free_node = nuts.free
+local insert_before = nodes.insert_before
+local insert_after = nodes.insert_after
local variables = interfaces.variables
local v_default = variables.default
local v_margin = variables.margin
-local v_auto = variables.auto
-local v_first = variables.first
-local v_last = variables.last
local texget = tex.get
local texsetattribute = tex.setattribute
@@ -65,8 +44,7 @@ local unsetvalue = attributes.unsetvalue
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
-local glue_code = nodecodes.glue
-local kern_code = nodecodes.kern
+local kern_node = nodecodes.kern
local whatsit_code = nodecodes.whatsit
local localpar_code = whatsitcodes.localpar
@@ -78,8 +56,6 @@ local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
local a_colorspace = attributes.private('colormodel')
-local category = characters.category
-
local settings = nil
function initials.set(specification)
@@ -108,288 +84,74 @@ commands.setinitial = initials.set
-- todo: prevent linebreak .. but normally a initial ends up at the top of
-- a page so this has a low priority
--- actions[v_default] = function(head,setting)
--- local done = false
--- if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
--- -- begin of par
--- local first = getnext(head)
--- -- parbox .. needs to be set at 0
--- if first and getid(first) == hlist_code then
--- first = getnext(first)
--- end
--- -- we need to skip over kerns and glues (signals)
--- while first and getid(first) ~= glyph_code do
--- first = getnext(first)
--- end
--- if first and getid(first) == glyph_code then
--- local char = getchar(first)
--- local prev = getprev(first)
--- local next = getnext(first)
--- -- if getid(prev) == hlist_code then
--- -- -- set the width to 0
--- -- end
--- if next and getid(next) == kern_code then
--- setfield(next,"kern",0)
--- end
--- if setting.font then
--- setfield(first,"font",setting.font)
--- end
--- if setting.dynamic > 0 then
--- setattr(first,0,setting.dynamic)
--- end
--- -- can be a helper
--- local ma = setting.ma or 0
--- local ca = setting.ca
--- local ta = setting.ta
--- if ca and ca > 0 then
--- setattr(first,a_colorspace,ma == 0 and 1 or ma)
--- setattr(first,a_color,ca)
--- end
--- if ta and ta > 0 then
--- setattr(first,a_transparency,ta)
--- end
--- --
--- local width = getfield(first,"width")
--- local height = getfield(first,"height")
--- local depth = getfield(first,"depth")
--- local distance = setting.distance or 0
--- local voffset = setting.voffset or 0
--- local hoffset = setting.hoffset or 0
--- local parindent = tex.parindent
--- local baseline = texget("baselineskip").width
--- local lines = tonumber(setting.n) or 0
--- --
--- setfield(first,"xoffset",- width - hoffset - distance - parindent)
--- setfield(first,"yoffset",- voffset) -- no longer - height here
--- -- We pack so that successive handling cannot touch the dropped cap. Packaging
--- -- in a hlist is also needed because we cannot locally adapt e.g. parindent (not
--- -- yet stored in with localpar).
--- setfield(first,"prev",nil)
--- setfield(first,"next",nil)
--- local h = hpack_nodes(first)
--- setfield(h,"width",0)
--- setfield(h,"height",0)
--- setfield(h,"depth",0)
--- setfield(prev,"next",h)
--- setfield(next,"prev",h)
--- setfield(h,"next",next)
--- setfield(h,"prev",prev)
--- first = h
--- -- end of packaging
--- if setting.location == v_margin then
--- -- okay
--- else
--- if lines == 0 then -- safeguard, not too precise
--- lines = ceil((height+voffset) / baseline)
--- end
--- -- We cannot set parshape yet ... when we can I'll add a slope
--- -- option (positive and negative, in emwidth).
--- local hangafter = - lines
--- local hangindent = width + distance + parindent
--- if trace_initials then
--- report_initials("setting hangafter to %i and hangindent to %p",hangafter,hangindent)
--- end
--- tex.hangafter = hangafter
--- tex.hangindent = hangindent
--- if parindent ~= 0 then
--- insert_after(first,first,new_kern(-parindent))
--- end
--- end
--- done = true
--- end
--- end
--- return head, done
--- end
-
actions[v_default] = function(head,setting)
local done = false
- if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
+ if head.id == whatsit_code and head.subtype == localpar_code then
-- begin of par
- local first = getnext(head)
- local indent = false
+ local first = head.next
-- parbox .. needs to be set at 0
- if first and getid(first) == hlist_code then
- first = getnext(first)
- indent = true
+ if first and first.id == hlist_code then
+ first = first.next
end
-- we need to skip over kerns and glues (signals)
- while first and getid(first) ~= glyph_code do
- first = getnext(first)
+ while first and first.id ~= glyph_code do
+ first = first.next
end
- if first and getid(first) == glyph_code then
- local ma = setting.ma or 0
- local ca = setting.ca
- local ta = setting.ta
- local last = first
+ if first and first.id == glyph_code then
+ local char = first.char
+ local prev = first.prev
+ local next = first.next
+ -- if prev.id == hlist_code then
+ -- -- set the width to 0
+ -- end
+ if next and next.id == kern_node then
+ next.kern = 0
+ end
+ if setting.font then
+ first.font = setting.font
+ end
+ if setting.dynamic > 0 then
+ first[0] = setting.dynamic
+ end
+ -- can be a helper
+ local ma = setting.ma or 0
+ local ca = setting.ca
+ local ta = setting.ta
+ if ca and ca > 0 then
+ first[a_colorspace] = ma == 0 and 1 or ma
+ first[a_color] = ca
+ end
+ if ta and ta > 0 then
+ first[a_transparency] = ta
+ end
+ --
+ local width = first.width
+ local height = first.height
+ local depth = first.depth
local distance = setting.distance or 0
local voffset = setting.voffset or 0
local hoffset = setting.hoffset or 0
local parindent = tex.parindent
local baseline = texget("baselineskip").width
local lines = tonumber(setting.n) or 0
- local dynamic = setting.dynamic
- local font = setting.font
- local method = settings_to_hash(setting.method)
- local length = tonumber(setting.m) or 1
- --
- -- 1 char | n chars | skip first quote | ignore punct | keep punct
--
- if getattr(first,a_initial) then
- for current in traverse(getnext(first)) do
- if getattr(current,a_initial) then
- last = current
- else
- break
- end
- end
- elseif method[v_auto] then
- local char = getchar(first)
- local kind = category(char)
- if kind == "po" or kind == "pi" then
- if method[v_first] then
- -- remove quote etc before initial
- local next = getnext(first)
- if not next then
- -- don't start with a quote or so
- return head, false
- end
- last = nil
- for current in traverse_id(glyph_code,next) do
- head, first = remove_node(head,first,true)
- first = current
- last = first
- break
- end
- if not last then
- -- no following glyph or so
- return head, false
- end
- else
- -- keep quote etc with initial
- local next = getnext(first)
- if not next then
- -- don't start with a quote or so
- return head, false
- end
- for current in traverse_id(glyph_code,next) do
- last = current
- break
- end
- if last == first then
- return head, false
- end
- end
- elseif kind == "pf" then
- -- error: final quote
- else
- -- okay
- end
- -- maybe also: get all A. B. etc
- local next = getnext(first)
- if next then
- for current in traverse_id(glyph_code,next) do
- local char = getchar(current)
- local kind = category(char)
- if kind == "po" then
- if method[v_last] then
- -- remove period etc after initial
- remove_node(head,current,true)
- else
- -- keep period etc with initial
- last = current
- end
- end
- break
- end
- end
- else
- for current in traverse_id(glyph_code,first) do
- last = current
- if length <= 1 then
- break
- else
- length = length - 1
- end
- end
- end
- local current = first
- while true do
- local id = getid(current)
- if id == kern_code then
- setfield(current,"kern",0)
- elseif id == glyph_code then
- local next = getnext(current)
- if font then
- setfield(current,"font",font)
- end
- if dynamic > 0 then
- setattr(current,0,dynamic)
- end
- -- can be a helper
- if ca and ca > 0 then
- setattr(current,a_colorspace,ma == 0 and 1 or ma)
- setattr(current,a_color,ca)
- end
- if ta and ta > 0 then
- setattr(current,a_transparency,ta)
- end
- --
- end
- if current == last then
- break
- else
- current = getnext(current)
- end
- end
+ first.xoffset = - width - hoffset - distance - parindent
+ first.yoffset = - voffset -- no longer - height here
-- We pack so that successive handling cannot touch the dropped cap. Packaging
-- in a hlist is also needed because we cannot locally adapt e.g. parindent (not
-- yet stored in with localpar).
- local prev = getprev(first)
- local next = getnext(last)
- --
- setfield(first,"prev",nil)
- setfield(last,"next",nil)
- local dropper = hpack_nodes(first)
- local width = getfield(dropper,"width")
- local height = getfield(dropper,"height")
- local depth = getfield(dropper,"depth")
- setfield(dropper,"width",0)
- setfield(dropper,"height",0)
- setfield(dropper,"depth",0)
- --
- setfield(prev,"next",dropper)
- if next then
- setfield(next,"prev",dropper)
- end
- setfield(dropper,"next",next)
- setfield(dropper,"prev",prev)
- --
- if next then
- local current = next
- while current do
- local id = getid(current)
- if id == glue_code or id == kern_code then
- local next = getnext(current)
- -- remove_node(current,current,true) -- created an invalid next link and dangling remains
- remove_node(head,current,true)
- current = next
- else
- break
- end
- end
- end
- --
- local hoffset = width + hoffset + distance + (indent and parindent or 0)
- for current in traverse_id(glyph_code,first) do
- setfield(current,"xoffset",- hoffset )
- setfield(current,"yoffset",- voffset) -- no longer - height here
- if current == last then
- break
- end
- end
- --
- first = dropper
- --
+ first.prev = nil
+ first.next = nil
+ local h = hpack_nodes(first)
+ h.width = 0
+ h.height = 0
+ h.depth = 0
+ prev.next = h
+ next.prev = h
+ h.next = next
+ h.prev = prev
+
+ -- end of packaging
if setting.location == v_margin then
-- okay
else
@@ -399,15 +161,15 @@ actions[v_default] = function(head,setting)
-- We cannot set parshape yet ... when we can I'll add a slope
-- option (positive and negative, in emwidth).
local hangafter = - lines
- local hangindent = width + distance
+ local hangindent = width + distance + parindent
if trace_initials then
report_initials("setting hangafter to %i and hangindent to %p",hangafter,hangindent)
end
tex.hangafter = hangafter
tex.hangindent = hangindent
- end
- if indent then
- insert_after(first,first,new_kern(-parindent))
+ if parindent ~= 0 then
+ insert_after(first,first,new_kern(-parindent))
+ end
end
done = true
end
@@ -416,17 +178,16 @@ actions[v_default] = function(head,setting)
end
function initials.handler(head)
- head = tonut(head)
local start = head
local attr = nil
while start do
- attr = getattr(start,a_initial)
+ attr = start[a_initial]
if attr then
break
- elseif getid(start) == glyph then
+ elseif start.id == glyph then
break
else
- start = getnext(start)
+ start = start.next
end
end
if attr then
@@ -440,8 +201,8 @@ function initials.handler(head)
report_initials("processing initials, alternative %a",alternative)
end
local head, done = action(head,settings)
- return tonode(head), done
+ return head, done
end
end
- return tonode(head), false
+ return head, false
end
diff --git a/tex/context/base/typo-drp.mkiv b/tex/context/base/typo-drp.mkiv
index 3ac47466f..78f6df0a2 100644
--- a/tex/context/base/typo-drp.mkiv
+++ b/tex/context/base/typo-drp.mkiv
@@ -57,8 +57,6 @@
\setupinitial
[\c!location=\v!text,
\c!n=3,
- \c!m=1,
- \c!method=\v!none,
% \s!font=Bold sa 4,
% \s!font=Bold ht \measure{initial:n},
\s!font=Bold cp \measure{initial:n},
@@ -69,25 +67,24 @@
\c!color=,
\c!before=\blank]
-\unexpanded\def\placeinitial % we cannot group so no settings
+\unexpanded\def\placeinitial
{\dosingleempty\typo_initials_place}
\def\typo_initials_place[#1]% old command
{\par
\namedinitialparameter{#1}\c!before
- \setinitial[#1]\relax}
+ \setinitial[#1]}
\unexpanded\def\setinitial
- {\dodoubleempty\typo_initials_set}
+ {\dosingleempty\typo_initials_set}
-\unexpanded\def\typo_initials_set[#1][#2]%
- {\edef\typo_initial_handle{\typo_initial_handle_indeed{#1}{#2}}}
+\unexpanded\def\typo_initials_set[#1]%
+ {\edef\typo_initial_handle{\typo_initial_handle_indeed{#1}}}
-\unexpanded\def\typo_initial_handle_indeed#1#2%
+\unexpanded\def\typo_initial_handle_indeed#1%
{\dontleavehmode
\begingroup
\edef\currentinitial{#1}%
- \setupcurrentinitial[#2]%
\scratchcounter \initialparameter\c!n\relax
\scratchdistance\initialparameter\c!distance\relax
\scratchhoffset \initialparameter\c!hoffset \relax
@@ -98,43 +95,24 @@
{\definedfont[\initialparameter\s!font]}
{\useinitialstyleparameter\c!style}%
\useinitialcolorparameter\c!color
- \edef\p_text{\initialparameter\c!text}% optional
\ctxcommand{setinitial{
- location = "\initialparameter\c!location",
- enabled = true,
- n = \number\scratchcounter,
- m = \number\initialparameter\c!m,
- method = "\initialparameter\c!method",
- distance = \number\scratchdistance,
- hoffset = \number\scratchhoffset,
- voffset = \number\scratchvoffset,
- ma = \the\attribute\colormodelattribute,
- ca = \the\attribute\colorattribute,
- ta = \the\attribute\transparencyattribute,
- font = \fontid\font,
- dynamic = \number\attribute\zerocount, % it's a bit over the top to support this here
+ location = "\initialparameter\c!location",
+ enabled = true,
+ n = \number\scratchcounter,
+ distance = \number\scratchdistance,
+ hoffset = \number\scratchhoffset,
+ voffset = \number\scratchvoffset,
+ ma = \the\attribute\colormodelattribute ,
+ ca = \the\attribute\colorattribute ,
+ ta = \the\attribute\transparencyattribute,
+ font = \fontid\font,
+ dynamic = \number\attribute\zerocount, % it's a bit over the top to support this here
}}%
\stopluacode
\kern\zeropoint % we need a node
- \p_text
\endgroup
\globallet\typo_initial_handle\relax}
\let\typo_initial_handle\relax
-% \setupbodyfont[dejavu,9pt]
-%
-% \startbuffer
-% \setinitial[two] D. E. Knuth \ignorespaces\input knuth \par
-% \setinitial[two] Knuth \ignorespaces\input knuth \par
-% \setinitial[two] \quotation{D. E. Knuth} \ignorespaces\input knuth \par
-% \setinitial[two] \quotation {Knuth} \ignorespaces\input knuth \par
-% \setinitial[two] [text={D.E. Knuth}] \ignorespaces\input knuth \par
-% \setinitial[two] [m=4] D. E. Knuth \ignorespaces\input knuth \par
-% \stopbuffer
-%
-% \type{m=2} \start \defineinitial[two][m=2,method=none] \getbuffer \page \stop
-% \type{m=1,method=auto} \start \defineinitial[two][m=1,method=auto] \getbuffer \page \stop
-% \type{m=1,method={auto,first,last}} \start \defineinitial[two][m=1,method={first,auto,last}] \getbuffer \page \stop
-
\protect \endinput
diff --git a/tex/context/base/typo-dua.lua b/tex/context/base/typo-dua.lua
index 91a27a30e..ec85a3d9f 100644
--- a/tex/context/base/typo-dua.lua
+++ b/tex/context/base/typo-dua.lua
@@ -66,24 +66,11 @@ local formatters = string.formatters
local directiondata = characters.directions
local mirrordata = characters.mirrors
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-local nutstring = nuts.tostring
-
-local getnext = nuts.getnext
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-
-local remove_node = nuts.remove
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-
-local nodepool = nuts.pool
+local remove_node = nodes.remove
+local insert_node_after = nodes.insert_after
+local insert_node_before = nodes.insert_before
+
+local nodepool = nodes.pool
local new_textdir = nodepool.textdir
local nodecodes = nodes.nodecodes
@@ -202,17 +189,17 @@ local function build_list(head) -- todo: store node pointer ... saves loop
local size = 0
while current do
size = size + 1
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local chr = getchar(current)
+ local chr = current.char
local dir = directiondata[chr]
list[size] = { char = chr, direction = dir, original = dir, level = 0 }
- current = getnext(current)
+ current = current.next
elseif id == glue_code then
list[size] = { char = 0x0020, direction = "ws", original = "ws", level = 0 }
- current = getnext(current)
- elseif id == whatsit_code and getsubtype(current) == dir_code then
- local dir = getfield(current,"dir")
+ current = current.next
+ elseif id == whatsit_code and current.subtype == dir_code then
+ local dir = current.dir
if dir == "+TLT" then
list[size] = { char = 0x202A, direction = "lre", original = "lre", level = 0 }
elseif dir == "+TRT" then
@@ -222,27 +209,27 @@ local function build_list(head) -- todo: store node pointer ... saves loop
else
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, id = id } -- object replacement character
end
- current = getnext(current)
+ current = current.next
elseif id == math_code then
local skip = 0
- current = getnext(current)
- while getid(current) ~= math_code do
+ current = current.next
+ while current.id ~= math_code do
skip = skip + 1
- current = getnext(current)
+ current = current.next
end
- skip = skip + 1
- current = getnext(current)
+ skip = skip + 1
+ current = current.next
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id }
else
local skip = 0
local last = id
- current = getnext(current)
+ current = current.next
while n do
- local id = getid(current)
- if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and getsubtype(current) == dir_code) then
+ local id = current.id
+ if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and current.subtype == dir_code) then
skip = skip + 1
last = id
- current = getnext(current)
+ current = current.next
else
break
end
@@ -302,8 +289,8 @@ local function find_run_limit_b_s_ws_on(list,start,limit)
end
local function get_baselevel(head,list,size) -- todo: skip if first is object (or pass head and test for local_par)
- if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
- if getfield(head,"dir") == "TRT" then
+ if head.id == whatsit_code and head.subtype == localpar_code then
+ if head.dir == "TRT" then
return 1, "TRT", true
else
return 0, "TLT", true
@@ -690,30 +677,30 @@ local function apply_to_list(list,size,head,pardir)
report_directions("fatal error, size mismatch")
break
end
- local id = getid(current)
+ local id = current.id
local entry = list[index]
local begindir = entry.begindir
local enddir = entry.enddir
if id == glyph_code then
local mirror = entry.mirror
if mirror then
- setfield(current,"char",mirror)
+ current.char = mirror
end
if trace_directions then
local direction = entry.direction
setcolor(current,direction,direction ~= entry.original,mirror)
end
elseif id == hlist_code or id == vlist_code then
- setfield(current,"dir",pardir) -- is this really needed?
+ current.dir = pardir -- is this really needed?
elseif id == glue_code then
- if enddir and getsubtype(current) == parfillskip_code then
+ if enddir and current.subtype == parfillskip_code then
-- insert the last enddir before \parfillskip glue
head = insert_node_before(head,current,new_textdir(enddir))
enddir = false
done = true
end
elseif id == whatsit_code then
- if begindir and getsubtype(current) == localpar_code then
+ if begindir and current.subtype == localpar_code then
-- local_par should always be the 1st node
head, current = insert_node_after(head,current,new_textdir(begindir))
begindir = nil
@@ -727,7 +714,7 @@ local function apply_to_list(list,size,head,pardir)
local skip = entry.skip
if skip and skip > 0 then
for i=1,skip do
- current = getnext(current)
+ current = current.next
end
end
if enddir then
@@ -735,13 +722,13 @@ local function apply_to_list(list,size,head,pardir)
done = true
end
if not entry.remove then
- current = getnext(current)
+ current = current.next
elseif remove_controls then
-- X9
head, current = remove_node(head,current,true)
done = true
else
- current = getnext(current)
+ current = current.next
end
index = index + 1
end
@@ -749,7 +736,6 @@ local function apply_to_list(list,size,head,pardir)
end
local function process(head)
- head = tonut(head)
local list, size = build_list(head)
local baselevel, pardir, dirfound = get_baselevel(head,list,size) -- we always have an inline dir node in context
if not dirfound and trace_details then
@@ -766,7 +752,7 @@ local function process(head)
report_directions("result : %s",show_done(list,size))
end
head, done = apply_to_list(list,size,head,pardir)
- return tonode(head), done
+ return head, done
end
directions.installhandler(interfaces.variables.one,process)
diff --git a/tex/context/base/typo-dub.lua b/tex/context/base/typo-dub.lua
index 4dc0f21fb..3ecfce364 100644
--- a/tex/context/base/typo-dub.lua
+++ b/tex/context/base/typo-dub.lua
@@ -54,25 +54,11 @@ local directiondata = characters.directions
local mirrordata = characters.mirrors
local textclassdata = characters.textclasses
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-local nutstring = nuts.tostring
-
-local getnext = nuts.getnext
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-
-local remove_node = nuts.remove
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-
-local nodepool = nuts.pool
+local remove_node = nodes.remove
+local insert_node_after = nodes.insert_after
+local insert_node_before = nodes.insert_before
+
+local nodepool = nodes.pool
local new_textdir = nodepool.textdir
local nodecodes = nodes.nodecodes
@@ -256,17 +242,17 @@ local function build_list(head) -- todo: store node pointer ... saves loop
local size = 0
while current do
size = size + 1
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local chr = getchar(current)
+ local chr = current.char
local dir = directiondata[chr]
list[size] = { char = chr, direction = dir, original = dir, level = 0 }
- current = getnext(current)
+ current = current.next
elseif id == glue_code then
list[size] = { char = 0x0020, direction = "ws", original = "ws", level = 0 }
- current = getnext(current)
- elseif id == whatsit_code and getsubtype(current) == dir_code then
- local dir = getfield(current,"dir")
+ current = current.next
+ elseif id == whatsit_code and current.subtype == dir_code then
+ local dir = current.dir
if dir == "+TLT" then
list[size] = { char = 0x202A, direction = "lre", original = "lre", level = 0 }
elseif dir == "+TRT" then
@@ -276,27 +262,27 @@ local function build_list(head) -- todo: store node pointer ... saves loop
else
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, id = id } -- object replacement character
end
- current = getnext(current)
+ current = current.next
elseif id == math_code then
local skip = 0
- current = getnext(current)
- while getid(current) ~= math_code do
+ current = current.next
+ while current.id ~= math_code do
skip = skip + 1
- current = getnext(current)
+ current = current.next
end
skip = skip + 1
- current = getnext(current)
+ current = current.next
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id }
else
local skip = 0
local last = id
- current = getnext(current)
+ current = current.next
while n do
- local id = getid(current)
- if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and getsubtype(current) == dir_code) then
+ local id = current.id
+ if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and current.subtype == dir_code) then
skip = skip + 1
last = id
- current = getnext(current)
+ current = current.next
else
break
end
@@ -379,8 +365,8 @@ end
-- the action
local function get_baselevel(head,list,size) -- todo: skip if first is object (or pass head and test for local_par)
- if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
- if getfield(head,"dir") == "TRT" then
+ if head.id == whatsit_code and head.subtype == localpar_code then
+ if head.dir == "TRT" then
return 1, "TRT", true
else
return 0, "TLT", true
@@ -799,30 +785,30 @@ local function apply_to_list(list,size,head,pardir)
report_directions("fatal error, size mismatch")
break
end
- local id = getid(current)
+ local id = current.id
local entry = list[index]
local begindir = entry.begindir
local enddir = entry.enddir
if id == glyph_code then
local mirror = entry.mirror
if mirror then
- setfield(current,"char",mirror)
+ current.char = mirror
end
if trace_directions then
local direction = entry.direction
setcolor(current,direction,direction ~= entry.original,mirror)
end
elseif id == hlist_code or id == vlist_code then
- setfield(current,"dir",pardir) -- is this really needed?
+ current.dir = pardir -- is this really needed?
elseif id == glue_code then
- if enddir and getsubtype(current) == parfillskip_code then
+ if enddir and current.subtype == parfillskip_code then
-- insert the last enddir before \parfillskip glue
head = insert_node_before(head,current,new_textdir(enddir))
enddir = false
done = true
end
elseif id == whatsit_code then
- if begindir and getsubtype(current) == localpar_code then
+ if begindir and current.subtype == localpar_code then
-- local_par should always be the 1st node
head, current = insert_node_after(head,current,new_textdir(begindir))
begindir = nil
@@ -836,7 +822,7 @@ local function apply_to_list(list,size,head,pardir)
local skip = entry.skip
if skip and skip > 0 then
for i=1,skip do
- current = getnext(current)
+ current = current.next
end
end
if enddir then
@@ -844,13 +830,13 @@ local function apply_to_list(list,size,head,pardir)
done = true
end
if not entry.remove then
- current = getnext(current)
+ current = current.next
elseif remove_controls then
-- X9
head, current = remove_node(head,current,true)
done = true
else
- current = getnext(current)
+ current = current.next
end
index = index + 1
end
@@ -858,9 +844,8 @@ local function apply_to_list(list,size,head,pardir)
end
local function process(head)
- head = tonut(head)
-- for the moment a whole paragraph property
- local attr = getattr(head,a_directions)
+ local attr = head[a_directions]
local analyze_fences = getfences(attr)
--
local list, size = build_list(head)
@@ -879,7 +864,7 @@ local function process(head)
report_directions("result : %s",show_done(list,size))
end
head, done = apply_to_list(list,size,head,pardir)
- return tonode(head), done
+ return head, done
end
directions.installhandler(interfaces.variables.two,process)
diff --git a/tex/context/base/typo-fln.lua b/tex/context/base/typo-fln.lua
index 7ce41cd81..4c97af450 100644
--- a/tex/context/base/typo-fln.lua
+++ b/tex/context/base/typo-fln.lua
@@ -23,38 +23,25 @@ local firstlines = typesetters.firstlines
local nodes = nodes
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getfield = nuts.getfield
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local getbox = nuts.getbox
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
+local getbox = nodes.getbox
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
-local traverse_id = nuts.traverse_id
-local free_node_list = nuts.flush_list
-local free_node = nuts.flush_node
-local copy_node_list = nuts.copy_list
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local hpack_node_list = nuts.hpack
-local remove_node = nuts.remove
+local traverse_id = nodes.traverse_id
+local free_node_list = nodes.flush_list
+local free_node = nodes.flush_node
+local copy_node_list = nodes.copy_list
+local insert_node_after = nodes.insert_after
+local insert_node_before = nodes.insert_before
+local hpack_node_list = nodes.hpack
+local remove_node = nodes.remove
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local newpenalty = nodepool.penalty
local newkern = nodepool.kern
-local tracerrule = nodes.tracers.pool.nuts.rule
+local tracerrule = nodes.tracers.pool.nodes.rule
local actions = { }
firstlines.actions = actions
@@ -105,9 +92,9 @@ actions[v_line] = function(head,setting)
local linebreaks = { }
for g in traverse_id(glyph_code,temp) do
if dynamic > 0 then
- setattr(g,0,dynamic)
+ g[0] = dynamic
end
- setfield(g,"font",font)
+ g.font = font
end
local start = temp
local list = temp
@@ -121,7 +108,7 @@ actions[v_line] = function(head,setting)
hsize = hsize - hangindent
end
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
n = n + 1
elseif id == disc_code then
@@ -130,7 +117,7 @@ actions[v_line] = function(head,setting)
-- this could be an option
elseif n > 0 then
local pack = hpack_node_list(copy_node_list(list,start))
- if getfield(pack,"width") > hsize then
+ if pack.width > hsize then
free_node_list(pack)
list = prev
break
@@ -141,7 +128,7 @@ actions[v_line] = function(head,setting)
nofchars = n
end
end
- start = getnext(start)
+ start = start.next
end
if not linebreaks[i] then
linebreaks[i] = n
@@ -152,18 +139,18 @@ actions[v_line] = function(head,setting)
for i=1,noflines do
local linebreak = linebreaks[i]
while start and n < nofchars do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then -- or id == disc_code then
if dynamic > 0 then
- setattr(start,0,dynamic)
+ start[0] = dynamic
end
- setfield(start,"font",font)
+ start.font = font
if ca and ca > 0 then
- setattr(start,a_colorspace,ma == 0 and 1 or ma)
- setattr(start,a_color,ca)
+ start[a_colorspace] = ma == 0 and 1 or ma
+ start[a_color] = ca
end
if ta and ta > 0 then
- setattr(start,a_transparency,ta)
+ start[a_transparency] = ta
end
n = n + 1
end
@@ -176,7 +163,7 @@ actions[v_line] = function(head,setting)
head, start = insert_node_after(head,start,newpenalty(-10000)) -- break
break
end
- start = getnext(start)
+ start = start.next
end
end
free_node_list(temp)
@@ -195,7 +182,7 @@ actions[v_word] = function(head,setting)
local ca = setting.ca
local ta = setting.ta
while start do
- local id = getid(start)
+ local id = start.id
-- todo: delete disc nodes
if id == glyph_code then
if not ok then
@@ -203,16 +190,16 @@ actions[v_word] = function(head,setting)
ok = true
end
if ca and ca > 0 then
- setattr(start,a_colorspace,ma == 0 and 1 or ma)
- setattr(start,a_color,ca)
+ start[a_colorspace] = ma == 0 and 1 or ma
+ start[a_color] = ca
end
if ta and ta > 0 then
- setattr(start,a_transparency,ta)
+ start[a_transparency] = ta
end
if dynamic > 0 then
- setattr(start,0,dynamic)
+ start[0] = dynamic
end
- setfield(start,"font",font)
+ start.font = font
elseif id == disc_code then
-- continue
elseif id == kern_code then -- todo: fontkern
@@ -223,7 +210,7 @@ actions[v_word] = function(head,setting)
break
end
end
- start = getnext(start)
+ start = start.next
end
return head, true
end
@@ -231,17 +218,16 @@ end
actions[v_default] = actions[v_line]
function firstlines.handler(head)
- head = tonut(head)
local start = head
local attr = nil
while start do
- attr = getattr(start,a_firstline)
+ attr = start[a_firstline]
if attr then
break
- elseif getid(start) == glyph_code then
+ elseif start.id == glyph then
break
else
- start = getnext(start)
+ start = start.next
end
end
if attr then
@@ -254,18 +240,17 @@ function firstlines.handler(head)
if trace_firstlines then
report_firstlines("processing firstlines, alternative %a",alternative)
end
- local head, done = action(head,settings)
- return tonode(head), done
+ return action(head,settings)
end
end
- return tonode(head), false
+ return head, false
end
-- goodie
function commands.applytofirstcharacter(box,what)
local tbox = getbox(box) -- assumes hlist
- local list = getlist(tbox)
+ local list = tbox.list
local done = nil
for n in traverse_id(glyph_code,list) do
list = remove_node(list,n)
@@ -273,10 +258,10 @@ function commands.applytofirstcharacter(box,what)
break
end
if done then
- setfield(tbox,"list",list)
+ tbox.list = list
local kind = type(what)
if kind == "string" then
- context[what](tonode(done))
+ context[what](done)
elseif kind == "function" then
what(done)
else
diff --git a/tex/context/base/typo-itc.lua b/tex/context/base/typo-itc.lua
index db94c5c54..452b623c8 100644
--- a/tex/context/base/typo-itc.lua
+++ b/tex/context/base/typo-itc.lua
@@ -9,9 +9,8 @@ if not modules then modules = { } end modules ['typo-itc'] = {
local utfchar = utf.char
local trace_italics = false trackers.register("typesetters.italics", function(v) trace_italics = v end)
-local report_italics = logs.reporter("nodes","italics")
-local threshold = 0.5 trackers.register("typesetters.threshold", function(v) threshold = v == true and 0.5 or tonumber(v) end)
+local report_italics = logs.reporter("nodes","italics")
typesetters.italics = typesetters.italics or { }
local italics = typesetters.italics
@@ -25,35 +24,21 @@ local math_code = nodecodes.math
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getattr = nuts.getattr
-
-local insert_node_after = nuts.insert_after
-local delete_node = nuts.delete
-local end_of_math = nuts.end_of_math
+local insert_node_after = node.insert_after
+local delete_node = nodes.delete
+local end_of_math = node.end_of_math
local texgetattribute = tex.getattribute
local texsetattribute = tex.setattribute
local a_italics = attributes.private("italics")
local unsetvalue = attributes.unsetvalue
-local new_correction_kern = nodepool.fontkern
-local new_correction_glue = nodepool.glue
+local new_correction_kern = nodes.pool.fontkern
+local new_correction_glue = nodes.pool.glue
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
local italicsdata = fonthashes.italics
-local exheights = fonthashes.exheights
local forcedvariant = false
@@ -98,7 +83,6 @@ end
-- todo: clear attribute
function italics.handler(head)
- head = tonut(head)
local done = false
local italic = 0
local lastfont = nil
@@ -108,10 +92,10 @@ function italics.handler(head)
local current = head
local inserted = nil
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local font = getfont(current)
- local char = getchar(current)
+ local font = current.font
+ local char = current.char
local data = italicsdata[font]
if font ~= lastfont then
if italic ~= 0 then
@@ -120,25 +104,11 @@ function italics.handler(head)
report_italics("ignoring %p between italic %C and italic %C",italic,prevchar,char)
end
else
- local okay = true
- if threshold then
- local ht = getfield(current,"height")
- local ex = exheights[font]
- local th = threshold * ex
- if ht <= th then
- if trace_italics then
- report_italics("ignoring correction between italic %C and regular %C, height %p less than threshold %p",prevchar,char,ht,th)
- end
- okay = false
- end
- end
- if okay then
- if trace_italics then
- report_italics("inserting %p between italic %C and regular %C",italic,prevchar,char)
- end
- insert_node_after(head,previous,new_correction_kern(italic))
- done = true
+ if trace_italics then
+ report_italics("inserting %p between italic %C and regular %C",italic,prevchar,char)
end
+ insert_node_after(head,previous,new_correction_kern(italic))
+ done = true
end
elseif inserted and data then
if trace_italics then
@@ -151,7 +121,7 @@ function italics.handler(head)
lastfont = font
end
if data then
- local attr = forcedvariant or getattr(current,a_italics)
+ local attr = forcedvariant or current[a_italics]
if attr and attr > 0 then
local cd = data[char]
if not cd then
@@ -203,7 +173,7 @@ function italics.handler(head)
italic = 0
done = true
end
- current = getnext(current)
+ current = current.next
end
if italic ~= 0 and lastattr > 1 then -- more control is needed here
if trace_italics then
@@ -212,7 +182,7 @@ function italics.handler(head)
insert_node_after(head,previous,new_correction_kern(italic))
done = true
end
- return tonode(head), done
+ return head, done
end
local enable
@@ -254,7 +224,6 @@ function commands.setupitaliccorrection(option) -- no grouping !
elseif options[variables.always] then
variant = 2
end
- -- maybe also keywords for threshold
if options[variables.global] then
forcedvariant = variant
texsetattribute(a_italics,unsetvalue)
diff --git a/tex/context/base/typo-krn.lua b/tex/context/base/typo-krn.lua
index a8ffe557b..56f58bb73 100644
--- a/tex/context/base/typo-krn.lua
+++ b/tex/context/base/typo-krn.lua
@@ -13,36 +13,21 @@ local utfchar = utf.char
local nodes, node, fonts = nodes, node, fonts
-local tasks = nodes.tasks
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local find_node_tail = nuts.tail
-local free_node = nuts.free
-local free_nodelist = nuts.flush_list
-local copy_node = nuts.copy
-local copy_nodelist = nuts.copy_list
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local end_of_math = nuts.end_of_math
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
+local find_node_tail = node.tail or node.slide
+local free_node = node.free
+local free_nodelist = node.flush_list
+local copy_node = node.copy
+local copy_nodelist = node.copy_list
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local end_of_math = node.end_of_math
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+
local new_gluespec = nodepool.gluespec
local new_kern = nodepool.kern
local new_glue = nodepool.glue
@@ -122,10 +107,10 @@ kerns.keeptogether = false -- just for fun (todo: control setting with key/value
-- blue : keep by goodie
function kerns.keepligature(n) -- might become default
- local f = getfont(n)
- local a = getattr(n,0) or 0
+ local f = n.font
+ local a = n[0] or 0
if trace_ligatures then
- local c = getchar(n)
+ local c = n.char
local d = fontdescriptions[f][c].name
if a > 0 and contextsetups[a].keepligatures == v_auto then
report("font %!font:name!, glyph %a, slot %X -> ligature %s, by %s feature %a",f,d,c,"kept","dynamic","keepligatures")
@@ -184,9 +169,9 @@ end
local function kern_injector(fillup,kern)
if fillup then
local g = new_glue(kern)
- local s = getfield(g,"spec")
- setfield(s,"stretch",kern)
- setfield(s,"stretch_order",1)
+ local s = g.spec
+ s.stretch = kern
+ s.stretch_order = 1
return g
else
return new_kern(kern)
@@ -196,7 +181,7 @@ end
local function spec_injector(fillup,width,stretch,shrink)
if fillup then
local s = new_gluespec(width,2*stretch,2*shrink)
- setfield(s,"stretch_order",1)
+ s.stretch_order = 1
return s
else
return new_gluespec(width,stretch,shrink)
@@ -212,9 +197,9 @@ local function do_process(head,force) -- todo: glue so that we can fully stretch
local fillup = false
while start do
-- faster to test for attr first
- local attr = force or getattr(start,a_kerns)
+ local attr = force or start[a_kerns]
if attr and attr > 0 then
- setattr(start,a_kerns,unsetvalue)
+ start[a_kerns] = unsetvalue
local krn = mapping[attr]
if krn == v_max then
krn = .25
@@ -223,10 +208,10 @@ local function do_process(head,force) -- todo: glue so that we can fully stretch
fillup = false
end
if krn and krn ~= 0 then
- local id = getid(start)
- if id == glyph_code then -- we could use the subtype ligature
- lastfont = getfont(start)
- local c = getfield(start,"components")
+ local id = start.id
+ if id == glyph_code then
+ lastfont = start.font
+ local c = start.components
if not c then
-- fine
elseif keepligature and keepligature(start) then
@@ -234,47 +219,47 @@ local function do_process(head,force) -- todo: glue so that we can fully stretch
else
c = do_process(c,attr)
local s = start
- local p, n = getprev(s), getnext(s)
+ local p, n = s.prev, s.next
local tail = find_node_tail(c)
if p then
- setfield(p,"next",c)
- setfield(c,"prev",p)
+ p.next = c
+ c.prev = p
else
head = c
end
if n then
- setfield(n,"prev",tail)
+ n.prev = tail
end
- setfield(tail,"next",n)
+ tail.next = n
start = c
- setfield(s,"components",nil)
+ s.components = nil
-- we now leak nodes !
- -- free_node(s)
+ -- free_node(s)
done = true
end
- local prev = getprev(start)
+ local prev = start.prev
if not prev then
-- skip
- elseif markdata[lastfont][getchar(start)] then
+ elseif markdata[lastfont][start.char] then
-- skip
else
- local pid = getid(prev)
+ local pid = prev.id
if not pid then
-- nothing
elseif pid == kern_code then
- if getsubtype(prev) == kerning_code or getattr(prev,a_fontkern) then
- if keeptogether and getid(getprev(prev)) == glyph_code and keeptogether(getprev(prev),start) then -- we could also pass start
+ if prev.subtype == kerning_code or prev[a_fontkern] then
+ if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then -- we could also pass start
-- keep 'm
else
-- not yet ok, as injected kerns can be overlays (from node-inj.lua)
- setfield(prev,"subtype",userkern_code)
- setfield(prev,"kern",getfield(prev,"kern") + quaddata[lastfont]*krn) -- here
+ prev.subtype = userkern_code
+ prev.kern = prev.kern + quaddata[lastfont]*krn -- here
done = true
end
end
elseif pid == glyph_code then
- if getfont(prev) == lastfont then
- local prevchar, lastchar = getchar(prev), getchar(start)
+ if prev.font == lastfont then
+ local prevchar, lastchar = prev.char, start.char
if keeptogether and keeptogether(prev,start) then
-- keep 'm
else
@@ -293,102 +278,102 @@ local function do_process(head,force) -- todo: glue so that we can fully stretch
-- a bit too complicated, we can best not copy and just calculate
-- but we could have multiple glyphs involved so ...
local disc = prev -- disc
- local prv, nxt = getprev(disc), getnext(disc)
- if getsubtype(disc) == discretionary_code then
+ local prv, nxt = disc.prev, disc.next
+ if disc.subtype == discretionary_code then
-- maybe we should forget about this variant as there is no glue
-- possible
- local pre, post, replace = getfield(disc,"pre"), getfield(disc,"post"), getfield(disc,"replace")
- if pre and prv then -- must pair with getprev(start)
+ local pre, post, replace = disc.pre, disc.post, disc.replace
+ if pre and prv then -- must pair with start.prev
+ -- this one happens in most cases
local before = copy_node(prv)
- setfield(pre,"prev",before)
- setfield(before,"next",pre)
- setfield(before,"prev",nil)
+ pre.prev = before
+ before.next = pre
+ before.prev = nil
pre = do_process(before,attr)
- pre = getnext(pre)
- setfield(pre,"prev",nil)
- setfield(disc,"pre",pre)
+ pre = pre.next
+ pre.prev = nil
+ disc.pre = pre
free_node(before)
end
if post and nxt then -- must pair with start
local after = copy_node(nxt)
local tail = find_node_tail(post)
- setfield(tail,"next",after)
- setfield(after,"prev",tail)
- setfield(after,"next",nil)
+ tail.next = after
+ after.prev = tail
+ after.next = nil
post = do_process(post,attr)
- setfield(tail,"next",nil)
- setfield(disc,"post",post)
+ tail.next = nil
+ disc.post = post
free_node(after)
end
if replace and prv and nxt then -- must pair with start and start.prev
local before = copy_node(prv)
local after = copy_node(nxt)
local tail = find_node_tail(replace)
- setfield(replace,"prev",before)
- setfield(before,"next",replace)
- setfield(before,"prev",nil)
- setfield(tail,"next",after)
- setfield(after,"prev",tail)
- setfield(after,"next",nil)
+ replace.prev = before
+ before.next = replace
+ before.prev = nil
+ tail.next = after
+ after.prev = tail
+ after.next = nil
replace = do_process(before,attr)
- replace = getnext(replace)
- setfield(replace,"prev",nil)
- setfield(getfield(after,"prev"),"next",nil)
- setfield(disc,"replace",replace)
+ replace = replace.next
+ replace.prev = nil
+ after.prev.next = nil
+ disc.replace = replace
free_node(after)
free_node(before)
- elseif prv and getid(prv) == glyph_code and getfont(prv) == lastfont then
- local prevchar, lastchar = getchar(prv), getchar(start)
+ elseif prv and prv.id == glyph_code and prv.font == lastfont then
+ local prevchar, lastchar = prv.char, start.char
local kerns = chardata[lastfont][prevchar].kerns
local kern = kerns and kerns[lastchar] or 0
krn = kern + quaddata[lastfont]*krn -- here
- setfield(disc,"replace",kern_injector(false,krn)) -- only kerns permitted, no glue
+ disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
else
krn = quaddata[lastfont]*krn -- here
- setfield(disc,"replace",kern_injector(false,krn)) -- only kerns permitted, no glue
+ disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
end
else
-- this one happens in most cases: automatic (-), explicit (\-), regular (patterns)
- if prv and getid(prv) == glyph_code and getfont(prv) == lastfont then
- -- the normal case
- local prevchar, lastchar = getchar(prv), getchar(start)
+ if prv and prv.id == glyph_code and prv.font == lastfont then
+ local prevchar, lastchar = prv.char, start.char
local kerns = chardata[lastfont][prevchar].kerns
local kern = kerns and kerns[lastchar] or 0
- krn = kern + quaddata[lastfont]*krn
+ krn = kern + quaddata[lastfont]*krn -- here
else
- krn = quaddata[lastfont]*krn
+ krn = quaddata[lastfont]*krn -- here
end
insert_node_before(head,start,kern_injector(fillup,krn))
end
end
end
elseif id == glue_code then
- local subtype = getsubtype(start)
+ local subtype = start.subtype
if subtype == userskip_code or subtype == xspaceskip_code or subtype == spaceskip_code then
- local s = getfield(start,"spec")
- local w = getfield(s,"width")
+ local s = start.spec
+ local w = s.width
if w > 0 then
- local width, stretch, shrink = w+gluefactor*w*krn, getfield(s,"stretch"), getfield(s,"shrink")
- setfield(start,"spec",spec_injector(fillup,width,stretch*width/w,shrink*width/w))
+ local width, stretch, shrink = w+gluefactor*w*krn, s.stretch, s.shrink
+ start.spec = spec_injector(fillup,width,stretch*width/w,shrink*width/w)
done = true
end
end
elseif id == kern_code then
- -- if getsubtype(start) == kerning_code then -- handle with glyphs
- -- local sk = getfield(start,"kern")
+ -- if start.subtype == kerning_code then -- handle with glyphs
+ -- local sk = start.kern
-- if sk > 0 then
- -- setfield(start,"kern",sk*krn)
+ -- start.kern = sk*krn
-- done = true
-- end
-- end
elseif lastfont and (id == hlist_code or id == vlist_code) then -- todo: lookahead
- local p = getprev(start)
- if p and getid(p) ~= glue_code then
+ local p = start.prev
+ if p and p.id ~= glue_code then
insert_node_before(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
done = true
end
- local n = getnext(start)
- if n and getid(n) ~= glue_code then
+ local n = start.next
+ if n and n.id ~= glue_code then
insert_node_after(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
done = true
end
@@ -398,7 +383,7 @@ local function do_process(head,force) -- todo: glue so that we can fully stretch
end
end
if start then
- start = getnext(start)
+ start = start.next
end
end
return head, done
@@ -429,8 +414,7 @@ function kerns.set(factor)
end
function kerns.handler(head)
- local head, done = do_process(tonut(head)) -- no direct map, because else fourth argument is tail == true
- return tonode(head), done
+ return do_process(head) -- no direct map, because else fourth argument is tail == true
end
-- interface
diff --git a/tex/context/base/typo-mar.lua b/tex/context/base/typo-mar.lua
index 4bfc107ad..85d5c85a8 100644
--- a/tex/context/base/typo-mar.lua
+++ b/tex/context/base/typo-mar.lua
@@ -115,30 +115,13 @@ local v_first = variables.first
local v_text = variables.text
local v_column = variables.column
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local copy_node_list = nuts.copy_list
-local hpack_nodes = nuts.hpack -- nodes.fasthpack not really faster here
-local traverse_id = nuts.traverse_id
-local free_node_list = nuts.flush_list
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local linked_nodes = nuts.linked
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getsubtype = nuts.getsubtype
-local getbox = nuts.getbox
-local getlist = nuts.getlist
+local copy_node_list = node.copy_list
+local slide_nodes = node.slide
+local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
+local traverse_id = node.traverse_id
+local free_node_list = node.flush_list
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -161,7 +144,7 @@ local userdefined_code = whatsitcodes.userdefined
local dir_code = whatsitcodes.dir
local localpar_code = whatsitcodes.localpar
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_kern = nodepool.kern
local new_glue = nodepool.glue
@@ -172,12 +155,13 @@ local new_latelua = nodepool.latelua
local texgetcount = tex.getcount
local texgetdimen = tex.getdimen
+local texgetbox = tex.getbox
local texget = tex.get
local points = number.points
local isleftpage = layouts.status.isleftpage
-local registertogether = builders.paragraphs.registertogether -- tonode
+local registertogether = builders.paragraphs.registertogether
local jobpositions = job.positions
local getposition = jobpositions.position
@@ -186,7 +170,7 @@ local a_margindata = attributes.private("margindata")
local inline_mark = nodepool.userids["margins.inline"]
-local margins = { }
+local margins = { }
typesetters.margins = margins
local locations = { v_left, v_right, v_inner, v_outer } -- order might change
@@ -249,7 +233,7 @@ local function showstore(store,banner,location)
if next(store) then
for i, si in table.sortedpairs(store) do
local si =store[i]
- report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(getlist(si.box)))
+ report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(si.box.list))
end
else
report_margindata("%s: nothing stored in location %a",banner,location)
@@ -258,7 +242,7 @@ end
function margins.save(t)
setmetatable(t,defaults)
- local content = getbox(t.number)
+ local content = texgetbox(t.number)
local location = t.location
local category = t.category
local inline = t.inline
@@ -326,11 +310,11 @@ function margins.save(t)
-- nice is to make a special status table mechanism
local leftmargindistance = texgetdimen("naturalleftmargindistance")
local rightmargindistance = texgetdimen("naturalrightmargindistance")
- local strutbox = getbox("strutbox")
- t.strutdepth = getfield(strutbox,"depth")
- t.strutheight = getfield(strutbox,"height")
- t.leftskip = getfield(texget("leftskip"),"width") -- we're not in forgetall
- t.rightskip = getfield(texget("rightskip"),"width") -- we're not in forgetall
+ local strutbox = texgetbox("strutbox")
+ t.strutdepth = strutbox.depth
+ t.strutheight = strutbox.height
+ t.leftskip = texget("leftskip").width -- we're not in forgetall
+ t.rightskip = texget("rightskip").width -- we're not in forgetall
t.leftmargindistance = leftmargindistance -- todo:layoutstatus table
t.rightmargindistance = rightmargindistance
t.leftedgedistance = texgetdimen("naturalleftedgedistance")
@@ -343,7 +327,7 @@ function margins.save(t)
--
-- t.realpageno = texgetcount("realpageno")
if inline then
- context(tonode(new_usernumber(inline_mark,nofsaved))) -- or use a normal node
+ context(new_usernumber(inline_mark,nofsaved))
store[nofsaved] = t -- no insert
nofinlined = nofinlined + 1
else
@@ -420,7 +404,7 @@ local function realign(current,candidate)
-- we assume that list is a hbox, otherwise we had to take the whole current
-- in order to get it right
- setfield(current,"width",0)
+ current.width = 0
local anchornode, move_x
-- this mess is needed for alignments (combinations) so we use that
@@ -462,9 +446,9 @@ local function realign(current,candidate)
report_margindata("realigned %a, location %a, margin %a",candidate.n,location,margin)
end
end
- local list = hpack_nodes(linked_nodes(anchornode,new_kern(-delta),getlist(current),new_kern(delta)))
- setfield(current,"list",list)
- setfield(current,"width",0)
+
+ current.list = hpack_nodes(anchornode .. new_kern(-delta) .. current.list .. new_kern(delta))
+ current.width = 0
end
local function realigned(current,a)
@@ -506,8 +490,7 @@ local function markovershoot(current)
v_anchors = v_anchors + 1
cache[v_anchors] = stacked
local anchor = new_latelua(format("typesetters.margins.ha(%s)",v_anchors)) -- todo: alleen als offset > line
- local list = hpack_nodes(linked_nodes(anchor,getlist(current)))
- setfield(current,"list",list)
+ current.list = hpack_nodes(anchor .. current.list)
end
local function getovershoot(location)
@@ -529,10 +512,10 @@ end
local function inject(parent,head,candidate)
local box = candidate.box
- local width = getfield(box,"width")
- local height = getfield(box,"height")
- local depth = getfield(box,"depth")
- local shift = getfield(box,"shift")
+ local width = box.width
+ local height = box.height
+ local depth = box.depth
+ local shift = box.shift
local stack = candidate.stack
local location = candidate.location
local method = candidate.method
@@ -541,7 +524,7 @@ local function inject(parent,head,candidate)
local baseline = candidate.baseline
local strutheight = candidate.strutheight
local strutdepth = candidate.strutdepth
- local psubtype = getsubtype(parent)
+ local psubtype = parent.subtype
local offset = stacked[location]
local firstonstack = offset == false or offset == nil
nofstatus = nofstatus + 1
@@ -563,7 +546,7 @@ local function inject(parent,head,candidate)
end
end
candidate.width = width
- candidate.hsize = getfield(parent,"width") -- we can also pass textwidth
+ candidate.hsize = parent.width -- we can also pass textwidth
candidate.psubtype = psubtype
if trace_margindata then
report_margindata("processing, index %s, height %p, depth %p, parent %s",candidate.n,height,depth,listcodes[psubtype])
@@ -590,7 +573,7 @@ local function inject(parent,head,candidate)
-- experimental.
-- -- --
if method == v_top then
- local delta = height - getfield(parent,"height")
+ local delta = height - parent.height
if trace_margindata then
report_margindata("top aligned by %p",delta)
end
@@ -633,23 +616,22 @@ local function inject(parent,head,candidate)
shift = shift + delta
offset = offset + delta
end
- setfield(box,"shift",shift)
- setfield(box,"width",0)
+ box.shift = shift
+ box.width = 0
if not head then
head = box
- elseif getid(head) == whatsit_code and getsubtype(head) == localpar_code then
+ elseif head.id == whatsit_code and head.subtype == localpar_code then
-- experimental
- if getfield(head,"dir") == "TRT" then
- local list = hpack_nodes(linked_nodes(new_kern(candidate.hsize),getlist(box),new_kern(-candidate.hsize)))
- setfield(box,"list",list)
+ if head.dir == "TRT" then
+ box.list = hpack_nodes(new_kern(candidate.hsize) .. box.list .. new_kern(-candidate.hsize))
end
insert_node_after(head,head,box)
else
- setfield(head,"prev",box)
- setfield(box,"next",head)
+ head.prev = box
+ box.next = head
head = box
end
- setattr(box,a_margindata,nofstatus)
+ box[a_margindata] = nofstatus
if trace_margindata then
report_margindata("injected, location %a, shift %p",location,shift)
end
@@ -674,12 +656,12 @@ local function flushinline(parent,head)
local current = head
local done = false
local continue = false
- local room, don, con, list
+ local room, don, con
while current and nofinlined > 0 do
- local id = getid(current)
+ local id = current.id
if id == whatsit_code then
- if getsubtype(current) == userdefined_code and getfield(current,"user_id") == inline_mark then
- local n = getfield(current,"value")
+ if current.subtype == userdefined_code and current.user_id == inline_mark then
+ local n = current.value
local candidate = inlinestore[n]
if candidate then -- no vpack, as we want to realign
inlinestore[n] = nil
@@ -692,12 +674,11 @@ local function flushinline(parent,head)
end
elseif id == hlist_code or id == vlist_code then
-- optional (but sometimes needed)
- list, don, con = flushinline(current,getlist(current))
- setfield(current,"list",list)
+ current.list, don, con = flushinline(current,current.list)
continue = continue or con
done = done or don
end
- current = getnext(current)
+ current = current.next
end
return head, done, continue
end
@@ -705,7 +686,7 @@ end
local a_linenumber = attributes.private('linenumber')
local function flushed(scope,parent) -- current is hlist
- local head = getlist(parent)
+ local head = parent.list
local done = false
local continue = false
local room, con, don
@@ -721,7 +702,7 @@ local function flushed(scope,parent) -- current is hlist
done = true
continue = continue or con
nofstored = nofstored - 1
- registertogether(tonode(parent),room) -- !! tonode
+ registertogether(parent,room)
else
break
end
@@ -730,18 +711,17 @@ local function flushed(scope,parent) -- current is hlist
end
if nofinlined > 0 then
if done then
- setfield(parent,"list",head)
+ parent.list = head
end
head, don, con = flushinline(parent,head)
continue = continue or con
done = done or don
end
if done then
- local a = getattr(head,a_linenumber) -- hack .. we need a more decent critical attribute inheritance mechanism
- local l = hpack_nodes(head,getfield(parent,"width"),"exactly")
- setfield(parent,"list",l)
+ local a = head[a_linenumber] -- hack .. we need a more decent critical attribute inheritance mechanism
+ parent.list = hpack_nodes(head,parent.width,"exactly")
if a then
- setattr(l,a_linenumber,a)
+ parent.list[a_linenumber] = a
end
-- resetstacked()
end
@@ -756,15 +736,14 @@ local function handler(scope,head,group)
if trace_margindata then
report_margindata("flushing stage one, stored %s, scope %s, delayed %s, group %a",nofstored,scope,nofdelayed,group)
end
- head = tonut(head)
local current = head
local done = false
while current do
- local id = getid(current)
- if (id == vlist_code or id == hlist_code) and not getattr(current,a_margindata) then
+ local id = current.id
+ if (id == vlist_code or id == hlist_code) and not current[a_margindata] then
local don, continue = flushed(scope,current)
if don then
- setattr(current,a_margindata,0) -- signal to prevent duplicate processing
+ current[a_margindata] = 0 -- signal to prevent duplicate processing
if continue then
markovershoot(current)
end
@@ -774,12 +753,12 @@ local function handler(scope,head,group)
done = true
end
end
- current = getnext(current)
+ current = current.next
end
-- if done then
resetstacked() -- why doesn't done work ok here?
-- end
- return tonode(head), done
+ return head, done
else
return head, false
end
@@ -832,11 +811,11 @@ local function finalhandler(head)
local current = head
local done = false
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code then
- local a = getattr(current,a_margindata)
+ local a = current[a_margindata]
if not a or a == 0 then
- finalhandler(getlist(current))
+ finalhandler(current.list)
elseif realigned(current,a) then
done = true
if nofdelayed == 0 then
@@ -844,9 +823,9 @@ local function finalhandler(head)
end
end
elseif id == vlist_code then
- finalhandler(getlist(current))
+ finalhandler(current.list)
end
- current = getnext(current)
+ current = current.next
end
return head, done
else
@@ -859,10 +838,7 @@ function margins.finalhandler(head)
-- if trace_margindata then
-- report_margindata("flushing stage two, instore: %s, delayed: %s",nofstored,nofdelayed)
-- end
-head = tonut(head)
-local head, done = finalhandler(head)
-head = tonode(head)
- return head, done
+ return finalhandler(head)
else
return head, false
end
diff --git a/tex/context/base/typo-pag.lua b/tex/context/base/typo-pag.lua
index 5b96e9c21..0dd75ddf9 100644
--- a/tex/context/base/typo-pag.lua
+++ b/tex/context/base/typo-pag.lua
@@ -6,14 +6,6 @@ if not modules then modules = { } end modules ['typo-pag'] = {
license = "see context related readme files"
}
-
-builders = builders or { }
-local builders = builders
-
-builders.paragraphs = builders.paragraphs or { }
-local parbuilders = builders.paragraphs
-
-local nodes = nodes
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
@@ -22,22 +14,12 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local penalty_code = nodecodes.penalty
-local unsetvalue = attributes.unsetvalue
-local a_keeptogether = attributes.private("keeptogether")
-
-local nuts = nodes.nuts
-local tonut = nuts.tonut
+local insert_node_after = node.insert_after
+local new_penalty = nodes.pool.penalty
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
+local unsetvalue = attributes.unsetvalue
-local insert_node_after = nuts.insert_after
-local new_penalty = nuts.pool.penalty
+local a_keeptogether = attributes.private("keeptogether")
local trace_keeptogether = false
local report_keeptogether = logs.reporter("parbuilders","keeptogether")
@@ -51,11 +33,11 @@ trackers.register("parbuilders.keeptogether", function(v) trace_keeptogether =
-- todo: also support lines = 3 etc (e.g. dropped caps) but how to set that
-- when no hlists are there ? ... maybe the local_par
-function parbuilders.registertogether(line,specification) -- might change
+function builders.paragraphs.registertogether(line,specification) -- might change
if not enabled then
nodes.tasks.enableaction("finalizers","builders.paragraphs.keeptogether")
end
- local a = getattr(line,a_keeptogether)
+ local a = line[a_keeptogether]
local c = a and cache[a]
if c then
local height = specification.height
@@ -82,7 +64,7 @@ function parbuilders.registertogether(line,specification) -- might change
if not specification.slack then
specification.slack = 0
end
- setattr(line,a_keeptogether,last)
+ line[a_keeptogether] = last
end
if trace_keeptogether then
local a = a or last
@@ -106,24 +88,24 @@ local function keeptogether(start,a)
if start then
local specification = cache[a]
if a then
- local current = getnext(start)
+ local current = start.next
local previous = start
- local total = getfield(previous,"depth")
+ local total = previous.depth
local slack = specification.slack
local threshold = specification.depth - slack
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p, slack %p","list",a,total,threshold,slack)
end
while current do
- local id = getid(current)
+ local id = current.id
if id == vlist_code or id == hlist_code then
- total = total + getfield(current,"height") + getfield(current,"depth")
+ total = total + current.height + current.depth
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p","list",a,total,threshold)
end
if total <= threshold then
- if getid(previous) == penalty_code then
- setfield(previous,"penalty",10000)
+ if previous.id == penalty_code then
+ previous.penalty = 10000
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -132,13 +114,13 @@ local function keeptogether(start,a)
end
elseif id == glue_code then
-- hm, breakpoint, maybe turn this into kern
- total = total + getfield(getfield(current,"spec"),"width")
+ total = total + current.spec.width
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p","glue",a,total,threshold)
end
if total <= threshold then
- if getid(previous) == penalty_code then
- setfield(previous,"penalty",10000)
+ if previous.id == penalty_code then
+ previous.penalty = 10000
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -146,13 +128,13 @@ local function keeptogether(start,a)
break
end
elseif id == kern_code then
- total = total + getfield(current,"kern")
+ total = total + current.kern
if trace_keeptogether then
report_keeptogether("%s, index %s, total %s, threshold %s","kern",a,total,threshold)
end
if total <= threshold then
- if getid(previous) == penalty_code then
- setfield(previous,"penalty",10000)
+ if previous.id == penalty_code then
+ previous.penalty = 10000
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -161,16 +143,16 @@ local function keeptogether(start,a)
end
elseif id == penalty_code then
if total <= threshold then
- if getid(previous) == penalty_code then
- setfield(previous,"penalty",10000)
+ if previous.id == penalty_code then
+ previous.penalty = 10000
end
- setfield(current,"penalty",10000)
+ current.penalty = 10000
else
break
end
end
previous = current
- current = getnext(current)
+ current = current.next
end
end
end
@@ -178,20 +160,20 @@ end
-- also look at first non glue/kern node e.g for a dropped caps
-function parbuilders.keeptogether(head)
+function builders.paragraphs.keeptogether(head)
local done = false
- local current = tonut(head)
+ local current = head
while current do
- if getid(current) == hlist_code then
- local a = getattr(current,a_keeptogether)
+ if current.id == hlist_code then
+ local a = current[a_keeptogether]
if a and a > 0 then
keeptogether(current,a)
- setattr(current,a_keeptogether,unsetvalue)
+ current[a_keeptogether] = unsetvalue
cache[a] = nil
done = true
end
end
- current = getnext(current)
+ current = current.next
end
return head, done
end
diff --git a/tex/context/base/typo-par.mkiv b/tex/context/base/typo-par.mkiv
deleted file mode 100644
index 8572f31b8..000000000
--- a/tex/context/base/typo-par.mkiv
+++ /dev/null
@@ -1,29 +0,0 @@
-%D \module
-%D [ file=typo-par,
-%D version=2008.09.30,
-%D title=\CONTEXT\ Typesetting Macros,
-%D subtitle=Paragraph Building,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{ConTeXt Node Macros / Paragraph Building}
-
-%D This is very experimental, undocumented, subjected to changes, etc. just as
-%D the underlying interfaces. But at least it's cleaned as part of the status-mkiv
-%D cleanup.
-
-% \startparbuilder[basic]
-% \input tufte \par
-% \stopparbuilder
-
-\unprotect
-
-\registerctxluafile{node-ltp}{1.001}
-\registerctxluafile{trac-par}{1.001}
-
-\protect \endinput
diff --git a/tex/context/base/typo-rep.lua b/tex/context/base/typo-rep.lua
index 95b801e2e..01868f490 100644
--- a/tex/context/base/typo-rep.lua
+++ b/tex/context/base/typo-rep.lua
@@ -10,44 +10,31 @@ if not modules then modules = { } end modules ['typo-rep'] = {
-- endure it by listening to a couple cd's by The Scene and The Lau
-- on the squeezebox on my desk.
-local next, type, tonumber = next, type, tonumber
-
local trace_stripping = false trackers.register("nodes.stripping", function(v) trace_stripping = v end)
trackers.register("fonts.stripping", function(v) trace_stripping = v end)
local report_stripping = logs.reporter("fonts","stripping")
-local nodes = nodes
-local tasks = nodes.tasks
-
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getattr = nuts.getid
+local nodes, node = nodes, node
-local setattr = nuts.setattr
-
-local delete_node = nuts.delete
-local replace_node = nuts.replace
-local copy_node = nuts.copy
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
+local delete_node = nodes.delete
+local replace_node = nodes.replace
+local copy_node = node.copy
local chardata = characters.data
local collected = false
+local a_stripping = attributes.private("stripping")
local fontdata = fonts.hashes.identifiers
+local tasks = nodes.tasks
-local a_stripping = attributes.private("stripping")
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
local v_reset = interfaces.variables.reset
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+
-- todo: other namespace -> typesetters
nodes.stripping = nodes.stripping or { } local stripping = nodes.stripping
@@ -72,13 +59,13 @@ local function process(what,head,current,char)
head, current = delete_node(head,current)
elseif type(what) == "function" then
head, current = what(head,current)
- current = getnext(current)
+ current = current.next
if trace_stripping then
report_stripping("processing %C in text",char)
end
elseif what then -- assume node
head, current = replace_node(head,current,copy_node(what))
- current = getnext(current)
+ current = current.next
if trace_stripping then
report_stripping("replacing %C in text",char)
end
@@ -87,29 +74,28 @@ local function process(what,head,current,char)
end
function nodes.handlers.stripping(head)
- head = tonut(head)
local current, done = head, false
while current do
- if getid(current) == glyph_code then
+ if current.id == glyph_code then
-- it's more efficient to keep track of what needs to be kept
- local todo = getattr(current,a_stripping)
+ local todo = current[a_stripping]
if todo == 1 then
- local char = getchar(current)
+ local char = current.char
local what = glyphs[char]
if what then
head, current = process(what,head,current,char)
done = true
else -- handling of spacing etc has to be done elsewhere
- current = getnext(current)
+ current = current.next
end
else
- current = getnext(current)
+ current = current.next
end
else
- current = getnext(current)
+ current = current.next
end
end
- return tonode(head), done
+ return head, done
end
local enabled = false
diff --git a/tex/context/base/typo-spa.lua b/tex/context/base/typo-spa.lua
index 5cf9ab837..c3f50fe98 100644
--- a/tex/context/base/typo-spa.lua
+++ b/tex/context/base/typo-spa.lua
@@ -15,7 +15,10 @@ local report_spacing = logs.reporter("typesetting","spacing")
local nodes, fonts, node = nodes, fonts, node
-local tasks = nodes.tasks
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove
+local end_of_math = node.end_of_math
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
@@ -26,28 +29,6 @@ local unsetvalue = attributes.unsetvalue
local v_reset = interfaces.variables.reset
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getfont = nuts.getfont
-local getattr = nuts.getattr
-
-local setattr = nuts.setattr
-
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-local end_of_math = nuts.end_of_math
-
-local nodepool = nuts.pool
-local new_penalty = nodepool.penalty
-local new_glue = nodepool.glue
-
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local math_code = nodecodes.math
@@ -55,6 +36,12 @@ local math_code = nodecodes.math
local somespace = nodes.somespace
local somepenalty = nodes.somepenalty
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+
+local new_penalty = nodepool.penalty
+local new_glue = nodepool.glue
+
typesetters = typesetters or { }
local typesetters = typesetters
@@ -65,6 +52,7 @@ spacings.mapping = spacings.mapping or { }
spacings.numbers = spacings.numbers or { }
local a_spacings = attributes.private("spacing")
+spacings.attribute = a_spacings
storage.register("typesetters/spacings/mapping", spacings.mapping, "typesetters.spacings.mapping")
@@ -79,30 +67,29 @@ end
-- todo cache lastattr
function spacings.handler(head)
- head = tonut(head)
local done = false
local start = head
-- head is always begin of par (whatsit), so we have at least two prev nodes
-- penalty followed by glue
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- local attr = getattr(start,a_spacings)
+ local attr = start[a_spacings]
if attr and attr > 0 then
local data = mapping[attr]
if data then
- local char = getchar(start)
+ local char = start.char
local map = data.characters[char]
- setattr(start,a_spacings,unsetvalue) -- needed?
+ start[a_spacings] = unsetvalue -- needed?
if map then
local left = map.left
local right = map.right
local alternative = map.alternative
- local quad = quaddata[getfont(start)]
- local prev = getprev(start)
+ local quad = quaddata[start.font]
+ local prev = start.prev
if left and left ~= 0 and prev then
local ok = false
- local prevprev = getprev(prev)
+ local prevprev = prev.prev
if alternative == 1 then
local somespace = somespace(prev,true)
if somespace then
@@ -133,10 +120,10 @@ function spacings.handler(head)
done = true
end
end
- local next = getnext(start)
+ local next = start.next
if right and right ~= 0 and next then
local ok = false
- local nextnext = getnext(next)
+ local nextnext = next.next
if alternative == 1 then
local somepenalty = somepenalty(next,10000)
if somepenalty then
@@ -177,10 +164,10 @@ function spacings.handler(head)
start = end_of_math(start) -- weird, can return nil .. no math end?
end
if start then
- start = getnext(start)
+ start = start.next
end
end
- return tonode(head), done
+ return head, done
end
local enabled = false
diff --git a/tex/context/base/typo-tal.lua b/tex/context/base/typo-tal.lua
index debcedfd3..63a66d037 100644
--- a/tex/context/base/typo-tal.lua
+++ b/tex/context/base/typo-tal.lua
@@ -20,34 +20,19 @@ local fontcharacters = fonts.hashes.characters
local unicodes = fonts.hashes.unicodes
local categories = characters.categories -- nd
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
+local insert_node_before = nodes.insert_before
+local insert_node_after = nodes.insert_after
+local traverse_list_by_id = nodes.traverse_id
+local dimensions_of_list = nodes.dimensions
+local first_glyph = nodes.first_glyph
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getattr = nuts.getattr
-local getfield = nuts.getfield
-
-local setattr = nuts.setattr
-local setfield = nuts.setfield
-
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local traverse_list_by_id = nuts.traverse_id
-local dimensions_of_list = nuts.dimensions
-local first_glyph = nuts.first_glyph
-
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_kern = nodepool.kern
local new_gluespec = nodepool.gluespec
local tracers = nodes.tracers
local setcolor = tracers.colors.set
-local tracedrule = tracers.pool.nuts.rule
+local tracedrule = tracers.pool.nodes.rule
local characteralign = { }
typesetters.characteralign = characteralign
@@ -84,11 +69,10 @@ local function traced_kern(w)
return tracedrule(w,nil,nil,"darkgray")
end
-function characteralign.handler(originalhead,where)
+function characteralign.handler(head,where)
if not datasets then
- return originalhead, false
+ return head, false
end
- local head = tonut(originalhead)
-- local first = first_glyph(head) -- we could do that once
local first
for n in traverse_list_by_id(glyph_code,head) do
@@ -96,11 +80,11 @@ function characteralign.handler(originalhead,where)
break
end
if not first then
- return originalhead, false
+ return head, false
end
- local a = getattr(first,a_characteralign)
+ local a = first[a_characteralign]
if not a or a == 0 then
- return originalhead, false
+ return head, false
end
local column = div(a,100)
local row = a % 100
@@ -116,10 +100,10 @@ function characteralign.handler(originalhead,where)
local sign = nil
-- we can think of constraints
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local char = getchar(current)
- local font = getfont(current)
+ local char = current.char
+ local font = current.font
local unicode = unicodes[font][char]
if not unicode then
-- no unicode so forget about it
@@ -142,13 +126,13 @@ function characteralign.handler(originalhead,where)
if not b_start then
if sign then
b_start = sign
- local new = validsigns[getchar(sign)]
- if char == new or not fontcharacters[getfont(sign)][new] then
+ local new = validsigns[sign.char]
+ if char == new or not fontcharacters[sign.font][new] then
if trace_split then
setcolor(sign,"darkyellow")
end
else
- setfield(sign,"char",new)
+ sign.char = new
if trace_split then
setcolor(sign,"darkmagenta")
end
@@ -174,14 +158,14 @@ function characteralign.handler(originalhead,where)
end
elseif (b_start or a_start) and id == glue_code then
-- somewhat inefficient
- local next = getnext(current)
- local prev = getprev(current)
- if next and prev and getid(next) == glyph_code and getid(prev) == glyph_code then -- too much checking
- local width = fontcharacters[getfont(b_start)][separator or period].width
- -- local spec = getfield(current,"spec")
- -- free_spec(spec)
- setfield(current,"spec",new_gluespec(width))
- setattr(current,a_character,punctuationspace)
+ local next = current.next
+ local prev = current.prev
+ if next and prev and next.id == glyph_code and prev.id == glyph_code then -- too much checking
+ local width = fontcharacters[b_start.font][separator or period].width
+ -- local spec = current.spec
+ -- nodes.free(spec) -- hm, we leak but not that many specs
+ current.spec = new_gluespec(width)
+ current[a_character] = punctuationspace
if a_start then
a_stop = current
elseif b_start then
@@ -189,7 +173,7 @@ function characteralign.handler(originalhead,where)
end
end
end
- current = getnext(current)
+ current = current.next
end
local entry = list[row]
if entry then
@@ -223,7 +207,7 @@ function characteralign.handler(originalhead,where)
if not c then
-- print("[before]")
if dataset.hasseparator then
- local width = fontcharacters[getfont(b_stop)][separator].width
+ local width = fontcharacters[b_stop.font][separator].width
insert_node_after(head,b_stop,new_kern(maxafter+width))
end
elseif a_start then
@@ -245,7 +229,7 @@ function characteralign.handler(originalhead,where)
end
else
-- print("[after]")
- local width = fontcharacters[getfont(b_stop)][separator].width
+ local width = fontcharacters[b_stop.font][separator].width
head = insert_node_before(head,a_start,new_kern(maxbefore+width))
end
if after < maxafter then
@@ -262,12 +246,12 @@ function characteralign.handler(originalhead,where)
end
else
entry = {
- before = b_start and dimensions_of_list(b_start,getnext(b_stop)) or 0,
- after = a_start and dimensions_of_list(a_start,getnext(a_stop)) or 0,
+ before = b_start and dimensions_of_list(b_start,b_stop.next) or 0,
+ after = a_start and dimensions_of_list(a_start,a_stop.next) or 0,
}
list[row] = entry
end
- return tonode(head), true
+ return head, true
end
function setcharacteralign(column,separator)
diff --git a/tex/context/base/util-deb.lua b/tex/context/base/util-deb.lua
index ee732b3b5..785373f86 100644
--- a/tex/context/base/util-deb.lua
+++ b/tex/context/base/util-deb.lua
@@ -92,41 +92,37 @@ end
function debugger.disable()
debug.sethook()
- -- counters[debug.getinfo(2,"f").func] = nil
+--~ counters[debug.getinfo(2,"f").func] = nil
end
--- debugger.enable()
---
--- print(math.sin(1*.5))
--- print(math.sin(1*.5))
--- print(math.sin(1*.5))
--- print(math.sin(1*.5))
--- print(math.sin(1*.5))
---
--- debugger.disable()
---
--- print("")
--- debugger.showstats()
--- print("")
--- debugger.showstats(print,3)
---
+--~ debugger.enable()
+
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+
+--~ debugger.disable()
+
+--~ print("")
+--~ debugger.showstats()
+--~ print("")
+--~ debugger.showstats(print,3)
+
-- from the lua book:
-local function showtraceback(rep) -- from lua site / adapted
- local level = 2 -- we don't want this function to be reported
- local reporter = rep or report
+function traceback()
+ local level = 1
while true do
- local info = getinfo(level, "Sl")
+ local info = debug.getinfo(level, "Sl")
if not info then
break
elseif info.what == "C" then
- reporter("%2i : %s",level-1,"C function")
+ print(format("%3i : C function",level))
else
- reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
end
level = level + 1
end
end
-
-debugger.showtraceback = showtraceback
--- debug.showtraceback = showtraceback
diff --git a/tex/context/base/util-str.lua b/tex/context/base/util-str.lua
index 4ecaed7d3..af8b1651e 100644
--- a/tex/context/base/util-str.lua
+++ b/tex/context/base/util-str.lua
@@ -20,24 +20,8 @@ local utfchar, utfbyte = utf.char, utf.byte
----- loadstripped = utilities.lua.loadstripped
----- setmetatableindex = table.setmetatableindex
-local loadstripped = nil
-
-if _LUAVERSION < 5.2 then
-
- loadstripped = function(str,shortcuts)
- return load(str)
- end
-
-else
-
- loadstripped = function(str,shortcuts)
- if shortcuts then
- return load(dump(load(str),true),nil,nil,shortcuts)
- else
- return load(dump(load(str),true))
- end
- end
-
+local loadstripped = _LUAVERSION < 5.2 and load or function(str)
+ return load(dump(load(str),true)) -- it only makes sense in luajit and luatex where we have a stipped load
end
-- todo: make a special namespace for the formatter
@@ -307,67 +291,33 @@ function number.sparseexponent(f,n)
return tostring(n)
end
+local preamble = [[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+local formattednumber = number.formatted
+local sparseexponent = number.sparseexponent
+]]
+
local template = [[
%s
%s
return function(%s) return %s end
]]
-local preamble, environment = "", { }
-
-if _LUAVERSION < 5.2 then
-
- preamble = [[
-local lpeg=lpeg
-local type=type
-local tostring=tostring
-local tonumber=tonumber
-local format=string.format
-local concat=table.concat
-local signed=number.signed
-local points=number.points
-local basepoints= number.basepoints
-local utfchar=utf.char
-local utfbyte=utf.byte
-local lpegmatch=lpeg.match
-local nspaces=string.nspaces
-local tracedchar=string.tracedchar
-local autosingle=string.autosingle
-local autodouble=string.autodouble
-local sequenced=table.sequenced
-local formattednumber=number.formatted
-local sparseexponent=number.sparseexponent
- ]]
-
-else
-
- environment = {
- global = global or _G,
- lpeg = lpeg,
- type = type,
- tostring = tostring,
- tonumber = tonumber,
- format = string.format,
- concat = table.concat,
- signed = number.signed,
- points = number.points,
- basepoints = number.basepoints,
- utfchar = utf.char,
- utfbyte = utf.byte,
- lpegmatch = lpeg.match,
- nspaces = string.nspaces,
- tracedchar = string.tracedchar,
- autosingle = string.autosingle,
- autodouble = string.autodouble,
- sequenced = table.sequenced,
- formattednumber = number.formatted,
- sparseexponent = number.sparseexponent,
- }
-
-end
-
--- -- --
-
local arguments = { "a1" } -- faster than previously used (select(n,...))
setmetatable(arguments, { __index =
@@ -790,37 +740,28 @@ local builder = Cs { "start",
-- we can be clever and only alias what is needed
--- local direct = Cs (
--- P("%")/""
--- * Cc([[local format = string.format return function(str) return format("%]])
--- * (S("+- .") + R("09"))^0
--- * S("sqidfgGeExXo")
--- * Cc([[",str) end]])
--- * P(-1)
--- )
-
local direct = Cs (
- P("%")
- * (S("+- .") + R("09"))^0
- * S("sqidfgGeExXo")
- * P(-1) / [[local format = string.format return function(str) return format("%0",str) end]]
-)
+ P("%")/""
+ * Cc([[local format = string.format return function(str) return format("%]])
+ * (S("+- .") + R("09"))^0
+ * S("sqidfgGeExXo")
+ * Cc([[",str) end]])
+ * P(-1)
+ )
local function make(t,str)
local f
local p
local p = lpegmatch(direct,str)
if p then
- -- f = loadstripped(p)()
- -- print("builder 1 >",p)
f = loadstripped(p)()
else
n = 0
p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
if n > 0 then
p = format(template,preamble,t._preamble_,arguments[n],p)
- -- print("builder 2 >",p)
- f = loadstripped(p,t._environment_)() -- t._environment is not populated (was experiment)
+-- print("builder>",p)
+ f = loadstripped(p)()
else
f = function() return str end
end
@@ -875,26 +816,10 @@ strings.formatters = { }
-- table (metatable) in which case we could better keep a count and
-- clear that table when a threshold is reached
-if _LUAVERSION < 5.2 then
-
- function strings.formatters.new()
- local t = { _extensions_ = { }, _preamble_ = preamble, _environment_ = { }, _type_ = "formatter" }
- setmetatable(t, { __index = make, __call = use })
- return t
- end
-
-else
-
- function strings.formatters.new()
- local e = { } -- better make a copy as we can overload
- for k, v in next, environment do
- e[k] = v
- end
- local t = { _extensions_ = { }, _preamble_ = "", _environment_ = e, _type_ = "formatter" }
- setmetatable(t, { __index = make, __call = use })
- return t
- end
-
+function strings.formatters.new()
+ local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter" }
+ setmetatable(t, { __index = make, __call = use })
+ return t
end
-- function strings.formatters.new()
@@ -913,12 +838,8 @@ string.formatter = function(str,...) return formatters[str](...) end -- someti
local function add(t,name,template,preamble)
if type(t) == "table" and t._type_ == "formatter" then
t._extensions_[name] = template or "%s"
- if type(preamble) == "string" then
+ if preamble then
t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload !
- elseif type(preamble) == "table" then
- for k, v in next, preamble do
- t._environment_[k] = v
- end
end
end
end
@@ -935,23 +856,9 @@ patterns.luaquoted = Cs(Cc('"') * ((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"
-- escaping by lpeg is faster for strings without quotes, slower on a string with quotes, but
-- faster again when other q-escapables are found (the ones we don't need to escape)
--- add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
--- add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
--- add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
-
-if _LUAVERSION < 5.2 then
-
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
-
-else
-
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape = lpeg.patterns.xmlescape })
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape = lpeg.patterns.texescape })
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape = lpeg.patterns.luaescape })
-
-end
+add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
-- -- yes or no:
--
diff --git a/tex/context/base/util-tab.lua b/tex/context/base/util-tab.lua
index d235520c4..ae44269bb 100644
--- a/tex/context/base/util-tab.lua
+++ b/tex/context/base/util-tab.lua
@@ -316,7 +316,7 @@ function table.fastserialize(t,prefix)
-- not sorted
-- only number and string indices (currently)
- local r = { type(prefix) == "string" and prefix or "return" }
+ local r = { prefix or "return" }
local m = 1
local function fastserialize(t,outer) -- no mixes
@@ -376,6 +376,7 @@ function table.fastserialize(t,prefix)
end
return r
end
+
return concat(fastserialize(t,true))
end
diff --git a/tex/context/base/x-mathml.lua b/tex/context/base/x-mathml.lua
index baf839ad8..cd60e756d 100644
--- a/tex/context/base/x-mathml.lua
+++ b/tex/context/base/x-mathml.lua
@@ -82,9 +82,8 @@ local o_replacements = { -- in main table
-- [utfchar(0xF103C)] = "\\mmlleftdelimiter<",
[utfchar(0xF1026)] = "\\mmlchar{38}",
- [utfchar(0x02061)] = "", -- function applicator sometimes shows up in font
-- [utfchar(0xF103E)] = "\\mmlleftdelimiter>",
- -- [utfchar(0x000AF)] = '\\mmlchar{"203E}', -- 0x203E
+
}
local simpleoperatorremapper = utf.remapper(o_replacements)
@@ -480,7 +479,7 @@ end
function mathml.mo(id)
local str = xmlcontent(getid(id)) or ""
local rep = gsub(str,"&.-;","") -- todo
- context(simpleoperatorremapper(rep) or rep)
+ context(simpleoperatorremapper(rep))
end
function mathml.mi(id)
@@ -492,18 +491,13 @@ function mathml.mi(id)
if n == 0 then
-- nothing to do
elseif n == 1 then
- local first = str[1]
- if type(first) == "string" then
- local str = gsub(first,"&.-;","") -- bah
- local rep = i_replacements[str]
- if not rep then
- rep = gsub(str,".",i_replacements)
- end
- context(rep)
- -- context.mi(rep)
- else
- context.xmlflush(id) -- xmlsprint or so
+ local str = gsub(str[1],"&.-;","") -- bah
+ local rep = i_replacements[str]
+ if not rep then
+ rep = gsub(str,".",i_replacements)
end
+ context(rep)
+ -- context.mi(rep)
else
context.xmlflush(id) -- xmlsprint or so
end
@@ -834,13 +828,3 @@ function mathml.cpolar_a(root)
end
context.right(false,")")
end
-
--- crap .. maybe in char-def a mathml overload
-
-local mathmleq = {
- [utfchar(0x00AF)] = utfchar(0x203E),
-}
-
-function mathml.extensible(chr)
- context(mathmleq[chr] or chr)
-end
diff --git a/tex/context/base/x-mathml.mkiv b/tex/context/base/x-mathml.mkiv
index 5520dbbe6..ec8fd74e4 100644
--- a/tex/context/base/x-mathml.mkiv
+++ b/tex/context/base/x-mathml.mkiv
@@ -2283,7 +2283,7 @@
\unexpanded\def\mmloverbs#1{\mmlexecuteifdefined\mmlbasecommand\relax{\mmlunexpandedsecond{#1}}\relax}
\startxmlsetups mml:mover
- \edef\mmlovertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
\doifelseutfmathabove\mmlovertoken {
\edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
\mmloverof{#1}
@@ -2295,7 +2295,7 @@
} {
\edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
\edef\mmlovercommand{\utfmathfiller\mmlovertoken}
- \mmlundertriplet{\mmloverbf{#1}}{\mmloveros{#1}}{}%\relax
+ \mmlovertriplet{\mmloveros{#1}}{\mmloverbf{#1}}\relax
}
}
% \limits % spoils spacing
@@ -2321,18 +2321,13 @@
% % \limits % spoils spacing
% \stopxmlsetups
-% do this in lua
-
-\def\mmlextensible#1{\ctxmodulemathml{extensible(\!!bs#1\!!es)}}
-
\unexpanded\def\mmlunderuf#1{\mmlexecuteifdefined\mmlundercommand\relax {\mmlunexpandedfirst {#1}}\relax}
\unexpanded\def\mmlunderus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
\unexpanded\def\mmlunderbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-%unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax {\mmlunexpandedsecond{#1}}\relax}
+\unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\mmlunexpandedsecond{#1}}\relax}
\startxmlsetups mml:munder
- \edef\mmlundertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \edef\mmlundertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
\doifelseutfmathbelow\mmlundertoken {%
\edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
\mmlunderuf{#1}
@@ -2344,7 +2339,7 @@
} {
\edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
\edef\mmlundercommand{\utfmathfiller\mmlundertoken}
- \mmlundertriplet{\mmlunderbf{#1}}{}{\mmlunderus{#1}}%\relax
+ \mmlundertriplet{\mmlunderus{#1}}{\mmlunderbf{#1}}\relax
}
}
% \limits % spoils spacing
diff --git a/tex/context/base/x-set-11.mkiv b/tex/context/base/x-set-11.mkiv
index 12854dc92..d4b43a9ee 100644
--- a/tex/context/base/x-set-11.mkiv
+++ b/tex/context/base/x-set-11.mkiv
@@ -448,18 +448,8 @@
% \def\showsetupindeed#1%
% {\xmlfilterlist{\loadedsetups}{interface/command[@name='#1']/command(xml:setups:typeset)}}
-% \def\showsetupindeed#1%
-% {\xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}}
-
-% \setelementnature[setup][display]
-% \setelementnature[setup][mixed]
-
\def\showsetupindeed#1%
- {\startelement[setup][name=#1]%
- \startelement[noexport][comment={setup definition #1}]
- \xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}%
- \stopelement
- \stopelement}
+ {\xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}}
\unexpanded\def\placesetup {\placelistofsorts[texcommand][\c!criterium=\v!used]}
\unexpanded\def\placeallsetups{\placelistofsorts[texcommand][\c!criterium=\v!all ]}
@@ -648,16 +638,11 @@
\xmlflush{#1}
\doifmode{interface:setup:defaults} {
\ifx\currentSETUPhash\empty \else
- \begingroup
- % todo, make a one level expansion of parameter
- \let\emwidth \relax
- \let\exheight\relax
- \edef\currentSETUPvalue{\csname named\currentSETUPhash parameter\endcsname\empty{\xmlatt{#1}{name}}}
- \ifx\currentSETUPvalue\empty \else
- =\space
- \detokenize\expandafter{\currentSETUPvalue}
- \fi
- \endgroup
+ \edef\currentSETUPvalue{\csname named\currentSETUPhash parameter\endcsname\empty{\xmlatt{#1}{name}}}
+ \ifx\currentSETUPvalue\empty
+ \space=\space
+ \detokenize\expandafter{\currentSETUPvalue}
+ \fi
\fi
}
\stopsecondSETUPcolumn
@@ -833,6 +818,7 @@
\stoptabulate
\stopxmlsetups
+
\starttexdefinition showrootvalues [#1]
\edef\currentsetupparametercategory{#1}
\edef\currentsetupparametercommand{setup#1}
diff --git a/tex/context/interface/keys-cs.xml b/tex/context/interface/keys-cs.xml
index 0a0b9b9a6..d36f969f3 100644
--- a/tex/context/interface/keys-cs.xml
+++ b/tex/context/interface/keys-cs.xml
@@ -1051,8 +1051,6 @@
-
-
diff --git a/tex/context/interface/keys-de.xml b/tex/context/interface/keys-de.xml
index 28b21b915..c5ba364e3 100644
--- a/tex/context/interface/keys-de.xml
+++ b/tex/context/interface/keys-de.xml
@@ -1051,8 +1051,6 @@
-
-
diff --git a/tex/context/interface/keys-en.xml b/tex/context/interface/keys-en.xml
index da433cdee..be59542e7 100644
--- a/tex/context/interface/keys-en.xml
+++ b/tex/context/interface/keys-en.xml
@@ -1051,8 +1051,6 @@
-
-
diff --git a/tex/context/interface/keys-fr.xml b/tex/context/interface/keys-fr.xml
index 6a8eaa9c5..43c47d578 100644
--- a/tex/context/interface/keys-fr.xml
+++ b/tex/context/interface/keys-fr.xml
@@ -1051,8 +1051,6 @@
-
-
diff --git a/tex/context/interface/keys-it.xml b/tex/context/interface/keys-it.xml
index c7c996318..95c2d8aa5 100644
--- a/tex/context/interface/keys-it.xml
+++ b/tex/context/interface/keys-it.xml
@@ -1051,8 +1051,6 @@
-
-
diff --git a/tex/context/interface/keys-nl.xml b/tex/context/interface/keys-nl.xml
index 21536214a..bc940ebc4 100644
--- a/tex/context/interface/keys-nl.xml
+++ b/tex/context/interface/keys-nl.xml
@@ -1051,8 +1051,6 @@
-
-
diff --git a/tex/context/interface/keys-pe.xml b/tex/context/interface/keys-pe.xml
index 8e4d412d5..75e3a17c2 100644
--- a/tex/context/interface/keys-pe.xml
+++ b/tex/context/interface/keys-pe.xml
@@ -1051,8 +1051,6 @@
-
-
diff --git a/tex/context/interface/keys-ro.xml b/tex/context/interface/keys-ro.xml
index f9ef01b9f..e83d145d0 100644
--- a/tex/context/interface/keys-ro.xml
+++ b/tex/context/interface/keys-ro.xml
@@ -1051,8 +1051,6 @@
-
-
diff --git a/tex/generic/context/luatex/luatex-fonts-inj.lua b/tex/generic/context/luatex/luatex-fonts-inj.lua
deleted file mode 100644
index ae48150a6..000000000
--- a/tex/generic/context/luatex/luatex-fonts-inj.lua
+++ /dev/null
@@ -1,526 +0,0 @@
-if not modules then modules = { } end modules ['node-inj'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- This is very experimental (this will change when we have luatex > .50 and
--- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help. Some optimizations can go away when we have faster machines.
-
--- todo: make a special one for context
-
-local next = next
-local utfchar = utf.char
-
-local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
-
-local report_injections = logs.reporter("nodes","injections")
-
-local attributes, nodes, node = attributes, nodes, node
-
-fonts = fonts
-local fontdata = fonts.hashes.identifiers
-
-nodes.injections = nodes.injections or { }
-local injections = nodes.injections
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local kern_code = nodecodes.kern
-local nodepool = nodes.pool
-local newkern = nodepool.kern
-
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-
-local a_kernpair = attributes.private('kernpair')
-local a_ligacomp = attributes.private('ligacomp')
-local a_markbase = attributes.private('markbase')
-local a_markmark = attributes.private('markmark')
-local a_markdone = attributes.private('markdone')
-local a_cursbase = attributes.private('cursbase')
-local a_curscurs = attributes.private('curscurs')
-local a_cursdone = attributes.private('cursdone')
-
--- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
--- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
--- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
--- that this code is not 100% okay but examples are needed to figure things out.
-
-function injections.installnewkern(nk)
- newkern = nk or newkern
-end
-
-local cursives = { }
-local marks = { }
-local kerns = { }
-
--- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in
--- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we
--- can share tables.
-
--- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
--- checking with husayni (volt and fontforge).
-
-function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
- local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
- local ws, wn = tfmstart.width, tfmnext.width
- local bound = #cursives + 1
- start[a_cursbase] = bound
- nxt[a_curscurs] = bound
- cursives[bound] = { rlmode, dx, dy, ws, wn }
- return dx, dy, bound
-end
-
-function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
- local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
- -- dy = y - h
- if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = current[a_kernpair]
- if bound then
- local kb = kerns[bound]
- -- inefficient but singles have less, but weird anyway, needs checking
- kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
- else
- bound = #kerns + 1
- current[a_kernpair] = bound
- kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
- end
- return x, y, w, h, bound
- end
- return x, y, w, h -- no bound
-end
-
-function injections.setkern(current,factor,rlmode,x,tfmchr)
- local dx = factor*x
- if dx ~= 0 then
- local bound = #kerns + 1
- current[a_kernpair] = bound
- kerns[bound] = { rlmode, dx }
- return dx, bound
- else
- return 0, 0
- end
-end
-
-function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = base[a_markbase] -- fails again we should pass it
- local index = 1
- if bound then
- local mb = marks[bound]
- if mb then
- -- if not index then index = #mb + 1 end
- index = #mb + 1
- mb[index] = { dx, dy, rlmode }
- start[a_markmark] = bound
- start[a_markdone] = index
- return dx, dy, bound
- else
- report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
- end
- end
--- index = index or 1
- index = index or 1
- bound = #marks + 1
- base[a_markbase] = bound
- start[a_markmark] = bound
- start[a_markdone] = index
- marks[bound] = { [index] = { dx, dy, rlmode, baseismark } }
- return dx, dy, bound
-end
-
-local function dir(n)
- return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
-end
-
-local function trace(head)
- report_injections("begin run")
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local kp = n[a_kernpair]
- local mb = n[a_markbase]
- local mm = n[a_markmark]
- local md = n[a_markdone]
- local cb = n[a_cursbase]
- local cc = n[a_curscurs]
- local char = n.char
- report_injections("font %s, char %U, glyph %c",n.font,char,char)
- if kp then
- local k = kerns[kp]
- if k[3] then
- report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
- else
- report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
- end
- end
- if mb then
- report_injections(" markbase: bound %a",mb)
- end
- if mm then
- local m = marks[mm]
- if mb then
- local m = m[mb]
- if m then
- report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
- else
- report_injections(" markmark: bound %a, missing index",mm)
- end
- else
- m = m[1]
- report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
- end
- end
- if cb then
- report_injections(" cursbase: bound %a",cb)
- end
- if cc then
- local c = cursives[cc]
- report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
- end
- end
- end
- report_injections("end run")
-end
-
--- todo: reuse tables (i.e. no collection), but will be extra fields anyway
--- todo: check for attribute
-
--- We can have a fast test on a font being processed, so we can check faster for marks etc
--- but I'll make a context variant anyway.
-
-local function show_result(head)
- local current = head
- local skipping = false
- while current do
- local id = current.id
- if id == glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
- skipping = false
- elseif id == kern_code then
- report_injections("kern: %p",current.kern)
- skipping = false
- elseif not skipping then
- report_injections()
- skipping = true
- end
- current = current.next
- end
-end
-
-function injections.handler(head,where,keep)
- local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
- if has_marks or has_cursives then
- if trace_injections then
- trace(head)
- end
- -- in the future variant we will not copy items but refs to tables
- local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
- if has_kerns then -- move outside loop
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- local k = n[a_kernpair]
- if k then
- local kk = kerns[k]
- if kk then
- local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
- local dy = y - h
- if dy ~= 0 then
- ky[n] = dy
- end
- if w ~= 0 or x ~= 0 then
- wx[n] = kk
- end
- rl[n] = kk[1] -- could move in test
- end
- end
- end
- end
- else
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- end
- end
- end
- if nofvalid > 0 then
- -- we can assume done == true because we have cursives and marks
- local cx = { }
- if has_kerns and next(ky) then
- for n, k in next, ky do
- n.yoffset = k
- end
- end
- -- todo: reuse t and use maxt
- if has_cursives then
- local p_cursbase, p = nil, nil
- -- since we need valid[n+1] we can also use a "while true do"
- local t, d, maxt = { }, { }, 0
- for i=1,nofvalid do -- valid == glyphs
- local n = valid[i]
- if not mk[n] then
- local n_cursbase = n[a_cursbase]
- if p_cursbase then
- local n_curscurs = n[a_curscurs]
- if p_cursbase == n_curscurs then
- local c = cursives[n_curscurs]
- if c then
- local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
- if rlmode >= 0 then
- dx = dx - ws
- else
- dx = dx + wn
- end
- if dx ~= 0 then
- cx[n] = dx
- rl[n] = rlmode
- end
- -- if rlmode and rlmode < 0 then
- dy = -dy
- -- end
- maxt = maxt + 1
- t[maxt] = p
- d[maxt] = dy
- else
- maxt = 0
- end
- end
- elseif maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ti.yoffset + ny
- end
- maxt = 0
- end
- if not n_cursbase and maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- p_cursbase, p = n_cursbase, n
- end
- end
- if maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- if not keep then
- cursives = { }
- end
- end
- if has_marks then
- for i=1,nofvalid do
- local p = valid[i]
- local p_markbase = p[a_markbase]
- if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark = n[a_markmark]
- if p_markbase == n_markmark then
- local index = n[a_markdone] or 1
- local d = mrks[index]
- if d then
- local rlmode = d[3]
- --
- local k = wx[p]
- if k then
- local x = k[2]
- local w = k[4]
- if w then
- if rlmode and rlmode >= 0 then
- -- kern(x) glyph(p) kern(w-x) mark(n)
- n.xoffset = p.xoffset - p.width + d[1] - (w-x)
- else
- -- kern(w-x) glyph(p) kern(x) mark(n)
- n.xoffset = p.xoffset - d[1] - x
- end
- else
- if rlmode and rlmode >= 0 then
- -- okay for husayni
- n.xoffset = p.xoffset - p.width + d[1]
- else
- -- needs checking: is x ok here?
- n.xoffset = p.xoffset - d[1] - x
- end
- end
- else
- if rlmode and rlmode >= 0 then
- n.xoffset = p.xoffset - p.width + d[1]
- else
- n.xoffset = p.xoffset - d[1]
- end
- local w = n.width
- if w ~= 0 then
- insert_node_before(head,n,newkern(-w/2))
- insert_node_after(head,n,newkern(-w/2))
- end
- end
- -- --
- if mk[p] then
- n.yoffset = p.yoffset + d[2]
- else
- n.yoffset = n.yoffset + p.yoffset + d[2]
- end
- --
- if nofmarks == 1 then
- break
- else
- nofmarks = nofmarks - 1
- end
- end
- else
- -- KE: there can be sequences in ligatures
- end
- end
- end
- end
- if not keep then
- marks = { }
- end
- end
- -- todo : combine
- if next(wx) then
- for n, k in next, wx do
- -- only w can be nil (kernclasses), can be sped up when w == nil
- local x = k[2]
- local w = k[4]
- if w then
- local rl = k[1] -- r2l = k[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx)) -- type 0/2
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x)) -- type 0/2
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x)) -- type 0/2
- end
- if wx ~= 0 then
- insert_node_after (head,n,newkern(wx)) -- type 0/2
- end
- end
- elseif x ~= 0 then
- -- this needs checking for rl < 0 but it is unlikely that a r2l script
- -- uses kernclasses between glyphs so we're probably safe (KE has a
- -- problematic font where marks interfere with rl < 0 in the previous
- -- case)
- insert_node_before(head,n,newkern(x)) -- a real font kern, type 0
- end
- end
- end
- if next(cx) then
- for n, k in next, cx do
- if k ~= 0 then
- local rln = rl[n]
- if rln and rln < 0 then
- insert_node_before(head,n,newkern(-k)) -- type 0/2
- else
- insert_node_before(head,n,newkern(k)) -- type 0/2
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- -- if trace_injections then
- -- show_result(head)
- -- end
- return head, true
- elseif not keep then
- kerns, cursives, marks = { }, { }, { }
- end
- elseif has_kerns then
- if trace_injections then
- trace(head)
- end
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local k = n[a_kernpair]
- if k then
- local kk = kerns[k]
- if kk then
- local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
- if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
- end
- if w then
- -- copied from above
- -- local r2l = kk[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- else
- -- simple (e.g. kernclass kerns)
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- end
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- -- if trace_injections then
- -- show_result(head)
- -- end
- return head, true
- else
- -- no tracing needed
- end
- return head, false
-end
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 3f408b96f..24e49308c 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 02/14/14 17:07:59
+-- merge date : 01/03/14 00:40:35
do -- begin closure to overcome local limits and interference
@@ -82,9 +82,6 @@ function optionalrequire(...)
return result
end
end
-if lua then
- lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
-end
end -- closure
@@ -104,9 +101,7 @@ local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.forma
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-if setinspector then
- setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-end
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -175,11 +170,9 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
-local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
patterns.stripper=stripper
-patterns.fullstripper=fullstripper
patterns.collapser=collapser
patterns.lowercase=lowercase
patterns.uppercase=uppercase
@@ -402,7 +395,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction,isutf)
+function lpeg.finder(lst,makefunction)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -418,11 +411,7 @@ function lpeg.finder(lst,makefunction,isutf)
else
pattern=P(lst)
end
- if isutf then
- pattern=((utf8char or 1)-pattern)^0*pattern
- else
- pattern=(1-pattern)^0*pattern
- end
+ pattern=(1-pattern)^0*pattern
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -759,15 +748,11 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
-local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
-function string.fullstrip(str)
- return lpegmatch(fullstripper,str) or ""
-end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -1651,9 +1636,7 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-if setinspector then
- setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
-end
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
@@ -2525,19 +2508,8 @@ local unpack,concat=table.unpack,table.concat
local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
local patterns,lpegmatch=lpeg.patterns,lpeg.match
local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=nil
-if _LUAVERSION<5.2 then
- loadstripped=function(str,shortcuts)
- return load(str)
- end
-else
- loadstripped=function(str,shortcuts)
- if shortcuts then
- return load(dump(load(str),true),nil,nil,shortcuts)
- else
- return load(dump(load(str),true))
- end
- end
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
end
if not number then number={} end
local stripper=patterns.stripzeros
@@ -2687,58 +2659,31 @@ function number.sparseexponent(f,n)
end
return tostring(n)
end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+local formattednumber = number.formatted
+local sparseexponent = number.sparseexponent
+]]
local template=[[
%s
%s
return function(%s) return %s end
]]
-local preamble,environment="",{}
-if _LUAVERSION<5.2 then
- preamble=[[
-local lpeg=lpeg
-local type=type
-local tostring=tostring
-local tonumber=tonumber
-local format=string.format
-local concat=table.concat
-local signed=number.signed
-local points=number.points
-local basepoints= number.basepoints
-local utfchar=utf.char
-local utfbyte=utf.byte
-local lpegmatch=lpeg.match
-local nspaces=string.nspaces
-local tracedchar=string.tracedchar
-local autosingle=string.autosingle
-local autodouble=string.autodouble
-local sequenced=table.sequenced
-local formattednumber=number.formatted
-local sparseexponent=number.sparseexponent
- ]]
-else
- environment={
- global=global or _G,
- lpeg=lpeg,
- type=type,
- tostring=tostring,
- tonumber=tonumber,
- format=string.format,
- concat=table.concat,
- signed=number.signed,
- points=number.points,
- basepoints=number.basepoints,
- utfchar=utf.char,
- utfbyte=utf.byte,
- lpegmatch=lpeg.match,
- nspaces=string.nspaces,
- tracedchar=string.tracedchar,
- autosingle=string.autosingle,
- autodouble=string.autodouble,
- sequenced=table.sequenced,
- formattednumber=number.formatted,
- sparseexponent=number.sparseexponent,
- }
-end
local arguments={ "a1" }
setmetatable(arguments,{ __index=function(t,k)
local v=t[k-1]..",a"..k
@@ -3060,8 +3005,8 @@ local builder=Cs { "start",
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
-)
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
local function make(t,str)
local f
local p
@@ -3073,7 +3018,7 @@ local function make(t,str)
p=lpegmatch(builder,str,1,"..",t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p,t._environment_)()
+ f=loadstripped(p)()
else
f=function() return str end
end
@@ -3085,22 +3030,10 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-if _LUAVERSION<5.2 then
- function strings.formatters.new()
- local t={ _extensions_={},_preamble_=preamble,_environment_={},_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
-else
- function strings.formatters.new()
- local e={}
- for k,v in next,environment do
- e[k]=v
- end
- local t={ _extensions_={},_preamble_="",_environment_=e,_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -3108,12 +3041,8 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if type(preamble)=="string" then
+ if preamble then
t._preamble_=preamble.."\n"..t._preamble_
- elseif type(preamble)=="table" then
- for k,v in next,preamble do
- t._environment_[k]=v
- end
end
end
end
@@ -3122,15 +3051,9 @@ patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/""
patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
-if _LUAVERSION<5.2 then
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
-else
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
-end
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+add(formatters,"lua",[[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
end -- closure
@@ -6482,7 +6405,7 @@ local type,next,tonumber,tostring=type,next,tonumber,tostring
local abs=math.abs
local insert=table.insert
local lpegmatch=lpeg.match
-local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys
+local reversed,concat,remove=table.reversed,table.concat,table.remove
local ioflush=io.flush
local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
local formatters=string.formatters
@@ -6504,7 +6427,7 @@ local report_otf=logs.reporter("fonts","otf loading")
local fonts=fonts
local otf=fonts.handlers.otf
otf.glists={ "gsub","gpos" }
-otf.version=2.751
+otf.version=2.749
otf.cache=containers.define("fonts","otf",otf.version,true)
local fontdata=fonts.hashes.identifiers
local chardata=characters and characters.data
@@ -6656,7 +6579,6 @@ local valid_fields=table.tohash {
"upos",
"use_typo_metrics",
"uwidth",
- "validation_state",
"version",
"vert_base",
"weight",
@@ -7123,6 +7045,7 @@ actions["prepare glyphs"]=function(data,filename,raw)
}
local altuni=glyph.altuni
if altuni then
+ local d
for i=1,#altuni do
local a=altuni[i]
local u=a.unicode
@@ -7135,8 +7058,15 @@ actions["prepare glyphs"]=function(data,filename,raw)
vv={ [u]=unicode }
variants[v]=vv
end
+ elseif d then
+ d[#d+1]=u
+ else
+ d={ u }
end
end
+ if d then
+ duplicates[unicode]=d
+ end
end
else
report_otf("potential problem: glyph %U is used but empty",index)
@@ -7154,49 +7084,47 @@ actions["check encoding"]=function(data,filename,raw)
local duplicates=resources.duplicates
local mapdata=raw.map or {}
local unicodetoindex=mapdata and mapdata.map or {}
- local indextounicode=mapdata and mapdata.backmap or {}
local encname=lower(data.enc_name or mapdata.enc_name or "")
local criterium=0xFFFF
if find(encname,"unicode") then
if trace_loading then
report_otf("checking embedded unicode map %a",encname)
end
- local hash={}
- for index,unicode in next,indices do
- hash[index]=descriptions[unicode]
- end
- local reported={}
- for unicode,index in next,unicodetoindex do
- if not descriptions[unicode] then
- local d=hash[index]
- if d then
- if d.unicode~=unicode then
- local c=d.copies
- if c then
- c[unicode]=true
- else
- d.copies={ [unicode]=true }
+ for unicode,index in next,unicodetoindex do
+ if unicode<=criterium and not descriptions[unicode] then
+ local parent=indices[index]
+ if not parent then
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ else
+ local parentdescription=descriptions[parent]
+ if parentdescription then
+ local altuni=parentdescription.altuni
+ if not altuni then
+ altuni={ { unicode=unicode } }
+ parentdescription.altuni=altuni
+ duplicates[parent]={ unicode }
+ else
+ local done=false
+ for i=1,#altuni do
+ if altuni[i].unicode==unicode then
+ done=true
+ break
+ end
end
+ if not done then
+ insert(altuni,{ unicode=unicode })
+ insert(duplicates[parent],unicode)
+ end
+ end
+ if trace_loading then
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
end
- elseif not reported[i] then
- report_otf("missing index %i",index)
- reported[i]=true
+ else
+ report_otf("weird, unicode %U points to %U with index %H",unicode,index)
end
end
end
- for index,data in next,hash do
- data.copies=sortedkeys(data.copies)
- end
- for index,unicode in next,indices do
- local description=hash[index]
- local copies=description.copies
- if copies then
- duplicates[unicode]=copies
- description.copies=nil
- else
- report_otf("copies but no unicode parent %U",unicode)
- end
- end
+ end
elseif properties.cidinfo then
report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
else
@@ -7204,7 +7132,6 @@ actions["check encoding"]=function(data,filename,raw)
end
if mapdata then
mapdata.map={}
- mapdata.backmap={}
end
end
actions["add duplicates"]=function(data,filename,raw)
@@ -7215,37 +7142,28 @@ actions["add duplicates"]=function(data,filename,raw)
local indices=resources.indices
local duplicates=resources.duplicates
for unicode,d in next,duplicates do
- local nofduplicates=#d
- if nofduplicates>4 then
- if trace_loading then
- report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
- end
- else
- for i=1,nofduplicates do
- local u=d[i]
- if not descriptions[u] then
- local description=descriptions[unicode]
- local n=0
- for _,description in next,descriptions do
- if kerns then
- local kerns=description.kerns
- for _,k in next,kerns do
- local ku=k[unicode]
- if ku then
- k[u]=ku
- n=n+1
- end
+ for i=1,#d do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ local n=0
+ for _,description in next,descriptions do
+ if kerns then
+ local kerns=description.kerns
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
end
end
end
- if u>0 then
- local duplicate=table.copy(description)
- duplicate.comment=format("copy of U+%05X",unicode)
- descriptions[u]=duplicate
- if trace_loading then
- report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
- end
- end
+ end
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
end
end
end
@@ -7969,11 +7887,6 @@ actions["check metadata"]=function(data,filename,raw)
ttftables[i].data="deleted"
end
end
- if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
- local name=file.nameonly(filename)
- metadata.fontname="bad-fontname-"..name
- metadata.fullname="bad-fullname-"..name
- end
end
actions["cleanup tables"]=function(data,filename,raw)
data.resources.indices=nil
@@ -8271,24 +8184,6 @@ local function otftotfm(specification)
local features=specification.features.normal
local rawdata=otf.load(filename,sub,features and features.featurefile)
if rawdata and next(rawdata) then
- local descriptions=rawdata.descriptions
- local duplicates=rawdata.resources.duplicates
- if duplicates then
- local nofduplicates,nofduplicated=0,0
- for parent,list in next,duplicates do
- for i=1,#list do
- local unicode=list[i]
- if not descriptions[unicode] then
- descriptions[unicode]=descriptions[parent]
- nofduplicated=nofduplicated+1
- end
- end
- nofduplicates=nofduplicates+#list
- end
- if trace_otf and nofduplicated~=nofduplicates then
- report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
- end
- end
rawdata.lookuphash={}
tfmdata=copytotfm(rawdata,cache_id)
if tfmdata and next(tfmdata) then
@@ -8986,12 +8881,26 @@ nodes.injections=nodes.injections or {}
local injections=nodes.injections
local nodecodes=nodes.nodecodes
local glyph_code=nodecodes.glyph
+local disc_code=nodecodes.disc
local kern_code=nodecodes.kern
-local nodepool=nodes.pool
+local nuts=nodes.nuts
+local nodepool=nuts.pool
local newkern=nodepool.kern
-local traverse_id=node.traverse_id
-local insert_node_before=node.insert_before
-local insert_node_after=node.insert_after
+local tonode=nuts.tonode
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getattr=nuts.getattr
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local setfield=nuts.setfield
+local setattr=nuts.setattr
+local traverse_id=nuts.traverse_id
+local insert_node_before=nuts.insert_before
+local insert_node_after=nuts.insert_after
local a_kernpair=attributes.private('kernpair')
local a_ligacomp=attributes.private('ligacomp')
local a_markbase=attributes.private('markbase')
@@ -9010,21 +8919,21 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne
local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2])
local ws,wn=tfmstart.width,tfmnext.width
local bound=#cursives+1
- start[a_cursbase]=bound
- nxt[a_curscurs]=bound
+ setattr(start,a_cursbase,bound)
+ setattr(nxt,a_curscurs,bound)
cursives[bound]={ rlmode,dx,dy,ws,wn }
return dx,dy,bound
end
function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
if x~=0 or w~=0 or y~=0 or h~=0 then
- local bound=current[a_kernpair]
+ local bound=getattr(current,a_kernpair)
if bound then
local kb=kerns[bound]
kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h
else
bound=#kerns+1
- current[a_kernpair]=bound
+ setattr(current,a_kernpair,bound)
kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width }
end
return x,y,w,h,bound
@@ -9035,7 +8944,7 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
local dx=factor*x
if dx~=0 then
local bound=#kerns+1
- current[a_kernpair]=bound
+ setattr(current,a_kernpair,bound)
kerns[bound]={ rlmode,dx }
return dx,bound
else
@@ -9044,25 +8953,25 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
end
function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark)
local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
- local bound=base[a_markbase]
+ local bound=getattr(base,a_markbase)
local index=1
if bound then
local mb=marks[bound]
if mb then
index=#mb+1
mb[index]={ dx,dy,rlmode }
- start[a_markmark]=bound
- start[a_markdone]=index
+ setattr(start,a_markmark,bound)
+ setattr(start,a_markdone,index)
return dx,dy,bound
else
- report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound)
end
end
index=index or 1
bound=#marks+1
- base[a_markbase]=bound
- start[a_markmark]=bound
- start[a_markdone]=index
+ setattr(base,a_markbase,bound)
+ setattr(start,a_markmark,bound)
+ setattr(start,a_markdone,index)
marks[bound]={ [index]={ dx,dy,rlmode,baseismark } }
return dx,dy,bound
end
@@ -9072,15 +8981,15 @@ end
local function trace(head)
report_injections("begin run")
for n in traverse_id(glyph_code,head) do
- if n.subtype<256 then
- local kp=n[a_kernpair]
- local mb=n[a_markbase]
- local mm=n[a_markmark]
- local md=n[a_markdone]
- local cb=n[a_cursbase]
- local cc=n[a_curscurs]
- local char=n.char
- report_injections("font %s, char %U, glyph %c",n.font,char,char)
+ if getsubtype(n)<256 then
+ local kp=getattr(n,a_kernpair)
+ local mb=getattr(n,a_markbase)
+ local mm=getattr(n,a_markmark)
+ local md=getattr(n,a_markdone)
+ local cb=getattr(n,a_cursbase)
+ local cc=getattr(n,a_curscurs)
+ local char=getchar(n)
+ report_injections("font %s, char %U, glyph %c",getfont(n),char,char)
if kp then
local k=kerns[kp]
if k[3] then
@@ -9121,21 +9030,23 @@ local function show_result(head)
local current=head
local skipping=false
while current do
- local id=current.id
+ local id=getid(current)
if id==glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",
+ getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
skipping=false
elseif id==kern_code then
- report_injections("kern: %p",current.kern)
+ report_injections("kern: %p",getfield(current,"kern"))
skipping=false
elseif not skipping then
report_injections()
skipping=true
end
- current=current.next
+ current=getnext(current)
end
end
function injections.handler(head,where,keep)
+ head=tonut(head)
local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns)
if has_marks or has_cursives then
if trace_injections then
@@ -9145,17 +9056,18 @@ function injections.handler(head,where,keep)
if has_kerns then
local nf,tm=nil,nil
for n in traverse_id(glyph_code,head) do
- if n.subtype<256 then
+ if getsubtype(n)<256 then
nofvalid=nofvalid+1
valid[nofvalid]=n
- if n.font~=nf then
- nf=n.font
- tm=fontdata[nf].resources.marks
+ local f=getfont(n)
+ if f~=nf then
+ nf=f
+ tm=fontdata[nf].resources.marks
end
if tm then
- mk[n]=tm[n.char]
+ mk[n]=tm[getchar(n)]
end
- local k=n[a_kernpair]
+ local k=getattr(n,a_kernpair)
if k then
local kk=kerns[k]
if kk then
@@ -9175,15 +9087,16 @@ function injections.handler(head,where,keep)
else
local nf,tm=nil,nil
for n in traverse_id(glyph_code,head) do
- if n.subtype<256 then
+ if getsubtype(n)<256 then
nofvalid=nofvalid+1
valid[nofvalid]=n
- if n.font~=nf then
- nf=n.font
- tm=fontdata[nf].resources.marks
+ local f=getfont(n)
+ if f~=nf then
+ nf=f
+ tm=fontdata[nf].resources.marks
end
if tm then
- mk[n]=tm[n.char]
+ mk[n]=tm[getchar(n)]
end
end
end
@@ -9192,7 +9105,7 @@ function injections.handler(head,where,keep)
local cx={}
if has_kerns and next(ky) then
for n,k in next,ky do
- n.yoffset=k
+ setfield(n,"yoffset",k)
end
end
if has_cursives then
@@ -9201,9 +9114,9 @@ function injections.handler(head,where,keep)
for i=1,nofvalid do
local n=valid[i]
if not mk[n] then
- local n_cursbase=n[a_cursbase]
+ local n_cursbase=getattr(n,a_cursbase)
if p_cursbase then
- local n_curscurs=n[a_curscurs]
+ local n_curscurs=getattr(n,a_curscurs)
if p_cursbase==n_curscurs then
local c=cursives[n_curscurs]
if c then
@@ -9226,20 +9139,20 @@ function injections.handler(head,where,keep)
end
end
elseif maxt>0 then
- local ny=n.yoffset
+ local ny=getfield(n,"yoffset")
for i=maxt,1,-1 do
ny=ny+d[i]
local ti=t[i]
- ti.yoffset=ti.yoffset+ny
+ setfield(ti,"yoffset",getfield(ti,"yoffset")+ny)
end
maxt=0
end
if not n_cursbase and maxt>0 then
- local ny=n.yoffset
+ local ny=getfield(n,"yoffset")
for i=maxt,1,-1 do
ny=ny+d[i]
local ti=t[i]
- ti.yoffset=ny
+ setfield(ti,"yoffset",ny)
end
maxt=0
end
@@ -9247,11 +9160,11 @@ function injections.handler(head,where,keep)
end
end
if maxt>0 then
- local ny=n.yoffset
+ local ny=getfield(n,"yoffset")
for i=maxt,1,-1 do
ny=ny+d[i]
local ti=t[i]
- ti.yoffset=ny
+ setfield(ti,"yoffset",ny)
end
maxt=0
end
@@ -9262,57 +9175,66 @@ function injections.handler(head,where,keep)
if has_marks then
for i=1,nofvalid do
local p=valid[i]
- local p_markbase=p[a_markbase]
+ local p_markbase=getattr(p,a_markbase)
if p_markbase then
local mrks=marks[p_markbase]
local nofmarks=#mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark=n[a_markmark]
+ for n in traverse_id(glyph_code,getnext(p)) do
+ local n_markmark=getattr(n,a_markmark)
if p_markbase==n_markmark then
- local index=n[a_markdone] or 1
+ local index=getattr(n,a_markdone) or 1
local d=mrks[index]
if d then
local rlmode=d[3]
local k=wx[p]
+ local px=getfield(p,"xoffset")
+ local ox=0
if k then
local x=k[2]
local w=k[4]
if w then
if rlmode and rlmode>=0 then
- n.xoffset=p.xoffset-p.width+d[1]-(w-x)
+ ox=px-getfield(p,"width")+d[1]-(w-x)
else
- n.xoffset=p.xoffset-d[1]-x
+ ox=px-d[1]-x
end
else
if rlmode and rlmode>=0 then
- n.xoffset=p.xoffset-p.width+d[1]
+ ox=px-getfield(p,"width")+d[1]
else
- n.xoffset=p.xoffset-d[1]-x
+ ox=px-d[1]-x
end
end
else
+ local wp=getfield(p,"width")
+ local wn=getfield(n,"width")
if rlmode and rlmode>=0 then
- n.xoffset=p.xoffset-p.width+d[1]
+ ox=px-wp+d[1]
else
- n.xoffset=p.xoffset-d[1]
+ ox=px-d[1]
end
- local w=n.width
- if w~=0 then
- insert_node_before(head,n,newkern(-w/2))
- insert_node_after(head,n,newkern(-w/2))
+ if wn~=0 then
+ insert_node_before(head,n,newkern(-wn/2))
+ insert_node_after(head,n,newkern(-wn/2))
end
end
+ setfield(n,"xoffset",ox)
+ local py=getfield(p,"yoffset")
+ local oy=0
if mk[p] then
- n.yoffset=p.yoffset+d[2]
+ oy=py+d[2]
else
- n.yoffset=n.yoffset+p.yoffset+d[2]
+ oy=getfield(n,"yoffset")+py+d[2]
end
+ setfield(n,"yoffset",oy)
if nofmarks==1 then
break
else
nofmarks=nofmarks-1
end
end
+ elseif not n_markmark then
+ break
else
end
end
@@ -9364,6 +9286,7 @@ function injections.handler(head,where,keep)
if not keep then
kerns={}
end
+head=tonode(head)
return head,true
elseif not keep then
kerns,cursives,marks={},{},{}
@@ -9373,14 +9296,14 @@ function injections.handler(head,where,keep)
trace(head)
end
for n in traverse_id(glyph_code,head) do
- if n.subtype<256 then
- local k=n[a_kernpair]
+ if getsubtype(n)<256 then
+ local k=getattr(n,a_kernpair)
if k then
local kk=kerns[k]
if kk then
local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4]
if y and y~=0 then
- n.yoffset=y
+ setfield(n,"yoffset",y)
end
if w then
local wx=w-x
@@ -9411,10 +9334,10 @@ function injections.handler(head,where,keep)
if not keep then
kerns={}
end
- return head,true
+ return tonode(head),true
else
end
- return head,false
+ return tonode(head),false
end
end -- closure
@@ -9829,12 +9752,25 @@ registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
registertracker("otf.actions","otf.replacements,otf.positions")
registertracker("otf.injections","nodes.injections")
registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-local insert_node_after=node.insert_after
-local delete_node=nodes.delete
-local copy_node=node.copy
-local find_node_tail=node.tail or node.slide
-local flush_node_list=node.flush_list
-local end_of_math=node.end_of_math
+local nuts=nodes.nuts
+local tonode=nuts.tonode
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getattr=nuts.getattr
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local setfield=nuts.setfield
+local setattr=nuts.setattr
+local insert_node_after=nuts.insert_after
+local delete_node=nuts.delete
+local copy_node=nuts.copy
+local find_node_tail=nuts.tail
+local flush_node_list=nuts.flush_list
+local end_of_math=nuts.end_of_math
local setmetatableindex=table.setmetatableindex
local zwnj=0x200C
local zwj=0x200D
@@ -9945,83 +9881,83 @@ local function pref(kind,lookupname)
return formatters["feature %a, lookup %a"](kind,lookupname)
end
local function copy_glyph(g)
- local components=g.components
+ local components=getfield(g,"components")
if components then
- g.components=nil
+ setfield(g,"components",nil)
local n=copy_node(g)
- g.components=components
+ setfield(g,"components",components)
return n
else
return copy_node(g)
end
end
local function markstoligature(kind,lookupname,head,start,stop,char)
- if start==stop and start.char==char then
+ if start==stop and getchar(start)==char then
return head,start
else
- local prev=start.prev
- local next=stop.next
- start.prev=nil
- stop.next=nil
+ local prev=getprev(start)
+ local next=getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local base=copy_glyph(start)
if head==start then
head=base
end
- base.char=char
- base.subtype=ligature_code
- base.components=start
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
if prev then
- prev.next=base
+ setfield(prev,"next",base)
end
if next then
- next.prev=base
+ setfield(next,"prev",base)
end
- base.next=next
- base.prev=prev
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
return head,base
end
end
local function getcomponentindex(start)
- if start.id~=glyph_code then
+ if getid(start)~=glyph_code then
return 0
- elseif start.subtype==ligature_code then
+ elseif getsubtype(start)==ligature_code then
local i=0
- local components=start.components
+ local components=getfield(start,"components")
while components do
i=i+getcomponentindex(components)
- components=components.next
+ components=getnext(components)
end
return i
- elseif not marks[start.char] then
+ elseif not marks[getchar(start)] then
return 1
else
return 0
end
end
local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound)
- if start==stop and start.char==char then
- start.char=char
+ if start==stop and getchar(start)==char then
+ setfield(start,"char",char)
return head,start
end
- local prev=start.prev
- local next=stop.next
- start.prev=nil
- stop.next=nil
+ local prev=getprev(start)
+ local next=getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local base=copy_glyph(start)
if start==head then
head=base
end
- base.char=char
- base.subtype=ligature_code
- base.components=start
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
if prev then
- prev.next=base
+ setfield(prev,"next",base)
end
if next then
- next.prev=base
+ setfield(next,"prev",base)
end
- base.next=next
- base.prev=prev
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
if not discfound then
local deletemarks=markflag~="mark"
local components=start
@@ -10030,42 +9966,42 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
local head=base
local current=base
while start do
- local char=start.char
+ local char=getchar(start)
if not marks[char] then
baseindex=baseindex+componentindex
componentindex=getcomponentindex(start)
elseif not deletemarks then
- start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ setattr(start,a_ligacomp,baseindex+(getattr(start,a_ligacomp) or componentindex))
if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp))
end
head,current=insert_node_after(head,current,copy_node(start))
elseif trace_marks then
logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
end
- start=start.next
+ start=getnext(start)
end
- local start=current.next
- while start and start.id==glyph_code do
- local char=start.char
+ local start=getnext(current)
+ while start and getid(start)==glyph_code do
+ local char=getchar(start)
if marks[char] then
- start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ setattr(start,a_ligacomp,baseindex+(getattr(start,a_ligacomp) or componentindex))
if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp))
end
else
break
end
- start=start.next
+ start=getnext(start)
end
end
return head,base
end
function handlers.gsub_single(head,start,kind,lookupname,replacement)
if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
end
- start.char=replacement
+ setfield(start,"char",replacement)
return head,start,true
end
local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
@@ -10091,7 +10027,7 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives
return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
end
elseif value==0 then
- return start.char,trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
elseif value<1 then
return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
else
@@ -10102,25 +10038,25 @@ end
local function multiple_glyphs(head,start,multiple,ignoremarks)
local nofmultiples=#multiple
if nofmultiples>0 then
- start.char=multiple[1]
+ setfield(start,"char",multiple[1])
if nofmultiples>1 then
- local sn=start.next
+ local sn=getnext(start)
for k=2,nofmultiples do
local n=copy_node(start)
- n.char=multiple[k]
- n.next=sn
- n.prev=start
+ setfield(n,"char",multiple[k])
+ setfield(n,"next",sn)
+ setfield(n,"prev",start)
if sn then
- sn.prev=n
+ setfield(sn,"prev",n)
end
- start.next=n
+ setfield(start,"next",n)
start=n
end
end
return head,start,true
else
if trace_multiples then
- logprocess("no multiple for %s",gref(start.char))
+ logprocess("no multiple for %s",gref(getchar(start)))
end
return head,start,false
end
@@ -10130,34 +10066,34 @@ function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence
local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives)
if choice then
if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
end
- start.char=choice
+ setfield(start,"char",choice)
else
if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
end
end
return head,start,true
end
function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
end
return multiple_glyphs(head,start,multiple,sequence.flags[1])
end
function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s,stop,discfound=start.next,nil,false
- local startchar=start.char
+ local s,stop,discfound=getnext(start),nil,false
+ local startchar=getchar(start)
if marks[startchar] then
while s do
- local id=s.id
- if id==glyph_code and s.font==currentfont and s.subtype<256 then
- local lg=ligature[s.char]
+ local id=getid(s)
+ if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then
+ local lg=ligature[getchar(s)]
if lg then
stop=s
ligature=lg
- s=s.next
+ s=getnext(s)
else
break
end
@@ -10169,9 +10105,9 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
local lig=ligature.ligature
if lig then
if trace_ligatures then
- local stopchar=stop.char
+ local stopchar=getchar(stop)
head,start=markstoligature(kind,lookupname,head,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
else
head,start=markstoligature(kind,lookupname,head,start,stop,lig)
end
@@ -10182,18 +10118,18 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
else
local skipmark=sequence.flags[1]
while s do
- local id=s.id
- if id==glyph_code and s.subtype<256 then
- if s.font==currentfont then
- local char=s.char
+ local id=getid(s)
+ if id==glyph_code and getsubtype(s)<256 then
+ if getfont(s)==currentfont then
+ local char=getchar(s)
if skipmark and marks[char] then
- s=s.next
+ s=getnext(s)
else
local lg=ligature[char]
if lg then
stop=s
ligature=lg
- s=s.next
+ s=getnext(s)
else
break
end
@@ -10203,7 +10139,7 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
end
elseif id==disc_code then
discfound=true
- s=s.next
+ s=getnext(s)
else
break
end
@@ -10212,36 +10148,35 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
if lig then
if stop then
if trace_ligatures then
- local stopchar=stop.char
+ local stopchar=getchar(stop)
head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
else
head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
end
- return head,start,true
else
- start.char=lig
+ setfield(start,"char",lig)
if trace_ligatures then
logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
end
- return head,start,true
end
+ return head,start,true
else
end
end
return head,start,false
end
function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
- local base=start.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
if marks[basechar] then
while true do
- base=base.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- basechar=base.char
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
if not marks[basechar] then
break
end
@@ -10290,16 +10225,16 @@ function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence
return head,start,false
end
function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
- local base=start.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
if marks[basechar] then
while true do
- base=base.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- basechar=base.char
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
if not marks[basechar] then
break
end
@@ -10311,7 +10246,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
end
end
- local index=start[a_ligacomp]
+ local index=getattr(start,a_ligacomp)
local baseanchors=descriptions[basechar]
if baseanchors then
baseanchors=baseanchors.anchors
@@ -10356,22 +10291,22 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
return head,start,false
end
function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
- local base=start.prev
- local slc=start[a_ligacomp]
+ local base=getprev(start)
+ local slc=getattr(start,a_ligacomp)
if slc then
while base do
- local blc=base[a_ligacomp]
+ local blc=getattr(base,a_ligacomp)
if blc and blc~=slc then
- base=base.prev
+ base=getprev(base)
else
break
end
end
end
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
local baseanchors=descriptions[basechar]
if baseanchors then
baseanchors=baseanchors.anchors
@@ -10409,20 +10344,20 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
return head,start,false
end
function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
- local alreadydone=cursonce and start[a_cursbase]
+ local alreadydone=cursonce and getattr(start,a_cursbase)
if not alreadydone then
local done=false
- local startchar=start.char
+ local startchar=getchar(start)
if marks[startchar] then
if trace_cursive then
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt=start.next
- while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
- local nextchar=nxt.char
+ local nxt=getnext(start)
+ while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do
+ local nextchar=getchar(nxt)
if marks[nextchar] then
- nxt=nxt.next
+ nxt=getnext(nxt)
else
local entryanchors=descriptions[nextchar]
if entryanchors then
@@ -10456,13 +10391,13 @@ function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
return head,start,done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
end
return head,start,false
end
end
function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
- local startchar=start.char
+ local startchar=getchar(start)
local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
@@ -10470,33 +10405,33 @@ function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
return head,start,false
end
function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
- local snext=start.next
+ local snext=getnext(start)
if not snext then
return head,start,false
else
local prev,done=start,false
local factor=tfmdata.parameters.factor
local lookuptype=lookuptypes[lookupname]
- while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
- local nextchar=snext.char
+ while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do
+ local nextchar=getchar(snext)
local krn=kerns[nextchar]
if not krn and marks[nextchar] then
prev=snext
- snext=snext.next
+ snext=getnext(snext)
else
if not krn then
elseif type(krn)=="table" then
if lookuptype=="pair" then
local a,b=krn[2],krn[3]
if a and #a>0 then
- local startchar=start.char
+ local startchar=getchar(start)
local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b>0 then
- local startchar=start.char
+ local startchar=getchar(start)
local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
@@ -10509,7 +10444,7 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
elseif krn~=0 then
local k=setkern(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
end
done=true
end
@@ -10544,13 +10479,13 @@ function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,looku
return head,start,false
end
function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char=start.char
+ local char=getchar(start)
local replacement=replacements[char]
if replacement then
if trace_singles then
logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
end
- start.char=replacement
+ setfield(start,"char",replacement)
return head,start,true
else
return head,start,false
@@ -10563,8 +10498,8 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
end
while current do
- if current.id==glyph_code then
- local currentchar=current.char
+ if getid(current)==glyph_code then
+ local currentchar=getchar(current)
local lookupname=subtables[1]
local replacement=lookuphash[lookupname]
if not replacement then
@@ -10581,21 +10516,21 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
if trace_singles then
logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
end
- current.char=replacement
+ setfield(current,"char",replacement)
end
end
return head,start,true
elseif current==stop then
break
else
- current=current.next
+ current=getnext(current)
end
end
return head,start,false
end
chainmores.gsub_single=chainprocs.gsub_single
function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local startchar=start.char
+ local startchar=getchar(start)
local subtables=currentlookup.subtables
local lookupname=subtables[1]
local replacements=lookuphash[lookupname]
@@ -10624,8 +10559,8 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
local subtables=currentlookup.subtables
local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
while current do
- if current.id==glyph_code then
- local currentchar=current.char
+ if getid(current)==glyph_code then
+ local currentchar=getchar(current)
local lookupname=subtables[1]
local alternatives=lookuphash[lookupname]
if not alternatives then
@@ -10640,7 +10575,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
if trace_alternatives then
logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
end
- start.char=choice
+ setfield(start,"char",choice)
else
if trace_alternatives then
logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
@@ -10654,14 +10589,14 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
elseif current==stop then
break
else
- current=current.next
+ current=getnext(current)
end
end
return head,start,false
end
chainmores.gsub_alternate=chainprocs.gsub_alternate
function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar=start.char
+ local startchar=getchar(start)
local subtables=currentlookup.subtables
local lookupname=subtables[1]
local ligatures=lookuphash[lookupname]
@@ -10676,20 +10611,20 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
end
else
- local s=start.next
+ local s=getnext(start)
local discfound=false
local last=stop
local nofreplacements=0
local skipmark=currentlookup.flags[1]
while s do
- local id=s.id
+ local id=getid(s)
if id==disc_code then
- s=s.next
+ s=getnext(s)
discfound=true
else
- local schar=s.char
+ local schar=getchar(s)
if skipmark and marks[schar] then
- s=s.next
+ s=getnext(s)
else
local lg=ligatures[schar]
if lg then
@@ -10697,7 +10632,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if s==stop then
break
else
- s=s.next
+ s=getnext(s)
end
else
break
@@ -10714,7 +10649,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start==stop then
logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
end
end
head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
@@ -10723,7 +10658,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start==stop then
logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
end
end
end
@@ -10732,7 +10667,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
end
chainmores.gsub_ligature=chainprocs.gsub_ligature
function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
local subtables=currentlookup.subtables
local lookupname=subtables[1]
@@ -10741,14 +10676,14 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
markanchors=markanchors[markchar]
end
if markanchors then
- local base=start.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
if marks[basechar] then
while true do
- base=base.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- basechar=base.char
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
if not marks[basechar] then
break
end
@@ -10795,7 +10730,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
return head,start,false
end
function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
local subtables=currentlookup.subtables
local lookupname=subtables[1]
@@ -10804,14 +10739,14 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
markanchors=markanchors[markchar]
end
if markanchors then
- local base=start.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
if marks[basechar] then
while true do
- base=base.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- basechar=base.char
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
if not marks[basechar] then
break
end
@@ -10823,7 +10758,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
end
end
- local index=start[a_ligacomp]
+ local index=getattr(start,a_ligacomp)
local baseanchors=descriptions[basechar].anchors
if baseanchors then
local baseanchors=baseanchors['baselig']
@@ -10862,7 +10797,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
return head,start,false
end
function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
local subtables=currentlookup.subtables
local lookupname=subtables[1]
@@ -10871,20 +10806,20 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
markanchors=markanchors[markchar]
end
if markanchors then
- local base=start.prev
- local slc=start[a_ligacomp]
+ local base=getprev(start)
+ local slc=getattr(start,a_ligacomp)
if slc then
while base do
- local blc=base[a_ligacomp]
+ local blc=getattr(base,a_ligacomp)
if blc and blc~=slc then
- base=base.prev
+ base=getprev(base)
else
break
end
end
end
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
local baseanchors=descriptions[basechar].anchors
if baseanchors then
baseanchors=baseanchors['basemark']
@@ -10920,9 +10855,9 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
return head,start,false
end
function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone=cursonce and start[a_cursbase]
+ local alreadydone=cursonce and getattr(start,a_cursbase)
if not alreadydone then
- local startchar=start.char
+ local startchar=getchar(start)
local subtables=currentlookup.subtables
local lookupname=subtables[1]
local exitanchors=lookuphash[lookupname]
@@ -10936,11 +10871,11 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt=start.next
- while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
- local nextchar=nxt.char
+ local nxt=getnext(start)
+ while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do
+ local nextchar=getchar(nxt)
if marks[nextchar] then
- nxt=nxt.next
+ nxt=getnext(nxt)
else
local entryanchors=descriptions[nextchar]
if entryanchors then
@@ -10974,7 +10909,7 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
return head,start,done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
end
return head,start,false
end
@@ -10982,7 +10917,7 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
return head,start,false
end
function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local startchar=start.char
+ local startchar=getchar(start)
local subtables=currentlookup.subtables
local lookupname=subtables[1]
local kerns=lookuphash[lookupname]
@@ -10999,9 +10934,9 @@ function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lo
end
chainmores.gpos_single=chainprocs.gpos_single
function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local snext=start.next
+ local snext=getnext(start)
if snext then
- local startchar=start.char
+ local startchar=getchar(start)
local subtables=currentlookup.subtables
local lookupname=subtables[1]
local kerns=lookuphash[lookupname]
@@ -11011,26 +10946,26 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
local lookuptype=lookuptypes[lookupname]
local prev,done=start,false
local factor=tfmdata.parameters.factor
- while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
- local nextchar=snext.char
+ while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do
+ local nextchar=getchar(snext)
local krn=kerns[nextchar]
if not krn and marks[nextchar] then
prev=snext
- snext=snext.next
+ snext=getnext(snext)
else
if not krn then
elseif type(krn)=="table" then
if lookuptype=="pair" then
local a,b=krn[2],krn[3]
if a and #a>0 then
- local startchar=start.char
+ local startchar=getchar(start)
local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b>0 then
- local startchar=start.char
+ local startchar=getchar(start)
local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
@@ -11042,7 +10977,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
if a and a~=0 then
local k=setkern(snext,factor,rlmode,a)
if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
end
end
if b and b~=0 then
@@ -11053,7 +10988,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
elseif krn~=0 then
local k=setkern(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
end
done=true
end
@@ -11074,6 +11009,10 @@ local function show_skip(kind,chainname,char,ck,class)
logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
end
end
+local quit_on_no_replacement=true
+directives.register("otf.chain.quitonnoreplacement",function(value)
+ quit_on_no_replacement=value
+end)
local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
local flags=sequence.flags
local done=false
@@ -11091,7 +11030,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
local seq=ck[3]
local s=#seq
if s==1 then
- match=current.id==glyph_code and current.font==currentfont and current.subtype<256 and seq[1][current.char]
+ match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
else
local f,l=ck[4],ck[5]
if f==1 and f==l then
@@ -11099,13 +11038,13 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if f==l then
else
local n=f+1
- last=last.next
+ last=getnext(last)
while n<=l do
if last then
- local id=last.id
+ local id=getid(last)
if id==glyph_code then
- if last.font==currentfont and last.subtype<256 then
- local char=last.char
+ if getfont(last)==currentfont and getsubtype(last)<256 then
+ local char=getchar(last)
local ccd=descriptions[char]
if ccd then
local class=ccd.class
@@ -11114,10 +11053,10 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if trace_skips then
show_skip(kind,chainname,char,ck,class)
end
- last=last.next
+ last=getnext(last)
elseif seq[n][char] then
if n1 then
- local prev=start.prev
+ local prev=getprev(start)
if prev then
local n=f-1
while n>=1 do
if prev then
- local id=prev.id
+ local id=getid(prev)
if id==glyph_code then
- if prev.font==currentfont and prev.subtype<256 then
- local char=prev.char
+ if getfont(prev)==currentfont and getsubtype(prev)<256 then
+ local char=getchar(prev)
local ccd=descriptions[char]
if ccd then
local class=ccd.class
@@ -11184,7 +11123,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
match=false
break
end
- prev=prev.prev
+ prev=getprev(prev)
elseif seq[n][32] then
n=n -1
else
@@ -11204,15 +11143,15 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
if match and s>l then
- local current=last and last.next
+ local current=last and getnext(last)
if current then
local n=l+1
while n<=s do
if current then
- local id=current.id
+ local id=getid(current)
if id==glyph_code then
- if current.font==currentfont and current.subtype<256 then
- local char=current.char
+ if getfont(current)==currentfont and getsubtype(current)<256 then
+ local char=getchar(current)
local ccd=descriptions[char]
if ccd then
local class=ccd.class
@@ -11242,7 +11181,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
match=false
break
end
- current=current.next
+ current=getnext(current)
elseif seq[n][32] then
n=n+1
else
@@ -11265,7 +11204,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if match then
if trace_contexts then
local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5]
- local char=start.char
+ local char=getchar(start)
if ck[9] then
logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
@@ -11299,12 +11238,12 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
repeat
if skipped then
while true do
- local char=start.char
+ local char=getchar(start)
local ccd=descriptions[char]
if ccd then
local class=ccd.class
if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
- start=start.next
+ start=getnext(start)
else
break
end
@@ -11334,7 +11273,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
if start then
- start=start.next
+ start=getnext(start)
else
end
until i>nofchainlookups
@@ -11344,7 +11283,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if replacements then
head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements)
else
- done=true
+ done=quit_on_no_replacement
if trace_contexts then
logprocess("%s: skipping match",cref(kind,chainname))
end
@@ -11461,6 +11400,7 @@ local function featuresprocessor(head,font,attr)
if not lookuphash then
return head,false
end
+ head=tonut(head)
if trace_steps then
checkstep(head)
end
@@ -11493,10 +11433,10 @@ local function featuresprocessor(head,font,attr)
local handler=handlers[typ]
local start=find_node_tail(head)
while start do
- local id=start.id
+ local id=getid(start)
if id==glyph_code then
- if start.font==font and start.subtype<256 then
- local a=start[0]
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
if a then
a=a==attr
else
@@ -11507,7 +11447,7 @@ local function featuresprocessor(head,font,attr)
local lookupname=subtables[i]
local lookupcache=lookuphash[lookupname]
if lookupcache then
- local lookupmatch=lookupcache[start.char]
+ local lookupmatch=lookupcache[getchar(start)]
if lookupmatch then
head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
if success then
@@ -11518,15 +11458,15 @@ local function featuresprocessor(head,font,attr)
report_missing_cache(typ,lookupname)
end
end
- if start then start=start.prev end
+ if start then start=getprev(start) end
else
- start=start.prev
+ start=getprev(start)
end
else
- start=start.prev
+ start=getprev(start)
end
else
- start=start.prev
+ start=getprev(start)
end
end
else
@@ -11544,16 +11484,16 @@ local function featuresprocessor(head,font,attr)
local head=start
local done=false
while start do
- local id=start.id
- if id==glyph_code and start.font==font and start.subtype<256 then
- local a=start[0]
+ local id=getid(start)
+ if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
else
- a=not attribute or start[a_state]==attribute
+ a=not attribute or getattr(start,a_state)==attribute
end
if a then
- local lookupmatch=lookupcache[start.char]
+ local lookupmatch=lookupcache[getchar(start)]
if lookupmatch then
local ok
head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
@@ -11561,12 +11501,12 @@ local function featuresprocessor(head,font,attr)
done=true
end
end
- if start then start=start.next end
+ if start then start=getnext(start) end
else
- start=start.next
+ start=getnext(start)
end
else
- start=start.next
+ start=getnext(start)
end
end
if done then
@@ -11575,18 +11515,18 @@ local function featuresprocessor(head,font,attr)
end
end
local function kerndisc(disc)
- local prev=disc.prev
- local next=disc.next
+ local prev=getprev(disc)
+ local next=getnext(disc)
if prev and next then
- prev.next=next
- local a=prev[0]
+ setfield(prev,"next",next)
+ local a=getattr(prev,0)
if a then
- a=(a==attr) and (not attribute or prev[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(prev,a_state)==attribute)
else
- a=not attribute or prev[a_state]==attribute
+ a=not attribute or getattr(prev,a_state)==attribute
end
if a then
- local lookupmatch=lookupcache[prev.char]
+ local lookupmatch=lookupcache[getchar(prev)]
if lookupmatch then
local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
if ok then
@@ -11595,22 +11535,22 @@ local function featuresprocessor(head,font,attr)
end
end
end
- prev.next=disc
+ setfield(prev,"next",disc)
end
return next
end
while start do
- local id=start.id
+ local id=getid(start)
if id==glyph_code then
- if start.font==font and start.subtype<256 then
- local a=start[0]
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
else
- a=not attribute or start[a_state]==attribute
+ a=not attribute or getattr(start,a_state)==attribute
end
if a then
- local lookupmatch=lookupcache[start.char]
+ local lookupmatch=lookupcache[getchar(start)]
if lookupmatch then
local ok
head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
@@ -11618,38 +11558,38 @@ local function featuresprocessor(head,font,attr)
success=true
end
end
- if start then start=start.next end
+ if start then start=getnext(start) end
else
- start=start.next
+ start=getnext(start)
end
else
- start=start.next
+ start=getnext(start)
end
elseif id==disc_code then
- if start.subtype==discretionary_code then
- local pre=start.pre
+ if getsubtype(start)==discretionary_code then
+ local pre=getfield(start,"pre")
if pre then
local new=subrun(pre)
- if new then start.pre=new end
+ if new then setfield(start,"pre",new) end
end
- local post=start.post
+ local post=getfield(start,"post")
if post then
local new=subrun(post)
- if new then start.post=new end
+ if new then setfield(start,"post",new) end
end
- local replace=start.replace
+ local replace=getfield(start,"replace")
if replace then
local new=subrun(replace)
- if new then start.replace=new end
+ if new then setfield(start,"replace",new) end
end
elseif typ=="gpos_single" or typ=="gpos_pair" then
kerndisc(start)
end
- start=start.next
+ start=getnext(start)
elseif id==whatsit_code then
- local subtype=start.subtype
+ local subtype=getsubtype(start)
if subtype==dir_code then
- local dir=start.dir
+ local dir=getfield(start,"dir")
if dir=="+TRT" or dir=="+TLT" then
topstack=topstack+1
dirstack[topstack]=dir
@@ -11668,7 +11608,7 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype==localpar_code then
- local dir=start.dir
+ local dir=getfield(start,"dir")
if dir=="TRT" then
rlparmode=-1
elseif dir=="TLT" then
@@ -11681,11 +11621,11 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
- start=start.next
+ start=getnext(start)
elseif id==math_code then
- start=end_of_math(start).next
+ start=getnext(end_of_math(start))
else
- start=start.next
+ start=getnext(start)
end
end
end
@@ -11694,20 +11634,20 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
local head=start
local done=false
while start do
- local id=start.id
- if id==glyph_code and start.id==font and start.subtype<256 then
- local a=start[0]
+ local id=getid(start)
+ if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
else
- a=not attribute or start[a_state]==attribute
+ a=not attribute or getattr(start,a_state)==attribute
end
if a then
for i=1,ns do
local lookupname=subtables[i]
local lookupcache=lookuphash[lookupname]
if lookupcache then
- local lookupmatch=lookupcache[start.char]
+ local lookupmatch=lookupcache[getchar(start)]
if lookupmatch then
local ok
head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
@@ -11722,12 +11662,12 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_missing_cache(typ,lookupname)
end
end
- if start then start=start.next end
+ if start then start=getnext(start) end
else
- start=start.next
+ start=getnext(start)
end
else
- start=start.next
+ start=getnext(start)
end
end
if done then
@@ -11736,22 +11676,22 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
end
end
local function kerndisc(disc)
- local prev=disc.prev
- local next=disc.next
+ local prev=getprev(disc)
+ local next=getnext(disc)
if prev and next then
- prev.next=next
- local a=prev[0]
+ setfield(prev,"next",next)
+ local a=getattr(prev,0)
if a then
- a=(a==attr) and (not attribute or prev[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(prev,a_state)==attribute)
else
- a=not attribute or prev[a_state]==attribute
+ a=not attribute or getattr(prev,a_state)==attribute
end
if a then
for i=1,ns do
local lookupname=subtables[i]
local lookupcache=lookuphash[lookupname]
if lookupcache then
- local lookupmatch=lookupcache[prev.char]
+ local lookupmatch=lookupcache[getchar(prev)]
if lookupmatch then
local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
if ok then
@@ -11764,26 +11704,26 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
end
end
end
- prev.next=disc
+ setfield(prev,"next",disc)
end
return next
end
while start do
- local id=start.id
+ local id=getid(start)
if id==glyph_code then
- if start.font==font and start.subtype<256 then
- local a=start[0]
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
else
- a=not attribute or start[a_state]==attribute
+ a=not attribute or getattr(start,a_state)==attribute
end
if a then
for i=1,ns do
local lookupname=subtables[i]
local lookupcache=lookuphash[lookupname]
if lookupcache then
- local lookupmatch=lookupcache[start.char]
+ local lookupmatch=lookupcache[getchar(start)]
if lookupmatch then
local ok
head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
@@ -11798,38 +11738,38 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_missing_cache(typ,lookupname)
end
end
- if start then start=start.next end
+ if start then start=getnext(start) end
else
- start=start.next
+ start=getnext(start)
end
else
- start=start.next
+ start=getnext(start)
end
elseif id==disc_code then
- if start.subtype==discretionary_code then
- local pre=start.pre
+ if getsubtype(start)==discretionary_code then
+ local pre=getfield(start,"pre")
if pre then
local new=subrun(pre)
- if new then start.pre=new end
+ if new then setfield(start,"pre",new) end
end
- local post=start.post
+ local post=getfield(start,"post")
if post then
local new=subrun(post)
- if new then start.post=new end
+ if new then setfield(start,"post",new) end
end
- local replace=start.replace
+ local replace=getfield(start,"replace")
if replace then
local new=subrun(replace)
- if new then start.replace=new end
+ if new then setfield(start,"replace",new) end
end
elseif typ=="gpos_single" or typ=="gpos_pair" then
kerndisc(start)
end
- start=start.next
+ start=getnext(start)
elseif id==whatsit_code then
- local subtype=start.subtype
+ local subtype=getsubtype(start)
if subtype==dir_code then
- local dir=start.dir
+ local dir=getfield(start,"dir")
if dir=="+TRT" or dir=="+TLT" then
topstack=topstack+1
dirstack[topstack]=dir
@@ -11848,7 +11788,7 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype==localpar_code then
- local dir=start.dir
+ local dir=getfield(start,"dir")
if dir=="TRT" then
rlparmode=-1
elseif dir=="TLT" then
@@ -11861,11 +11801,11 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
- start=start.next
+ start=getnext(start)
elseif id==math_code then
- start=end_of_math(start).next
+ start=getnext(end_of_math(start))
else
- start=start.next
+ start=getnext(start)
end
end
end
@@ -11877,6 +11817,7 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
registerstep(head)
end
end
+ head=tonode(head)
return head,done
end
local function generic(lookupdata,lookupname,unicode,lookuphash)
diff --git a/tex/generic/context/luatex/luatex-fonts-otn.lua b/tex/generic/context/luatex/luatex-fonts-otn.lua
deleted file mode 100644
index c57be5f02..000000000
--- a/tex/generic/context/luatex/luatex-fonts-otn.lua
+++ /dev/null
@@ -1,2848 +0,0 @@
-if not modules then modules = { } end modules ['font-otn'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- preprocessors = { "nodes" }
-
--- this is still somewhat preliminary and it will get better in due time;
--- much functionality could only be implemented thanks to the husayni font
--- of Idris Samawi Hamid to who we dedicate this module.
-
--- in retrospect it always looks easy but believe it or not, it took a lot
--- of work to get proper open type support done: buggy fonts, fuzzy specs,
--- special made testfonts, many skype sessions between taco, idris and me,
--- torture tests etc etc ... unfortunately the code does not show how much
--- time it took ...
-
--- todo:
---
--- kerning is probably not yet ok for latin around dics nodes (interesting challenge)
--- extension infrastructure (for usage out of context)
--- sorting features according to vendors/renderers
--- alternative loop quitters
--- check cursive and r2l
--- find out where ignore-mark-classes went
--- default features (per language, script)
--- handle positions (we need example fonts)
--- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
--- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
--- remove some optimizations (when I have a faster machine)
---
--- maybe redo the lot some way (more context specific)
-
---[[ldx--
-This module is a bit more split up that I'd like but since we also want to test
-with plain it has to be so. This module is part of
-and discussion about improvements and functionality mostly happens on the
- mailing list.
-
-The specification of OpenType is kind of vague. Apart from a lack of a proper
-free specifications there's also the problem that Microsoft and Adobe
-may have their own interpretation of how and in what order to apply features.
-In general the Microsoft website has more detailed specifications and is a
-better reference. There is also some information in the FontForge help files.
-
-Because there is so much possible, fonts might contain bugs and/or be made to
-work with certain rederers. These may evolve over time which may have the side
-effect that suddenly fonts behave differently.
-
-After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
-implementation. Of course all errors are mine and of course the code can be
-improved. There are quite some optimizations going on here and processing speed
-is currently acceptable. Not all functions are implemented yet, often because I
-lack the fonts for testing. Many scripts are not yet supported either, but I will
-look into them as soon as users ask for it.
-
-Because there are different interpretations possible, I will extend the code
-with more (configureable) variants. I can also add hooks for users so that they can
-write their own extensions.
-
-Glyphs are indexed not by unicode but in their own way. This is because there is no
-relationship with unicode at all, apart from the fact that a font might cover certain
-ranges of characters. One character can have multiple shapes. However, at the
- end we use unicode so and all extra glyphs are mapped into a private
-space. This is needed because we need to access them and has to include
-then in the output eventually.
-
-The raw table as it coms from gets reorganized in to fit out needs.
-In that table is packed (similar tables are shared) and cached on disk
-so that successive runs can use the optimized table (after loading the table is
-unpacked). The flattening code used later is a prelude to an even more compact table
-format (and as such it keeps evolving).
-
-This module is sparsely documented because it is a moving target. The table format
-of the reader changes and we experiment a lot with different methods for supporting
-features.
-
-As with the code, we may decide to store more information in the
- table.
-
-Incrementing the version number will force a re-cache. We jump the number by one
-when there's a fix in the library or code that
-results in different tables.
---ldx]]--
-
--- action handler chainproc chainmore comment
---
--- gsub_single ok ok ok
--- gsub_multiple ok ok not implemented yet
--- gsub_alternate ok ok not implemented yet
--- gsub_ligature ok ok ok
--- gsub_context ok --
--- gsub_contextchain ok --
--- gsub_reversecontextchain ok --
--- chainsub -- ok
--- reversesub -- ok
--- gpos_mark2base ok ok
--- gpos_mark2ligature ok ok
--- gpos_mark2mark ok ok
--- gpos_cursive ok untested
--- gpos_single ok ok
--- gpos_pair ok ok
--- gpos_context ok --
--- gpos_contextchain ok --
---
--- todo: contextpos and contextsub and class stuff
---
--- actions:
---
--- handler : actions triggered by lookup
--- chainproc : actions triggered by contextual lookup
--- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
---
--- remark: the 'not implemented yet' variants will be done when we have fonts that use them
--- remark: we need to check what to do with discretionaries
-
--- We used to have independent hashes for lookups but as the tags are unique
--- we now use only one hash. If needed we can have multiple again but in that
--- case I will probably prefix (i.e. rename) the lookups in the cached font file.
-
--- Todo: make plugin feature that operates on char/glyphnode arrays
-
-local concat, insert, remove = table.concat, table.insert, table.remove
-local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
-local random = math.random
-local formatters = string.formatters
-
-local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
-
-local registertracker = trackers.register
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
-local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
-local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
-local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
-local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
-local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
-local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
-local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
-local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
-local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
-local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
-local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
-
-local report_direct = logs.reporter("fonts","otf direct")
-local report_subchain = logs.reporter("fonts","otf subchain")
-local report_chain = logs.reporter("fonts","otf chain")
-local report_process = logs.reporter("fonts","otf process")
-local report_prepare = logs.reporter("fonts","otf prepare")
-local report_warning = logs.reporter("fonts","otf warning")
-
-registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
-registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
-
-registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
-registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
-registertracker("otf.actions","otf.replacements,otf.positions")
-registertracker("otf.injections","nodes.injections")
-
-registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local copy_node = node.copy
-local find_node_tail = node.tail or node.slide
-local flush_node_list = node.flush_list
-local end_of_math = node.end_of_math
-
-local setmetatableindex = table.setmetatableindex
-
-local zwnj = 0x200C
-local zwj = 0x200D
-local wildcard = "*"
-local default = "dflt"
-
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-local glyphcodes = nodes.glyphcodes
-local disccodes = nodes.disccodes
-
-local glyph_code = nodecodes.glyph
-local glue_code = nodecodes.glue
-local disc_code = nodecodes.disc
-local whatsit_code = nodecodes.whatsit
-local math_code = nodecodes.math
-
-local dir_code = whatcodes.dir
-local localpar_code = whatcodes.localpar
-
-local discretionary_code = disccodes.discretionary
-
-local ligature_code = glyphcodes.ligature
-
-local privateattribute = attributes.private
-
--- Something is messed up: we have two mark / ligature indices, one at the injection
--- end and one here ... this is bases in KE's patches but there is something fishy
--- there as I'm pretty sure that for husayni we need some connection (as it's much
--- more complex than an average font) but I need proper examples of all cases, not
--- of only some.
-
-local a_state = privateattribute('state')
-local a_markbase = privateattribute('markbase')
-local a_markmark = privateattribute('markmark')
-local a_markdone = privateattribute('markdone') -- assigned at the injection end
-local a_cursbase = privateattribute('cursbase')
-local a_curscurs = privateattribute('curscurs')
-local a_cursdone = privateattribute('cursdone')
-local a_kernpair = privateattribute('kernpair')
-local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
-
-local injections = nodes.injections
-local setmark = injections.setmark
-local setcursive = injections.setcursive
-local setkern = injections.setkern
-local setpair = injections.setpair
-
-local markonce = true
-local cursonce = true
-local kernonce = true
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local onetimemessage = fonts.loggers.onetimemessage or function() end
-
-otf.defaultnodealternate = "none" -- first last
-
--- we share some vars here, after all, we have no nested lookups and less code
-
-local tfmdata = false
-local characters = false
-local descriptions = false
-local resources = false
-local marks = false
-local currentfont = false
-local lookuptable = false
-local anchorlookups = false
-local lookuptypes = false
-local handlers = { }
-local rlmode = 0
-local featurevalue = false
-
--- head is always a whatsit so we can safely assume that head is not changed
-
--- we use this for special testing and documentation
-
-local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
-local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
-local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_direct(...)
-end
-
-local function logwarning(...)
- report_direct(...)
-end
-
-local f_unicode = formatters["%U"]
-local f_uniname = formatters["%U (%s)"]
-local f_unilist = formatters["% t (% t)"]
-
-local function gref(n) -- currently the same as in font-otb
- if type(n) == "number" then
- local description = descriptions[n]
- local name = description and description.name
- if name then
- return f_uniname(n,name)
- else
- return f_unicode(n)
- end
- elseif n then
- local num, nam = { }, { }
- for i=1,#n do
- local ni = n[i]
- if tonumber(ni) then -- later we will start at 2
- local di = descriptions[ni]
- num[i] = f_unicode(ni)
- nam[i] = di and di.name or "-"
- end
- end
- return f_unilist(num,nam)
- else
- return ""
- end
-end
-
-local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
- if index then
- return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
- elseif lookupname then
- return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
- elseif chainlookupname then
- return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
- elseif chainname then
- return formatters["feature %a, chain %a"](kind,chainname)
- else
- return formatters["feature %a"](kind)
- end
-end
-
-local function pref(kind,lookupname)
- return formatters["feature %a, lookup %a"](kind,lookupname)
-end
-
--- We can assume that languages that use marks are not hyphenated. We can also assume
--- that at most one discretionary is present.
-
--- We do need components in funny kerning mode but maybe I can better reconstruct then
--- as we do have the font components info available; removing components makes the
--- previous code much simpler. Also, later on copying and freeing becomes easier.
--- However, for arabic we need to keep them around for the sake of mark placement
--- and indices.
-
-local function copy_glyph(g) -- next and prev are untouched !
- local components = g.components
- if components then
- g.components = nil
- local n = copy_node(g)
- g.components = components
- return n
- else
- return copy_node(g)
- end
-end
-
--- start is a mark and we need to keep that one
-
-local function markstoligature(kind,lookupname,head,start,stop,char)
- if start == stop and start.char == char then
- return head, start
- else
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
- local base = copy_glyph(start)
- if head == start then
- head = base
- end
- base.char = char
- base.subtype = ligature_code
- base.components = start
- if prev then
- prev.next = base
- end
- if next then
- next.prev = base
- end
- base.next = next
- base.prev = prev
- return head, base
- end
-end
-
--- The next code is somewhat complicated by the fact that some fonts can have ligatures made
--- from ligatures that themselves have marks. This was identified by Kai in for instance
--- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
--- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
--- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
--- third component.
-
-local function getcomponentindex(start)
- if start.id ~= glyph_code then
- return 0
- elseif start.subtype == ligature_code then
- local i = 0
- local components = start.components
- while components do
- i = i + getcomponentindex(components)
- components = components.next
- end
- return i
- elseif not marks[start.char] then
- return 1
- else
- return 0
- end
-end
-
--- eventually we will do positioning in an other way (needs addional w/h/d fields)
-
-local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
- if start == stop and start.char == char then
- start.char = char
- return head, start
- end
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
- local base = copy_glyph(start)
- if start == head then
- head = base
- end
- base.char = char
- base.subtype = ligature_code
- base.components = start -- start can have components
- if prev then
- prev.next = base
- end
- if next then
- next.prev = base
- end
- base.next = next
- base.prev = prev
- if not discfound then
- local deletemarks = markflag ~= "mark"
- local components = start
- local baseindex = 0
- local componentindex = 0
- local head = base
- local current = base
- -- first we loop over the glyphs in start .. stop
- while start do
- local char = start.char
- if not marks[char] then
- baseindex = baseindex + componentindex
- componentindex = getcomponentindex(start)
- elseif not deletemarks then -- quite fishy
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
- end
- head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
- elseif trace_marks then
- logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
- end
- start = start.next
- end
- -- we can have one accent as part of a lookup and another following
- -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
- local start = current.next
- while start and start.id == glyph_code do
- local char = start.char
- if marks[char] then
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
- if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
- end
- else
- break
- end
- start = start.next
- end
- end
- return head, base
-end
-
-function handlers.gsub_single(head,start,kind,lookupname,replacement)
- if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
- end
- start.char = replacement
- return head, start, true
-end
-
-local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
- local n = #alternatives
- if value == "random" then
- local r = random(1,n)
- return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r)
- elseif value == "first" then
- return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1)
- elseif value == "last" then
- return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n)
- else
- value = tonumber(value)
- if type(value) ~= "number" then
- return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
- elseif value > n then
- local defaultalt = otf.defaultnodealternate
- if defaultalt == "first" then
- return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
- elseif defaultalt == "last" then
- return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
- else
- return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
- end
- elseif value == 0 then
- return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
- elseif value < 1 then
- return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
- else
- return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value)
- end
- end
-end
-
-local function multiple_glyphs(head,start,multiple,ignoremarks)
- local nofmultiples = #multiple
- if nofmultiples > 0 then
- start.char = multiple[1]
- if nofmultiples > 1 then
- local sn = start.next
- for k=2,nofmultiples do -- todo: use insert_node
--- untested:
---
--- while ignoremarks and marks[sn.char] then
--- local sn = sn.next
--- end
- local n = copy_node(start) -- ignore components
- n.char = multiple[k]
- n.next = sn
- n.prev = start
- if sn then
- sn.prev = n
- end
- start.next = n
- start = n
- end
- end
- return head, start, true
- else
- if trace_multiples then
- logprocess("no multiple for %s",gref(start.char))
- end
- return head, start, false
- end
-end
-
-function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
- end
- start.char = choice
- else
- if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
- end
- end
- return head, start, true
-end
-
-function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
- if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
- end
- return multiple_glyphs(head,start,multiple,sequence.flags[1])
-end
-
-function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s, stop, discfound = start.next, nil, false
- local startchar = start.char
- if marks[startchar] then
- while s do
- local id = s.id
- if id == glyph_code and s.font == currentfont and s.subtype<256 then
- local lg = ligature[s.char]
- if lg then
- stop = s
- ligature = lg
- s = s.next
- else
- break
- end
- else
- break
- end
- end
- if stop then
- local lig = ligature.ligature
- if lig then
- if trace_ligatures then
- local stopchar = stop.char
- head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- end
- return head, start, true
- else
- -- ok, goto next lookup
- end
- end
- else
- local skipmark = sequence.flags[1]
- while s do
- local id = s.id
- if id == glyph_code and s.subtype<256 then
- if s.font == currentfont then
- local char = s.char
- if skipmark and marks[char] then
- s = s.next
- else
- local lg = ligature[char]
- if lg then
- stop = s
- ligature = lg
- s = s.next
- else
- break
- end
- end
- else
- break
- end
- elseif id == disc_code then
- discfound = true
- s = s.next
- else
- break
- end
- end
- local lig = ligature.ligature
- if lig then
- if stop then
- if trace_ligatures then
- local stopchar = stop.char
- head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- end
- return head, start, true
- else
- -- weird but happens (in some arabic font)
- start.char = lig
- if trace_ligatures then
- logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
- end
- return head, start, true
- end
- else
- -- weird but happens
- end
- end
- return head, start, false
-end
-
---[[ldx--
-We get hits on a mark, but we're not sure if the it has to be applied so
-we need to explicitly test for basechar, baselig and basemark entries.
---ldx]]--
-
-function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- end
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
- -- check chainpos variant
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local index = start[a_ligacomp]
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor, ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
- if trace_marks then
- logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head, start, true
- else
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
- end
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = base[a_ligacomp]
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
- end
- end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and start[a_cursbase]
- if not alreadydone then
- local done = false
- local startchar = start.char
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return head, start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
- end
- return head, start, false
- end
-end
-
-function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
- local startchar = start.char
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
- end
- return head, start, false
-end
-
-function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
- -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
- -- todo: kerns in components of ligatures
- local snext = start.next
- if not snext then
- return head, start, false
- else
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- local lookuptype = lookuptypes[lookupname]
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then -- probably not needed
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else -- wrong ... position has different entries
- report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
- -- local a, b = krn[2], krn[6]
- -- if a and a ~= 0 then
- -- local k = setkern(snext,factor,rlmode,a)
- -- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- -- end
- -- end
- -- if b and b ~= 0 then
- -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
- -- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return head, start, done
- end
-end
-
---[[ldx--
-I will implement multiple chain replacements once I run into a font that uses
-it. It's not that complex to handle.
---ldx]]--
-
-local chainmores = { }
-local chainprocs = { }
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_subchain(...)
-end
-
-local logwarning = report_subchain
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_chain(...)
-end
-
-local logwarning = report_chain
-
--- We could share functions but that would lead to extra function calls with many
--- arguments, redundant tests and confusing messages.
-
-function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
- logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return head, start, false
-end
-
-function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
- logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return head, start, false
-end
-
--- The reversesub is a special case, which is why we need to store the replacements
--- in a bit weird way. There is no lookup and the replacement comes from the lookup
--- itself. It is meant mostly for dealing with Urdu.
-
-function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = start.char
- local replacement = replacements[char]
- if replacement then
- if trace_singles then
- logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
- end
- start.char = replacement
- return head, start, true
- else
- return head, start, false
- end
-end
-
---[[ldx--
-This chain stuff is somewhat tricky since we can have a sequence of actions to be
-applied: single, alternate, multiple or ligature where ligature can be an invalid
-one in the sense that it will replace multiple by one but not neccessary one that
-looks like the combination (i.e. it is the counterpart of multiple then). For
-example, the following is valid:
-
-
-xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx
-
-
-Therefore we we don't really do the replacement here already unless we have the
-single lookup case. The efficiency of the replacements can be improved by deleting
-as less as needed but that would also make the code even more messy.
---ldx]]--
-
--- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start
--- local n = 1
--- if start == stop then
--- -- done
--- elseif ignoremarks then
--- repeat -- start x x m x x stop => start m
--- local next = start.next
--- if not marks[next.char] then
--- local components = next.components
--- if components then -- probably not needed
--- flush_node_list(components)
--- end
--- head = delete_node(head,next)
--- end
--- n = n + 1
--- until next == stop
--- else -- start x x x stop => start
--- repeat
--- local next = start.next
--- local components = next.components
--- if components then -- probably not needed
--- flush_node_list(components)
--- end
--- head = delete_node(head,next)
--- n = n + 1
--- until next == stop
--- end
--- return head, n
--- end
-
---[[ldx--
-Here we replace start by a single variant, First we delete the rest of the
-match.
---ldx]]--
-
-function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- -- todo: marks ?
- local current = start
- local subtables = currentlookup.subtables
- if #subtables > 1 then
- logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
- end
- while current do
- if current.id == glyph_code then
- local currentchar = current.char
- local lookupname = subtables[1] -- only 1
- local replacement = lookuphash[lookupname]
- if not replacement then
- if trace_bugs then
- logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- replacement = replacement[currentchar]
- if not replacement or replacement == "" then
- if trace_bugs then
- logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
- end
- else
- if trace_singles then
- logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
- end
- current.char = replacement
- end
- end
- return head, start, true
- elseif current == stop then
- break
- else
- current = current.next
- end
- end
- return head, start, false
-end
-
-chainmores.gsub_single = chainprocs.gsub_single
-
---[[ldx--
-Here we replace start by a sequence of new glyphs. First we delete the rest of
-the match.
---ldx]]--
-
-function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- -- local head, n = delete_till_stop(head,start,stop)
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local replacements = lookuphash[lookupname]
- if not replacements then
- if trace_bugs then
- logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- replacements = replacements[startchar]
- if not replacements or replacement == "" then
- if trace_bugs then
- logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
- end
- else
- if trace_multiples then
- logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
- end
- return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
- end
- end
- return head, start, false
-end
-
-chainmores.gsub_multiple = chainprocs.gsub_multiple
-
---[[ldx--
-Here we replace start by new glyph. First we delete the rest of the match.
---ldx]]--
-
--- char_1 mark_1 -> char_x mark_1 (ignore marks)
--- char_1 mark_1 -> char_x
-
--- to be checked: do we always have just one glyph?
--- we can also have alternates for marks
--- marks come last anyway
--- are there cases where we need to delete the mark
-
-function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local current = start
- local subtables = currentlookup.subtables
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- while current do
- if current.id == glyph_code then -- is this check needed?
- local currentchar = current.char
- local lookupname = subtables[1]
- local alternatives = lookuphash[lookupname]
- if not alternatives then
- if trace_bugs then
- logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- alternatives = alternatives[currentchar]
- if alternatives then
- local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
- end
- start.char = choice
- else
- if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
- end
- end
- elseif trace_bugs then
- logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
- end
- end
- return head, start, true
- elseif current == stop then
- break
- else
- current = current.next
- end
- end
- return head, start, false
-end
-
-chainmores.gsub_alternate = chainprocs.gsub_alternate
-
---[[ldx--
-When we replace ligatures we use a helper that handles the marks. I might change
-this function (move code inline and handle the marks by a separate function). We
-assume rather stupid ligatures (no complex disc nodes).
---ldx]]--
-
-function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local ligatures = lookuphash[lookupname]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- ligatures = ligatures[startchar]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- end
- else
- local s = start.next
- local discfound = false
- local last = stop
- local nofreplacements = 0
- local skipmark = currentlookup.flags[1]
- while s do
- local id = s.id
- if id == disc_code then
- s = s.next
- discfound = true
- else
- local schar = s.char
- if skipmark and marks[schar] then -- marks
- s = s.next
- else
- local lg = ligatures[schar]
- if lg then
- ligatures, last, nofreplacements = lg, s, nofreplacements + 1
- if s == stop then
- break
- else
- s = s.next
- end
- else
- break
- end
- end
- end
- end
- local l2 = ligatures.ligature
- if l2 then
- if chainindex then
- stop = last
- end
- if trace_ligatures then
- if start == stop then
- logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
- else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
- end
- end
- head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
- return head, start, true, nofreplacements
- elseif trace_bugs then
- if start == stop then
- logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
- end
- end
- end
- end
- return head, start, false, 0
-end
-
-chainmores.gsub_ligature = chainprocs.gsub_ligature
-
-function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
- end
- return head, start, false
- end
- end
- end
- -- todo: like marks a ligatures hash
- local index = start[a_ligacomp]
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head, start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- -- local alreadydone = markonce and start[a_markmark]
- -- if not alreadydone then
- -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = base[a_ligacomp]
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
- end
- end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- -- elseif trace_marks and trace_details then
- -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
- -- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and start[a_cursbase]
- if not alreadydone then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local exitanchors = lookuphash[lookupname]
- if exitanchors then
- exitanchors = exitanchors[startchar]
- end
- if exitanchors then
- local done = false
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return head, start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
- end
- return head, start, false
- end
- end
- return head, start, false
-end
-
-function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- -- untested .. needs checking for the new model
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar] -- needed ?
- if kerns then
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
- end
- end
- end
- return head, start, false
-end
-
-chainmores.gpos_single = chainprocs.gpos_single -- okay?
-
--- when machines become faster i will make a shared function
-
-function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local snext = start.next
- if snext then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar]
- if kerns then
- local lookuptype = lookuptypes[lookupname]
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else
- report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
- local a, b = krn[2], krn[6]
- if a and a ~= 0 then
- local k = setkern(snext,factor,rlmode,a)
- if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- end
- if b and b ~= 0 then
- logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return head, start, done
- end
- end
- end
- return head, start, false
-end
-
-chainmores.gpos_pair = chainprocs.gpos_pair -- okay?
-
--- what pointer to return, spec says stop
--- to be discussed ... is bidi changer a space?
--- elseif char == zwnj and sequence[n][32] then -- brrr
-
--- somehow l or f is global
--- we don't need to pass the currentcontext, saves a bit
--- make a slow variant then can be activated but with more tracing
-
-local function show_skip(kind,chainname,char,ck,class)
- if ck[9] then
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
- else
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
- end
-end
-
-local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
- -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
- local flags = sequence.flags
- local done = false
- local skipmark = flags[1]
- local skipligature = flags[2]
- local skipbase = flags[3]
- local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
- local markclass = sequence.markclass -- todo, first we need a proper test
- local skipped = false
- for k=1,#contexts do
- local match = true
- local current = start
- local last = start
- local ck = contexts[k]
- local seq = ck[3]
- local s = #seq
- -- f..l = mid string
- if s == 1 then
- -- never happens
- match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
- else
- -- maybe we need a better space check (maybe check for glue or category or combination)
- -- we cannot optimize for n=2 because there can be disc nodes
- local f, l = ck[4], ck[5]
- -- current match
- if f == 1 and f == l then -- current only
- -- already a hit
- -- match = true
- else -- before/current/after | before/current | current/after
- -- no need to test first hit (to be optimized)
- if f == l then -- new, else last out of sync (f is > 1)
- -- match = true
- else
- local n = f + 1
- last = last.next
- while n <= l do
- if last then
- local id = last.id
- if id == glyph_code then
- if last.font == currentfont and last.subtype<256 then
- local char = last.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- last = last.next
- elseif seq[n][char] then
- if n < l then
- last = last.next
- end
- n = n + 1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- last = last.next
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- end
- end
- end
- -- before
- if match and f > 1 then
- local prev = start.prev
- if prev then
- local n = f-1
- while n >= 1 do
- if prev then
- local id = prev.id
- if id == glyph_code then
- if prev.font == currentfont and prev.subtype<256 then -- normal char
- local char = prev.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n -1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- -- skip 'm
- elseif seq[n][32] then
- n = n -1
- else
- match = false
- break
- end
- prev = prev.prev
- elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
- n = n -1
- else
- match = false
- break
- end
- end
- elseif f == 2 then
- match = seq[1][32]
- else
- for n=f-1,1 do
- if not seq[n][32] then
- match = false
- break
- end
- end
- end
- end
- -- after
- if match and s > l then
- local current = last and last.next
- if current then
- -- removed optimization for s-l == 1, we have to deal with marks anyway
- local n = l + 1
- while n <= s do
- if current then
- local id = current.id
- if id == glyph_code then
- if current.font == currentfont and current.subtype<256 then -- normal char
- local char = current.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n + 1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- -- skip 'm
- elseif seq[n][32] then -- brrr
- n = n + 1
- else
- match = false
- break
- end
- current = current.next
- elseif seq[n][32] then
- n = n + 1
- else
- match = false
- break
- end
- end
- elseif s-l == 1 then
- match = seq[s][32]
- else
- for n=l+1,s do
- if not seq[n][32] then
- match = false
- break
- end
- end
- end
- end
- end
- if match then
- -- ck == currentcontext
- if trace_contexts then
- local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = start.char
- if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
- else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
- end
- end
- local chainlookups = ck[6]
- if chainlookups then
- local nofchainlookups = #chainlookups
- -- we can speed this up if needed
- if nofchainlookups == 1 then
- local chainlookupname = chainlookups[1]
- local chainlookup = lookuptable[chainlookupname]
- if chainlookup then
- local cp = chainprocs[chainlookup.type]
- if cp then
- local ok
- head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- if ok then
- done = true
- end
- else
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- end
- else -- shouldn't happen
- logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
- end
- else
- local i = 1
- repeat
- if skipped then
- while true do
- local char = start.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = start.next
- else
- break
- end
- else
- break
- end
- end
- end
- local chainlookupname = chainlookups[i]
- local chainlookup = lookuptable[chainlookupname]
- if not chainlookup then
- -- okay, n matches, < n replacements
- i = i + 1
- else
- local cp = chainmores[chainlookup.type]
- if not cp then
- -- actually an error
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- i = i + 1
- else
- local ok, n
- head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
- -- messy since last can be changed !
- if ok then
- done = true
- -- skip next one(s) if ligature
- i = i + (n or 1)
- else
- i = i + 1
- end
- end
- end
- if start then
- start = start.next
- else
- -- weird
- end
- until i > nofchainlookups
- end
- else
- local replacements = ck[7]
- if replacements then
- head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
- else
- done = true -- can be meant to be skipped
- if trace_contexts then
- logprocess("%s: skipping match",cref(kind,chainname))
- end
- end
- end
- end
- end
- return head, start, done
-end
-
--- Because we want to keep this elsewhere (an because speed is less an issue) we
--- pass the font id so that the verbose variant can access the relevant helper tables.
-
-local verbose_handle_contextchain = function(font,...)
- logwarning("no verbose handler installed, reverting to 'normal'")
- otf.setcontextchain()
- return normal_handle_contextchain(...)
-end
-
-otf.chainhandlers = {
- normal = normal_handle_contextchain,
- verbose = verbose_handle_contextchain,
-}
-
-function otf.setcontextchain(method)
- if not method or method == "normal" or not otf.chainhandlers[method] then
- if handlers.contextchain then -- no need for a message while making the format
- logwarning("installing normal contextchain handler")
- end
- handlers.contextchain = normal_handle_contextchain
- else
- logwarning("installing contextchain handler %a",method)
- local handler = otf.chainhandlers[method]
- handlers.contextchain = function(...)
- return handler(currentfont,...) -- hm, get rid of ...
- end
- end
- handlers.gsub_context = handlers.contextchain
- handlers.gsub_contextchain = handlers.contextchain
- handlers.gsub_reversecontextchain = handlers.contextchain
- handlers.gpos_contextchain = handlers.contextchain
- handlers.gpos_context = handlers.contextchain
-end
-
-otf.setcontextchain()
-
-local missing = { } -- we only report once
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_process(...)
-end
-
-local logwarning = report_process
-
-local function report_missing_cache(typ,lookup)
- local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
- local t = f[typ] if not t then t = { } f[typ] = t end
- if not t[lookup] then
- t[lookup] = true
- logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
- end
-end
-
-local resolved = { } -- we only resolve a font,script,language pair once
-
--- todo: pass all these 'locals' in a table
-
-local lookuphashes = { }
-
-setmetatableindex(lookuphashes, function(t,font)
- local lookuphash = fontdata[font].resources.lookuphash
- if not lookuphash or not next(lookuphash) then
- lookuphash = false
- end
- t[font] = lookuphash
- return lookuphash
-end)
-
--- fonts.hashes.lookups = lookuphashes
-
-local autofeatures = fonts.analyzers.features -- was: constants
-
-local function initialize(sequence,script,language,enabled)
- local features = sequence.features
- if features then
- for kind, scripts in next, features do
- local valid = enabled[kind]
- if valid then
- local languages = scripts[script] or scripts[wildcard]
- if languages and (languages[language] or languages[wildcard]) then
- return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
- end
- end
- end
- end
- return false
-end
-
-function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
- local shared = tfmdata.shared
- local properties = tfmdata.properties
- local language = properties.language or "dflt"
- local script = properties.script or "dflt"
- local enabled = shared.features
- local res = resolved[font]
- if not res then
- res = { }
- resolved[font] = res
- end
- local rs = res[script]
- if not rs then
- rs = { }
- res[script] = rs
- end
- local rl = rs[language]
- if not rl then
- rl = {
- -- indexed but we can also add specific data by key
- }
- rs[language] = rl
- local sequences = tfmdata.resources.sequences
--- setmetatableindex(rl, function(t,k)
--- if type(k) == "number" then
--- local v = enabled and initialize(sequences[k],script,language,enabled)
--- t[k] = v
--- return v
--- end
--- end)
-for s=1,#sequences do
- local v = enabled and initialize(sequences[s],script,language,enabled)
- if v then
- rl[#rl+1] = v
- end
-end
- end
- return rl
-end
-
--- elseif id == glue_code then
--- if p[5] then -- chain
--- local pc = pp[32]
--- if pc then
--- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
--- if ok then
--- done = true
--- end
--- if start then start = start.next end
--- else
--- start = start.next
--- end
--- else
--- start = start.next
--- end
-
--- there will be a new direction parser (pre-parsed etc)
-
--- less bytecode: 290 -> 254
---
--- attr = attr or false
---
--- local a = getattr(start,0)
--- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then
--- -- the action
--- end
-
-local function featuresprocessor(head,font,attr)
-
- local lookuphash = lookuphashes[font] -- we can also check sequences here
-
- if not lookuphash then
- return head, false
- end
-
- if trace_steps then
- checkstep(head)
- end
-
- tfmdata = fontdata[font]
- descriptions = tfmdata.descriptions
- characters = tfmdata.characters
- resources = tfmdata.resources
-
- marks = resources.marks
- anchorlookups = resources.lookup_to_anchor
- lookuptable = resources.lookups
- lookuptypes = resources.lookuptypes
-
- currentfont = font
- rlmode = 0
-
- local sequences = resources.sequences
- local done = false
- local datasets = otf.dataset(tfmdata,font,attr)
-
- local dirstack = { } -- could move outside function
-
- -- We could work on sub start-stop ranges instead but I wonder if there is that
- -- much speed gain (experiments showed that it made not much sense) and we need
- -- to keep track of directions anyway. Also at some point I want to play with
- -- font interactions and then we do need the full sweeps.
-
- -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
- -- so that multiple cases are also covered.)
-
- for s=1,#datasets do
- local dataset = datasets[s]
- featurevalue = dataset[1] -- todo: pass to function instead of using a global
-
- local sequence = dataset[5] -- sequences[s] -- also dataset[5]
- local rlparmode = 0
- local topstack = 0
- local success = false
- local attribute = dataset[2]
- local chain = dataset[3] -- sequence.chain or 0
- local typ = sequence.type
- local subtables = sequence.subtables
- if chain < 0 then
- -- this is a limited case, no special treatments like 'init' etc
- local handler = handlers[typ]
- -- we need to get rid of this slide! probably no longer needed in latest luatex
- local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
- while start do
- local id = start.id
- if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
- if a then
- a = a == attr
- else
- a = true
- end
- if a then
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if success then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.prev end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- end
- else
- local handler = handlers[typ]
- local ns = #subtables
- local start = head -- local ?
- rlmode = 0 -- to be checked ?
- if ns == 1 then -- happens often
- local lookupname = subtables[1]
- local lookupcache = lookuphash[lookupname]
- if not lookupcache then -- also check for empty cache
- report_missing_cache(typ,lookupname)
- else
-
- local function subrun(start)
- -- mostly for gsub, gpos would demand a more clever approach
- local head = start
- local done = false
- while start do
- local id = start.id
- if id == glyph_code and start.font == font and start.subtype <256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done = true
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- end
- if done then
- success = true
- return head
- end
- end
-
- local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = disc.prev
- local next = disc.next
- if prev and next then
- prev.next = next
- -- next.prev = prev
- local a = prev[0]
- if a then
- a = (a == attr) and (not attribute or prev[a_state] == attribute)
- else
- a = not attribute or prev[a_state] == attribute
- end
- if a then
- local lookupmatch = lookupcache[prev.char]
- if lookupmatch then
- -- sequence kan weg
- local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done = true
- success = true
- end
- end
- end
- prev.next = disc
- -- next.prev = disc
- end
- return next
- end
-
- while start do
- local id = start.id
- if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- success = true
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- elseif id == disc_code then
- -- mostly for gsub
- if start.subtype == discretionary_code then
- local pre = start.pre
- if pre then
- local new = subrun(pre)
- if new then start.pre = new end
- end
- local post = start.post
- if post then
- local new = subrun(post)
- if new then start.post = new end
- end
- local replace = start.replace
- if replace then
- local new = subrun(replace)
- if new then start.replace = new end
- end
-elseif typ == "gpos_single" or typ == "gpos_pair" then
- kerndisc(start)
- end
- start = start.next
- elseif id == whatsit_code then -- will be function
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- -- one might wonder if the par dir should be looked at, so we might as well drop the next line
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
- end
- start = start.next
- elseif id == math_code then
- start = end_of_math(start).next
- else
- start = start.next
- end
- end
- end
- else
-
- local function subrun(start)
- -- mostly for gsub, gpos would demand a more clever approach
- local head = start
- local done = false
- while start do
- local id = start.id
- if id == glyph_code and start.id == font and start.subtype <256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- end
- if done then
- success = true
- return head
- end
- end
-
- local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = disc.prev
- local next = disc.next
- if prev and next then
- prev.next = next
- -- next.prev = prev
- local a = prev[0]
- if a then
- a = (a == attr) and (not attribute or prev[a_state] == attribute)
- else
- a = not attribute or prev[a_state] == attribute
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[prev.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done = true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- end
- prev.next = disc
- -- next.prev = disc
- end
- return next
- end
-
- while start do
- local id = start.id
- if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- success = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- elseif id == disc_code then
- -- mostly for gsub
- if start.subtype == discretionary_code then
- local pre = start.pre
- if pre then
- local new = subrun(pre)
- if new then start.pre = new end
- end
- local post = start.post
- if post then
- local new = subrun(post)
- if new then start.post = new end
- end
- local replace = start.replace
- if replace then
- local new = subrun(replace)
- if new then start.replace = new end
- end
-elseif typ == "gpos_single" or typ == "gpos_pair" then
- kerndisc(start)
- end
- start = start.next
- elseif id == whatsit_code then
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
- end
- start = start.next
- elseif id == math_code then
- start = end_of_math(start).next
- else
- start = start.next
- end
- end
- end
- end
- if success then
- done = true
- end
- if trace_steps then -- ?
- registerstep(head)
- end
- end
- return head, done
-end
-
-local function generic(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if target then
- target[unicode] = lookupdata
- else
- lookuphash[lookupname] = { [unicode] = lookupdata }
- end
-end
-
-local action = {
-
- substitution = generic,
- multiple = generic,
- alternate = generic,
- position = generic,
-
- ligature = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- for i=1,#lookupdata do
- local li = lookupdata[i]
- local tu = target[li]
- if not tu then
- tu = { }
- target[li] = tu
- end
- target = tu
- end
- target.ligature = unicode
- end,
-
- pair = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- local others = target[unicode]
- local paired = lookupdata[1]
- if others then
- others[paired] = lookupdata
- else
- others = { [paired] = lookupdata }
- target[unicode] = others
- end
- end,
-
-}
-
-local function prepare_lookups(tfmdata)
-
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local anchor_to_lookup = resources.anchor_to_lookup
- local lookup_to_anchor = resources.lookup_to_anchor
- local lookuptypes = resources.lookuptypes
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
-
- -- we cannot free the entries in the descriptions as sometimes we access
- -- then directly (for instance anchors) ... selectively freeing does save
- -- much memory as it's only a reference to a table and the slot in the
- -- description hash is not freed anyway
-
- for unicode, character in next, characters do -- we cannot loop over descriptions !
-
- local description = descriptions[unicode]
-
- if description then
-
- local lookups = description.slookups
- if lookups then
- for lookupname, lookupdata in next, lookups do
- action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
- end
- end
-
- local lookups = description.mlookups
- if lookups then
- for lookupname, lookuplist in next, lookups do
- local lookuptype = lookuptypes[lookupname]
- for l=1,#lookuplist do
- local lookupdata = lookuplist[l]
- action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
- end
- end
- end
-
- local list = description.kerns
- if list then
- for lookup, krn in next, list do -- ref to glyph, saves lookup
- local target = lookuphash[lookup]
- if target then
- target[unicode] = krn
- else
- lookuphash[lookup] = { [unicode] = krn }
- end
- end
- end
-
- local list = description.anchors
- if list then
- for typ, anchors in next, list do -- types
- if typ == "mark" or typ == "cexit" then -- or entry?
- for name, anchor in next, anchors do
- local lookups = anchor_to_lookup[name]
- if lookups then
- for lookup, _ in next, lookups do
- local target = lookuphash[lookup]
- if target then
- target[unicode] = anchors
- else
- lookuphash[lookup] = { [unicode] = anchors }
- end
- end
- end
- end
- end
- end
- end
-
- end
-
- end
-
-end
-
-local function split(replacement,original)
- local result = { }
- for i=1,#replacement do
- result[original[i]] = replacement[i]
- end
- return result
-end
-
-local valid = {
- coverage = { chainsub = true, chainpos = true, contextsub = true },
- reversecoverage = { reversesub = true },
- glyphs = { chainsub = true, chainpos = true },
-}
-
-local function prepare_contextchains(tfmdata)
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local lookups = rawdata.lookups
- if lookups then
- for lookupname, lookupdata in next, rawdata.lookups do
- local lookuptype = lookupdata.type
- if lookuptype then
- local rules = lookupdata.rules
- if rules then
- local format = lookupdata.format
- local validformat = valid[format]
- if not validformat then
- report_prepare("unsupported format %a",format)
- elseif not validformat[lookuptype] then
- -- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current = rule.current
- local before = rule.before
- local after = rule.after
- local replacements = rule.replacements
- local sequence = { }
- local nofsequences = 0
- -- Eventually we can store start, stop and sequence in the cached file
- -- but then less sharing takes place so best not do that without a lot
- -- of profiling so let's forget about it.
- if before then
- for n=1,#before do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = before[n]
- end
- end
- local start = nofsequences + 1
- for n=1,#current do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = current[n]
- end
- local stop = nofsequences
- if after then
- for n=1,#after do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = after[n]
- end
- end
- if sequence[1] then
- -- Replacements only happen with reverse lookups as they are single only. We
- -- could pack them into current (replacement value instead of true) and then
- -- use sequence[start] instead but it's somewhat ugly.
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
- end
- else
- -- no rules
- end
- else
- report_prepare("missing lookuptype for lookupname %a",lookupname)
- end
- end
- end
-end
-
--- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
-
-local function featuresinitializer(tfmdata,value)
- if true then -- value then
- -- beware we need to use the topmost properties table
- local rawdata = tfmdata.shared.rawdata
- local properties = rawdata.properties
- if not properties.initialized then
- local starttime = trace_preparing and os.clock()
- local resources = rawdata.resources
- resources.lookuphash = resources.lookuphash or { }
- prepare_contextchains(tfmdata)
- prepare_lookups(tfmdata)
- properties.initialized = true
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
- end
- end
- end
-end
-
-registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- position = 1,
- node = featuresinitializer,
- },
- processors = {
- node = featuresprocessor,
- }
-}
-
--- This can be used for extra handlers, but should be used with care!
-
-otf.handlers = handlers
diff --git a/tex/generic/context/luatex/luatex-fonts.lua b/tex/generic/context/luatex/luatex-fonts.lua
index 5e5c9a4cf..7995be33e 100644
--- a/tex/generic/context/luatex/luatex-fonts.lua
+++ b/tex/generic/context/luatex/luatex-fonts.lua
@@ -210,9 +210,9 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
loadmodule('font-oti.lua')
loadmodule('font-otf.lua')
loadmodule('font-otb.lua')
- loadmodule('luatex-fonts-inj.lua') -- will be replaced (luatex >= .80)
+ loadmodule('node-inj.lua') -- will be replaced (luatex >= .70)
loadmodule('font-ota.lua')
- loadmodule('luatex-fonts-otn.lua')
+ loadmodule('font-otn.lua')
loadmodule('font-otp.lua') -- optional
loadmodule('luatex-fonts-lua.lua')
loadmodule('font-def.lua')
--
cgit v1.2.3