summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorContext Git Mirror Bot <phg42.2a@gmail.com>2014-05-03 13:55:34 +0200
committerContext Git Mirror Bot <phg42.2a@gmail.com>2014-05-03 13:55:34 +0200
commit624cbb5da392e9403984dd1cf368c0d408b1c2a8 (patch)
tree489c049ac849bb5bbce7d32e4df477872c58373d
parent088de88944c1f2254250bb448c7371a87ff7ee39 (diff)
downloadcontext-624cbb5da392e9403984dd1cf368c0d408b1c2a8.tar.gz
2014-01-03 00:42:00
-rw-r--r--context/data/scite/lexers/data/scite-context-data-context.lua2
-rw-r--r--context/data/scite/lexers/data/scite-context-data-metafun.lua2
-rw-r--r--context/data/scite/lexers/scite-context-lexer-mps.lua2
-rw-r--r--context/data/scite/scite-context-data-context.properties89
-rw-r--r--context/data/scite/scite-context-data-metafun.properties86
-rw-r--r--context/data/scite/scite-context-readme.pdfbin210958 -> 210827 bytes
-rw-r--r--context/data/scite/scite-context-readme.tex6
-rw-r--r--context/data/scite/scite-context.properties49
-rw-r--r--context/data/scite/scite-ctx.properties19
-rw-r--r--doc/context/manuals/allkind/mkiv-publications.bib34
-rw-r--r--doc/context/manuals/allkind/mkiv-publications.tex1325
-rw-r--r--doc/context/manuals/allkind/publications-en.xml369
-rw-r--r--doc/context/scripts/mkiv/mtx-bibtex.html53
-rw-r--r--doc/context/scripts/mkiv/mtx-bibtex.man30
-rw-r--r--doc/context/scripts/mkiv/mtx-bibtex.xml26
-rw-r--r--metapost/context/base/mp-base.mpii19
-rw-r--r--metapost/context/base/mp-base.mpiv71
-rw-r--r--metapost/context/base/mp-grap.mpiv268
-rw-r--r--metapost/context/base/mp-tool.mpii2683
-rw-r--r--metapost/context/base/mp-tool.mpiv23
-rw-r--r--scripts/context/lua/mtx-bibtex.lua106
-rw-r--r--scripts/context/lua/mtx-context.lua37
-rw-r--r--scripts/context/lua/mtx-fonts.lua14
-rw-r--r--scripts/context/lua/mtx-plain.lua2
-rw-r--r--scripts/context/lua/mtxrun.lua355
-rw-r--r--scripts/context/stubs/install/first-setup.sh120
-rw-r--r--scripts/context/stubs/mswin/context.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/ctxtools.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/first-setup.bat (renamed from scripts/context/stubs/install/first-setup.bat)0
-rw-r--r--scripts/context/stubs/mswin/luatools.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/metatex.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/mptopdf.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/mtxrun.dllbin7680 -> 7680 bytes
-rw-r--r--scripts/context/stubs/mswin/mtxrun.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua355
-rw-r--r--scripts/context/stubs/mswin/mtxrunjit.exebin4608 -> 0 bytes
-rw-r--r--scripts/context/stubs/mswin/mtxworks.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/pstopdf.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/setuptex.bat (renamed from scripts/context/stubs/setup/setuptex.bat)0
-rw-r--r--scripts/context/stubs/mswin/texexec.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/texmfstart.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/setup/setuptex167
-rw-r--r--scripts/context/stubs/setup/setuptex.csh164
-rw-r--r--scripts/context/stubs/source/mtxrun_dll.c142
-rw-r--r--scripts/context/stubs/source/readme.txt42
-rw-r--r--scripts/context/stubs/unix/contextjit5
-rw-r--r--scripts/context/stubs/unix/ctxtools2
-rw-r--r--scripts/context/stubs/unix/mptopdf2
-rw-r--r--scripts/context/stubs/unix/mtxrun355
-rw-r--r--scripts/context/stubs/unix/mtxrunjit5
-rw-r--r--scripts/context/stubs/unix/pstopdf2
-rw-r--r--scripts/context/stubs/win64/context.exebin15360 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/contextjit.exebin15360 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/ctxtools.exebin15360 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/luatools.exebin15360 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/metatex.exebin15360 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/mptopdf.exebin15360 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/mtxrun.dllbin18432 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/mtxrun.exebin15360 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/mtxrun.lua18175
-rw-r--r--scripts/context/stubs/win64/mtxrunjit.exebin15360 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/mtxworks.exebin15360 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/pstopdf.exebin15360 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/texexec.exebin15360 -> 0 bytes
-rw-r--r--scripts/context/stubs/win64/texmfstart.exebin15360 -> 0 bytes
-rw-r--r--tex/context/base/anch-bar.mkiv8
-rw-r--r--tex/context/base/anch-pos.lua72
-rw-r--r--tex/context/base/attr-ini.mkiv2
-rw-r--r--tex/context/base/back-exp.lua162
-rw-r--r--tex/context/base/bibl-tra.lua2
-rw-r--r--tex/context/base/buff-ver.mkiv4
-rw-r--r--tex/context/base/char-def.lua2
-rw-r--r--tex/context/base/char-utf.lua87
-rw-r--r--tex/context/base/cont-new.mkiv12
-rw-r--r--tex/context/base/context-version.pdfbin4096 -> 4115 bytes
-rw-r--r--tex/context/base/context-version.pngbin38170 -> 40350 bytes
-rw-r--r--tex/context/base/context.mkiv22
-rw-r--r--tex/context/base/core-env.lua14
-rw-r--r--tex/context/base/core-sys.mkiv2
-rw-r--r--tex/context/base/core-two.lua2
-rw-r--r--tex/context/base/core-uti.lua6
-rw-r--r--tex/context/base/data-aux.lua3
-rw-r--r--tex/context/base/data-use.lua4
-rw-r--r--tex/context/base/enco-ini.mkiv15
-rw-r--r--tex/context/base/export-example.css15
-rw-r--r--tex/context/base/file-job.lua44
-rw-r--r--tex/context/base/file-job.mkvi2
-rw-r--r--tex/context/base/file-res.lua16
-rw-r--r--tex/context/base/font-chk.lua28
-rw-r--r--tex/context/base/font-col.lua21
-rw-r--r--tex/context/base/font-ctx.lua42
-rw-r--r--tex/context/base/font-gds.lua33
-rw-r--r--tex/context/base/font-map.lua30
-rw-r--r--tex/context/base/font-mis.lua2
-rw-r--r--tex/context/base/font-nod.lua121
-rw-r--r--tex/context/base/font-odv.lua1160
-rw-r--r--tex/context/base/font-otf.lua207
-rw-r--r--tex/context/base/font-otn.lua648
-rw-r--r--tex/context/base/font-ott.lua3
-rw-r--r--tex/context/base/font-otx.lua130
-rw-r--r--tex/context/base/font-pat.lua2
-rw-r--r--tex/context/base/font-pre.mkiv18
-rw-r--r--tex/context/base/font-sol.lua174
-rw-r--r--tex/context/base/font-syn.lua68
-rw-r--r--tex/context/base/l-dir.lua61
-rw-r--r--tex/context/base/l-lpeg.lua30
-rw-r--r--tex/context/base/l-lua.lua6
-rw-r--r--tex/context/base/l-string.lua5
-rw-r--r--tex/context/base/l-table.lua4
-rw-r--r--tex/context/base/l-url.lua10
-rw-r--r--tex/context/base/lang-lab.mkiv6
-rw-r--r--tex/context/base/lang-rep.lua157
-rw-r--r--tex/context/base/lang-rep.mkiv75
-rw-r--r--tex/context/base/lang-wrd.lua37
-rw-r--r--tex/context/base/lpdf-mis.lua74
-rw-r--r--tex/context/base/lpdf-nod.lua81
-rw-r--r--tex/context/base/lpdf-tag.lua82
-rw-r--r--tex/context/base/luat-cnf.lua2
-rw-r--r--tex/context/base/luat-sto.lua1
-rw-r--r--tex/context/base/lxml-tab.lua41
-rw-r--r--tex/context/base/m-oldbibtex.mkiv16
-rw-r--r--tex/context/base/math-dir.lua42
-rw-r--r--tex/context/base/math-fbk.lua27
-rw-r--r--tex/context/base/math-fen.mkiv2
-rw-r--r--tex/context/base/math-ini.lua11
-rw-r--r--tex/context/base/math-noa.lua411
-rw-r--r--tex/context/base/math-tag.lua129
-rw-r--r--tex/context/base/mult-de.mkii2
-rw-r--r--tex/context/base/mult-def.lua6
-rw-r--r--tex/context/base/mult-def.mkiv4
-rw-r--r--tex/context/base/mult-en.mkii2
-rw-r--r--tex/context/base/mult-fr.mkii2
-rw-r--r--tex/context/base/mult-fun.lua2
-rw-r--r--tex/context/base/mult-it.mkii2
-rw-r--r--tex/context/base/mult-low.lua6
-rw-r--r--tex/context/base/mult-nl.mkii2
-rw-r--r--tex/context/base/mult-pe.mkii2
-rw-r--r--tex/context/base/mult-ro.mkii2
-rw-r--r--tex/context/base/node-acc.lua120
-rw-r--r--tex/context/base/node-aux.lua374
-rw-r--r--tex/context/base/node-bck.lua111
-rw-r--r--tex/context/base/node-fin.lua302
-rw-r--r--tex/context/base/node-fnt.lua41
-rw-r--r--tex/context/base/node-inj.lua200
-rw-r--r--tex/context/base/node-ltp.lua1614
-rw-r--r--tex/context/base/node-met.lua53
-rw-r--r--tex/context/base/node-mig.lua97
-rw-r--r--tex/context/base/node-nut.lua650
-rw-r--r--tex/context/base/node-pro.lua113
-rw-r--r--tex/context/base/node-ref.lua220
-rw-r--r--tex/context/base/node-res.lua530
-rw-r--r--tex/context/base/node-rul.lua125
-rw-r--r--tex/context/base/node-tra.lua355
-rw-r--r--tex/context/base/node-tst.lua69
-rw-r--r--tex/context/base/node-typ.lua71
-rw-r--r--tex/context/base/pack-rul.lua71
-rw-r--r--tex/context/base/pack-rul.mkiv25
-rw-r--r--tex/context/base/page-brk.mkiv245
-rw-r--r--tex/context/base/page-lay.mkiv6
-rw-r--r--tex/context/base/page-lin.lua112
-rw-r--r--tex/context/base/page-mak.mkvi63
-rw-r--r--tex/context/base/page-mix.lua229
-rw-r--r--tex/context/base/page-mix.mkiv3
-rw-r--r--tex/context/base/page-mul.mkiv8
-rw-r--r--tex/context/base/page-str.lua6
-rw-r--r--tex/context/base/page-str.mkiv2
-rw-r--r--tex/context/base/publ-aut.lua550
-rw-r--r--tex/context/base/publ-dat.lua529
-rw-r--r--tex/context/base/publ-imp-apa.mkiv547
-rw-r--r--tex/context/base/publ-imp-cite.mkiv74
-rw-r--r--tex/context/base/publ-imp-commands.mkiv15
-rw-r--r--tex/context/base/publ-imp-definitions.mkiv68
-rw-r--r--tex/context/base/publ-ini.lua1425
-rw-r--r--tex/context/base/publ-ini.mkiv963
-rw-r--r--tex/context/base/publ-old.mkiv22
-rw-r--r--tex/context/base/publ-oth.lua146
-rw-r--r--tex/context/base/publ-tra.lua296
-rw-r--r--tex/context/base/publ-tra.mkiv35
-rw-r--r--tex/context/base/publ-usr.lua91
-rw-r--r--tex/context/base/publ-usr.mkiv2
-rw-r--r--tex/context/base/publ-xml.mkiv114
-rw-r--r--tex/context/base/s-abr-01.tex2
-rw-r--r--tex/context/base/s-inf-03.mkiv5
-rw-r--r--tex/context/base/s-languages-hyphenation.lua2
-rw-r--r--tex/context/base/s-math-coverage.lua4
-rw-r--r--tex/context/base/scrp-cjk.lua131
-rw-r--r--tex/context/base/scrp-eth.lua22
-rw-r--r--tex/context/base/scrp-ini.lua85
-rw-r--r--tex/context/base/sort-ini.lua54
-rw-r--r--tex/context/base/sort-lan.lua2
-rw-r--r--tex/context/base/spac-ali.lua46
-rw-r--r--tex/context/base/spac-ali.mkiv25
-rw-r--r--tex/context/base/spac-chr.lua95
-rw-r--r--tex/context/base/spac-ver.lua430
-rw-r--r--tex/context/base/status-files.pdfbin24795 -> 24556 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin226993 -> 228200 bytes
-rw-r--r--tex/context/base/status-mkiv.lua98
-rw-r--r--tex/context/base/strc-lst.mkvi1
-rw-r--r--tex/context/base/strc-mar.lua29
-rw-r--r--tex/context/base/strc-mat.mkiv8
-rw-r--r--tex/context/base/strc-pag.lua14
-rw-r--r--tex/context/base/strc-pag.mkiv5
-rw-r--r--tex/context/base/supp-box.lua147
-rw-r--r--tex/context/base/supp-mat.mkiv34
-rw-r--r--tex/context/base/syst-ini.mkiv10
-rw-r--r--tex/context/base/tabl-ntb.mkiv4
-rw-r--r--tex/context/base/tabl-tbl.mkiv5
-rw-r--r--tex/context/base/tabl-xtb.lua131
-rw-r--r--tex/context/base/task-ini.lua8
-rw-r--r--tex/context/base/trac-inf.lua5
-rw-r--r--tex/context/base/trac-jus.lua59
-rw-r--r--tex/context/base/trac-par.lua39
-rw-r--r--tex/context/base/trac-pro.lua6
-rw-r--r--tex/context/base/trac-tim.lua2
-rw-r--r--tex/context/base/trac-vis.lua294
-rw-r--r--tex/context/base/type-imp-buy.mkiv136
-rw-r--r--tex/context/base/type-ini.lua4
-rw-r--r--tex/context/base/typo-bld.lua28
-rw-r--r--tex/context/base/typo-brk.lua122
-rw-r--r--tex/context/base/typo-cap.lua103
-rw-r--r--tex/context/base/typo-cln.lua17
-rw-r--r--tex/context/base/typo-dha.lua75
-rw-r--r--tex/context/base/typo-dig.lua58
-rw-r--r--tex/context/base/typo-dir.lua32
-rw-r--r--tex/context/base/typo-drp.lua375
-rw-r--r--tex/context/base/typo-drp.mkiv56
-rw-r--r--tex/context/base/typo-dua.lua78
-rw-r--r--tex/context/base/typo-dub.lua79
-rw-r--r--tex/context/base/typo-fln.lua91
-rw-r--r--tex/context/base/typo-itc.lua63
-rw-r--r--tex/context/base/typo-krn.lua192
-rw-r--r--tex/context/base/typo-mar.lua148
-rw-r--r--tex/context/base/typo-pag.lua76
-rw-r--r--tex/context/base/typo-par.mkiv29
-rw-r--r--tex/context/base/typo-rep.lua50
-rw-r--r--tex/context/base/typo-spa.lua57
-rw-r--r--tex/context/base/typo-tal.lua80
-rw-r--r--tex/context/base/util-deb.lua46
-rw-r--r--tex/context/base/util-str.lua173
-rw-r--r--tex/context/base/util-tab.lua3
-rw-r--r--tex/context/base/x-mathml.lua32
-rw-r--r--tex/context/base/x-mathml.mkiv15
-rw-r--r--tex/context/base/x-set-11.mkiv28
-rw-r--r--tex/context/interface/keys-cs.xml2
-rw-r--r--tex/context/interface/keys-de.xml2
-rw-r--r--tex/context/interface/keys-en.xml2
-rw-r--r--tex/context/interface/keys-fr.xml2
-rw-r--r--tex/context/interface/keys-it.xml2
-rw-r--r--tex/context/interface/keys-nl.xml2
-rw-r--r--tex/context/interface/keys-pe.xml2
-rw-r--r--tex/context/interface/keys-ro.xml2
-rw-r--r--tex/generic/context/luatex/luatex-fonts-inj.lua526
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua1069
-rw-r--r--tex/generic/context/luatex/luatex-fonts-otn.lua2848
-rw-r--r--tex/generic/context/luatex/luatex-fonts.lua4
255 files changed, 7685 insertions, 41249 deletions
diff --git a/context/data/scite/lexers/data/scite-context-data-context.lua b/context/data/scite/lexers/data/scite-context-data-context.lua
index 0d577c8da..f167c82c1 100644
--- a/context/data/scite/lexers/data/scite-context-data-context.lua
+++ b/context/data/scite/lexers/data/scite-context-data-context.lua
@@ -1,4 +1,4 @@
return {
["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "etexversion", "pdftexversion", "xetexversion", "xetexrevision", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifmodeelse", "doifnotmode", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "setupdocument", "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", "setupmodule", "currentmoduleparameter", "moduleparameter", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "globalenablemode", "globaldisablemode", "globalpreventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile", "luastringsep", "!!bs", "!!es", "lefttorightmark", "righttoleftmark", "breakablethinspace", "nobreakspace", "narrownobreakspace", "zerowidthnobreakspace", "ideographicspace", "ideographichalffillspace", "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace", "figurespace", "punctuationspace", "hairspace", "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj" },
- ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "doifelsecommandhandler", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "strippedcsname", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "normalbaselineskip", "normallineskip", "normallineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifinsetelse", "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifinstringelse", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", "newconstant", "setnewconstant", "setconstant", "setconstantvalue", "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "carryoverpar", "assumelongusagecs", "Umathbotaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "lefttoright", "righttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "lesshyphens", "morehyphens", "nohyphens", "dohyphens", "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath" },
+ ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "doifelsecommandhandler", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "strippedcsname", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "normalbaselineskip", "normallineskip", "normallineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifinsetelse", "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifinstringelse", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "carryoverpar", "assumelongusagecs", "Umathbotaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "lefttoright", "righttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "lesshyphens", "morehyphens", "nohyphens", "dohyphens" },
} \ No newline at end of file
diff --git a/context/data/scite/lexers/data/scite-context-data-metafun.lua b/context/data/scite/lexers/data/scite-context-data-metafun.lua
index 50b9ecec4..1ca02de97 100644
--- a/context/data/scite/lexers/data/scite-context-data-metafun.lua
+++ b/context/data/scite/lexers/data/scite-context-data-metafun.lua
@@ -1,4 +1,4 @@
return {
- ["commands"]={ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", "withcircularshade", "withlinearshade", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", "transparent", "withtransparency", "property", "properties", "withproperties", "asgroup", "infont", "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", "space", "CRLF", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedpath", "constructedpairs", "punkedfunction", "curvedfunction", "tightfunction", "punkedpath", "curvedpath", "tightpath", "punkedpairs", "curvedpairs", "tightpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", "undashed", "decorated", "redecorated", "undecorated", "passvariable", "passarrayvariable", "tostring", "format", "formatted", "startpassingvariable", "stoppassingvariable" },
+ ["commands"]={ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "acosh", "asinh", "sinh", "cosh", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", "withcircularshade", "withlinearshade", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", "transparent", "withtransparency", "property", "properties", "withproperties", "asgroup", "infont", "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", "space", "CRLF", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedpath", "constructedpairs", "punkedfunction", "curvedfunction", "tightfunction", "punkedpath", "curvedpath", "tightpath", "punkedpairs", "curvedpairs", "tightpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", "undashed", "decorated", "redecorated", "undecorated", "passvariable", "passarrayvariable", "tostring", "format", "formatted", "startpassingvariable", "stoppassingvariable" },
["internals"]={ "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel", "shadefactor", "textextoffset", "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent", "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent", "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent", "metapostversion", "maxdimensions" },
} \ No newline at end of file
diff --git a/context/data/scite/lexers/scite-context-lexer-mps.lua b/context/data/scite/lexers/scite-context-lexer-mps.lua
index f0d88eb3b..96c5e9c3c 100644
--- a/context/data/scite/lexers/scite-context-lexer-mps.lua
+++ b/context/data/scite/lexers/scite-context-lexer-mps.lua
@@ -98,7 +98,7 @@ local number = token('number', number)
local grouping = token('grouping', S("()[]{}")) -- can be an option
local special = token('special', S("#()[]{}<>=:\"")) -- or else := <> etc split
local texlike = token('warning', P("\\") * cstokentex^1)
-local extra = token('extra', P("+-+") + P("++") + S("`~%^&_-+*/\'|\\"))
+local extra = token('extra', S("`~%^&_-+*/\'|\\"))
local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
local texlike = token('embedded', P("\\") * (P("MP") + P("mp")) * mptoken^1)
diff --git a/context/data/scite/scite-context-data-context.properties b/context/data/scite/scite-context-data-context.properties
index fbd958f8a..140b0d96b 100644
--- a/context/data/scite/scite-context-data-context.properties
+++ b/context/data/scite/scite-context-data-context.properties
@@ -143,49 +143,48 @@ gobblethreearguments gobblefourarguments gobblefivearguments gobblesixarguments
gobbleeightarguments gobbleninearguments gobbletenarguments gobbleoneoptional gobbletwooptionals \
gobblethreeoptionals gobblefouroptionals gobblefiveoptionals dorecurse doloop \
exitloop dostepwiserecurse recurselevel recursedepth dofastloopcs \
-dowith newconstant setnewconstant setconstant setconstantvalue \
-newconditional settrue setfalse settruevalue setfalsevalue \
-newmacro setnewmacro newfraction newsignal dosingleempty \
-dodoubleempty dotripleempty doquadrupleempty doquintupleempty dosixtupleempty \
-doseventupleempty dosingleargument dodoubleargument dotripleargument doquadrupleargument \
-doquintupleargument dosixtupleargument doseventupleargument dosinglegroupempty dodoublegroupempty \
-dotriplegroupempty doquadruplegroupempty doquintuplegroupempty permitspacesbetweengroups dontpermitspacesbetweengroups \
-nopdfcompression maximumpdfcompression normalpdfcompression modulonumber dividenumber \
-getfirstcharacter doiffirstcharelse startnointerference stopnointerference twodigits \
-threedigits leftorright strut setstrut strutbox \
-strutht strutdp strutwd struthtdp begstrut \
-endstrut lineheight ordordspacing ordopspacing ordbinspacing \
-ordrelspacing ordopenspacing ordclosespacing ordpunctspacing ordinnerspacing \
-opordspacing opopspacing opbinspacing oprelspacing opopenspacing \
-opclosespacing oppunctspacing opinnerspacing binordspacing binopspacing \
-binbinspacing binrelspacing binopenspacing binclosespacing binpunctspacing \
-bininnerspacing relordspacing relopspacing relbinspacing relrelspacing \
-relopenspacing relclosespacing relpunctspacing relinnerspacing openordspacing \
-openopspacing openbinspacing openrelspacing openopenspacing openclosespacing \
-openpunctspacing openinnerspacing closeordspacing closeopspacing closebinspacing \
-closerelspacing closeopenspacing closeclosespacing closepunctspacing closeinnerspacing \
-punctordspacing punctopspacing punctbinspacing punctrelspacing punctopenspacing \
-punctclosespacing punctpunctspacing punctinnerspacing innerordspacing inneropspacing \
-innerbinspacing innerrelspacing inneropenspacing innerclosespacing innerpunctspacing \
-innerinnerspacing normalreqno startimath stopimath normalstartimath \
-normalstopimath startdmath stopdmath normalstartdmath normalstopdmath \
-uncramped cramped triggermathstyle mathstylefont mathsmallstylefont \
-mathstyleface mathsmallstyleface mathstylecommand mathpalette mathstylehbox \
-mathstylevbox mathstylevcenter mathstylevcenteredhbox mathstylevcenteredvbox mathtext \
-setmathsmalltextbox setmathtextbox triggerdisplaystyle triggertextstyle triggerscriptstyle \
-triggerscriptscriptstyle triggeruncrampedstyle triggercrampedstyle triggersmallstyle triggeruncrampedsmallstyle \
-triggercrampedsmallstyle triggerbigstyle triggeruncrampedbigstyle triggercrampedbigstyle luaexpr \
-expdoifelse expdoif expdoifnot expdoifcommonelse expdoifinsetelse \
-ctxdirectlua ctxlatelua ctxsprint ctxwrite ctxcommand \
-ctxdirectcommand ctxlatecommand ctxreport ctxlua luacode \
-lateluacode directluacode registerctxluafile ctxloadluafile luaversion \
-luamajorversion luaminorversion ctxluacode luaconditional luaexpanded \
-startluaparameterset stopluaparameterset luaparameterset definenamedlua obeylualines \
-obeyluatokens startluacode stopluacode startlua stoplua \
-carryoverpar assumelongusagecs Umathbotaccent righttolefthbox lefttorighthbox \
-righttoleftvbox lefttorightvbox righttoleftvtop lefttorightvtop rtlhbox \
-ltrhbox rtlvbox ltrvbox rtlvtop ltrvtop \
-autodirhbox autodirvbox autodirvtop lefttoright righttoleft \
-synchronizelayoutdirection synchronizedisplaydirection synchronizeinlinedirection lesshyphens morehyphens \
-nohyphens dohyphens Ucheckedstartdisplaymath Ucheckedstopdisplaymath
+dowith newconstant setnewconstant newconditional settrue \
+setfalse setconstant newmacro setnewmacro newfraction \
+newsignal dosingleempty dodoubleempty dotripleempty doquadrupleempty \
+doquintupleempty dosixtupleempty doseventupleempty dosingleargument dodoubleargument \
+dotripleargument doquadrupleargument doquintupleargument dosixtupleargument doseventupleargument \
+dosinglegroupempty dodoublegroupempty dotriplegroupempty doquadruplegroupempty doquintuplegroupempty \
+permitspacesbetweengroups dontpermitspacesbetweengroups nopdfcompression maximumpdfcompression normalpdfcompression \
+modulonumber dividenumber getfirstcharacter doiffirstcharelse startnointerference \
+stopnointerference twodigits threedigits leftorright strut \
+setstrut strutbox strutht strutdp strutwd \
+struthtdp begstrut endstrut lineheight ordordspacing \
+ordopspacing ordbinspacing ordrelspacing ordopenspacing ordclosespacing \
+ordpunctspacing ordinnerspacing opordspacing opopspacing opbinspacing \
+oprelspacing opopenspacing opclosespacing oppunctspacing opinnerspacing \
+binordspacing binopspacing binbinspacing binrelspacing binopenspacing \
+binclosespacing binpunctspacing bininnerspacing relordspacing relopspacing \
+relbinspacing relrelspacing relopenspacing relclosespacing relpunctspacing \
+relinnerspacing openordspacing openopspacing openbinspacing openrelspacing \
+openopenspacing openclosespacing openpunctspacing openinnerspacing closeordspacing \
+closeopspacing closebinspacing closerelspacing closeopenspacing closeclosespacing \
+closepunctspacing closeinnerspacing punctordspacing punctopspacing punctbinspacing \
+punctrelspacing punctopenspacing punctclosespacing punctpunctspacing punctinnerspacing \
+innerordspacing inneropspacing innerbinspacing innerrelspacing inneropenspacing \
+innerclosespacing innerpunctspacing innerinnerspacing normalreqno startimath \
+stopimath normalstartimath normalstopimath startdmath stopdmath \
+normalstartdmath normalstopdmath uncramped cramped triggermathstyle \
+mathstylefont mathsmallstylefont mathstyleface mathsmallstyleface mathstylecommand \
+mathpalette mathstylehbox mathstylevbox mathstylevcenter mathstylevcenteredhbox \
+mathstylevcenteredvbox mathtext setmathsmalltextbox setmathtextbox triggerdisplaystyle \
+triggertextstyle triggerscriptstyle triggerscriptscriptstyle triggeruncrampedstyle triggercrampedstyle \
+triggersmallstyle triggeruncrampedsmallstyle triggercrampedsmallstyle triggerbigstyle triggeruncrampedbigstyle \
+triggercrampedbigstyle luaexpr expdoifelse expdoif expdoifnot \
+expdoifcommonelse expdoifinsetelse ctxdirectlua ctxlatelua ctxsprint \
+ctxwrite ctxcommand ctxdirectcommand ctxlatecommand ctxreport \
+ctxlua luacode lateluacode directluacode registerctxluafile \
+ctxloadluafile luaversion luamajorversion luaminorversion ctxluacode \
+luaconditional luaexpanded startluaparameterset stopluaparameterset luaparameterset \
+definenamedlua obeylualines obeyluatokens startluacode stopluacode \
+startlua stoplua carryoverpar assumelongusagecs Umathbotaccent \
+righttolefthbox lefttorighthbox righttoleftvbox lefttorightvbox righttoleftvtop \
+lefttorightvtop rtlhbox ltrhbox rtlvbox ltrvbox \
+rtlvtop ltrvtop autodirhbox autodirvbox autodirvtop \
+lefttoright righttoleft synchronizelayoutdirection synchronizedisplaydirection synchronizeinlinedirection \
+lesshyphens morehyphens nohyphens dohyphens
diff --git a/context/data/scite/scite-context-data-metafun.properties b/context/data/scite/scite-context-data-metafun.properties
index 9381b4f8d..c0b080982 100644
--- a/context/data/scite/scite-context-data-metafun.properties
+++ b/context/data/scite/scite-context-data-metafun.properties
@@ -3,49 +3,49 @@ sqr log ln exp \
inv pow pi radian tand \
cotd sin cos tan cot \
atan asin acos invsin invcos \
-invtan acosh asinh sinh cosh \
-paired tripled unitcircle fulldiamond unitdiamond \
-fullsquare llcircle lrcircle urcircle ulcircle \
-tcircle bcircle lcircle rcircle lltriangle \
-lrtriangle urtriangle ultriangle smoothed cornered \
-superellipsed randomized squeezed enlonged shortened \
-punked curved unspiked simplified blownup \
-stretched enlarged leftenlarged topenlarged rightenlarged \
-bottomenlarged crossed laddered randomshifted interpolated \
-paralleled cutends peepholed llenlarged lrenlarged \
-urenlarged ulenlarged llmoved lrmoved urmoved \
-ulmoved rightarrow leftarrow centerarrow boundingbox \
-innerboundingbox outerboundingbox pushboundingbox popboundingbox bottomboundary \
-leftboundary topboundary rightboundary xsized ysized \
-xysized sized xyscaled intersection_point intersection_found \
-penpoint bbwidth bbheight withshade withlinearshading \
-withcircularshading withfromshadecolor withtoshadecolor withshading shadedinto \
-withcircularshade withlinearshade cmyk spotcolor multitonecolor \
-namedcolor drawfill undrawfill inverted uncolored \
-softened grayed greyed onlayer along \
-graphictext loadfigure externalfigure withmask figure \
-register bitmapimage colordecimals ddecimal dddecimal \
-ddddecimal textext thetextext rawtextext textextoffset \
-verbatim thelabel label autoalign transparent \
-withtransparency property properties withproperties asgroup \
-infont set_linear_vector linear_shade define_linear_shade define_circular_linear_shade \
-define_sampled_linear_shade set_circular_vector circular_shade define_circular_shade define_circular_linear_shade \
-define_sampled_circular_shade space CRLF grayscale greyscale \
-withgray withgrey colorpart readfile clearxy \
-unitvector center epsed anchored originpath \
-infinite break xstretched ystretched snapped \
-pathconnectors function constructedpath constructedpairs punkedfunction \
-curvedfunction tightfunction punkedpath curvedpath tightpath \
-punkedpairs curvedpairs tightpairs evenly oddly \
-condition pushcurrentpicture popcurrentpicture arrowpath tensecircle \
-roundedsquare colortype whitecolor blackcolor normalfill \
-normaldraw visualizepaths naturalizepaths drawboundary drawwholepath \
-visualizeddraw visualizedfill draworigin drawboundingbox drawpath \
-drawpoint drawpoints drawcontrolpoints drawcontrollines drawpointlabels \
-drawlineoptions drawpointoptions drawcontroloptions drawlabeloptions draworiginoptions \
-drawboundoptions drawpathoptions resetdrawoptions undashed decorated \
-redecorated undecorated passvariable passarrayvariable tostring \
-format formatted startpassingvariable stoppassingvariable
+acosh asinh sinh cosh paired \
+tripled unitcircle fulldiamond unitdiamond fullsquare \
+llcircle lrcircle urcircle ulcircle tcircle \
+bcircle lcircle rcircle lltriangle lrtriangle \
+urtriangle ultriangle smoothed cornered superellipsed \
+randomized squeezed enlonged shortened punked \
+curved unspiked simplified blownup stretched \
+enlarged leftenlarged topenlarged rightenlarged bottomenlarged \
+crossed laddered randomshifted interpolated paralleled \
+cutends peepholed llenlarged lrenlarged urenlarged \
+ulenlarged llmoved lrmoved urmoved ulmoved \
+rightarrow leftarrow centerarrow boundingbox innerboundingbox \
+outerboundingbox pushboundingbox popboundingbox bottomboundary leftboundary \
+topboundary rightboundary xsized ysized xysized \
+sized xyscaled intersection_point intersection_found penpoint \
+bbwidth bbheight withshade withlinearshading withcircularshading \
+withfromshadecolor withtoshadecolor withshading shadedinto withcircularshade \
+withlinearshade cmyk spotcolor multitonecolor namedcolor \
+drawfill undrawfill inverted uncolored softened \
+grayed greyed onlayer along graphictext \
+loadfigure externalfigure withmask figure register \
+bitmapimage colordecimals ddecimal dddecimal ddddecimal \
+textext thetextext rawtextext textextoffset verbatim \
+thelabel label autoalign transparent withtransparency \
+property properties withproperties asgroup infont \
+set_linear_vector linear_shade define_linear_shade define_circular_linear_shade define_sampled_linear_shade \
+set_circular_vector circular_shade define_circular_shade define_circular_linear_shade define_sampled_circular_shade \
+space CRLF grayscale greyscale withgray \
+withgrey colorpart readfile clearxy unitvector \
+center epsed anchored originpath infinite \
+break xstretched ystretched snapped pathconnectors \
+function constructedpath constructedpairs punkedfunction curvedfunction \
+tightfunction punkedpath curvedpath tightpath punkedpairs \
+curvedpairs tightpairs evenly oddly condition \
+pushcurrentpicture popcurrentpicture arrowpath tensecircle roundedsquare \
+colortype whitecolor blackcolor normalfill normaldraw \
+visualizepaths naturalizepaths drawboundary drawwholepath visualizeddraw \
+visualizedfill draworigin drawboundingbox drawpath drawpoint \
+drawpoints drawcontrolpoints drawcontrollines drawpointlabels drawlineoptions \
+drawpointoptions drawcontroloptions drawlabeloptions draworiginoptions drawboundoptions \
+drawpathoptions resetdrawoptions undashed decorated redecorated \
+undecorated passvariable passarrayvariable tostring format \
+formatted startpassingvariable stoppassingvariable
keywordclass.metafun.internals=\
nocolormodel greycolormodel graycolormodel rgbcolormodel \
diff --git a/context/data/scite/scite-context-readme.pdf b/context/data/scite/scite-context-readme.pdf
index 99f05a2a5..b6a751a36 100644
--- a/context/data/scite/scite-context-readme.pdf
+++ b/context/data/scite/scite-context-readme.pdf
Binary files differ
diff --git a/context/data/scite/scite-context-readme.tex b/context/data/scite/scite-context-readme.tex
index 42f5e0a98..ef1475fa2 100644
--- a/context/data/scite/scite-context-readme.tex
+++ b/context/data/scite/scite-context-readme.tex
@@ -821,18 +821,12 @@ from the on|-|line help pages.
\NC \type{Ctrl+Right} \NC next word; \type{Shift} extends selection \NC \NR
\NC \type{Ctrl+/} \NC previous word part; \type{Shift} extends selection \NC \NR
\NC \type{Ctrl+\ } \NC next word part; \type{Shift} extends selection \NC \NR
-\ML
-\NC \type{F12 / Ctrl+F7} \NC check (or process) \NC \NR
-\NC \type{Ctrl+F12 / Ctrl+F7} \NC process (run) \NC \NR
-\NC \type{Alt+F12 / Ctrl+F7} \NC process (run) using the luajit vm (if applicable) \NC \NR
\LL
\stoptabulate
\stopbuffer
\getbuffer[keybindings]
-\page
-
\subject{Affiliation}
\starttabulate[|l|l|]
diff --git a/context/data/scite/scite-context.properties b/context/data/scite/scite-context.properties
index bc1af717c..caf230de7 100644
--- a/context/data/scite/scite-context.properties
+++ b/context/data/scite/scite-context.properties
@@ -115,11 +115,9 @@ name.metafun.console=$(name.context.console)
name.example.console=$(name.context.console)
name.context.mtxrun=mtxrun --autogenerate
-name.context.mtxrunjit=mtxrunjit --autogenerate
name.context.check=$(name.context.mtxrun) --script check
name.context.run=$(name.context.mtxrun) --script context $(name.flag.pdfopen)
-name.context.runjit=$(name.context.mtxrunjit) --script context $(name.flag.pdfopen)
name.context.texshow=$(name.context.mtxrun) texshow
name.context.purge=$(name.context.mtxrun) --context --purge --all
name.context.showcase=$(name.context.mtxrun) --launch showcase.pdf
@@ -164,20 +162,20 @@ import scite-ctx
# hard coded compile / build / go
-command.build.$(file.patterns.context)=$(name.context.check) $(FileNameExt)
-command.build.$(file.patterns.metafun)=
-command.build.$(file.patterns.example)=$(name.example.xmlcheck) $(FileNameExt)
-command.build.*.fo=$(name.example.xmlcheck) $(FileNameExt)
+command.compile.$(file.patterns.context)=$(name.context.check) $(FileNameExt)
+command.compile.$(file.patterns.metafun)=
+command.compile.$(file.patterns.example)=$(name.example.xmlcheck) $(FileNameExt)
+command.compile.*.fo=$(name.example.xmlcheck) $(FileNameExt)
-command.compile.$(file.patterns.context)=$(name.context.run) $(FileNameExt)
-command.compile.$(file.patterns.metafun)=$(name.context.run) $(name.flag.pdfopen) $(FileNameExt)
-command.compile.$(file.patterns.example)=$(name.context.run) --forcexml $(FileNameExt)
-command.compile.*.fo=$(name.context.run) $(name.flag.pdfopen) --forcexml --use=foxet $(FileNameExt)
+command.build.$(file.patterns.context)=$(name.context.run) $(FileNameExt)
+command.build.$(file.patterns.metafun)=$(name.context.mtxrun) --script context $(name.flag.pdfopen) $(FileNameExt)
+command.build.$(file.patterns.example)=$(name.context.run) --forcexml $(FileNameExt)
+command.build.*.fo=$(name.context.run) $(name.flag.pdfopen) --forcexml --use=foxet $(FileNameExt)
-command.compile.subsystem.$(file.patterns.context)=1
-command.compile.subsystem.$(file.patterns.metafun)=1
-command.compile.subsystem.$(file.patterns.example)=1
-command.compile.subsystem.*.fo=1
+command.build.subsystem.$(file.patterns.context)=1
+command.build.subsystem.$(file.patterns.metafun)=1
+command.build.subsystem.$(file.patterns.example)=1
+command.build.subsystem.*.fo=1
if PLAT_WIN
command.go.$(file.patterns.context)=$(FileName).pdf
@@ -220,24 +218,6 @@ command.1.subsystem.$(file.patterns.context)=1
command.1.subsystem.$(file.patterns.metafun)=1
command.1.subsystem.$(file.patterns.example)=1
-command.name.29.*=Run with jit
-command.subsystem.29.*=1
-command.29.$(file.patterns.context)=$(name.context.runjit) $(FileNameExt)
-command.29.$(file.patterns.metafun)=$(name.context.runjit) $(FileNameExt) --metapost
-command.29.$(file.patterns.exmaple)=$(name.context.runjit) $(FileNameExt) --xml
-command.groupundo.29.*=yes
-command.save.before.29.*=2
-command.shortcut.29.*=Alt+F12
-
-command.name.30.*=Run with jit
-command.subsystem.30.*=1
-command.30.$(file.patterns.context)=$(name.context.runjit) $(FileNameExt)
-command.30.$(file.patterns.metafun)=$(name.context.runjit) $(FileNameExt) --metapost
-command.30.$(file.patterns.exmaple)=$(name.context.runjit) $(FileNameExt) --xml
-command.groupundo.30.*=yes
-command.save.before.30.*=2
-command.shortcut.30.*=Alt+F7
-
# 2 : pdf viewing
command.name.2.$(file.patterns.context)=View PDF File with GhostScript
@@ -357,9 +337,10 @@ highlight.indentation.guides=1
# Editor: keys
user.shortcuts=\
-F12|IDM_BUILD|\
-Ctrl+F12|IDM_COMPILE|\
+F12|IDM_COMPILE|\
+Ctrl+F12|IDM_BUILD|\
Shift+F12|IDM_GO|\
+Alt+F12|IDM_STOPEXECUTE|\
os.x.home.end.keys=0
diff --git a/context/data/scite/scite-ctx.properties b/context/data/scite/scite-ctx.properties
index acbb33c0b..d56ae653d 100644
--- a/context/data/scite/scite-ctx.properties
+++ b/context/data/scite/scite-ctx.properties
@@ -69,19 +69,14 @@ ctx.spellcheck.wordsize.uk=4
ctx.spellcheck.wordsize.nl=4
ctx.helpinfo=\
- Shift + F11 pop up menu with ctx options|\
+ Shift + F11 pop up menu with ctx options|\
|\
- Ctrl + B check spelling|\
- Ctrl + M wrap text (auto indent)|\
- Ctrl + R reset spelling results|\
- Ctrl + I insert template|\
- Ctrl + E open log file|\
- Ctrl + + toggle strip|\
- |\
- F7 / F12 check (or process)|\
- Ctrl + F7 / F12 process|\
- Alt + F7 / F12 process with jit|\
- shift + F7 / F12 launch
+ Ctrl + B check spelling|\
+ Ctrl + M wrap text (auto indent)|\
+ Ctrl + R reset spelling results|\
+ Ctrl + I insert template|\
+ Ctrl + E open log file|\
+ Ctrl + + toggle strip
command.name.21.$(file.patterns.context)=CTX Action List
command.subsystem.21.$(file.patterns.context)=3
diff --git a/doc/context/manuals/allkind/mkiv-publications.bib b/doc/context/manuals/allkind/mkiv-publications.bib
deleted file mode 100644
index e94f43202..000000000
--- a/doc/context/manuals/allkind/mkiv-publications.bib
+++ /dev/null
@@ -1,34 +0,0 @@
-@book{demo-001,
- author = "Hans Hagen",
- title = "\BIBTEX, the \CONTEXT\ way",
- year = "2013",
-}
-
-@book{demo-002,
- crossref = "demo-001"
- year = "2014",
-}
-
-@book{demo-003,
- author = "Hans Hagen and Ton Otten",
- title = "Typesetting education documents",
- year = "1996",
- comment = "a non-existing document",
-}
-
-@book{demo-004,
- author = "Luigi Scarso",
- title = "Designing high speed trains",
- year = "2021",
- comment = "still to be published",
-}
-
-@book{demo-005,
- author = "author",
- title = "title",
- year = "year",
- serial = "serial",
- doi = "doi",
- url = "url",
- pages = "pages"
-}
diff --git a/doc/context/manuals/allkind/mkiv-publications.tex b/doc/context/manuals/allkind/mkiv-publications.tex
deleted file mode 100644
index 3300a0f53..000000000
--- a/doc/context/manuals/allkind/mkiv-publications.tex
+++ /dev/null
@@ -1,1325 +0,0 @@
-% language=uk
-
-% \setupbtxrendering[continue=yes]
-% \btxfield{manipulator_a->manipulator_b->fieldname}
-
-% engine=luajittex
-
-% criterium: all + sorttype=cite => citex before rest
-% criterium: all + sorttype=database => database order
-% criterium: used
-%
-% numbering: label, short, indexinlist, indexused
-%
-% maybeyear
-%
-% \cite[data][whatever]
-
-% \showframe
-
-\usemodule[abr-02]
-\usemodule[set-11]
-
-\loadsetups[publications-en.xml] \enablemode[interface:setup:defaults]
-
-\setupbackend
- [export=yes,
- xhtml=yes,
- css=export-example.css]
-
-\setupexport
- [hyphen=yes,
- width=60em]
-
-% \input publ-tmp.mkiv
-
-\setupbodyfont
- [dejavu,10pt]
-
-\setuphead
- [chapter]
- [header=high,
- style=\bfc,
- color=darkmagenta]
-
-\setuplayout
- [topspace=2cm,
- bottomspace=1cm,
- header=0cm,
- width=middle,
- height=middle]
-
-\setupwhitespace
- [big]
-
-\setuptyping
- [color=darkmagenta]
-
-\setuptyping
- [keeptogether=yes]
-
-\setuptype
- [color=darkcyan]
-
-\setupfootertexts
- [pagenumber]
-
-\setupMPgraphics
- [mpy=\jobname.mpy]
-
-\setupinteraction
- [state=start,
- color=darkcyan,
- contrastcolor=darkyellow]
-
-\starttext
-
-\startMPpage
-
- StartPage ;
-
- % input "mkiv-publications.mpy" ;
-
- picture pic ; pic := image (
- path pth ; pth := ((0,0) for i=1 step 2 until 20 : -- (i,1) -- (i+1,0) endfor) ;
- for i=0 upto 9 : draw pth shifted (0,2*i) ; endfor ;
- ) ;
-
- % picture btx ; btx := textext("\ssbf BIBTEX") ;
- % picture ctx ; ctx := textext("\ssbf THE CONTEXT WAY") ;
- picture btx ; btx := image(graphictext("\ssbf BIBTEX") withfillcolor white) ;
- picture ctx ; ctx := image(graphictext("\ssbf THE CONTEXT WAY") withfillcolor white) ;
-
- pic := pic shifted - llcorner pic ;
- btx := btx shifted - llcorner btx ;
- ctx := ctx shifted - llcorner ctx ;
-
- pic := pic xysized (PaperWidth,PaperHeight) ;
- btx := btx xsized (2PaperWidth/3) shifted (.25PaperWidth,.15PaperHeight) ;
- ctx := ctx xsized (2PaperWidth/3) shifted (.25PaperWidth,.075PaperHeight) ;
-
- fill Page withcolor \MPcolor{darkcyan} ;
-
- draw pic withcolor \MPcolor{darkmagenta} ;
- draw btx withcolor \MPcolor{lightgray} ;
- draw ctx withcolor \MPcolor{lightgray} ;
-
- % draw boundingbox btx ;
- % draw boundingbox ctx ;
-
- StopPage ;
-
-\stopMPpage
-
-
-\startfrontmatter
-
-\starttitle[title=Contents]
- \placelist[chapter,section][color=black]
-\stoptitle
-
-\startchapter[title=Introduction]
-
-This manual describes how \MKIV\ handles bibliographies. Support in \CONTEXT\
-started in \MKII for \BIBTEX, using a module written by Taco Hoekwater. Later his
-code was adapted to \MKIV, but because users demanded more, I decided that
-reimplementing made more sense than patching. In particular, through the use of
-\LUA, the \BIBTEX\ data files can be easily directly parsed, thus liberating
-\CONTEXT\ from the dependency on an external \BIBTEX\ executable. The CritEd
-project (by Thomas Schmitz, Alan Braslau, Luigi Scarso and myself) was a good
-reason to undertake this rewrite. As part that project users were invited to come
-up with ideas about extensions. Not all of them are (yet) honored, but the
-rewrite makes more functionality possible.
-
-This manual is dedicated to Taco Hoekwater who in a previous century implemented
-the first \BIBTEX\ module and saw it morf into a \TEX||\LUA\ hybrid in this
-century. The fact that there was support for bibliographies made it possible for
-users to use \CONTEXT\ in an academic environment, dominated by bibliographic
-databases encoded in the \BIBTEX\ format.
-
-\startlines
-Hans Hagen
-PRAGMA ADE
-Hasselt NL
-\stoplines
-
-\stopchapter
-
-\stopfrontmatter
-
-\startbodymatter
-
-\startchapter[title=The database]
-
-The \BIBTEX\ format is rather popular in the \TEX\ community and even with its
-shortcomings it will stay around for a while. Many publication websites can
-export and many tools are available to work with this database format. It is
-rather simple and looks a bit like \LUA\ tables. Unfortunately the content can be
-polluted with non|-|standardized \TEX\ commands which complicates pre- or
-postprocessing outside \TEX. In that sense a \BIBTEX\ database is often not coded
-neutrally. Some limitations, like the use of commands to encode accented
-characters root in the \ASCII\ world and can be bypassed by using \UTF\ instead
-(as handled somewhat in \LATEX\ through extensions such as \type {bibtex8}).
-
-The normal way to deal with a bibliography is to refer to entries using a unique
-tag or key. When a list of entries is typeset, this reference can be used for
-linking purposes. The typeset list can be processed and sorted using the \type
-{bibtex} program that converts the database into something more \TEX\ friendly (a
-\type {.bbl} file). I never used the program myself (nor bibliographies) so I
-will not go into too much detail here, if only because all I say can be wrong.
-
-In \CONTEXT\ we no longer use the \type {bibtex} program: we just use
-database files and deal with the necessary manipulations directly in \CONTEXT.
-One or more such databases can be used and combined with additional entries
-defined within the document. We can have several such datasets active at the same
-time.
-
-A \BIBTEX\ file looks like this:
-
-\starttyping
-@Article{sometag,
- author = "An Author and Another One",
- title = "A hopefully meaningful title",
- journal = maps,
- volume = "25",
- number = "2",
- pages = "5--9",
- month = mar,
- year = "2013",
- ISSN = "1234-5678",
-}
-\stoptyping
-
-Normally a value is given between quotes (or curly brackets) but single words are
-also OK (there is no real benefit in not using quotes, so we advise to always use
-them). There can be many more fields and instead of strings one can use
-predefined shortcuts. The title for example quite often contains \TEX\ macros.
-Some fields, like \type {pages} have funny characters such as the endash
-(typically as \type {--}) so we have a mixture of data and typesetting
-directives. If you are covering non||english references, you often need
-characters that are not in the \ASCII\ subset but \CONTEXT\ is quite happy with
-\UTF. If your database file uses old|-|fashioned \TEX\ accent commands then these
-will be internally converted automatically to \UTF. Commands (macros) are
-converted to an indirect call, which is quite robust.
-
-The \BIBTEX\ files are loaded in memory as \LUA\ table but can be converted to
-\XML\ so that we can access them in a more flexible way, but that is a subject
-for specialists.
-
-In the old \MKII\ setup we have two kinds of entries: the ones that come from the
-\BIBTEX\ run and user supplied ones. We no longer rely on \BIBTEX\ output but we
-do still support the user supplied definitions. These were in fact prepared in a
-way that suits the processing of \BIBTEX\ generated entries. The next variant
-reflects the \CONTEXT\ recoding of the old \BIBTEX\ output.
-
-\starttyping
-\startpublication[k=Hagen:Second,t=article,a={Hans Hagen},y=2013,s=HH01]
- \artauthor[]{Hans}[H.]{}{Hagen}
- \arttitle{Who knows more?}
- \journal{MyJournal}
- \pubyear{2013}
- \month{8}
- \volume{1}
- \issue{3}
- \issn{1234-5678}
- \pages{123--126}
-\stoppublication
-\stoptyping
-
-The split \type {\artauthor} fields are collapsed into a single \type {author}
-field as we deal with the splitting later when it gets parsed in \LUA. The \type
-{\artauthor} syntax is only kept around for backward compatibility with the
-previous use of \BIBTEX.
-
-In the new setup we support these variants as well:
-
-\starttyping
-\startpublication[k=Hagen:Third,t=article]
- \author{Hans Hagen}
- \title{Who knows who?}
- ...
-\stoppublication
-\stoptyping
-
-and
-
-\starttyping
-\startpublication[tag=Hagen:Third,category=article]
- \author{Hans Hagen}
- \title{Who knows who?}
- ...
-\stoppublication
-\stoptyping
-
-and
-
-\starttyping
-\startpublication
- \tag{Hagen:Third}
- \category{article}
- \author{Hans Hagen}
- \title{Who knows who?}
- ...
-\stoppublication
-\stoptyping
-
-Because internally the entries are \LUA\ tables, we also support loading of \LUA\
-based definitions:
-
-\starttyping
-return {
- ["Hagen:First"] = {
- author = "Hans Hagen",
- category = "article",
- issn = "1234-5678",
- issue = "3",
- journal = "MyJournal",
- month = "8",
- pages = "123--126",
- tag = "Hagen:First",
- title = "Who knows nothing?",
- volume = "1",
- year = "2013",
- },
-}
-\stoptyping
-
-Files set up like this can be loaded too. The following \XML\ input is rather
-close to this, and is also accepted as input.
-
-\starttyping
-<?xml version="2.0" standalone="yes" ?>
-<bibtex>
- <entry tag="Hagen:First" category="article">
- <field name="author">Hans Hagen</field>
- <field name="category">article</field>
- <field name="issn">1234-5678</field>
- <field name="issue">3</field>
- <field name="journal">MyJournal</field>
- <field name="month">8</field>
- <field name="pages">123--126</field>
- <field name="tag">Hagen:First</field>
- <field name="title">Who knows nothing?</field>
- <field name="volume">1</field>
- <field name="year">2013</field>
- </entry>
-</bibtex>
-\stoptyping
-
-{\em Todo: Add some remarks about loading EndNote and RIS formats, but first we
-need to complete the tag mapping (on Alan's plate).}
-
-So the user has a rather wide choice of formatting style for bibliography
-database files.
-
-\stopchapter
-
-You can load more data than you actually need. Only entries that are referred to
-explicitly through the \type {\cite} and \type {\nocite} commands will be shown
-in lists. We will cover these details later.
-
-\startchapter[title=Commands in entries]
-
-One unfortunate aspect commonly found in \BIBTEX\ files is that they often
-contain \TEX\ commands. Even worse is that there is no standard on what these
-commands can be and what they mean, at least not formally, as \BIBTEX\ is a
-program intended to be used with many variants of \TEX\ style: plain, \LATEX, and
-others. This means that we need to define our use of these typesetting commands.
-However, in most cases, they are just abbreviations or font switches and these
-are often known. Therefore, \CONTEXT\ will try to resolve them before reporting
-an issue. In the log file there is a list of commands that has been seen in the
-loaded databases. For instance, loading \type {tugboat.bib} gives a long list of
-commands of which we show a small set here:
-
-\starttyping
-publications > start used btx commands
-
-publications > standard CONTEXT 1 known
-publications > standard ConTeXt 4 known
-publications > standard TeXLive 3 KNOWN
-publications > standard eTeX 1 known
-publications > standard hbox 6 known
-publications > standard sltt 1 unknown
-
-publications > stop used btxcommands
-\stoptyping
-
-You can define unknown commands, or overload existing definitions in the
-following way:
-
-\starttyping
-\definebtxcommand\TUB {TUGboat}
-\definebtxcommand\sltt{\tt}
-\definebtxcommand\<#1>{\type{#1}}
-\stoptyping
-
-Unknown commands do not stall processing, but their names are then typeset in a
-mono|-|spaced font so they probably stand out for proofreading. You can
-access the commands with \type {\btxcommand {...}}, as in:
-
-\startbuffer
-commands like \btxcommand{MySpecialCommand} are handled in an indirect way
-\stopbuffer
-
-\typebuffer
-
-As this is an undefined command we get: \quotation {\inlinebuffer}.
-
-??
-
-\stopchapter
-
-\startchapter[title=Datasets]
-
-Normally in a document you will use only one bibliographic database, whether or
-not distributed over multiple files. Nevertheless we support multiple databases as well
-which is why we talk of datasets instead. A dataset is loaded with the \type
-{\usebtxdataset} command. Although currently it is not necessary to define a
-(default) dataset you can best do this because in the future we might provide more
-options. Here are some examples:
-
-\starttyping
-\definebtxdataset[standard]
-
-\usebtxdataset[standard][tugboat.bib]
-\usebtxdataset[standard][mtx-bibtex-output.xml]
-\usebtxdataset[standard][test-001-btx-standard.lua]
-\stoptyping
-
-These three suffixes are understood by the loader. Here the dataset has the name
-\type {standard} and the three database files are merged, where later entries having the
-same tag overload previous ones. Definitions in the document source (coded in \TEX\
-speak) are also added, and they are saved for successive runs. This means that if
-you load and define entries, they will be known at a next run beforehand, so that
-references to them are independent of when loading and definitions take place.
-
-\showsetup{setupbtxdataset}
-
-\showsetup{definebtxdataset}
-
-\showsetup{usebtxdataset}
-
-In this document we use some example databases, so let's load one of them now:
-
-\startbuffer
-\definebtxdataset[example]
-
-\usebtxdataset[example][mkiv-publications.bib]
-\stopbuffer
-
-\typebuffer \getbuffer
-
-You can ask for an overview of entries in a dataset with:
-
-\startbuffer
-\showbtxdatasetfields[example]
-\stopbuffer
-
-\typebuffer
-
-this gives:
-
-\getbuffer
-
-You can set the current active dataset with
-
-\starttyping
-\setbtxdataset[standard]
-\stoptyping
-
-but most publication|-|related commands accept optional arguments that denote the
-dataset and references to entries can be prefixed with a dataset identifier.. More
-about that later.
-
-Sometimes you want to check a database. One way of doing that is the following:
-
-\startbuffer
-\definebtxdataset[check]
-
-\usebtxdataset[check][mkiv-publications-check.bib]
-
-\showbtxdatasetcompleteness[check]
-\stopbuffer
-
-\typebuffer
-
-The database like like this:
-
-\typefile{mkiv-publications-check.bib}
-
-The completeness check shows (with green field names) the required fields and
-when one is missing this is indicated in red. In blue we show what gets
-inherited.
-
-\getbuffer
-
-\stopchapter
-
-\startchapter[title=Renderings]
-
-A list of publications can be rendered at any place in the document. A database
-can be much larger than needed for a document. The same is true for the fields
-that make up an entry. Here is the list of fields that are currently handled, but
-of course there can be additional ones:
-
-
-\startalignment[flushleft,verytolerant,nothyphenated]
-\startluacode
-local fields = publications.tracers.fields
-
-for i=1,#fields do
- if i > 1 then
- context(", ")
- end
- context.type(fields[i])
-end
-\stopluacode
-\stopalignment
-
-If you want to see what publications are in the database, the easiest way is to
-ask for a complete list:
-
-\startbuffer
-\definebtxrendering
- [example]
- [dataset=example,
- method=local,
- alternative=apa]
-\placelistofpublications % \placebtxrendering
- [example]
- [criterium=all]
-\stopbuffer
-
-\typebuffer
-
-This gives:
-
-\getbuffer
-
-The rendering itself is somewhat complex to set up because we have not only many
-different standards but also many fields that can be set up. This means that
-there are several commands involved. Often there is a prescribed style to render
-bibliographic descriptions, for example \type {apa}. A rendering is setup and
-defined with:
-
-\showsetup[setupbtxrendering]
-%showrootvalues[btxrendering]
-\showsetup[definebtxrendering]
-
-And a list of such descriptions is generated with:
-
-\showsetup[placebtxrendering]
-
-A dataset can have all kind of entries:
-
-\startalignment[flushleft,verytolerant,nothyphenated]
-\startluacode
- local categories = publications.tracers.categories
-
- for i=1,#categories do
- if i > 1 then
- context(", ")
- end
- context.type(categories[i])
- end
-\stopluacode
-\stopalignment
-
-Each has its own rendering variant. To keep things simple we have their settings
-separated. However, these settings are shared for all rendering alternatives. In
-practice this is seldom a problem in a publication as only one rendering
-alternative will be active. If this be not sufficient, you can always group local
-settings in a setup and hook that into the specific rendering.
-
-\showsetup[setupbtxlistvariant]
-%showrootvalues[btxlistvariant]
-\showsetup[definebtxlistvariant]
-
-Examples of list variants are:
-
-\startluacode
- local variants = publications.tracers.listvariants
-
- for i=1,#variants do
- context.showinstancevalues( { "btxlistvariant" }, { variants[i] })
- end
-\stopluacode
-
-The exact rendering of list entries is determined by the \type {alternative} key
-and defaults to \type {apa} which uses definitions from \type
-{publ-imp-apa.mkiv}. If you look at that file you will see that each category has
-its own setup. You may also notice that additional tests are needed to make sure
-that empty fields don't trigger separators and such.
-
-% \showsetup{setuplists}
-
-There are a couple of accessors and helpers to get the job done. When you want to
-fetch a field from the current entry you use \type {\btxfield}. In most cases
-you want to make sure this field has a value, for instance because you don't want
-fences or punctuation that belongs to a field.
-
-\starttyping
-\btxdoif {title} {
- \bold{\btxfield{title}},
-}
-\stoptyping
-
-There are three test macros:
-
-\starttyping
-\btxdoifelse{fieldname}{action when found}{action when not found}
-\btxdoif {fieldname}{action when found}
-\btxdoifnot {fieldname} {action when not found}
-\stoptyping
-
-An extra conditional is available for testing interactivity:
-
-\starttyping
-\btxdoifelseinteraction{action when true}{action when false}
-\stoptyping
-
-In addition there is also a conditional \type {\btxinteractive} which is
-more efficient, although in practice efficiency is not so important here.
-
-There are three commands to flush data:
-
-\starttabulate[|||] % Funny usage here! Could not tabulate work without
- % even specifying the number of columns?
-\NC \type {\btxfield} \NC fetch a explicit field (e.g. \type {year}) \NC \NR
-\NC \type {\btxdetail} \NC fetch a derived field (e.g. \type {short}) \NC \NR
-\NC \type {\btxflush} \NC fetch a derived or explicit field \NC \NR
-\stoptabulate
-
-Normally you can use \type {\btxfield} or \type {\btxflush} as derived fields
-just like analyzed author fields are flushed in a special way.
-
-You can improve readability by using setups, for instance:
-
-\starttyping
-\btxdoifelse {author} {
- \btxsetup{btx:apa:author:yes}
-} {
- \btxsetup{btx:apa:author:nop}
-}
-\stoptyping
-
-Keep in mind that normally you don't need to mess with definitions like this
-because standard rendering styles are provided. These styles use a few helpers
-that inject symbols but also take care of leading and trailing spaces:
-
-\starttabulate[|||]
-\NC \type {\btxspace } \NC before \btxspace after \NC \NR
-\NC \type {\btxperiod } \NC before \btxperiod after \NC \NR
-\NC \type {\btxcomma } \NC before \btxcomma after \NC \NR
-\NC \type {\btxlparent } \NC before \btxlparent after \NC \NR
-\NC \type {\btxrparent } \NC before \btxrparent after \NC \NR
-\NC \type {\btxlbracket} \NC before \btxlbracket after \NC \NR
-\NC \type {\btxrbracket} \NC before \btxrbracket after \NC \NR
-\stoptabulate
-
-So, the previous example setup can be rewritten as:
-
-\starttyping
-\btxdoif {title} {
- \bold{\btxfield{title}}
- \btxcomma
-}
-\stoptyping
-
-There is a special command for rendering a (combination) of authors:
-
-\starttyping
-\btxflushauthor{author}
-\btxflushauthor{editor}
-\btxflushauthor[inverted]{editor}
-\stoptyping
-
-Instead of the last one you can also use:
-
-\starttyping
-\btxflushauthorinverted{editor}
-\stoptyping
-
-You can use a (configurable) default or pass directives: Valid directives are
-
-\starttabulate
-\NC \bf conversion \NC \bf rendering \NC \NR
-\HL
-\NC \type{inverted} \NC the Frog jr, Kermit \NC \NR
-\NC \type{invertedshort} \NC the Frog jr, K \NC \NR
-\NC \type{normal} \NC Kermit, the Frog, jr \NC \NR
-\NC \type{normalshort} \NC K, the Frog, jr \NC \NR
-\stoptabulate
-
-The list itself is not a list in the sense of a regular \CONTEXT\ structure related
-list. We do use the list mechanism to keep track of used entries but that is mostly
-because we can then reuse filtering mechanisms. The actual rendering of a reference
-and entry runs on top of so called constructions (other examples of constructions are
-descriptions, enumerations and notes).
-
-\showsetup[setupbtxlist]
-
-You need to be aware what command is used to achieve the desired result. For instance,
-in order to put parentheses around a number reference you say:
-
-\starttyping
-\setupbtxlistvariant
- [num]
- [left=(,
- right=)]
-\stoptyping
-
-If you want automated width calculations, the following does the trick:
-
-\starttyping
-\setupbtxrendering
- [standard]
- [width=auto]
-\stoptyping
-
-but if you want to control it yourself you say something:
-
-\starttyping
-\setupbtxrendering
- [width=none]
-
-\setupbtxlist
- [standard]
- [width=3cm,
- distance=\emwidth,
- color=red,
- headcolor=blue,
- headalign=flushright]
-\stoptyping
-
-In most cases the defaults will work out fine.
-
-\stopchapter
-
-\startchapter[title=Citations]
-
-Citations are references to bibliographic entries that normally show up in lists
-someplace in the document: at the end of a chapter, in an appendix, at the end of
-an article, etc. We discussed the rendering of these lists in the previous chapter.
-A citation is normally pretty short as its main purpose is to refer uniquely to a more
-detailed description. But, there are several ways to refer, which is why the citation
-subsystem is configurable and extensible. Just look at the following commands:
-
-\startbuffer
-\cite[author][example::demo-003]
-\cite[authoryear][example::demo-003]
-\cite[authoryears][example::demo-003]
-\cite[author][example::demo-003,demo-004]
-\cite[authoryear][example::demo-003,demo-004]
-\cite[authoryears][example::demo-003,demo-004]
-\cite[author][example::demo-004,demo-003]
-\cite[authoryear][example::demo-004,demo-003]
-\cite[authoryears][example::demo-004,demo-003]
-\stopbuffer
-
-\typebuffer
-
-\startlines \getbuffer \stoplines
-
-The first argument is optional.
-% What is the default? How can one set this up?
-
-\showsetup[cite]
-
-You can tune the way a citation shows up:
-
-\startbuffer
-\setupbtxcitevariant[author] [sorttype=author,color=darkyellow]
-\setupbtxcitevariant[authoryear] [sorttype=author,color=darkyellow]
-\setupbtxcitevariant[authoryears][sorttype=author,color=darkyellow]
-
-\cite[author][example::demo-004,demo-003]
-\cite[authoryear][example::demo-004,demo-003]
-\cite[authoryears][example::demo-004,demo-003]
-\stopbuffer
-
-\typebuffer
-
-Here we sort the authors and color the citation:
-
-\startlines \getbuffer \stoplines
-
-For reasons of backward compatibility the \type {\cite} command is a bit picky
-about spaces between the two arguments, of which the first is optional. This is
-a consequence of allowing its use with the key specified between curly brackets
-as is the traditional practice. (We do encourage users to adopt the more
-coherent \CONTEXT\ syntax by using square brackets for keywords and reserving
-curly brackets to regroup text to be typeset.)
-% Just how is it picky?
-
-The \type {\citation} command is synonymous but is more flexible with respect to
-spacing of its arguments:
-
-\starttyping
-\citation[author] [example::demo-004,demo-003]
-\citation[authoryear] [example::demo-004,demo-003]
-\citation[authoryears][example::demo-004,demo-003]
-\stoptyping
-
-% The first argument of cite and citation is optional. What is the default and how does one set it?
-
-There is a whole bunch of cite options and more can be easily defined.
-
-\startluacode
-local variants = publications.tracers.citevariants
-
-context.starttabulate { "|l|p|" }
- context.NC() context.bold("key")
- context.NC() context.bold("rendering")
- context.NC() context.NR() context.FL()
- for i=1,#variants do
- local variant = variants[i]
- context.NC() context.type(variant)
- context.NC() context.citation( { variant }, { "example::demo-005" })
- context.NC() context.NR()
- end
-context.stoptabulate()
-\stopluacode
-
-Because we are dealing with database input and because we generally need to
-manipulate entries, much of the work is delegated to \LUA. This makes it easier
-to maintain and extend the code. Of course \TEX\ still does the rendering. The
-typographic details are controlled by parameters but not all are used in all
-variants. As with most \CONTEXT\ commands, it starts out with a general setup
-command:
-
-\showsetup[setupbtxcitevariant]
-
-On top of that we can define instances that inherit either from a given parent or
-from the topmost setup.
-
-\showsetup[definebtxcitevariant]
-
-% The default values are:
-
-% \showrootvalues[btxcitevariant]
-
-But, specific variants can have them overloaded:
-
-% \showinstancevalues[setupbtxcitevariant][author]
-% \showinstancevalues[setupbtxcitevariant][authornum]
-
-\startluacode
- local variants = publications.tracers.citevariants
-
- for i=1,#variants do
- context.showinstancevalues( { "btxcitevariant" }, { variants[i] })
- end
-\stopluacode
-
-A citation variant is defined in several steps and if you really want to know
-the dirty details, you should look into the \type {publ-imp-*.mkiv} files. Here
-we stick to the concept.
-
-\starttyping
-\startsetups btx:cite:author
- \btxcitevariant{author}
-\stopsetups
-\stoptyping
-
-You can overload such setups if needed, but that only makes sense when you cannot
-configure the rendering with parameters. The \type {\btxcitevariant} command is
-one of the build in accessors and it calls out to \LUA\ where more complex
-manipulation takes place if needed. If no manipulation is known, the field with
-the same name (if found) will be flushed. A command like \type {\btxcitevariant}
-assumes that a dataset and specific tag has been set. This is normally done in
-the wrapper macros, like \type {\cite}. For special purposes you can use these
-commands
-
-\starttyping
-\setbtxdataset[example]
-\setbtxentry[hh2013]
-\stoptyping
-
-But don't expect too much support for such low level rendering control.
-
-Unless you use \type {criterium=all} only publications that are cited will end up
-in the lists. You can force a citation into a list using \type {\usecitation}, for
-example:
-
-\starttyping
-\usecitation[example::demo-004,demo-003]
-\stoptyping
-
-This command has two synonyms: \type {\nocite} and \type {\nocitation} so you can
-choose whatever fits you best.
-
-\showsetup[nocite]
-
-\stopchapter
-
-\startchapter[title=The \LUA\ view]
-
-Because we manage data at the \LUA\ end it is tempting to access it there for
-other purposes. This is fine as long as you keep in mind that aspects of the
-implementation may change over time, although this is unlikely once the modules
-become stable.
-
-The entries are collected in datasets and each set has a unique name. In this
-document we have the set named \type {example}. A dataset table has several
-fields, and probably the one of most interest is the \type {luadata} field. Each
-entry in this table describes a publication:
-
-\startluacode
- context.tocontext(publications.datasets.example.luadata["demo-001"])
-\stopluacode
-
-This is \type {publications.datasets.example.luadata["demo-001"]}. There can be
-a companion entry in the parallel \type {details} table.
-
-\startluacode
- context.tocontext(publications.datasets.example.details["demo-001"])
-\stopluacode
-
-These details are accessed as \type
-{publications.datasets.example.details["demo-001"]} and by using a separate table
-we can overload fields in the original entry without losing the original.
-
-You can loop over the entries using regular \LUA\ code combined with \MKIV\
-helpers:
-
-\startbuffer
-local dataset = publications.datasets.example
-
-context.starttabulate { "|l|l|l|" }
-for tag, entry in table.sortedhash(dataset.luadata) do
- local detail = dataset.details[tag] or { }
- context.NC() context.type(tag)
- context.NC() context(detail.short)
- context.NC() context(entry.title)
- context.NC() context.NR()
-end
-context.stoptabulate()
-\stopbuffer
-
-\typebuffer
-
-This results in:
-
-\ctxluabuffer
-
-You can manipulate a dataset after loading. Of course this assumes that you know
-what kind of content you have and what you need for rendering. As example we
-load a small dataset.
-
-\startbuffer
-\definebtxdataset[drumming]
-\usebtxdataset[drumming][mkiv-publications.lua]
-\stopbuffer
-
-\typebuffer \getbuffer
-
-Because we're going to do some \LUA, we could also have loaded the dataset
-with:
-
-\starttyping
-publications.load("drumming","mkiv-publications.lua","lua")
-\stoptyping
-
-The dataset has three entries:
-
-\typefile{mkiv-publications.lua}
-
-As you can see, we can have a subtitle. We will combine the title and subtitle
-into one:
-
-\startbuffer
-\startluacode
-for tag, entry in next, publications.datasets.drumming.luadata do
- if entry.subtitle then
- if entry.title then
- entry.title = entry.title .. ", " .. entry.subtitle
- else
- entry.title = entry.subtitle
- end
- entry.subtitle = nil
- logs.report("btx","combining title and subtitle of entry tagged %a",tag)
- end
-end
-\stopluacode
-\stopbuffer
-
-\typebuffer \getbuffer
-
-We can now typeset the entries with:
-
-\startbuffer
-\definebtxrendering[drumming][dataset=drumming,method=dataset]
-\placebtxrendering[drumming]
-\stopbuffer
-
-\typebuffer
-
-Because we just want to show the entries, and have no citations that force them
-to be shown, we have to the \type {method} to \type {dataset}. \footnote {Gavin
-Harrison is in my opinion one of the most creative, diverse and interesting
-drummers of our time. It's also fascinating to watch him play and a welcome
-distraction from writing code and manuals.}
-
-\blank \getbuffer \blank
-
-\stopchapter
-
-\startchapter[title=The \XML\ view]
-
-The \type {luadata} table can be converted into an \XML\ representation. This is
-a follow up on earlier experiments with an \XML|-|only approach. I decided in the end
-to stick to a \LUA\ approach and provide some simple \XML\ support in addition.
-
-Once a dataset is accessible as \XML\ tree, you can use the regular \type {\xml...}
-commands. We start with loading a dataset, in this case from just one file.
-
-\startbuffer
-\usebtxdataset[tugboat][tugboat.bib]
-\stopbuffer
-
-\typebuffer \getbuffer
-
-The dataset has to be converted to \XML:
-
-\startbuffer
-\convertbtxdatasettoxml[tugboat]
-\stopbuffer
-
-\typebuffer \getbuffer
-
-The tree is now accessible by its root reference \type {btx:tugboat}. If we want simple
-field access we can use a few setups:
-
-\startbuffer
-\startxmlsetups btx:initialize
- \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
- \xmlmain{#1}
-\stopxmlsetups
-
-\startxmlsetups btx:field
- \xmlflushcontext{#1}
-\stopxmlsetups
-
-\xmlsetup{btx:tugboat}{btx:initialize}
-\stopbuffer
-
-\typebuffer \getbuffer
-
-The two setups are predefined in the core already, but you might want to change them. They are
-applied in for instance:
-
-\startbuffer
-\starttabulate[|||]
- \NC \type {tag} \NC \xmlfirst {btx:tugboat}
- {/bibtex/entry[string.find(@tag,'Hagen')]/attribute('tag')}
- \NC \NR
- \NC \type {title} \NC \xmlfirst {btx:tugboat}
- {/bibtex/entry[string.find(@tag,'Hagen')]/field[@name='title']}
- \NC \NR
-\stoptabulate
-\stopbuffer
-
-\typebuffer \getbuffer
-
-\startbuffer
-\startxmlsetups btx:demo
- \xmlcommand
- {#1}
- {/bibtex/entry[string.find(@tag,'Hagen')][1]}{btx:table}
-\stopxmlsetups
-
-\startxmlsetups btx:table
-\starttabulate[|||]
- \NC \type {tag} \NC \xmlatt{#1}{tag} \NC \NR
- \NC \type {title} \NC \xmlfirst{#1}{/field[@name='title']} \NC \NR
-\stoptabulate
-\stopxmlsetups
-
-\xmlsetup{btx:tugboat}{btx:demo}
-\stopbuffer
-
-\typebuffer \getbuffer
-
-Here is another example:
-
-\startbuffer
-\startxmlsetups btx:row
- \NC \xmlatt{#1}{tag}
- \NC \xmlfirst{#1}{/field[@name='title']}
- \NC \NR
-\stopxmlsetups
-
-\startxmlsetups btx:demo
- \xmlfilter {#1} {
- /bibtex
- /entry[@category='article']
- /field[@name='author' and (find(text(),'Knuth') or find(text(),'DEK'))]
- /../command(btx:row)
- }
-\stopxmlsetups
-
-\starttabulate[|||]
- \xmlsetup{btx:tugboat}{btx:demo}
-\stoptabulate
-\stopbuffer
-
-\typebuffer \getbuffer
-
-A more extensive example is the following. Of course this assumes that you
-know what \XML\ support mechanisms and macros are available.
-
-\startbuffer
-\startxmlsetups btx:getkeys
- \xmladdsortentry{btx}{#1}{\xmlfilter{#1}{/field[@name='author']/text()}}
- \xmladdsortentry{btx}{#1}{\xmlfilter{#1}{/field[@name='year' ]/text()}}
- \xmladdsortentry{btx}{#1}{\xmlatt{#1}{tag}}
-\stopxmlsetups
-
-\startxmlsetups btx:sorter
- \xmlresetsorter{btx}
- % \xmlfilter{#1}{entry/command(btx:getkeys)}
- \xmlfilter{#1}{
- /bibtex
- /entry[@category='article']
- /field[@name='author' and find(text(),'Knuth')]
- /../command(btx:getkeys)}
- \xmlsortentries{btx}
- \starttabulate[||||]
- \xmlflushsorter{btx}{btx:entry:flush}
- \stoptabulate
-\stopxmlsetups
-
-\startxmlsetups btx:entry:flush
- \NC \xmlfilter{#1}{/field[@name='year' ]/context()}
- \NC \xmlatt{#1}{tag}
- \NC \xmlfilter{#1}{/field[@name='author']/context()}
- \NC \NR
-\stopxmlsetups
-
-\xmlsetup{btx:tugboat}{btx:sorter}
-\stopbuffer
-
-\typebuffer \getbuffer
-
-The original data is stored in a \LUA\ table, hashed by tag. Starting with \LUA\ 5.2
-each run of \LUA\ gets a different ordering of such a hash. In older versions, when you
-looped over a hash, the order was undefined, but the same as long as you used the same
-binary. This had the advantage that successive runs, something we often have in document
-processing gave consistent results. In today's \LUA\ we need to do much more sorting of
-hashes before we loop, especially when we save multi||pass data. It is for this reason
-that the \XML\ tree is sorted by hash key by default. That way lookups (especially
-the first of a set) give consistent outcomes.
-
-\stopchapter
-
-\startchapter[title=Standards]
-
-The rendering of bibliographic entries is often standardized and prescribed by
-the publisher. If you submit an article to a journal, normally it will be
-reformatted (or even re|-|keyed) and the rendering will happen at the publishers
-end. In that case it may not matter how entries were rendered when writing the
-publication, because the publisher will do it his or her way.
-This means that most users probably will stick to the standard \APA\ rules and for
-them we provide some configuration. Because we use setups it is easy to overload
-specifics. If you really want to tweak, best look in the files that deal with it.
-
-Many standards exist and support for other renderings may be added to the core.
-Interested users are invited to develop and to test alternate standard renderings
-according to their needs.
-
-Todo: maybe a list of categories and fields.
-
-\stopchapter
-
-\startchapter[title=Cleaning up]
-
-Although the \BIBTEX\ format is reasonably well defined, in practice there are
-many ways to organize the data. For instance, one can use predefined string
-constants that get used (either or not combined with other strings) later on. A string
-can be enclosed in curly braces or double quotes. The strings can contain \TEX\ commands
-but these are not standardized. The databases often have somewhat complex
-ways to deal with special characters and the use of braces in their definition is also
-not normalized.
-
-The most complex to deal with are the fields that contain names of people. At some point it
-might be needed to split a combination of names into individual ones that then get split into
-title, first name, optional inbetweens, surname(s) and additional: \type {Prof. Dr. Alfred
-B. C. von Kwik Kwak Jr. II and P. Q. Olet} is just one example of this. The convention seems
-to be not to use commas but \type {and} to separate names (often each name will be specified
-as lastname, firstname).
-
-We don't see it as challenge nor as a duty to support all kinds of messy definitions. Of
-course we try to be somewhat tolerant, but you will be sure to get better results if you
-use nicely setup, consistent databases.
-
-Todo: maybe some examples of bad.
-
-\stopchapter
-
-\startchapter[title=Transition]
-
-In the original bibliography support module usage was as follows (example taken
-from the contextgarden wiki):
-
-\starttyping
-% engine=pdftex
-
-\usemodule[bib]
-\usemodule[bibltx]
-
-\setupbibtex
- [database=xampl]
-
-\setuppublications
- [numbering=yes]
-
-\starttext
- As \cite [article-full] already indicated, bibtex is a \LATEX||centric
- program.
-
- \completepublications
-\stoptext
-\stoptyping
-
-For \MKIV\ the modules were partly rewritten and ended up in the core so the two
-{\usemodule} commands were no longer needed. The overhead associated with the
-automatic loading of the bibliography macros can be neglected these days, so
-standardized modules such as \type {bib} are all being moved to the core and do
-not need to be explicitly loaded.
-
-The first \type {\setupbibtex} command in this example is needed to bootstrap
-the process: it tells what database has to be processed by \BIBTEX\ between
-runs. The second \type {\setuppublications} command is optional. Each citation
-(tagged with \type {\cite}) ends up in the list of publications.
-
-In the new approach we no longer use \BIBTEX so we don't need to setup \BIBTEX.
-Instead we define dataset(s). We also no longer set up publications with one
-command, but have split that up in rendering-, list-, and cite|-|variants. The
-basic \type {\cite} command remains. The above example becomes:
-
-\starttyping
-\definebtxdataset
- [document]
-
-\usebtxdataset
- [document]
- [mybibfile.bib]
-
-\definebtxrendering
- [document]
-
-\setupbtxrendering
- [document]
- [numbering=yes]
-
-\starttext
- As \cite [article-full] already indicated, bibtex is a \LATEX||centric
- program.
-
- \completebtxrendering[document]
-\stoptext
-\stoptyping
-
-So, we have a few more commands to set up things. If you intend to use just a
-single dataset and rendering, the above preamble can be simplified to:
-
-\starttyping
-\usebtxdataset
- [mybibfile.bib]
-
-\setupbtxrendering
- [numbering=yes]
-\stoptyping
-
-But keep in mind that compared to the old \MKII\ derived method we have moved
-some of the options to the rendering, list and cite setup variants.
-
-Another difference is now the use of lists. When you define a rendering, you
-also define a list. However, all entries are collected in a common list tagged
-\type {btx}. Although you will normally configure a rendering you can still set
-some properties of lists, but in that case you need to prefix the list
-identifier. In the case of the above example this is \type {btx:document}.
-
-\stopchapter
-
-\startchapter[title=\MLBIBTEX]
-
-Todo: how to plug in \MLBIBTEX\ for sorting and other advanced operations.
-
-\stopchapter
-
-\startchapter[title=Extensions]
-
-As \TEX\ and \LUA\ are both open and accessible in \CONTEXT\ it is possible to
-extend the functionality of the bibliography related code. For instance, you can add
-extra loaders.
-
-\starttyping
-function publications.loaders.myformat(dataset,filename)
- local t = { }
- -- Load data from 'filename' and convert it to a Lua table 't' with
- -- the key as hash entry and fields conforming the luadata table
- -- format.
- loaders.lua(dataset,t)
-end
-\stoptyping
-
-This then permits loading a database (into a dataset) with the command:
-
-\starttyping
-\usebtxdataset[standard][myfile.myformat]
-\stoptyping
-
-The \type {myformat} suffix is recognized automatically. If you want to use another
-suffix, you can do this:
-
-\starttyping
-\usebtxdataset[standard][myformat::myfile.txt]
-\stoptyping
-
-\stopchapter
-
-\startchapter[title=Notes]
-
-The move from external \BIBTEX\ processing to internal processing has the
-advantage that we stay within the same run. In the traditional approach we had
-roughly the following steps:
-
-\startitemize[packed]
-\startitem the first run information is collected and written to file \stopitem
-\startitem after that run the \BIBTEX\ program converts that file to another one \stopitem
-\startitem successive runs use that data for references and producing lists \stopitem
-\stopitemize
-
-In the \MKIV\ approach the bibliographic database is loaded in memory each run
-and processing also happens each run. On paper this looks less efficient but as
-\LUA\ is quite fast, in practice performance is much better.
-
-Probably most demanding is the treatment of authors as we have to analyze names,
-split multiple authors and reassemble firstnames, vons, surnames and juniors.
-When we sort by author sorting vectors have to be made which also has a penalty.
-However, in practice the user will not notice a performance degradation. We did
-some tests with a list of 500.000 authors, sorted them and typeset them as list
-(producing some 5400 dense pages in a small font and with small margins). This is
-typical one of these cases where using \LUAJITTEX\ saves quite time. On my
-machine it took just over 100 seconds to get this done. Unfortunately not all
-operating systems performed equally well: 32 bit versions worked fine, but 64 bit
-\LINUX\ either crashed (stalled) the machine or ran out of memory rather fast,
-while \MACOSX\ and \WINDOWS\ performed fine. In practice you will never run into
-this, unless you produce massive amounts of bibliographic entries. \LUAJIT\ has
-some benefits but also some drawbacks.
-
-\stopchapter
-
-\stopbodymatter
-
-\stoptext
-
-Todo:
-
-\setuplabeltext[en][reprint=reprint]
-\setuplabeltext[de][reprint=Nachdruck]
-
-note = {\labeltext{reprint} 2004}
-
diff --git a/doc/context/manuals/allkind/publications-en.xml b/doc/context/manuals/allkind/publications-en.xml
deleted file mode 100644
index ea577ccf4..000000000
--- a/doc/context/manuals/allkind/publications-en.xml
+++ /dev/null
@@ -1,369 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!-- bibliographies -->
-
-<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="publications" language="en" version="2013.12.22">
-
- <!-- datasets -->
-
- <cd:command name="setupbtxdataset" file="publ-ini.mkiv" category="publications" hash="btxdataset">
- <cd:sequence>
- <cd:string value="setupbtxdataset"/>
- </cd:sequence>
- <cd:arguments>
- <cd:keywords n="1" optional="yes">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- <cd:assignments n="2" optional="yes">
- <!-- todo -->
- </cd:assignments>
- </cd:arguments>
- </cd:command>
-
- <cd:command name="definebtxdataset" file="publ-ini.mkiv" category="publications" hash="btxdataset">
- <cd:sequence>
- <cd:string value="definebtxdataset"/>
- </cd:sequence>
- <cd:arguments>
- <cd:keywords n="1">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- <cd:assignments n="2" optional="yes">
- <cd:inherit name="setupbtxdataset" n="2"/>
- </cd:assignments>
- </cd:arguments>
- </cd:command>
-
- <cd:command name="usebtxdataset" file="publ-ini.mkiv" category="publications" hash="btxdataset">
- <cd:sequence>
- <cd:string value="usebtxdataset"/>
- </cd:sequence>
- <cd:arguments>
- <cd:keywords n="1">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- <cd:keywords n="2">
- <cd:constant type="cd:file"/>
- </cd:keywords>
- </cd:arguments>
- </cd:command>
-
- <!-- rendering -->
-
- <cd:command name="setupbtxrendering" file="publ-ini.mkiv" category="publications" hash="btxrendering">
- <cd:sequence>
- <cd:string value="setupbtxrendering"/>
- </cd:sequence>
- <cd:arguments>
- <cd:keywords n="1" optional="yes">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- <cd:assignments n="2">
- <cd:parameter name="alternative">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="dataset">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="setups">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="method">
- <cd:constant type="local"/>
- <cd:constant type="global"/>
- <cd:constant type="none"/>
- <cd:constant type="force"/>
- </cd:parameter>
- <cd:parameter name="sorttype">
- <cd:constant type="short"/>
- <cd:constant type="reference"/>
- <cd:constant type="dataset"/>
- <cd:constant type="default"/>
- </cd:parameter>
- <cd:parameter name="criterium">
- <cd:constant type="cd:text"/> <!-- todo -->
- </cd:parameter>
- <cd:parameter name="refcommand">
- <cd:constant type="cd:text"/> <!-- todo -->
- </cd:parameter>
- <cd:parameter name="numbering">
- <cd:constant type="yes"/>
- <cd:constant type="cite"/>
- </cd:parameter>
- <cd:parameter name="width">
- <cd:constant type="cd:dimension"/>
- <cd:constant type="auto"/>
- </cd:parameter>
- <cd:parameter name="distance">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
- <cd:parameter name="continue">
- <cd:constant type="yes"/>
- <cd:constant type="no"/>
- </cd:parameter>
- </cd:assignments>
- </cd:arguments>
- </cd:command>
-
- <cd:command name="definebtxrendering" file="publ-ini.mkiv" category="publications" hash="btxrendering">
- <cd:sequence>
- <cd:string value="definebtxrendering"/>
- </cd:sequence>
- <cd:arguments>
- <cd:keywords n="1">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- <cd:keywords n="2" optional="yes">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- <cd:assignments n="3" optional="yes">
- <cd:inherit name="setupbtxrendering" n="2"/>
- </cd:assignments>
- </cd:arguments>
- </cd:command>
-
- <cd:command name="placebtxrendering" file="publ-ini.mkiv" category="publications" hash="btxrendering">
- <cd:sequence>
- <cd:string value="placebtxrendering"/>
- </cd:sequence>
- </cd:command>
-
- <!-- lists -->
-
- <cd:command name="setupbtxlistvariant" file="publ-ini.mkiv" category="publications" hash="btxlistvariant">
- <cd:sequence>
- <cd:string value="setupbtxlistvariant"/>
- </cd:sequence>
- <cd:arguments>
- <cd:keywords n="1" optional="yes">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- <cd:assignments n="2">
- <cd:parameter name="namesep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="lastnamesep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="finalnamesep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="firstnamesep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="juniorsep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="vonsep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="surnamesep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="surnamejuniorsep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="juniorjuniorsep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="surnamefirstnamesep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="surnameinitialsep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="etallimit">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="etaldisplay">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="etaltext">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="monthconversion">
- <cd:constant type="number"/>
- <cd:constant type="month"/>
- <cd:constant type="month:mnem"/>
- </cd:parameter>
- <cd:parameter name="authorconversion">
- <cd:constant type="normal"/>
- <cd:constant type="inverted"/>
- <cd:constant type="normalshort"/>
- <cd:constant type="invertedshort"/>
- </cd:parameter>
- </cd:assignments>
- </cd:arguments>
- </cd:command>
-
- <cd:command name="definebtxlistvariant" file="publ-ini.mkiv" category="publications" hash="btxlistvariant">
- <cd:sequence>
- <cd:string value="definebtxlistvariant"/>
- </cd:sequence>
- <cd:arguments>
- <cd:keywords n="1">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- </cd:arguments>
- </cd:command>
-
- <!-- variants -->
-
- <cd:command name="setupbtxcitevariant" file="publ-ini.mkiv" category="publications" hash="btxcitevariant">
- <cd:sequence>
- <cd:string value="setupbtxcitevariant"/>
- </cd:sequence>
- <cd:arguments>
- <cd:keywords n="1" optional="yes">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- <cd:assignments n="2">
- <cd:parameter name="alternative">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="setups">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="interaction">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="andtext">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="otherstext">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="compress">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="putsep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="lastputsep">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="inbetween">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="right">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="middle">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="left">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- </cd:assignments>
- </cd:arguments>
- </cd:command>
-
- <cd:command name="definebtxcitevariant" file="publ-ini.mkiv" category="publications" hash="btxcitevariant">
- <cd:sequence>
- <cd:string value="definebtxcitevariant"/>
- </cd:sequence>
- <cd:arguments>
- <cd:keywords n="1">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- <cd:keywords n="2" optional="yes">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- <cd:assignments n="3" optional="yes">
- <cd:inherit name="setupbtxvariant" n="3"/>
- </cd:assignments>
- </cd:arguments>
- </cd:command>
-
- <!-- refering -->
-
- <cd:command name="cite" file="publ-ini.mkiv" category="publications">
- <cd:sequence>
- <cd:string value="cite"/>
- </cd:sequence>
- <cd:arguments>
- <cd:keywords n="1" optional="yes">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- <cd:keywords n="2">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- </cd:arguments>
- </cd:command>
-
- <cd:command name="nocite" file="publ-ini.mkiv" category="publications">
- <cd:sequence>
- <cd:string value="nocite"/>
- </cd:sequence>
- <cd:arguments>
- <cd:keywords n="1">
- <cd:constant type="cd:name"/>
- </cd:keywords>
- </cd:arguments>
- </cd:command>
-
- <!-- list entries -->
-
-
- <cd:command name="setupbtxlist" file="publ-ini.mkiv" category="publications" hash="btxlist">
- <cd:sequence>
- <cd:string value="setupbtxlist"/>
- </cd:sequence>
- <cd:arguments>
- <cd:assignments n="1">
- <cd:parameter name="alternative">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="style">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="color">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="headstyle">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="headcolor">
- <cd:constant type="cd:text"/>
- </cd:parameter>
- <cd:parameter name="width">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
- <cd:parameter name="distance">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
- <cd:parameter name="hang">
- <cd:constant type="cd:number"/>
- </cd:parameter>
- <cd:parameter name="align">
- <cd:resolve name="align"/>
- </cd:parameter>
- <cd:parameter name="headalign">
- <cd:resolve name="symalign"/>
- </cd:parameter>
- <cd:parameter name="margin">
- <cd:constant type="cd:yes"/>
- <cd:constant type="cd:no"/>
- </cd:parameter>
- <cd:parameter name="before">
- <cd:constant type="cd:command" default="\blank"/>
- </cd:parameter>
- <cd:parameter name="inbetween">
- <cd:constant type="cd:command"/>
- </cd:parameter>
- <cd:parameter name="after">
- <cd:constant type="cd:command" default="\blank"/>
- </cd:parameter>
- <cd:parameter name="display">
- <cd:constant type="cd:yes"/>
- <cd:constant type="cd:no"/>
- </cd:parameter>
- <cd:parameter name="command">
- <cd:constant type="cd:command"/>
- </cd:parameter>
- </cd:assignments>
- </cd:arguments>
- </cd:command>
-
-
-</cd:interface>
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.html b/doc/context/scripts/mkiv/mtx-bibtex.html
deleted file mode 100644
index ba1591b4b..000000000
--- a/doc/context/scripts/mkiv/mtx-bibtex.html
+++ /dev/null
@@ -1,53 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-
-<!-- compare with lmx framework variant -->
-
-<!--
- filename : context-base.xml
- comment : companion to mtx-server-ctx-startup.tex
- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
- copyright: PRAGMA ADE / ConTeXt Development Team
- license : see context related readme files
--->
-
-<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
- <head>
- <title>bibtex helpers</title>
- <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
- <style type="text/css">
- body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
- </style>
- <style type="text/css">
- </style>
- </head>
- <body>
- <div id="top"> <div id="top-one">
- <div id="top-two">bibtex helpers </div>
- </div>
- </div>
- <div id="bottom"> <div id="bottom-one">
- <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
- </div>
- </div>
- <div id="left"></div>
- <div id="right"></div>
- <div id="main">
- <div id='main-settings'>
- <h1>Command line options</h1>
-<table>
- <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
- <tr><th/><td/><td/></tr>
- <tr><th>--toxml</th><td></td><td>convert bibtex database(s) to xml</td></tr>
- <tr><th>--tolua</th><td></td><td>convert bibtex database(s) to lua</td></tr>
- </table>
-<br/>
-<h1>Example</h1>
-<tt>mtxrun --script bibtex --tolua bibl-001.bib</tt>
-<br/><tt>mtxrun --script bibtex --tolua --simple bibl-001.bib</tt>
-<br/><tt>mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml</tt>
-<br/><br/> </div>
- </div>
- </body>
- </html>
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.man b/doc/context/scripts/mkiv/mtx-bibtex.man
deleted file mode 100644
index cedf41b8b..000000000
--- a/doc/context/scripts/mkiv/mtx-bibtex.man
+++ /dev/null
@@ -1,30 +0,0 @@
-.TH "mtx-bibtex" "1" "01-01-2014" "version 1.00" "bibtex helpers"
-.SH NAME
-.B mtx-bibtex
-.SH SYNOPSIS
-.B mtxrun --script bibtex [
-.I OPTIONS ...
-.B ] [
-.I FILENAMES
-.B ]
-.SH DESCRIPTION
-.B bibtex helpers
-.SH OPTIONS
-.TP
-.B --toxml
-convert bibtex database(s) to xml
-.TP
-.B --tolua
-convert bibtex database(s) to lua
-.SH AUTHOR
-More information about ConTeXt and the tools that come with it can be found at:
-
-
-.B "maillist:"
-ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-
-.B "webpage:"
-http://www.pragma-ade.nl / http://tex.aanhet.net
-
-.B "wiki:"
-http://contextgarden.net
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.xml b/doc/context/scripts/mkiv/mtx-bibtex.xml
deleted file mode 100644
index b33e1809c..000000000
--- a/doc/context/scripts/mkiv/mtx-bibtex.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0"?>
-<application>
- <metadata>
- <entry name="name">mtx-bibtex</entry>
- <entry name="detail">bibtex helpers</entry>
- <entry name="version">1.00</entry>
- </metadata>
- <flags>
- <category name="basic">
- <subcategory>
- <flag name="toxml"><short>convert bibtex database(s) to xml</short></flag>
- <flag name="tolua"><short>convert bibtex database(s) to lua</short></flag>
- </subcategory>
- </category>
- </flags>
- <examples>
- <category>
- <title>Example</title>
- <subcategory>
- <example><command>mtxrun --script bibtex --tolua bibl-001.bib</command></example>
- <example><command>mtxrun --script bibtex --tolua --simple bibl-001.bib</command></example>
- <example><command>mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml</command></example>
- </subcategory>
- </category>
- </examples>
-</application>
diff --git a/metapost/context/base/mp-base.mpii b/metapost/context/base/mp-base.mpii
index 7af4bc436..0f8104447 100644
--- a/metapost/context/base/mp-base.mpii
+++ b/metapost/context/base/mp-base.mpii
@@ -110,15 +110,12 @@ transform identity;
for z=origin,right,up: z transformed identity = z; endfor
% color constants
-color black, white, red, green, blue, cyan, magenta, yellow, background;
+color black, white, red, green, blue, background;
black = (0,0,0);
white = (1,1,1);
red = (1,0,0);
green = (0,1,0);
blue = (0,0,1);
-cyan = (0,1,1);
-magenta = (1,0,1);
-yellow = (1,1,0);
background = white; % The user can reset this
% color part selection for within
@@ -363,17 +360,9 @@ enddef;
def filldraw expr c =
addto currentpicture contour c withpen currentpen
_op_ enddef;
-% def drawdot expr z =
-% addto currentpicture contour makepath currentpen shifted z
-% _op_ enddef;
-
-def drawdot expr p =
- if pair p :
- addto currentpicture doublepath p withpen currentpen _op_
- else :
- errmessage("drawdot only accepts a pair expression")
- fi
-enddef ;
+def drawdot expr z =
+ addto currentpicture contour makepath currentpen shifted z
+ _op_ enddef;
def unfill expr c = fill c withcolor background enddef;
def undraw expr p = draw p withcolor background enddef;
diff --git a/metapost/context/base/mp-base.mpiv b/metapost/context/base/mp-base.mpiv
index 0b655ef47..2887cc462 100644
--- a/metapost/context/base/mp-base.mpiv
+++ b/metapost/context/base/mp-base.mpiv
@@ -323,7 +323,7 @@ primarydef w dotprod z =
enddef ;
primarydef x**y =
- if y = 2 :
+ if y=2 :
x*x
else :
takepower y of x
@@ -348,46 +348,11 @@ def takepower expr y of x =
endfor
fi
else :
- hide(errmessage "Undefined power: " & decimal x & "**" & decimal y)
+ hide(errmessage "Undefined power: " & decimal x&"**"&decimal y)
fi
fi
enddef ;
-% for big number systems:
-%
-% primarydef x**y =
-% if y = 1 :
-% x
-% elseif y = 2 :
-% x*x
-% elseif y = 3 :
-% x*x*x
-% else :
-% takepower y of x
-% fi
-% enddef ;
-%
-% vardef takepower expr y of x =
-% if (x=0) and (y>0) :
-% 0
-% else :
-% 1
-% if y = floor y :
-% if y >= 0 :
-% for n=1 upto y :
-% *x
-% endfor
-% else :
-% for n=-1 downto y :
-% /x
-% endfor
-% fi
-% else :
-% hide(errmessage "Undefined power: " & decimal x & "**" & decimal y)
-% fi
-% fi
-% enddef ;
-
vardef direction expr t of p =
postcontrol t of p - precontrol t of p
enddef ;
@@ -629,36 +594,8 @@ def filldraw expr c =
addto currentpicture contour c withpen currentpen _op_
enddef ;
-% def drawdot expr z =
-% addto currentpicture contour makepath currentpen shifted z _op_
-% enddef ;
-%
-% testcase DEK:
-%
-% for j=1 upto 9 :
-% pickup pencircle xscaled .4 yscaled .2 ;
-% drawdot (10j,0) withpen pencircle xscaled .5j yscaled .25j rotated 45 ;
-% pickup pencircle xscaled .5j yscaled .25j rotated 45 ;
-% drawdot (10j,10);
-% endfor ;
-%
-% or:
-%
-%\startMPpage
-%
-% def drawdot expr z =
-% addto currentpicture contour (makepath currentpen shifted z) _op_
-% enddef;
-%
-% drawdot origin shifted (0,-3cm) withpen pencircle scaled 2cm ;
-% pickup pencircle scaled 2cm ; drawdot origin withcolor red ;
-
-def drawdot expr p =
- if pair p :
- addto currentpicture doublepath p withpen currentpen _op_
- else :
- errmessage("drawdot only accepts a pair expression")
- fi
+def drawdot expr z =
+ addto currentpicture contour makepath currentpen shifted z _op_
enddef ;
def unfill expr c = fill c withcolor background enddef ;
diff --git a/metapost/context/base/mp-grap.mpiv b/metapost/context/base/mp-grap.mpiv
index 6d69c0b1e..417bfbe69 100644
--- a/metapost/context/base/mp-grap.mpiv
+++ b/metapost/context/base/mp-grap.mpiv
@@ -52,11 +52,11 @@ fi
% endgraph end of graph--the result is a picture
% option `plot <picture>' draws picture at each path knot, turns off pen
-% graph_template.<tickcmd> template paths for tick marks and grid lines
+% Gtemplate.<tickcmd> template paths for tick marks and grid lines
% graph_margin_fraction.low,
% graph_margin_fraction.high fractions determining margins when no setrange
-% graph_log_marks[], graph_lin_marks, graph_exp_marks loop text strings used by auto.<x or y>
-% graph_minimum_number_of_marks, graph_log_minimum numeric parameters used by auto.<x or y>
+% Glmarks[], Gumarks, Gemarks loop text strings used by auto.<x or y>
+% Gmarks, Gminlog numeric parameters used by auto.<x or y>
% Autoform is the format string used by autogrid
% Autoform_X, Autoform_Y if defined, are used instead
@@ -65,26 +65,22 @@ fi
% with `graph_'
% Depends on :
-
input string.mp
% Private version of a few marith macros, fixed for double math...
-
-newinternal Mzero ; Mzero := -16384; % Anything at least this small is treated as zero
-newinternal mlogten ; mlogten := mlog(10) ;
-newinternal largestmantissa ; largestmantissa := 2**52 ; % internal double warningcheck
-newinternal singleinfinity ; singleinfinity := 2**128 ;
-newinternal doubleinfinity ; doubleinfinity := 2**1024 ;
-Mzero := -largestmantissa ; % Note that we get arithmetic overflows if we set to -doubleinfinity
+newinternal Mzero; Mzero := -16384; % Anything at least this small is treated as zero
+newinternal mlogten ; mlogten := mlog(10) ;
+newinternal singleinfinity ; singleinfinity := 2**128 ;
+newinternal doubleinfinity ; doubleinfinity := 2**1024 ;
+% Note that we get arithmetic overflows if we set to -doubleinfinity below.
+% (but "only on odd days"...)
% Safely convert a number to mlog form, trapping zero.
-
vardef graph_mlog primary x =
if unknown x: whatever
elseif x=0: Mzero
else: mlog(abs x) fi
enddef ;
-
vardef graph_exp primary x =
if unknown x: whatever
elseif x<=Mzero: 0
@@ -93,25 +89,21 @@ enddef ;
% and add the following for utility/completeness
% (replacing the definitions in mp-tool.mpiv).
-
vardef logten primary x =
if unknown x: whatever
elseif x=0: Mzero
else: mlog(abs x)/mlog(10) fi
enddef ;
-
vardef ln primary x =
if unknown x: whatever
elseif x=0: Mzero
else: mlog(abs x)/256 fi
enddef ;
-
vardef exp primary x =
if unknown x: whatever
elseif x<= Mzero: 0
else: (mexp 256)**x fi
enddef ;
-
vardef powten primary x =
if unknown x: whatever
elseif x<= Mzero: 0
@@ -120,7 +112,6 @@ enddef ;
% Convert x from mlog form into a pair whose xpart gives a mantissa and whose
% ypart gives a power of ten.
-
vardef graph_Meform(expr x) =
if x<=Mzero : origin
else :
@@ -131,7 +122,6 @@ vardef graph_Meform(expr x) =
enddef ;
% Modified from above.
-
vardef graph_Feform(expr x) =
interim warningcheck :=0 ;
if x=0 : origin
@@ -156,7 +146,6 @@ def graph_suffix(suffix $) = % convert from x or y to X_ or Y_
enddef ;
% New :
-
save graph_background ; color graph_background ; % if defined, fill the frame.
save graph_close_file ; boolean graph_close_file ; graph_close_file = false ;
@@ -211,20 +200,17 @@ enddef ;
% user to alter the behavior of these macros.
% Not very modifiable : log, linear,
% graph_frame_pair_a, graph_frame_pair_b, graph_margin_pair
-% Modifiable : graph_template.suffix,
-% graph_log_marks[], graph_lin_marks, graph_exp_marks,
-% graph_minimum_number_of_marks,
-% graph_log_minimum, Autoform
+% Modifiable : Gtemplate.suffix, Glmarks[], Gumarks, Gemarks, Gmarks,
+% Gminlog, Autoform
newinternal log, linear ; % coordinate system codes
log :=1 ; linear :=2;
-
% note that mp-tool.mpiv defines log as log10.
%%%%%%%%%%%%%%%%%%%%%% Coordinates : setcoords, setrange %%%%%%%%%%%%%%%%%%%%%%
-% Graph-related user input is `user graph coordinates' as specified by arguments
+% Graph-related usr input is `user graph coordinates' as specified by arguments
% to setcoords.
% `Internal graph coordinates' are used for graph_current_graph, graph_current_bb, Z_.low, Z_.high.
% Their meaning depends on the appropriate component of Z_.graph_coordinate_type :
@@ -241,15 +227,14 @@ vardef graph_set_default_bounds = % Set default Z_.low, Z_.high
graph_margin_pair$ ;
endfor
enddef ;
-
pair graph_margin_pair.low, graph_margin_pair.high ;
graph_margin_pair.high = -graph_margin_pair.low = (.00002,.00002) ;
-% Set $, $$, $$$ so that shifting by $ then transforming by $$ and then $$$ maps
-% the essential bounding box of graph_current_graph into (0,0)..Z_.graph_dimensions.
-% The `essential bounding box' is either what Z_.low and Z_.high imply
-% or the result of ignoring pen widths in graph_current_graph.
+% Set $, $$, $$$ so that shifting by $ then transforming by $$ and then $$$
+% maps the essential bounding box of graph_current_graph into (0,0)..Z_.graph_dimensions. The
+% `essential bounding box' is either what Z_.low and Z_.high imply or the
+% result of ignoring pen widths in graph_current_graph.
vardef graph_remap(suffix $,$$,$$$) =
save p_ ;
graph_set_default_bounds ;
@@ -260,10 +245,10 @@ vardef graph_remap(suffix $,$$,$$$) =
(Z_.high+$) transformed $$ = p_ ;
p_ transformed $$$ = Z_.graph_dimensions ;
enddef ;
-
graph_margin_fraction.low=-.07 ; % bbox fraction for default range start
graph_margin_fraction.high=1.07 ; % bbox fraction for default range stop
+
def graph_with_pen_and_color(expr q) =
withpen penpart q withcolor
if colormodel q=1 :
@@ -283,7 +268,7 @@ enddef ;
% Pair o is the value of p that makes tp (0,0). This implements the trick
% whereby using 1 instead of 0 for the width or height or the setbounds path
% for a label picture suppresses shifting in x or y.
-
+%
%vardef graph_picture_conversion@#(expr q, o)(text tp) =
% save p ;
% if stroked q :
@@ -299,9 +284,8 @@ enddef ;
% addto @# also q shifted ((tp)-llcorner q) ;
% fi
%enddef ;
-
+%
% This new version makes gdraw clip the result to the window defined with setrange
-
vardef graph_picture_conversion@#(expr q, o)(text tp) =
save p ;
save do_clip, tp_clipped ; boolean do_clip ; do_clip := true ;
@@ -331,11 +315,12 @@ enddef ;
def graph_coordinate_multiplication(expr a,b) = (xpart a*xpart b, ypart a*ypart b) enddef ;
+
vardef graph_clear_bounds@# = numeric @#.low, @#.high ; enddef;
+
% Finalize anything drawn in the present coordinate system and set up a new
% system as requested
-
vardef setcoords(expr tx, ty) =
interim warningcheck :=0 ;
if length graph_current_graph>0 :
@@ -350,10 +335,10 @@ vardef setcoords(expr tx, ty) =
X_.graph_coordinate_type := tx ; Y_.graph_coordinate_type := ty;
enddef ;
+
% Set Z_.low and Z_.high to correspond to given range of user graph
% coordinates. The text argument should be a sequence of pairs and/or strings
% with 4 components in all.
-
vardef setrange(text t) =
interim warningcheck :=0 ;
save r_ ; r_=0;
@@ -368,8 +353,8 @@ vardef setrange(text t) =
endfor
enddef ;
-% @# is X_ or Y_ ; l and h are numeric or string
+% @# is X_ or Y_ ; l and h are numeric or string
vardef graph_set_bounds@#(expr l, h) =
graph_clear_bounds@# ;
if @#graph_coordinate_type>0 :
@@ -397,12 +382,15 @@ vardef graph_set_bounds@#(expr l, h) =
fi
enddef ;
+
+
+
+
%%%%%%%%%%%%%%%%%%%%%%%%% Converting path coordinates %%%%%%%%%%%%%%%%%%%%%%%%%
% Find the result of scanning path p and using macros tx and ty to adjust the
% x and y parts of each coordinate pair. Boolean parameter c tells whether to
% force the result to be polygonal.
-
vardef graph_scan_path(expr p, c)(suffix tx, ty) =
if (str tx="") and (str ty="") : p
else :
@@ -421,11 +409,10 @@ vardef graph_scan_path(expr p, c)(suffix tx, ty) =
if pair p : point 0 of fi r_
fi
enddef ;
-
vardef graph_pair_adjust(expr p)(suffix tx, ty) = (tx xpart p, ty ypart p) enddef ;
-% Convert path p from user graph coords to internal graph coords.
+% Convert path p from user graph coords to internal graph coords.
vardef graph_convert_user_path_to_internal primary p =
interim warningcheck :=0 ;
graph_scan_path(p,
@@ -437,11 +424,11 @@ vardef graph_convert_user_path_to_internal primary p =
if Y_.graph_coordinate_type<0 : yscaled -1 fi)
enddef ;
+
% Convert label location t_ from user graph coords to internal graph coords.
% The label location should be a pair, or two numbers/strings. If t_ is empty
% or a single item of non-pair type, just return t_. Unknown coordinates
% produce unknown components in the result.
-
vardef graph_label_convert_user_to_internal(text t_) =
save n_ ; n_=0;
interim warningcheck :=0 ;
@@ -461,12 +448,13 @@ vardef graph_label_convert_user_to_internal(text t_) =
fi
enddef ;
+
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Reading data files %%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Read a line from file f, extract whitespace-separated tokens ignoring any
% initial "%", and return true if at least one token is found. The tokens
% are stored in @#1, @#2, .. with "" in the last @#[] entry.
-
vardef graph_read_line@#(expr f) =
save n_, s_ ; string s_;
s_ = readfrom f ;
@@ -484,9 +472,9 @@ vardef graph_read_line@#(expr f) =
fi
enddef ;
+
% Execute c for each line of data read from file f, and stop at the first
% line with no data. Commands c can use line number i and tokens $1, $2, ...
-
def gdata(expr f)(suffix $)(text c) =
boolean flag ;
for i=1 upto infinity :
@@ -498,8 +486,8 @@ def gdata(expr f)(suffix $)(text c) =
fi
enddef ;
-% Read a path from file f. The path is terminated by blank line or EOF.
+% Read a path from file f. The path is terminated by blank line or EOF.
vardef graph_readpath(expr f) =
interim warningcheck :=0 ;
save s ;
@@ -509,9 +497,9 @@ vardef graph_readpath(expr f) =
)
enddef ;
+
% Append coordinates t to polygonal path @#. The coordinates can be numerics,
% strings, or a single pair.
-
vardef augment@#(text t) =
interim warningcheck := 0 ;
if not path begingroup @# endgroup :
@@ -525,11 +513,12 @@ vardef augment@#(text t) =
fi
enddef ;
+
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Drawing and filling %%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Unknown pair components are set to 0 because glabel and gdotlabel understand
% unknown coordinates as `0 in absolute units'.
-
vardef graph_unknown_pair_bbox(expr p) =
interim warningcheck:=0 ;
if known p : addto graph_current_bb doublepath p ;
@@ -546,7 +535,6 @@ enddef ;
% Initiate a gdraw or gfill command. This must be done before scanning the
% argument, because that could invoke the `if known graph_plot_picture' test in a following
% plot option .
-
def graph_addto =
def graph_errorbar_text = enddef ;
color graph_foreground ;
@@ -554,8 +542,8 @@ def graph_addto =
graph_last_drawn := graph_plot_picture := nullpicture ; addto graph_last_drawn
enddef;
-% Handle the part of a gdraw command that uses path or data file p.
+% Handle the part of a Gdraw command that uses path or data file p.
def graph_draw expr p =
if string p : hide(graph_last_path := graph_readpath(p) ;)
graph_convert_user_path_to_internal graph_last_path
@@ -568,8 +556,8 @@ def graph_draw expr p =
withpen currentpen graph_withlist _op_
enddef ;
-% Handle the part of a gdraw command that uses path or data file p.
+% Handle the part of a Gdraw command that uses path or data file p.
def graph_fill expr p =
if string p : hide(graph_last_path := graph_readpath(p) --cycle ;)
graph_convert_user_path_to_internal graph_last_path
@@ -583,8 +571,8 @@ enddef ;
def gdraw = graph_addto doublepath graph_draw enddef ;
def gfill = graph_addto contour graph_fill enddef ;
-% This is used in graph_draw and graph_fill to allow postprocessing graph_last_drawn
+% This is used in graph_draw and graph_fill to allow postprocessing graph_last_drawn
def graph_withlist text t_ = t_ ; graph_post_draw; enddef;
def witherrorbars(text t) text options =
@@ -596,8 +584,6 @@ def witherrorbars(text t) text options =
options
enddef ;
-% new feature: graph_errorbars
-
picture graph_errorbar_picture ; graph_errorbar_picture := image(draw (left--right) scaled .5 ;) ;
%picture graph_xbar_picture ; graph_xbar_picture := image(draw (down--up) scaled .5 ;) ;
%picture graph_ybar_picture ; graph_ybar_picture := image(draw (left--right) scaled .5 ;) ;
@@ -660,7 +646,6 @@ enddef ;
% Set graph_plot_picture so the postprocessing step will plot picture p at each path knot.
% Also select nullpen to suppress stroking.
-
def plot expr p =
if known graph_plot_picture :
withpen nullpen
@@ -672,19 +657,20 @@ def plot expr p =
enddef ;
% This hides a semicolon that could prematurely end graph_withlist's text argument
-
def graph_addto_currentpicture primary p = addto currentpicture also p ; enddef;
def graph_setbounds = setbounds currentpicture to enddef ;
-def gdrawarrow = graph_number_of_arrowheads := 1 ; gdraw enddef;
-def gdrawdblarrow = graph_number_of_arrowheads := 2 ; gdraw enddef;
+
+def gdrawarrow = graph_number_of_arrowheads :=1 ; gdraw enddef;
+def gdrawdblarrow = graph_number_of_arrowheads :=2 ; gdraw enddef;
+
% Post-process the filled or stroked picture graph_last_drawn as follows : (1) update
% the bounding box information ; (2) transfer it to graph_current_graph unless the pen has
% been set to nullpen to disable stroking ; (3) plot graph_plot_picture at each knot.
-
vardef graph_post_draw =
- save p ; path p ; p = pathpart graph_last_drawn ;
+ save p ;
+ path p ; p=pathpart graph_last_drawn;
graph_unknown_pair_bbox(p) ;
if filled graph_last_drawn or not graph_is_null(penpart graph_last_drawn) :
addto graph_current_graph also graph_last_drawn ;
@@ -701,23 +687,17 @@ vardef graph_post_draw =
if graph_number_of_arrowheads>1 :
graph_draw_arrowhead(reverse p, graph_with_pen_and_color(graph_last_drawn)) ;
fi
- graph_number_of_arrowheads := 0 ;
+ graph_number_of_arrowheads :=0 ;
fi
enddef ;
-
vardef graph_is_null(expr p) = (urcorner p=origin) and (llcorner p=origin) enddef ;
+
vardef graph_draw_arrowhead(expr p)(text w) = % Draw arrowhead for path p, with list w
- %save r ; r := angle(precontrol infinity of p shifted -point infinity of p) ;
addto graph_current_graph also
- image(fill arrowhead (graph_arrowhead_extent(precontrol infinity of p,point infinity of p)) w ;
- draw arrowhead (graph_arrowhead_extent(precontrol infinity of p,point infinity of p)) w
- undashed ;
-%if (r mod 90 <> 0) : % orientation can be wrong due to remapping
-% draw textext("\tfxx " & decimal r) shifted point infinity of p withcolor blue ;
-%fi
- graph_setbounds point infinity of p..cycle ;
- ) ; % rotatedabout(point infinity of p,-r) ;
+ image(filldraw arrowhead(
+ graph_arrowhead_extent(precontrol infinity of p, point infinity of p)) w ;
+ graph_setbounds point infinity of p..cycle) ;
enddef ;
vardef graph_arrowhead_extent(expr p, q) =
@@ -725,6 +705,8 @@ vardef graph_arrowhead_extent(expr p, q) =
q
enddef ;
+
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Drawing labels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Argument c is a drawing command that needs an additional argument p that gives
@@ -732,7 +714,6 @@ enddef ;
% path. Unknown components of p cause the setbounds path to have width or height 1 instead of 0.
% Then graph_unknown_pair_bbox sets these components to 0 and graph_picture_conversion
% suppresses subsequent repositioning.
-
def graph_draw_label(expr p)(suffix $)(text c) =
save sdim_ ; pair sdim_;
sdim_ := (if unknown xpart p : 1+ fi 0, if unknown ypart p : 1+ fi 0) ;
@@ -741,13 +722,14 @@ def graph_draw_label(expr p)(suffix $)(text c) =
image(c(p) ; graph_setbounds p--p+sdim_--cycle) _op_
enddef ;
+
% Stash the result drawing command c in the graph_label table using with list w and
% an index based on angle mfun_laboff$.
-
vardef graph_stash_label(suffix $)(text c) text w =
graph_label[1.5+angle mfun_laboff$ /90] = image(c(origin) w) ;
enddef ;
+
def graph_label_location primary p =
if pair p : graph_draw_label(p)
elseif numeric p : graph_draw_label(point p of pathpart graph_last_drawn)
@@ -755,31 +737,33 @@ def graph_label_location primary p =
fi
enddef ;
+
% Place label p at user graph coords t using with list w. (t is a time, a pair
% or 2 numerics or strings).
-
vardef glabel@#(expr p)(text t) text w =
graph_label_location graph_label_convert_user_to_internal(t) (@#,label@#(p)) w ; enddef;
+
% Place label p at user graph coords t using with list w and draw a dot there.
% (t is a time, a pair, or 2 numerics or strings).
-
vardef gdotlabel@#(expr p)(text t) text w =
graph_label_location graph_label_convert_user_to_internal(t) (@#,dotlabel@#(p)) w ; enddef;
+
def OUT = enddef ; % location text for outside labels
+
+
%%%%%%%%%%%%%%%%%%%%%%%%%% Grid lines, ticks, etc. %%%%%%%%%%%%%%%%%%%%%%%%%%
% Grid lines and tick marks are transformed versions of the templates below.
% In the template paths, (0,0) is on the edge of the frame and inward is to
% the right.
-
-path graph_template.tick, graph_template.itick, graph_template.otick, graph_template.grid ;
-graph_template.tick = (-3.5bp,0)--(3.5bp,0) ;
-graph_template.itick = origin--(7bp,0) ;
-graph_template.otick = (-7bp,0)--origin ;
-graph_template.grid = origin--(1,0) ;
+path Gtemplate.tick, Gtemplate.itick, Gtemplate.otick, Gtemplate.grid ;
+Gtemplate.tick = (-3.5bp,0)--(3.5bp,0) ;
+Gtemplate.itick = origin--(7bp,0) ;
+Gtemplate.otick = (-7bp,0)--origin ;
+Gtemplate.grid = origin--(1,0) ;
vardef tick@#(expr f,u) text w = graph_tick_label(@#,@,false,f,u,w) ; enddef;
@@ -790,82 +774,75 @@ vardef otick@#(expr f,u) text w = graph_tick_label(@#,@,false,f,u,w) ; enddef;
vardef grid@#(expr f,u) text w = graph_tick_label(@#,@,true,f,u,w) ; enddef;
-% Produce a tick or grid mark for label suffix $, graph_template suffix $$,
-% coordinate value u, and with list w. Boolean c tells whether graph_template$$
+% Produce a tick or grid mark for label suffix $, Gtemplate suffix $$,
+% coordinate value u, and with list w. Boolean c tells whether Gtemplate$$
% needs scaling by X_.graph_dimensions or Y_.graph_dimensions,
% and f gives a format string or a label picture.
-
def graph_tick_label(suffix $,$$)(expr c, f, u)(text w) =
- graph_draw_label(graph_label_convert_user_to_internal(graph_generate_label_position($,u)),,
- draw graph_gridline_picture$($$,c,f,u,w) shifted)
+ graph_draw_label(graph_label_convert_user_to_internal(graph_generate_label_position($,u)),,draw graph_gridline_picture$($$,c,f,u,w) shifted)
enddef ;
+
% Generate label positioning arguments appropriate for label suffix $ and
% coordinate u.
-
def graph_generate_label_position(suffix $)(expr u) =
- if pair u : u elseif xpart mfun_laboff.$=0 : u,whatever else : whatever,u fi
+ if xpart mfun_laboff.$=0 : u,whatever else : whatever,u fi
enddef ;
+
% Generate a picture of a grid line labeled with coordinate value u, picture
% or format string f, and with list w. Suffix @# is bot, top, lft, or rt,
-% suffix $ identifies entries in the graph_template table, and boolean c tells
-% whether to scale graph_template$.
-
+% suffix $ identifies entries in the Gtemplate table, and boolean c tells
+% whether to scale Gtemplate$.
vardef graph_gridline_picture@#(suffix $)(expr c, f, u)(text w) =
if unknown u : graph_error(u,"Label coordinate should be known") ; nullpicture
else :
save p ; path p;
interim warningcheck :=0 ;
graph_autogrid_needed :=false ;
- p = graph_template$ zscaled -mfun_laboff@#
- if c : graph_xyscale fi
- shifted (((.5 + mfun_laboff@# dotprod (.5,.5)) * mfun_laboff@#) graph_xyscale) ;
+ p = Gtemplate$ zscaled -mfun_laboff@#
+ if c : Gxyscale fi
+ shifted (((.5 + mfun_laboff@# dotprod (.5,.5)) * mfun_laboff@#) Gxyscale) ;
image(draw p w ;
label@#(if string f : format(f,u) else : f fi, point 0 of p))
fi
enddef ;
+def Gxyscale = xscaled X_.graph_dimensions yscaled Y_.graph_dimensions enddef ;
-def graph_xyscale = xscaled X_.graph_dimensions yscaled Y_.graph_dimensions enddef ;
% Draw the frame or the part corresponding to label suffix @# using with list w.
-
vardef frame@# text w =
graph_frame_needed :=false ;
picture p_ ;
p_ = image(draw
if str@#<>"" : subpath round(angle mfun_laboff@#*graph_frame_pair_a+graph_frame_pair_b) of fi
- unitsquare graph_xyscale w) ;
+ unitsquare Gxyscale w) ;
graph_draw_label((whatever,whatever),,draw p_ shifted) ;
enddef ;
-
-pair graph_frame_pair_a ; graph_frame_pair_a=(1,1)/90; % unitsquare subpath is linear in label angle
+pair graph_frame_pair_a ; graph_frame_pair_a=(1,1)/90; % unitsquare subpath is linear in label angle
pair graph_frame_pair_b ; graph_frame_pair_b=(.75,2.25);
-%%%%%%%%%%%%%%%%%%%%%%%%%% Automatic grid selection %%%%%%%%%%%%%%%%%%%%%%%%%%
-string graph_log_marks[] ; % marking options per decade for logarithmic scales
-string graph_lin_marks ; % mark spacing options per decade for linear scales
-string graph_exp_marks ; % exponent spacing options for logarithmic scales
-newinternal graph_minimum_number_of_marks, graph_log_minimum ;
-graph_minimum_number_of_marks := 4 ; % minimum number marks generated by auto.x or auto.y
-graph_log_minimum := mlog 3 ; % revert to uniform marks when largest/smallest < this
-def Gfor(text t) = for i=t endfor enddef ; % to shorten the mark templates below
-graph_log_marks[1]="1,2,5" ;
-graph_log_marks[2]="1,1.5,2,3,4,5,7" ;
-graph_log_marks[3]="1Gfor(6upto10 :,i/5)Gfor(5upto10 :,i/2)Gfor(6upto9 :,i)" ;
-graph_log_marks[4]="1Gfor(11upto20 :,i/10)Gfor(11upto25 :,i/5)Gfor(11upto19 :,i/2)" ;
-graph_log_marks[5]="1Gfor(21upto40 :,i/20)Gfor(21upto50 :,i/10)Gfor(26upto49 :,i/5)" ;
-graph_lin_marks="10,5,2" ; % start with 10 and go down; a final `,1' is appended
-graph_exp_marks="20,10,5,2,1" ;
+%%%%%%%%%%%%%%%%%%%%%%%%%% Automatic grid selection %%%%%%%%%%%%%%%%%%%%%%%%%%
+
+string Glmarks[] ; % marking options per decade for logarithmic scales
+string Gumarks ; % mark spacing options per decade for linear scales
+string Gemarks ; % exponent spacing options for logarithmic scales
+newinternal Gmarks, Gminlog ;
+Gmarks := 4 ; % minimum number marks generated by auto.x or auto.y
+Gminlog := mlog 3 ; % revert to uniform marks when largest/smallest < this
+
+def Gfor(text t) = for i=t endfor enddef ; % to shorten the mark templates below
+Glmarks[1]="1,2,5" ;
+Glmarks[2]="1,1.5,2,3,4,5,7" ;
+Glmarks[3]="1Gfor(6upto10 :,i/5)Gfor(5upto10 :,i/2)Gfor(6upto9 :,i)" ;
+Glmarks[4]="1Gfor(11upto20 :,i/10)Gfor(11upto25 :,i/5)Gfor(11upto19 :,i/2)" ;
+Glmarks[5]="1Gfor(21upto40 :,i/20)Gfor(21upto50 :,i/10)Gfor(26upto49 :,i/5)" ;
+Gumarks="10,5,2" ; % start with 10 and go down; a final `,1' is appended
+Gemarks="20,10,5,2,1" ;
-Ten_to0 = 1 ;
-Ten_to1 = 10 ;
-Ten_to2 = 100 ;
-Ten_to3 = 1000 ;
-Ten_to4 = 10000 ;
% Determine the X_ or Y_ bounds on the range to be covered by automatic grid
% marks. Suffix @# is X_ or Y_. The result is log or linear to specify the
@@ -874,7 +851,6 @@ Ten_to4 = 10000 ;
% are upper and lower bounds in
% `modified exponential form'. In modified exponential form, (x,y) means
% (x/1000)*10^y, where 1000<=abs x<10000.
-
vardef graph_bounds@# =
interim warningcheck :=0 ;
save l, h ;
@@ -883,29 +859,28 @@ vardef graph_bounds@# =
if abs @#graph_coordinate_type=log :
graph_modified_lower := graph_Meform(l)+graph_modified_bias ;
graph_modified_higher := graph_Meform(h)+graph_modified_bias ;
- if h-l >= graph_log_minimum : log else : linear fi
+ if h-l >= Gminlog : log else : linear fi
else :
graph_modified_lower := graph_Feform(l)+graph_modified_bias ;
graph_modified_higher := graph_Feform(h)+graph_modified_bias ;
linear
fi
enddef ;
-
pair graph_modified_bias ; graph_modified_bias=(0,3);
pair graph_modified_lower, graph_modified_higher ;
-% Scan graph_log_marks[k] and evaluate tokens t for each m where l<=m<=h.
+% Scan Glmarks[k] and evaluate tokens t for each m where l<=m<=h.
def graph_scan_marks(expr k, l, h)(text t) =
- for m=scantokens graph_log_marks[k] :
+ for m=scantokens Glmarks[k] :
exitif m>h ;
if m>=l : t fi
endfor
enddef ;
-% Scan graph_log_marks[k] and evaluate tokens t for each m and e where m*10^e belongs
-% between l and h (inclusive), where both l and h are in modified exponent form.
+% Scan Gmark[k] and evaluate tokens t for each m and e where m*10^e belongs
+% between l and h (inclusive), where both l and h are in modified exponent form.
def graph_scan_mark(expr k, l, h)(text t) =
for e=ypart l upto ypart h :
graph_scan_marks(k, if e>ypart l : 1 else : xpart l/1000 fi,
@@ -913,29 +888,27 @@ def graph_scan_mark(expr k, l, h)(text t) =
endfor
enddef ;
-% Select a k for which graph_scan_mark(k,...) gives enough marks.
+% Select a k for which graph_scan_mark(k,...) gives enough marks.
vardef graph_select_mark =
save k ;
k = 0 ;
forever :
- exitif unknown graph_log_marks[k+1] ;
- exitif 0 graph_scan_mark(incr k, graph_modified_lower, graph_modified_higher, +1)
- >= graph_minimum_number_of_marks ;
+ exitif unknown Glmarks[k+1] ;
+ exitif 0 graph_scan_mark(incr k, graph_modified_lower, graph_modified_higher, +1) >= Gmarks ;
endfor
k
enddef ;
-% Try to select an exponent spacing from graph_exp_marks. If successful, set @# and
-% return true
+% Try to select an exponent spacing from Gemarks. If successful, set @# and
+% return true
vardef graph_select_exponent_mark@# =
numeric @# ;
- for e=scantokens graph_exp_marks :
+ for e=scantokens Gemarks :
@# = e ;
exitif floor(ypart graph_modified_higher/e) -
- floor(graph_modified_exponent_ypart(graph_modified_lower)/e)
- >= graph_minimum_number_of_marks ;
+ floor(graph_modified_exponent_ypart(graph_modified_lower)/e) >= Gmarks ;
numeric @# ;
endfor
known @#
@@ -943,17 +916,17 @@ enddef ;
vardef graph_modified_exponent_ypart(expr p) = ypart p if xpart p=1000 : -1 fi enddef ;
-% Compute the mark spacing d between xpart graph_modified_lower and xpart graph_modified_higher.
+% Compute the mark spacing d between xpart graph_modified_lower and xpart graph_modified_higher.
vardef graph_tick_mark_spacing =
interim warningcheck :=0 ;
save m, n, d ;
- m = graph_minimum_number_of_marks ;
+ m = Gmarks ;
n = 1 for i=1 upto
(mlog(xpart graph_modified_higher-xpart graph_modified_lower) - mlog m)/mlogten :
*10 endfor ;
if n<=1000 :
- for x=scantokens graph_lin_marks :
+ for x=scantokens Gumarks :
d = n*x ;
exitif 0 graph_generate_numbers(d,+1)>=m ;
numeric d ;
@@ -962,24 +935,25 @@ vardef graph_tick_mark_spacing =
if known d : d else : n fi
enddef ;
+
def graph_generate_numbers(expr d)(text t) =
for m = d*ceiling(xpart graph_modified_lower/d) step d until xpart graph_modified_higher :
t
endfor
enddef ;
+
% Evaluate tokens t for exponents e in multiples of d in the range determined
% by graph_modified_lower and graph_modified_higher.
-
def graph_generate_exponents(expr d)(text t) =
for e = d*floor(graph_modified_exponent_ypart(graph_modified_lower)/d+1)
step d until d*floor(ypart graph_modified_higher/d) : t
endfor
enddef ;
+
% Adjust graph_modified_lower and graph_modified_higher so their exponent parts match
% and they are in true exponent form ((x,y) means x*10^y). Return the new exponent.
-
vardef graph_match_exponents =
interim warningcheck := 0 ;
save e ;
@@ -992,10 +966,10 @@ vardef graph_match_exponents =
e
enddef ;
+
% Assume e is an integer and either m=0 or 1<=abs(m)<10000. Find m*(10^e)
% and represent the result as a string if its absolute value would be at least
% 4096 or less than .1. It is OK to return 0 as a string or a numeric.
-
vardef graph_factor_and_exponent_to_string(expr m, e) =
if (e>3)or(e<-4) :
decimal m & "e" & decimal e
@@ -1010,6 +984,7 @@ vardef graph_factor_and_exponent_to_string(expr m, e) =
fi
enddef ;
+
def auto suffix $ =
hide(def graph_comma= hide(def graph_comma=,enddef) enddef)
if graph_bounds.graph_suffix($)=log :
@@ -1027,6 +1002,7 @@ def auto suffix $ =
fi
enddef ;
+
string Autoform ; Autoform = "%g";
%vardef autogrid(suffix tx, ty) text w =
@@ -1077,11 +1053,12 @@ vardef autogrid(suffix tx, ty) text w =
fi
enddef ;
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% endgraph %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
def endgraph =
if graph_autogrid_needed : autogrid(otick.bot, otick.lft) ; fi
- if graph_frame_needed : frame ; fi
+ if graph_frame_needed : frame ; fi
setcoords(linear,linear) ;
interim truecorners :=1 ;
for b=bbox graph_finished_graph :
@@ -1098,9 +1075,14 @@ enddef ;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-% We format in luatex (using \mathematics{}) ...
% we could pass via variables and save escaping as that is inefficient
+Ten_to0 = 1 ;
+Ten_to1 = 10 ;
+Ten_to2 = 100 ;
+Ten_to3 = 1000 ;
+Ten_to4 = 10000 ;
+
if unknown context_mlib :
vardef escaped_format(expr s) =
@@ -1126,8 +1108,6 @@ if unknown context_mlib :
fi ;
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
% A couple of extensions :
% Define a function plotsymbol() returning a picture : 10 different shapes,
diff --git a/metapost/context/base/mp-tool.mpii b/metapost/context/base/mp-tool.mpii
index 62bd122f5..f363f655e 100644
--- a/metapost/context/base/mp-tool.mpii
+++ b/metapost/context/base/mp-tool.mpii
@@ -5,13 +5,17 @@
%D subtitle=auxiliary macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% def loadfile(expr name) = scantokens("input " & name & ";") enddef ;
+% a cleanup is needed, like using image and alike
+% use a few more "newinternal"'s
+
+%D This module is rather preliminary and subjected to
+%D changes.
if known context_tool : endinput ; fi ;
@@ -28,8 +32,6 @@ let @## = @# ;
if not known mpversion : string mpversion ; mpversion := "0.641" ; fi ;
-newinternal metapostversion ; metapostversion := scantokens(mpversion) ;
-
% vardef mpversiongt(expr s) =
% scantokens (mpversion & " > " & if numeric s : decimal s else : s fi)
% enddef ;
@@ -83,14 +85,20 @@ mpprocset := 1 ;
%
% protect ;
-string space ; space := char 32 ;
-string CRLF ; CRLF := char 10 & char 13 ;
+%D By including this module, \METAPOST\ automatically writes a
+%D high resolution boundingbox to the \POSTSCRIPT\ file. This
+%D hack is due to John Hobby himself.
+
+% When somehow the first one gets no HiRes, then make sure
+% that the format matches the mem sizes in the config file.
+
+string space ; space = char 32 ;
vardef ddecimal primary p =
- decimal xpart p & " " & decimal ypart p
+ decimal xpart p & " " & decimal ypart p
enddef ;
-%D Plain compatibility:
+%D Plain compatibility
string plain_compatibility_data ; plain_compatibility_data := "" ;
@@ -104,7 +112,7 @@ def stopplaincompatibility =
enddef ;
% is now built in
-%
+
% extra_endfig := extra_endfig
% & "special "
% & "("
@@ -116,11 +124,6 @@ enddef ;
% & "&ddecimal urcorner currentpicture"
% & ");";
-%D More neutral:
-
-let triplet = rgbcolor ;
-let quadruplet = cmykcolor ;
-
%D Crap (experimental, not used):
def forcemultipass =
@@ -129,28 +132,12 @@ enddef ;
%D Colors:
-newinternal nocolormodel ; nocolormodel := 1 ;
-newinternal greycolormodel ; greycolormodel := 3 ;
-newinternal graycolormodel ; graycolormodel := 3 ;
-newinternal rgbcolormodel ; rgbcolormodel := 5 ;
-newinternal cmykcolormodel ; cmykcolormodel := 7 ;
+nocolormodel := 1 ;
+greycolormodel := 3 ;
+rgbcolormodel := 5 ;
+cmykcolormodel := 7 ;
let grayscale = numeric ;
-let greyscale = numeric ;
-
-vardef colorpart expr c =
- if not picture c :
- 0
- elseif colormodel c = greycolormodel :
- greypart c
- elseif colormodel c = rgbcolormodel :
- (redpart c,greenpart c,bluepart c)
- elseif colormodel c = cmykcolormodel :
- (cyanpart c,magentapart c,yellowpart c,blackpart c)
- else :
- 0 % black
- fi
-enddef ;
vardef colorlike(text c) text v = % colorlike(a) b, c, d ;
save _p_ ; picture _p_ ;
@@ -161,19 +148,25 @@ vardef colorlike(text c) text v = % colorlike(a) b, c, d ;
elseif (colormodel _p_ = rgbcolormodel) :
rgbcolor i ;
else :
- greycolor i ;
+ grayscale i ;
fi ;
endfor ;
enddef ;
+% if (unknown colormodel) :
+% def colormodel =
+% rgbcolormodel
+% enddef ;
+% fi ;
+
%D Also handy (when we flush colors):
vardef dddecimal primary c =
- decimal redpart c & " " & decimal greenpart c & " " & decimal bluepart c
+ decimal redpart c & " " & decimal greenpart c & " " & decimal bluepart c
enddef ;
vardef ddddecimal primary c =
- decimal cyanpart c & " " & decimal magentapart c & " " & decimal yellowpart c & " " & decimal blackpart c
+ decimal cyanpart c & " " & decimal magentapart c & " " & decimal yellowpart c & " " & decimal blackpart c
enddef ;
vardef colordecimals primary c =
@@ -200,34 +193,39 @@ enddef ;
%D we need some trickery when we have multiple files.
if unknown collapse_data :
- boolean collapse_data ;
- collapse_data := false ;
+ boolean collapse_data ; collapse_data := false ;
fi ;
boolean savingdata ; savingdata := false ;
boolean savingdatadone ; savingdatadone := false ;
def savedata expr txt =
- write if collapse_data :
- txt
- else :
- if savingdata : txt else : "\MPdata{" & decimal charcode & "}{" & txt & "}" fi & "%"
- fi to data_mpd_file ;
+ if collapse_data :
+ write txt to data_mpd_file ;
+ else :
+ write if savingdata : txt else :
+ "\MPdata{" & decimal charcode & "}{" & txt & "}"
+ fi
+ & "%" to data_mpd_file ;
+ fi ;
enddef ;
def startsavingdata =
- savingdata := true ;
- savingdatadone := true ;
- if collapse_data :
- write "\MPdata{" & decimal charcode & "}{%" to data_mpd_file ;
- fi ;
+ savingdata := true ;
+ savingdatadone := true ;
+ if collapse_data :
+ write
+ "\MPdata{" & decimal charcode & "}{%"
+ to
+ data_mpd_file ;
+ fi ;
enddef ;
def stopsavingdata =
- if collapse_data :
- write "}%" to data_mpd_file ;
- fi ;
- savingdata := false ;
+ if collapse_data :
+ write "}%" to data_mpd_file ;
+ fi ;
+ savingdata := false ;
enddef ;
def finishsavingdata =
@@ -255,130 +253,119 @@ def newpair text v = forsuffixes i=v : save i ; pair i ; endfor ; endd
%D box, draw the graphics that may not count, and restore the
%D bounding box.
%D
-%D \starttyping
+%D \starttypen
%D push_boundingbox currentpicture;
%D pop_boundingbox currentpicture;
-%D \stoptyping
+%D \stoptypen
%D
%D The bounding box can be called with:
%D
-%D \starttyping
+%D \starttypen
%D boundingbox currentpicture
%D inner_boundingbox currentpicture
%D outer_boundingbox currentpicture
-%D \stoptyping
+%D \stoptypen
%D
%D Especially the latter one can be of use when we include
%D the graphic in a document that is clipped to the bounding
%D box. In such occasions one can use:
%D
-%D \starttyping
+%D \starttypen
%D set_outer_boundingbox currentpicture;
-%D \stoptyping
+%D \stoptypen
%D
%D Its counterpart is:
%D
-%D \starttyping
+%D \starttypen
%D set_inner_boundingbox p
-%D \stoptyping
-
-path mfun_boundingbox_stack ;
-numeric mfun_boundingbox_stack_depth ;
-
-mfun_boundingbox_stack_depth := 0 ;
+%D \stoptypen
-def pushboundingbox text p =
- mfun_boundingbox_stack_depth := mfun_boundingbox_stack_depth + 1 ;
- mfun_boundingbox_stack[mfun_boundingbox_stack_depth] := boundingbox p ;
-enddef ;
+path pushed_boundingbox;
-def popboundingbox text p =
- setbounds p to mfun_boundingbox_stack[mfun_boundingbox_stack_depth] ;
- mfun_boundingbox_stack[mfun_boundingbox_stack_depth] := origin ;
- mfun_boundingbox_stack_depth := mfun_boundingbox_stack_depth - 1 ;
-enddef ;
+def push_boundingbox text p =
+ pushed_boundingbox := boundingbox p;
+enddef;
-let push_boundingbox = pushboundingbox ; % downward compatible
-let pop_boundingbox = popboundingbox ; % downward compatible
+def pop_boundingbox text p =
+ setbounds p to pushed_boundingbox;
+enddef;
vardef boundingbox primary p =
- if (path p) or (picture p) :
- llcorner p -- lrcorner p -- urcorner p -- ulcorner p
- else :
- origin
- fi -- cycle
+ if (path p) or (picture p) :
+ llcorner p -- lrcorner p -- urcorner p -- ulcorner p
+ else :
+ origin
+ fi -- cycle
enddef;
-vardef innerboundingbox primary p =
- top rt llcorner p --
- top lft lrcorner p --
- bot lft urcorner p --
- bot rt ulcorner p -- cycle
+vardef inner_boundingbox primary p =
+ top rt llcorner p --
+ top lft lrcorner p --
+ bot lft urcorner p --
+ bot rt ulcorner p -- cycle
enddef;
-vardef outerboundingbox primary p =
- bot lft llcorner p --
- bot rt lrcorner p --
- top rt urcorner p --
- top lft ulcorner p -- cycle
+vardef outer_boundingbox primary p =
+ bot lft llcorner p --
+ bot rt lrcorner p --
+ top rt urcorner p --
+ top lft ulcorner p -- cycle
enddef;
-def inner_boundingbox = innerboundingbox enddef ;
-def outer_boundingbox = outerboundingbox enddef ;
+def innerboundingbox = inner_boundingbox enddef ;
+def outerboundingbox = outer_boundingbox enddef ;
-vardef set_inner_boundingbox text q = % obsolete
- setbounds q to innerboundingbox q;
+vardef set_inner_boundingbox text q =
+ setbounds q to inner_boundingbox q;
enddef;
-vardef set_outer_boundingbox text q = % obsolete
- setbounds q to outerboundingbox q;
+vardef set_outer_boundingbox text q =
+ setbounds q to outer_boundingbox q;
enddef;
-%D Some missing functions can be implemented rather straightforward (thanks to
-%D Taco and others):
+%D Some missing functions can be implemented rather
+%D straightforward:
+
+numeric Pi ; Pi := 3.1415926 ;
-pi := 3.14159265358979323846 ; radian := 180/pi ; % 2pi*radian = 360 ;
+vardef sqr primary x = (x*x) enddef ;
+vardef log primary x = (if x=0: 0 else: mlog(x)/mlog(10) fi) enddef ;
+vardef ln primary x = (if x=0: 0 else: mlog(x)/256 fi) enddef ;
+vardef exp primary x = ((mexp 256)**x) enddef ;
+vardef inv primary x = (if x=0: 0 else: x**-1 fi) enddef ;
-% let +++ = ++ ;
+vardef pow (expr x,p) = (x**p) enddef ;
-numeric Pi ; Pi := pi ; % for some old compatibility reasons i guess
+vardef asin primary x = (x+(x**3)/6+3(x**5)/40) enddef ;
+vardef acos primary x = (asin(-x)) enddef ;
+vardef atan primary x = (x-(x**3)/3+(x**5)/5-(x**7)/7) enddef ;
+vardef tand primary x = (sind(x)/cosd(x)) enddef ;
-vardef sqr primary x = x*x enddef ;
-vardef log primary x = if x=0: 0 else: mlog(x)/mlog(10) fi enddef ;
-vardef ln primary x = if x=0: 0 else: mlog(x)/256 fi enddef ;
-vardef exp primary x = (mexp 256)**x enddef ;
-vardef inv primary x = if x=0: 0 else: x**-1 fi enddef ;
+%D Here are Taco Hoekwater's alternatives (but
+%D vardef'd and primaried).
-vardef pow (expr x,p) = x**p enddef ;
+pi := 3.1415926 ; radian := 180/pi ; % 2pi*radian = 360 ;
-vardef tand primary x = sind(x)/cosd(x) enddef ;
-vardef cotd primary x = cosd(x)/sind(x) enddef ;
+vardef tand primary x = (sind(x)/cosd(x)) enddef ;
+vardef cotd primary x = (cosd(x)/sind(x)) enddef ;
-vardef sin primary x = sind(x*radian) enddef ;
-vardef cos primary x = cosd(x*radian) enddef ;
-vardef tan primary x = sin(x)/cos(x) enddef ;
-vardef cot primary x = cos(x)/sin(x) enddef ;
+vardef sin primary x = (sind(x*radian)) enddef ;
+vardef cos primary x = (cosd(x*radian)) enddef ;
+vardef tan primary x = (sin(x)/cos(x)) enddef ;
+vardef cot primary x = (cos(x)/sin(x)) enddef ;
-vardef asin primary x = angle((1+-+x,x)) enddef ;
-vardef acos primary x = angle((x,1+-+x)) enddef ;
-vardef atan primary x = angle(1,x) enddef ;
+vardef asin primary x = angle((1+-+x,x)) enddef ;
+vardef acos primary x = angle((x,1+-+x)) enddef ;
-vardef invsin primary x = (asin(x))/radian enddef ;
-vardef invcos primary x = (acos(x))/radian enddef ;
-vardef invtan primary x = (atan(x))/radian enddef ;
+vardef invsin primary x = ((asin(x))/radian) enddef ;
+vardef invcos primary x = ((acos(x))/radian) enddef ;
-vardef acosh primary x = ln(x+(x+-+1)) enddef ;
-vardef asinh primary x = ln(x+(x++1)) enddef ;
+vardef acosh primary x = ln(x+(x+-+1)) enddef ;
+vardef asinh primary x = ln(x+(x++1)) enddef ;
vardef sinh primary x = save xx ; xx = exp x ; (xx-1/xx)/2 enddef ;
vardef cosh primary x = save xx ; xx = exp x ; (xx+1/xx)/2 enddef ;
-%D Sometimes this is handy:
-
-def undashed =
- dashed nullpicture
-enddef ;
-
%D We provide two macros for drawing stripes across a shape.
%D The first method (with the n suffix) uses another method,
%D slower in calculation, but more efficient when drawn. The
@@ -387,218 +374,101 @@ enddef ;
%D the second argument identifier the way the shape is to be
%D drawn.
%D
-%D \starttyping
+%D \starttypen
%D stripe_path_n
%D (dashed evenly withcolor blue)
%D (filldraw)
%D fullcircle xscaled 100 yscaled 40 shifted (50,50) withpen pencircle scaled 4;
-%D \stoptyping
+%D \stoptypen
%D
%D The a (or angle) alternative supports arbitrary angles and
%D is therefore more versatile.
%D
-%D \starttyping
+%D \starttypen
%D stripe_path_a
%D (withpen pencircle scaled 2 withcolor red)
%D (draw)
%D fullcircle xscaled 100 yscaled 40 withcolor blue;
-%D \stoptyping
-%D
-%D We have two alternatives, controlled by arguments or defaults (when arguments
-%D are zero).
-%D
-%D The newer and nicer interface is used as follows (triggered by a question by Mari):
-%D
-%D \starttyping
-%D draw image (draw fullcircle scaled 3cm shifted (0cm,0cm) withcolor green) numberstriped (1,10,3) withcolor red ;
-%D draw image (draw fullcircle scaled 3cm shifted (3cm,0cm) withcolor green) numberstriped (2,20,3) withcolor green ;
-%D draw image (draw fullcircle scaled 3cm shifted (3cm,3cm) withcolor green) numberstriped (3,10,5) withcolor blue ;
-%D draw image (draw fullcircle scaled 3cm shifted (0cm,3cm) withcolor green) numberstriped (4,20,5) withcolor yellow ;
-%D
-%D draw image (draw fullcircle scaled 3cm shifted (6cm,0cm) withcolor green) anglestriped (1,20,2) withcolor red ;
-%D draw image (draw fullcircle scaled 3cm shifted (9cm,0cm) withcolor green) anglestriped (2,40,2) withcolor green ;
-%D draw image (draw fullcircle scaled 3cm shifted (9cm,3cm) withcolor green) anglestriped (3,60,2) withcolor blue ;
-%D draw image (draw fullcircle scaled 3cm shifted (6cm,3cm) withcolor green) anglestriped (4,80,2) withcolor yellow ;
-%D
-%D draw image (
-%D draw fullcircle scaled 3cm shifted (0cm,0cm) withcolor green withpen pencircle scaled 2mm ;
-%D draw fullcircle scaled 2cm shifted (0cm,1cm) withcolor blue withpen pencircle scaled 3mm ;
-%D ) shifted (9cm,0cm) numberstriped (1,10,3) withcolor red ;
-%D
-%D draw image (
-%D draw fullcircle scaled 3cm shifted (0cm,0cm) withcolor green withpen pencircle scaled 2mm ;
-%D draw fullcircle scaled 2cm shifted (0cm,1cm) withcolor blue withpen pencircle scaled 3mm ;
-%D ) shifted (12cm,0cm) numberstriped (2,10,3) withcolor red ;
-%D
-%D draw image (
-%D draw fullcircle scaled 3cm shifted (0cm,0cm) withcolor green withpen pencircle scaled 2mm ;
-%D draw fullcircle scaled 2cm shifted (0cm,1cm) withcolor blue withpen pencircle scaled 3mm ;
-%D ) shifted (9cm,5cm) numberstriped (3,10,3) withcolor red ;
+%D \stoptypen
%D
-%D draw image (
-%D draw fullcircle scaled 3cm shifted (0cm,0cm) withcolor green withpen pencircle scaled 2mm ;
-%D draw fullcircle scaled 2cm shifted (0cm,1cm) withcolor blue withpen pencircle scaled 3mm ;
-%D ) shifted (12cm,5cm) numberstriped (4,10,3) withcolor red ;
-%D \stoptyping
+%D The first alternative obeys:
stripe_n := 10;
stripe_slot := 3;
-stripe_gap := 5;
-stripe_angle := 45;
-def mfun_tool_striped_number_action text extra =
- for i = 1/used_n step 1/used_n until 1 :
- draw point (1+i) of bounds -- point (3-i) of bounds withpen pencircle scaled penwidth extra ;
- endfor ;
- for i = 0 step 1/used_n until 1 :
- draw point (3+i) of bounds -- point (1-i) of bounds withpen pencircle scaled penwidth extra ;
- endfor ;
-enddef ;
-
-def mfun_tool_striped_set_options(expr option) =
- save isinner, swapped ;
- boolean isinner, swapped ;
- if option = 1 :
- isinner := false ;
- swapped := false ;
- elseif option = 2 :
- isinner := true ;
- swapped := false ;
- elseif option = 3 :
- isinner := false ;
- swapped := true ;
- elseif option = 4 :
- isinner := true ;
- swapped := true ;
- else :
- isinner := false ;
- swapped := false ;
- fi ;
-enddef ;
-
-vardef mfun_tool_striped_number(expr option, p, s_n, s_slot) text extra =
- image (
- begingroup ;
- save pattern, shape, bounds, penwidth, used_n, used_slot ;
- picture pattern, shape ; path bounds ; numeric used_s, used_slot ;
- mfun_tool_striped_set_options(option) ;
- used_slot := if s_slot = 0 : stripe_slot else : s_slot fi ;
- used_n := if s_n = 0 : stripe_n else : s_n fi ;
- shape := image(draw p) ;
- bounds := boundingbox shape ;
- penwidth := min(ypart urcorner shape - ypart llcorner shape, xpart urcorner shape - xpart llcorner shape) / (used_slot * used_n) ;
- pattern := image (
- if isinner :
- mfun_tool_striped_number_action extra ;
- for s within shape :
- if stroked s or filled s :
- clip currentpicture to pathpart s ;
- fi
- endfor ;
- else :
- for s within shape :
- if stroked s or filled s :
- draw image (
- mfun_tool_striped_number_action extra ;
- clip currentpicture to pathpart s ;
- ) ;
- fi ;
- endfor ;
- fi ;
- ) ;
- if swapped :
- addto currentpicture also shape ;
- addto currentpicture also pattern ;
- else :
- addto currentpicture also pattern ;
- addto currentpicture also shape ;
- fi ;
- endgroup ;
- )
-enddef ;
-
-def mfun_tool_striped_angle_action text extra =
- for i = minimum -.5used_gap step used_gap until maximum :
- draw (minimum,i) -- (maximum,i) extra ;
- endfor ;
- currentpicture := currentpicture rotated used_angle ;
-enddef ;
-
-vardef mfun_tool_striped_angle(expr option, p, s_angle, s_gap) text extra =
- image (
- begingroup ;
- save pattern, shape, mask, maximum, minimum, centrum, used_angle, used_gap ;
- picture pattern, shape, mask ; numeric maximum, minimum ; pair centrum ; numeric used_angle, used_gap ;
- mfun_tool_striped_set_options(option) ;
- used_angle := if s_angle = 0 : stripe_angle else : s_angle fi ;
- used_gap := if s_gap = 0 : stripe_gap else : s_gap fi ;
- shape := image(draw p) ;
- centrum := center shape ;
- shape := shape shifted - centrum ;
- mask := shape rotated used_angle ;
- maximum := max (xpart llcorner mask, xpart urcorner mask, ypart llcorner mask, ypart urcorner mask) ;
- minimum := min (xpart llcorner mask, xpart urcorner mask, ypart llcorner mask, ypart urcorner mask) ;
- pattern := image (
- if isinner :
- mfun_tool_striped_angle_action extra ;
- for s within shape :
- if stroked s or filled s :
- clip currentpicture to pathpart s ;
- fi
- endfor ;
- else :
- for s within shape :
- if stroked s or filled s :
- draw image (
- mfun_tool_striped_angle_action extra ;
- clip currentpicture to pathpart s ;
- ) ;
- fi ;
- endfor ;
- fi ;
- ) ;
- if swapped :
- addto currentpicture also shape ;
- addto currentpicture also pattern ;
- else :
- addto currentpicture also pattern ;
- addto currentpicture also shape ;
- fi ;
- currentpicture := currentpicture shifted - centrum ;
- endgroup ;
- )
-enddef;
-
-newinternal striped_normal_inner ; striped_normal_inner := 1 ;
-newinternal striped_reverse_inner ; striped_reverse_inner := 2 ;
-newinternal striped_normal_outer ; striped_normal_outer := 3 ;
-newinternal striped_reverse_outer ; striped_reverse_outer := 4 ;
-
-secondarydef p anglestriped s =
- mfun_tool_striped_angle(redpart s,p,greenpart s,bluepart s)
-enddef ;
-
-secondarydef p numberstriped s =
- mfun_tool_striped_number(redpart s,p,greenpart s,bluepart s)
-enddef ;
+%D When no pen dimensions are passed, the slot determines
+%D the spacing.
+%D
+%D The angle alternative is influenced by:
-% for old times sake:
+stripe_gap := 5;
+stripe_angle := 45;
def stripe_path_n (text s_spec) (text s_draw) expr s_path =
- do_stripe_path_n (s_spec) (s_draw) (s_path)
+ do_stripe_path_n (s_spec) (s_draw) (s_path)
enddef;
def do_stripe_path_n (text s_spec) (text s_draw) (expr s_path) text s_text =
- draw image(s_draw s_path s_text) numberstriped(3,0,0) s_spec ;
-enddef ;
+ begingroup
+ save curpic, newpic, bb, pp, ww;
+ picture curpic, newpic;
+ path bb, pp;
+ pp := s_path;
+ curpic := currentpicture;
+ currentpicture := nullpicture;
+ s_draw pp s_text;
+ bb := boundingbox currentpicture;
+ newpic := currentpicture;
+ currentpicture := nullpicture;
+ ww := min(ypart urcorner newpic - ypart llcorner newpic,
+ xpart urcorner newpic - xpart llcorner newpic);
+ ww := ww/(stripe_slot*stripe_n);
+ for i=1/stripe_n step 1/stripe_n until 1:
+ draw point (1+i) of bb -- point (3-i) of bb
+ withpen pencircle scaled ww s_spec ;
+ endfor;
+ for i=0 step 1/stripe_n until 1:
+ draw point (3+i) of bb -- point (1-i) of bb
+ withpen pencircle scaled ww s_spec;
+ endfor;
+ clip currentpicture to pp;
+ addto newpic also currentpicture;
+ currentpicture := curpic;
+ addto currentpicture also newpic;
+ endgroup
+enddef;
def stripe_path_a (text s_spec) (text s_draw) expr s_path =
- do_stripe_path_a (s_spec) (s_draw) (s_path)
+ do_stripe_path_a (s_spec) (s_draw) (s_path)
enddef;
def do_stripe_path_a (text s_spec) (text s_draw) (expr s_path) text s_text =
- draw image(s_draw s_path s_text) anglestriped(3,0,0) s_spec ;
-enddef ;
+ begingroup
+ save curpic, newpic, pp; picture curpic, newpic; path pp ;
+ pp := s_path ;
+ curpic := currentpicture;
+ currentpicture := nullpicture;
+ s_draw pp s_text ;
+ def do_stripe_rotation (expr p) =
+ (currentpicture rotatedaround(center p,stripe_angle))
+ enddef ;
+ s_max := max
+ (xpart llcorner do_stripe_rotation(currentpicture),
+ xpart urcorner do_stripe_rotation(currentpicture),
+ ypart llcorner do_stripe_rotation(currentpicture),
+ ypart urcorner do_stripe_rotation(currentpicture));
+ newpic := currentpicture;
+ currentpicture := nullpicture;
+ for i=-s_max-.5stripe_gap step stripe_gap until s_max:
+ draw (-s_max,i)--(s_max,i) s_spec;
+ endfor;
+ currentpicture := do_stripe_rotation(newpic);
+ clip currentpicture to pp ;
+ addto newpic also currentpicture;
+ currentpicture := curpic;
+ addto currentpicture also newpic;
+ endgroup
+enddef;
%D A few normalizing macros:
%D
@@ -637,43 +507,46 @@ enddef ;
% natural_width := xpart urcorner currentpicture - xpart llcorner currentpicture;
% currentpicture := currentpicture scaled (the_width/natural_width) ;
+% TODO TODO TODO TODO, not yet ok
+
primarydef p xsized w =
- (p if (bbwidth (p)>0) and (w>0) : scaled (w/bbwidth (p)) fi)
+ (p if (bbwidth (p)>0) and (w>0) : scaled (w/bbwidth (p)) fi)
enddef ;
primarydef p ysized h =
- (p if (bbheight(p)>0) and (h>0) : scaled (h/bbheight(p)) fi)
+ (p if (bbheight(p)>0) and (h>0) : scaled (h/bbheight(p)) fi)
enddef ;
primarydef p xysized s =
- begingroup
+ begingroup ;
save wh, w, h ; pair wh ; numeric w, h ;
wh := paired (s) ; w := bbwidth(p) ; h := bbheight(p) ;
- p
- if (w>0) and (h>0) :
- if xpart wh > 0 : xscaled (xpart wh/w) fi
- if ypart wh > 0 : yscaled (ypart wh/h) fi
- fi
- endgroup
+ (p if (w>0) and (h>0) :
+ if xpart wh > 0 : xscaled (xpart wh/w) fi
+ if ypart wh > 0 : yscaled (ypart wh/h) fi
+ fi)
+ endgroup
enddef ;
-let sized = xysized ;
+primarydef p sized wh =
+ (p xysized wh)
+enddef ;
-def xscale_currentpicture(expr w) = % obsolete
- currentpicture := currentpicture xsized w ;
+def xscale_currentpicture(expr w) =
+ currentpicture := currentpicture xsized w ;
enddef;
-def yscale_currentpicture(expr h) = % obsolete
- currentpicture := currentpicture ysized h ;
+def yscale_currentpicture(expr h) =
+ currentpicture := currentpicture ysized h ;
enddef;
-def xyscale_currentpicture(expr w, h) = % obsolete
- currentpicture := currentpicture xysized (w,h) ;
+def xyscale_currentpicture(expr w, h) =
+ currentpicture := currentpicture xysized (w,h) ;
enddef;
-def scale_currentpicture(expr w, h) = % obsolete
- currentpicture := currentpicture xsized w ;
- currentpicture := currentpicture ysized h ;
+def scale_currentpicture(expr w, h) =
+ currentpicture := currentpicture xsized w ;
+ currentpicture := currentpicture ysized h ;
enddef;
%D A full circle is centered at the origin, while a unitsquare
@@ -689,28 +562,28 @@ unitcircle := fullcircle shifted urcorner fullcircle ;
path urcircle, ulcircle, llcircle, lrcircle ;
-urcircle := origin -- (+.5,0) & (+.5,0){up} .. (0,+.5) & (0,+.5) -- cycle ;
-ulcircle := origin -- (0,+.5) & (0,+.5){left} .. (-.5,0) & (-.5,0) -- cycle ;
-llcircle := origin -- (-.5,0) & (-.5,0){down} .. (0,-.5) & (0,-.5) -- cycle ;
-lrcircle := origin -- (0,-.5) & (0,-.5){right} .. (+.5,0) & (+.5,0) -- cycle ;
+urcircle := origin--(+.5,0)&(+.5,0){up} ..(0,+.5)&(0,+.5)--cycle ;
+ulcircle := origin--(0,+.5)&(0,+.5){left} ..(-.5,0)&(-.5,0)--cycle ;
+llcircle := origin--(-.5,0)&(-.5,0){down} ..(0,-.5)&(0,-.5)--cycle ;
+lrcircle := origin--(0,-.5)&(0,-.5){right}..(+.5,0)&(+.5,0)--cycle ;
path tcircle, bcircle, lcircle, rcircle ;
-tcircle = origin -- (+.5,0) & (+.5,0) {up} .. (0,+.5) .. {down} (-.5,0) -- cycle ;
-bcircle = origin -- (-.5,0) & (-.5,0) {down} .. (0,-.5) .. {up} (+.5,0) -- cycle ;
-lcircle = origin -- (0,+.5) & (0,+.5) {left} .. (-.5,0) .. {right} (0,-.5) -- cycle ;
-rcircle = origin -- (0,-.5) & (0,-.5) {right} .. (+.5,0) .. {left} (0,+.5) -- cycle ;
+tcircle = origin--(+.5,0)&(+.5,0){up} ..(0,+.5)..{down} (-.5,0)--cycle ;
+bcircle = origin--(-.5,0)&(-.5,0){down} ..(0,-.5)..{up} (+.5,0)--cycle ;
+lcircle = origin--(0,+.5)&(0,+.5){left} ..(-.5,0)..{right}(0,-.5)--cycle ;
+rcircle = origin--(0,-.5)&(0,-.5){right}..(+.5,0)..{left} (0,+.5)--cycle ;
-path urtriangle, ultriangle, lltriangle, lrtriangle ; % watch out: it's contrary to what you expect and starts in the origin
+path urtriangle, ultriangle, lltriangle, lrtriangle ;
-urtriangle := origin -- (+.5,0) -- (0,+.5) -- cycle ;
-ultriangle := origin -- (0,+.5) -- (-.5,0) -- cycle ;
-lltriangle := origin -- (-.5,0) -- (0,-.5) -- cycle ;
-lrtriangle := origin -- (0,-.5) -- (+.5,0) -- cycle ;
+urtriangle := origin--(+.5,0)--(0,+.5)--cycle ;
+ultriangle := origin--(0,+.5)--(-.5,0)--cycle ;
+lltriangle := origin--(-.5,0)--(0,-.5)--cycle ;
+lrtriangle := origin--(0,-.5)--(+.5,0)--cycle ;
path unitdiamond, fulldiamond ;
-unitdiamond := (.5,0) -- (1,.5) -- (.5,1) -- (0,.5) -- cycle ;
+unitdiamond := (.5,0)--(1,.5)--(.5,1)--(0,.5)--cycle ;
fulldiamond := unitdiamond shifted - center unitdiamond ;
%D More robust:
@@ -725,49 +598,46 @@ fulldiamond := unitdiamond shifted - center unitdiamond ;
%D Shorter
-primarydef p xyscaled q = % secundarydef does not work out well
- begingroup
- save qq ; pair qq ;
- qq = paired(q) ;
- p
- if xpart qq <> 0 : xscaled (xpart qq) fi
- if ypart qq <> 0 : yscaled (ypart qq) fi
- endgroup
+primarydef p xyscaled q =
+ begingroup ; save qq ; pair qq ; qq = paired(q) ;
+ ( p
+ if xpart qq<>0 : xscaled (xpart qq) fi
+ if ypart qq<>0 : yscaled (ypart qq) fi )
+ endgroup
enddef ;
-%D Some personal code that might move to another module
+%D Experimenteel, zie folder-3.tex.
def set_grid(expr w, h, nx, ny) =
- boolean grid[][] ; boolean grid_full ;
- numeric grid_w, grid_h, grid_nx, grid_ny, grid_x, grid_y, grid_left ;
- grid_w := w ;
- grid_h := h ;
- grid_nx := nx ;
- grid_ny := ny ;
- grid_x := round(w/grid_nx) ; % +.5) ;
- grid_y := round(h/grid_ny) ; % +.5) ;
- grid_left := (1+grid_x)*(1+grid_y) ;
- grid_full := false ;
- for i=0 upto grid_x :
- for j=0 upto grid_y :
- grid[i][j] := false ;
- endfor ;
+ boolean grid[][] ; boolean grid_full ;
+ grid_w := w ;
+ grid_h := h ;
+ grid_nx := nx ;
+ grid_ny := ny ;
+ grid_x := round(w/grid_nx) ; % +.5) ;
+ grid_y := round(h/grid_ny) ; % +.5) ;
+ grid_left := (1+grid_x)*(1+grid_y) ;
+ grid_full := false ;
+ for i=0 upto grid_x:
+ for j=0 upto grid_y:
+ grid[i][j] := false ;
endfor ;
+ endfor ;
enddef ;
vardef new_on_grid(expr _dx_, _dy_) =
- dx := _dx_ ;
- dy := _dy_ ;
- ddx := min(round(dx/grid_nx),grid_x) ; % +.5),grid_x) ;
- ddy := min(round(dy/grid_ny),grid_y) ; % +.5),grid_y) ;
- if not grid_full and not grid[ddx][ddy] :
- grid[ddx][ddy] := true ;
- grid_left := grid_left-1 ;
- grid_full := (grid_left=0) ;
- true
- else :
- false
- fi
+ dx := _dx_ ;
+ dy := _dy_ ;
+ ddx := min(round(dx/grid_nx),grid_x) ; % +.5),grid_x) ;
+ ddy := min(round(dy/grid_ny),grid_y) ; % +.5),grid_y) ;
+ if not grid_full and not grid[ddx][ddy]:
+ grid[ddx][ddy] := true ;
+ grid_left := grid_left-1 ;
+ grid_full := (grid_left=0) ;
+ true
+ else:
+ false
+ fi
enddef ;
%D usage: \type{innerpath peepholed outerpath}.
@@ -780,71 +650,79 @@ enddef ;
%D endfig;
secondarydef p peepholed q =
- begingroup
- save start ; pair start ;
- start := point 0 of p ;
- if xpart start >= xpart center p :
- if ypart start >= ypart center p :
- urcorner q -- ulcorner q -- llcorner q -- lrcorner q --
- reverse p -- lrcorner q -- cycle
- else :
- lrcorner q -- urcorner q -- ulcorner q -- llcorner q --
- reverse p -- llcorner q -- cycle
- fi
+ begingroup ;
+ save start ; pair start ; start := point 0 of p ;
+ if xpart start >= xpart center p :
+ if ypart start >= ypart center p :
+ urcorner q -- ulcorner q -- llcorner q -- lrcorner q --
+ reverse p -- lrcorner q -- cycle
else :
- if ypart start > ypart center p :
- ulcorner q -- llcorner q -- lrcorner q -- urcorner q --
- reverse p -- urcorner q -- cycle
- else :
- llcorner q -- lrcorner q -- urcorner q -- ulcorner q --
- reverse p -- ulcorner q -- cycle
- fi
+ lrcorner q -- urcorner q -- ulcorner q -- llcorner q --
+ reverse p -- llcorner q -- cycle
fi
- endgroup
+ else :
+ if ypart start > ypart center p :
+ ulcorner q -- llcorner q -- lrcorner q -- urcorner q --
+ reverse p -- urcorner q -- cycle
+ else :
+ llcorner q -- lrcorner q -- urcorner q -- ulcorner q --
+ reverse p -- ulcorner q -- cycle
+ fi
+ fi
+ endgroup
enddef ;
boolean intersection_found ;
secondarydef p intersection_point q =
- begingroup
+ begingroup
save x_, y_ ;
(x_,y_) = p intersectiontimes q ;
if x_<0 :
- intersection_found := false ;
- center p % origin
+ intersection_found := false ;
+ center p % origin
else :
- intersection_found := true ;
- .5[point x_ of p, point y_ of q]
+ intersection_found := true ;
+ .5[point x_ of p, point y_ of q]
fi
- endgroup
+ endgroup
enddef ;
%D New, undocumented, experimental:
vardef tensecircle (expr width, height, offset) =
- (-width/2,-height/2) ... (0,-height/2-offset) ...
- (+width/2,-height/2) ... (+width/2+offset,0) ...
- (+width/2,+height/2) ... (0,+height/2+offset) ...
- (-width/2,+height/2) ... (-width/2-offset,0) ... cycle
+ ((-width/2,-height/2) ... (0,-height/2-offset) ...
+ (+width/2,-height/2) ... (+width/2+offset,0) ...
+ (+width/2,+height/2) ... (0,+height/2+offset) ...
+ (-width/2,+height/2) ... (-width/2-offset,0) ... cycle)
enddef ;
+%vardef tensecircle (expr width, height, offset) =
+% ((-width/2,-height/2)..(0,-height/2-offset)..(+width/2,-height/2) &
+% (+width/2,-height/2)..(+width/2+offset,0)..(+width/2,+height/2) &
+% (+width/2,+height/2)..(0,+height/2+offset)..(-width/2,+height/2) &
+% (-width/2,+height/2)..(-width/2-offset,0)..(-width/2,-height/2)..cycle)
+%enddef ;
+
vardef roundedsquare (expr width, height, offset) =
- (offset,0) -- (width-offset,0) {right} ..
- (width,offset) -- (width,height-offset) {up} ..
- (width-offset,height) -- (offset,height) {left} ..
- (0,height-offset) -- (0,offset) {down} .. cycle
+ ((offset,0)--(width-offset,0){right} ..
+ (width,offset)--(width,height-offset){up} ..
+ (width-offset,height)--(offset,height){left} ..
+ (0,height-offset)--(0,offset){down} .. cycle)
enddef ;
%D Some colors.
+color cyan ; cyan = (0,1,1) ;
+color magenta ; magenta = (1,0,1) ;
+color yellow ; yellow = (1,1,0) ;
+
def colortype(expr c) =
if cmykcolor c : cmykcolor elseif rgbcolor c : rgbcolor else : grayscale fi
enddef ;
-
vardef whitecolor(expr c) =
if cmykcolor c : (0,0,0,0) elseif rgbcolor c : (1,1,1) else : 1 fi
enddef ;
-
vardef blackcolor(expr c) =
if cmykcolor c : (0,0,0,1) elseif rgbcolor c : (0,0,0) else : 0 fi
enddef ;
@@ -852,74 +730,112 @@ enddef ;
%D Well, this is the dangerous and naive version:
def drawfill text t =
- fill t ;
- draw t ;
+ fill t ;
+ draw t ;
enddef;
%D This two step approach saves the path first, since it can
%D be a function. Attributes must not be randomized.
def drawfill expr c =
- path _c_ ; _c_ := c ;
- mfun_do_drawfill
+ path _c_ ; _c_ := c ;
+ do_drawfill
enddef ;
-def mfun_do_drawfill text t =
- draw _c_ t ;
- fill _c_ t ;
+def do_drawfill text t =
+ draw _c_ t ;
+ fill _c_ t ;
enddef;
def undrawfill expr c =
- drawfill c withcolor background % rather useless
+ drawfill c withcolor background
enddef ;
%D Moved from mp-char.mp
-vardef paired primary d =
- if pair d : d else : (d,d) fi
+vardef paired (expr d) =
+ if pair d : d else : (d,d) fi
+enddef ;
+
+vardef tripled (expr d) =
+ if color d : d else : (d,d,d) fi
+enddef ;
+
+primarydef p enlarged d =
+ (p llmoved d -- p lrmoved d -- p urmoved d -- p ulmoved d -- cycle)
+enddef;
+
+primarydef p llenlarged d =
+ (p llmoved d -- lrcorner p -- urcorner p -- ulcorner p -- cycle)
+enddef ;
+
+primarydef p lrenlarged d =
+ (llcorner p -- p lrmoved d -- urcorner p -- ulcorner p -- cycle)
enddef ;
-vardef tripled primary d =
- if color d : d else : (d,d,d) fi
+primarydef p urenlarged d =
+ (llcorner p -- lrcorner p -- p urmoved d -- ulcorner p -- cycle)
enddef ;
-% maybe secondaries:
+primarydef p ulenlarged d =
+ (llcorner p -- lrcorner p -- urcorner p -- p ulmoved d -- cycle)
+enddef ;
-primarydef p enlarged d = ( p llmoved d -- p lrmoved d -- p urmoved d -- p ulmoved d -- cycle ) enddef ;
-primarydef p llenlarged d = ( p llmoved d -- lrcorner p -- urcorner p -- ulcorner p -- cycle ) enddef ;
-primarydef p lrenlarged d = ( llcorner p -- p lrmoved d -- urcorner p -- ulcorner p -- cycle ) enddef ;
-primarydef p urenlarged d = ( llcorner p -- lrcorner p -- p urmoved d -- ulcorner p -- cycle ) enddef ;
-primarydef p ulenlarged d = ( llcorner p -- lrcorner p -- urcorner p -- p ulmoved d -- cycle ) enddef ;
+primarydef p llmoved d =
+ ((llcorner p) shifted (-xpart paired(d),-ypart paired(d)))
+enddef ;
-primarydef p llmoved d = ( (llcorner p) shifted (-xpart paired(d),-ypart paired(d)) ) enddef ;
-primarydef p lrmoved d = ( (lrcorner p) shifted (+xpart paired(d),-ypart paired(d)) ) enddef ;
-primarydef p urmoved d = ( (urcorner p) shifted (+xpart paired(d),+ypart paired(d)) ) enddef ;
-primarydef p ulmoved d = ( (ulcorner p) shifted (-xpart paired(d),+ypart paired(d)) ) enddef ;
+primarydef p lrmoved d =
+ ((lrcorner p) shifted (+xpart paired(d),-ypart paired(d)))
+enddef ;
+
+primarydef p urmoved d =
+ ((urcorner p) shifted (+xpart paired(d),+ypart paired(d)))
+enddef ;
+
+primarydef p ulmoved d =
+ ((ulcorner p) shifted (-xpart paired(d),+ypart paired(d)))
+enddef ;
+
+primarydef p leftenlarged d =
+ ((llcorner p) shifted (-d,0) -- lrcorner p --
+ urcorner p -- (ulcorner p) shifted (-d,0) -- cycle)
+enddef ;
+
+primarydef p rightenlarged d =
+ (llcorner p -- (lrcorner p) shifted (d,0) --
+ (urcorner p) shifted (d,0) -- ulcorner p -- cycle)
+enddef ;
+
+primarydef p topenlarged d =
+ (llcorner p -- lrcorner p --
+ (urcorner p) shifted (0,d) -- (ulcorner p) shifted (0,d) -- cycle)
+enddef ;
-primarydef p leftenlarged d = ( (llcorner p) shifted (-d,0) -- lrcorner p -- urcorner p -- (ulcorner p) shifted (-d,0) -- cycle ) enddef ;
-primarydef p rightenlarged d = ( llcorner p -- (lrcorner p) shifted (d,0) -- (urcorner p) shifted (d,0) -- ulcorner p -- cycle ) enddef ;
-primarydef p topenlarged d = ( llcorner p -- lrcorner p -- (urcorner p) shifted (0,d) -- (ulcorner p) shifted (0,d) -- cycle ) enddef ;
-primarydef p bottomenlarged d = ( llcorner p shifted (0,-d) -- lrcorner p shifted (0,-d) -- urcorner p -- ulcorner p -- cycle ) enddef ;
+primarydef p bottomenlarged d =
+ (llcorner p shifted (0,-d) -- lrcorner p shifted (0,-d) --
+ urcorner p -- ulcorner p -- cycle)
+enddef ;
%D Handy for testing/debugging:
-primarydef p crossed d = (
+primarydef p crossed d =
if pair p :
- p shifted (-d, 0) -- p --
- p shifted ( 0,-d) -- p --
- p shifted (+d, 0) -- p --
- p shifted ( 0,+d) -- p -- cycle
+ (p shifted (-d, 0) -- p --
+ p shifted ( 0,-d) -- p --
+ p shifted (+d, 0) -- p --
+ p shifted ( 0,+d) -- p -- cycle)
else :
- center p shifted (-d, 0) -- llcorner p --
- center p shifted ( 0,-d) -- lrcorner p --
- center p shifted (+d, 0) -- urcorner p --
- center p shifted ( 0,+d) -- ulcorner p -- cycle
+ (center p shifted (-d, 0) -- llcorner p --
+ center p shifted ( 0,-d) -- lrcorner p --
+ center p shifted (+d, 0) -- urcorner p --
+ center p shifted ( 0,+d) -- ulcorner p -- cycle)
fi
-) enddef ;
+enddef ;
%D Also handy (math ladders):
-vardef laddered primary p = % was expr
+vardef laddered expr p =
point 0 of p
for i=1 upto length(p) :
-- (xpart (point i of p), ypart (point (i-1) of p)) -- (point i of p)
@@ -933,143 +849,182 @@ enddef ;
% vardef topboundary primary p = (urcorner p -- ulcorner p) enddef ;
% vardef leftboundary primary p = (ulcorner p -- llcorner p) enddef ;
-vardef bottomboundary primary p = if pair p : p else : (llcorner p -- lrcorner p) fi enddef ;
-vardef rightboundary primary p = if pair p : p else : (lrcorner p -- urcorner p) fi enddef ;
-vardef topboundary primary p = if pair p : p else : (urcorner p -- ulcorner p) fi enddef ;
-vardef leftboundary primary p = if pair p : p else : (ulcorner p -- llcorner p) fi enddef ;
+vardef bottomboundary primary p =
+ if pair p : p else : (llcorner p -- lrcorner p) fi
+enddef ;
+
+vardef rightboundary primary p =
+ if pair p : p else : (lrcorner p -- urcorner p) fi
+enddef ;
+
+vardef topboundary primary p =
+ if pair p : p else : (urcorner p -- ulcorner p) fi
+enddef ;
+
+vardef leftboundary primary p =
+ if pair p : p else : (ulcorner p -- llcorner p) fi
+enddef ;
%D Nice too:
primarydef p superellipsed s =
- superellipse (
- .5[lrcorner p,urcorner p],
- .5[urcorner p,ulcorner p],
- .5[ulcorner p,llcorner p],
- .5[llcorner p,lrcorner p],
- s
- )
-enddef ;
-
-primarydef p squeezed s = (
- (llcorner p .. .5[llcorner p,lrcorner p] shifted ( 0, ypart paired(s)) .. lrcorner p) &
- (lrcorner p .. .5[lrcorner p,urcorner p] shifted (-xpart paired(s), 0) .. urcorner p) &
- (urcorner p .. .5[urcorner p,ulcorner p] shifted ( 0,-ypart paired(s)) .. ulcorner p) &
- (ulcorner p .. .5[ulcorner p,llcorner p] shifted ( xpart paired(s), 0) .. llcorner p) & cycle
-) enddef ;
+ superellipse
+ (.5[lrcorner p,urcorner p],
+ .5[urcorner p,ulcorner p],
+ .5[ulcorner p,llcorner p],
+ .5[llcorner p,lrcorner p],
+ s)
+enddef ;
-primarydef p randomshifted s =
- begingroup ;
- save ss ; pair ss ;
- ss := paired(s) ;
- p shifted (-.5xpart ss + uniformdeviate xpart ss,-.5ypart ss + uniformdeviate ypart ss)
- endgroup
+primarydef p squeezed s =
+ ((llcorner p .. .5[llcorner p,lrcorner p] shifted ( 0, ypart paired(s)) .. lrcorner p) &
+ (lrcorner p .. .5[lrcorner p,urcorner p] shifted (-xpart paired(s), 0) .. urcorner p) &
+ (urcorner p .. .5[urcorner p,ulcorner p] shifted ( 0,-ypart paired(s)) .. ulcorner p) &
+ (ulcorner p .. .5[ulcorner p,llcorner p] shifted ( xpart paired(s), 0) .. llcorner p) & cycle)
enddef ;
-primarydef p randomized s = (
- if path p :
- for i=0 upto length(p)-1 :
- ((point i of p) randomshifted s) .. controls
- ((postcontrol i of p) randomshifted s) and
- ((precontrol (i+1) of p) randomshifted s) ..
- endfor
- if cycle p :
- cycle
- else :
- ((point length(p) of p) randomshifted s)
- fi
- elseif pair p :
- p randomshifted s
- elseif cmykcolor p :
- if color s :
- ((uniformdeviate cyanpart s) * cyanpart p,
- (uniformdeviate magentapart s) * magentapart p,
- (uniformdeviate yellowpart s) * yellowpart p,
- (uniformdeviate blackpart s) * blackpart p)
- elseif pair s :
- ((xpart s + (uniformdeviate (ypart s - xpart s))) * p)
- else :
- ((uniformdeviate s) * p)
- fi
- elseif rgbcolor p :
- if color s :
- ((uniformdeviate redpart s) * redpart p,
- (uniformdeviate greenpart s) * greenpart p,
- (uniformdeviate bluepart s) * bluepart p)
- elseif pair s :
- ((xpart s + (uniformdeviate (ypart s - xpart s))) * p)
- else :
- ((uniformdeviate s) * p)
- fi
- elseif color p :
- if color s :
- ((uniformdeviate greypart s) * greypart p)
- elseif pair s :
- ((xpart s + (uniformdeviate (ypart s - xpart s))) * p)
- else :
- ((uniformdeviate s) * p)
- fi
+primarydef p randomshifted s =
+ begingroup ; save ss ; pair ss ; ss := paired(s) ;
+ p shifted (-.5xpart ss + uniformdeviate xpart ss,
+ -.5ypart ss + uniformdeviate ypart ss)
+ endgroup
+enddef ;
+
+%primarydef p randomized s =
+% for i=0 upto length(p)-1 :
+% ((point i of p) randomshifted s) .. controls
+% ((postcontrol i of p) randomshifted s) and
+% ((precontrol (i+1) of p) randomshifted s) ..
+% endfor cycle
+%enddef ;
+
+primarydef p randomized s =
+ (if path p :
+ for i=0 upto length(p)-1 :
+ ((point i of p) randomshifted s) .. controls
+ ((postcontrol i of p) randomshifted s) and
+ ((precontrol (i+1) of p) randomshifted s) ..
+ endfor
+ if cycle p :
+ cycle
else :
- p + uniformdeviate s
+ ((point length(p) of p) randomshifted s)
fi
-) enddef ;
+ elseif pair p :
+ p randomshifted s
+ elseif cmykcolor p :
+ if color s :
+ (uniformdeviate cyanpart s * cyanpart p,
+ uniformdeviate magentapart s * magentapart p,
+ uniformdeviate yellowpart s * yellowpart p,
+ uniformdeviate blackpart s * blackpart p)
+ elseif pair s :
+ ((xpart s + uniformdeviate (ypart s - xpart s)) * p)
+ else :
+ (uniformdeviate s * p)
+ fi
+ elseif rgbcolor p :
+ if color s :
+ (uniformdeviate redpart s * redpart p,
+ uniformdeviate greenpart s * greenpart p,
+ uniformdeviate bluepart s * bluepart p)
+ elseif pair s :
+ ((xpart s + uniformdeviate (ypart s - xpart s)) * p)
+ else :
+ (uniformdeviate s * p)
+ fi
+ elseif color p :
+ if color s :
+ (uniformdeviate graypart s * graypart p)
+ elseif pair s :
+ ((xpart s + uniformdeviate (ypart s - xpart s)) * p)
+ else :
+ (uniformdeviate s * p)
+ fi
+ else :
+ p + uniformdeviate s
+ fi)
+enddef ;
%D Not perfect (alternative for interpath)
vardef interpolated(expr s, p, q) =
- save m ; numeric m ;
- m := max(length(p),length(q)) ;
- if path p :
- for i=0 upto m-1 :
- s[point (i /m) along p,point (i /m) along q] .. controls
- s[postcontrol (i /m) along p,postcontrol (i /m) along q] and
- s[precontrol ((i+1)/m) along p,precontrol ((i+1)/m) along q] ..
- endfor
- if cycle p :
- cycle
- else :
- s[point infinity of p,point infinity of q]
- fi
- else :
- a[p,q]
- fi
+ save m ; m := max(length(p),length(q)) ;
+ (if path p :
+ for i=0 upto m-1 :
+ s[point (i /m) along p,
+ point (i /m) along q] .. controls
+ s[postcontrol (i /m) along p,
+ postcontrol (i /m) along q] and
+ s[precontrol ((i+1)/m) along p,
+ precontrol ((i+1)/m) along q] ..
+ endfor
+ if cycle p :
+ cycle
+ else :
+ s[point infinity of p,
+ point infinity of q]
+ fi
+ else :
+ a[p,q]
+ fi)
enddef ;
%D Interesting too:
-primarydef p paralleled d = (
+% primarydef p parallel s =
+% begingroup ; save q, b ; path q ; numeric b ;
+% b := xpart (lrcorner p - llcorner p) ;
+% q := p if b>0 : scaled ((b+2s)/b) fi ;
+% (q shifted (center p-center q))
+% endgroup
+% enddef ;
+
+%primarydef p parallel s =
+% begingroup ; save q, w,h ; path q ; numeric w, h ;
+% w := bbwidth(p) ; h := bbheight(p) ;
+% q := p if (w>0) and (h>0) :
+% xyscaled ((w+2*xpart paired(s))/w,(h+2*ypart paired(s))/h) fi ;
+% (q shifted (center p-center q))
+% endgroup
+%enddef ;
+
+primarydef p paralleled d =
p shifted if d < 0 : - fi ((point abs(d) on (p rotatedaround(point 0 of p,90))) - point 0 of p)
-) enddef ;
+enddef ;
vardef punked primary p =
- point 0 of p for i=1 upto length(p)-1 : -- point i of p endfor
- if cycle p : -- cycle else : -- point length(p) of p fi
+ (point 0 of p for i=1 upto length(p)-1 : -- point i of p endfor
+ if cycle p : -- cycle else : -- point length(p) of p fi)
enddef ;
vardef curved primary p =
- point 0 of p for i=1 upto length(p)-1 : .. point i of p endfor
- if cycle p : .. cycle else : .. point length(p) of p fi
+ (point 0 of p for i=1 upto length(p)-1 : .. point i of p endfor
+ if cycle p : .. cycle else : .. point length(p) of p fi)
enddef ;
primarydef p blownup s =
- begingroup
- save _p_ ; path _p_ ;
- _p_ := p xysized (bbwidth(p)+2(xpart paired(s)),bbheight(p)+2(ypart paired(s))) ;
- (_p_ shifted (center p - center _p_))
- endgroup
+ begingroup
+ save _p_ ; path _p_ ; _p_ := p xysized
+ (bbwidth (p)+2(xpart paired(s)),
+ bbheight(p)+2(ypart paired(s))) ;
+ (_p_ shifted (center p - center _p_))
+ endgroup
enddef ;
%D Rather fundamental.
% not yet ok
-vardef leftrightpath(expr p, l) = % used in s-pre-19
- save q, r, t, b ; path q, r ; pair t, b ;
- t := (ulcorner p -- urcorner p) intersection_point p ;
- b := (llcorner p -- lrcorner p) intersection_point p ;
- r := if xpart directionpoint t of p < 0 : reverse p else : p fi ; % r is needed, else problems when reverse is fed
- q := r cutbefore if l: t else: b fi ;
- q := q if xpart point 0 of r > 0 : & r fi cutafter if l: b else: t fi ;
- q
+def leftrightpath(expr p, l) = % used in s-pre-19
+ save q, r, t, b ; path q, r ; pair t, b ;
+ t := (ulcorner p -- urcorner p) intersection_point p ;
+ b := (llcorner p -- lrcorner p) intersection_point p ;
+ r := if xpart directionpoint t of p < 0 : reverse p else : p fi ; % r is needed, else problems when reverse is fed
+ q := r cutbefore if l: t else: b fi ;
+ q := q if xpart point 0 of r > 0 : &
+ r fi cutafter if l: b else: t fi ;
+ q
enddef ;
vardef leftpath expr p = leftrightpath(p,true ) enddef ;
@@ -1078,10 +1033,10 @@ vardef rightpath expr p = leftrightpath(p,false) enddef ;
%D Drawoptions
def saveoptions =
- save _op_ ; def _op_ = enddef ;
+ save _op_ ; def _op_ = enddef ;
enddef ;
-%D Tracing. (not yet in lexer)
+%D Tracing.
let normaldraw = draw ;
let normalfill = fill ;
@@ -1091,6 +1046,7 @@ let normalfill = fill ;
def normalfill expr c = addto currentpicture contour c _op_ enddef ;
def normaldraw expr p = addto currentpicture if picture p: also p else: doublepath p withpen currentpen fi _op_ enddef ;
+
def drawlineoptions (text t) = def _lin_opt_ = t enddef ; enddef ;
def drawpointoptions (text t) = def _pnt_opt_ = t enddef ; enddef ;
def drawcontroloptions(text t) = def _ctr_opt_ = t enddef ; enddef ;
@@ -1100,13 +1056,13 @@ def drawboundoptions (text t) = def _bnd_opt_ = t enddef ; enddef ;
def drawpathoptions (text t) = def _pth_opt_ = t enddef ; enddef ;
def resetdrawoptions =
- drawlineoptions (withpen pencircle scaled 1pt withcolor .5white) ;
- drawpointoptions (withpen pencircle scaled 4pt withcolor black) ;
- drawcontroloptions(withpen pencircle scaled 2.5pt withcolor black) ;
- drawlabeloptions () ;
- draworiginoptions (withpen pencircle scaled 1pt withcolor .5white) ;
- drawboundoptions (dashed evenly _ori_opt_) ;
- drawpathoptions (withpen pencircle scaled 5pt withcolor .8white) ;
+ drawlineoptions (withpen pencircle scaled 1pt withcolor .5white) ;
+ drawpointoptions (withpen pencircle scaled 4pt withcolor black) ;
+ drawcontroloptions(withpen pencircle scaled 2.5pt withcolor black) ;
+ drawlabeloptions () ;
+ draworiginoptions (withpen pencircle scaled 1pt withcolor .5white) ;
+ drawboundoptions (dashed evenly _ori_opt_) ;
+ drawpathoptions (withpen pencircle scaled 5pt withcolor .8white) ;
enddef ;
resetdrawoptions ;
@@ -1114,95 +1070,96 @@ resetdrawoptions ;
%D Path.
def drawpath expr p =
- normaldraw p _pth_opt_
+ normaldraw p _pth_opt_
enddef ;
%D Arrow.
vardef drawarrowpath expr p =
- save autoarrows ; boolean autoarrows ; autoarrows := true ;
- drawarrow p _pth_opt_
+ save autoarrows ; boolean autoarrows ; autoarrows := true ;
+ drawarrow p _pth_opt_
enddef ;
-% def drawarrowpath expr p =
-% begingroup ;
-% save autoarrows ; boolean autoarrows ; autoarrows := true ;
-% save arrowpath ; path arrowpath ; arrowpath := p ;
-% _drawarrowpath_
-% enddef ;
+%def drawarrowpath expr p =
+% begingroup ;
+% save autoarrows ; boolean autoarrows ; autoarrows := true ;
+% save arrowpath ; path arrowpath ; arrowpath := p ;
+% _drawarrowpath_
+%enddef ;
%
-% def _drawarrowpath_ text t =
-% drawarrow arrowpath _pth_opt_ t ;
-% endgroup ;
-% enddef ;
+%def _drawarrowpath_ text t =
+% drawarrow arrowpath _pth_opt_ t ;
+% endgroup ;
+%enddef ;
def midarrowhead expr p =
- arrowhead p cutafter (point length(p cutafter point .5 along p)+ahlength on p)
+ arrowhead p cutafter
+ (point length(p cutafter point .5 along p)+ahlength on p)
enddef ;
vardef arrowheadonpath (expr p, s) =
- save autoarrows ; boolean autoarrows ;
- autoarrows := true ;
- set_ahlength(scaled ahfactor) ; % added
- arrowhead p if s<1 : cutafter (point (s*arclength(p)+.5ahlength) on p) fi
+ save autoarrows ; boolean autoarrows ; autoarrows := true ;
+ set_ahlength(scaled ahfactor) ; % added
+ arrowhead p if s<1 : cutafter (point (s*arclength(p)+.5ahlength) on p) fi
enddef ;
%D Points.
def drawpoint expr c =
- if string c :
- string _c_ ;
- _c_ := "(" & c & ")" ;
- dotlabel.urt(_c_, scantokens _c_) ;
- drawdot scantokens _c_
- else :
- dotlabel.urt("(" & decimal xpart c & "," & decimal ypart c & ")", c) ;
- drawdot c
- fi _pnt_opt_
+ if string c :
+ string _c_ ; _c_ := "(" & c & ")" ;
+ dotlabel.urt(_c_, scantokens _c_) ;
+ drawdot scantokens _c_
+ else :
+ dotlabel.urt("(" & decimal xpart c & "," & decimal ypart c & ")", c) ;
+ drawdot c
+ fi _pnt_opt_
enddef ;
%D PathPoints.
-def drawpoints expr c = path _c_ ; _c_ := c ; mfun_draw_points enddef ;
-def drawcontrolpoints expr c = path _c_ ; _c_ := c ; mfun_draw_controlpoints enddef ;
-def drawcontrollines expr c = path _c_ ; _c_ := c ; mfun_draw_controllines enddef ;
-def drawpointlabels expr c = path _c_ ; _c_ := c ; mfun_draw_pointlabels enddef ;
+def drawpoints expr c = path _c_ ; _c_ := c ; do_drawpoints enddef ;
+def drawcontrolpoints expr c = path _c_ ; _c_ := c ; do_drawcontrolpoints enddef ;
+def drawcontrollines expr c = path _c_ ; _c_ := c ; do_drawcontrollines enddef ;
+def drawpointlabels expr c = path _c_ ; _c_ := c ; do_drawpointlabels enddef ;
-def mfun_draw_points text t =
- for _i_=0 upto length(_c_) :
- normaldraw point _i_ of _c_ _pnt_opt_ t ;
- endfor ;
+def do_drawpoints text t =
+ for _i_=0 upto length(_c_) :
+ normaldraw point _i_ of _c_ _pnt_opt_ t ;
+ endfor ;
enddef;
-def mfun_draw_controlpoints text t =
- for _i_=0 upto length(_c_) :
- normaldraw precontrol _i_ of _c_ _ctr_opt_ t ;
- normaldraw postcontrol _i_ of _c_ _ctr_opt_ t ;
- endfor ;
+def do_drawcontrolpoints text t =
+ for _i_=0 upto length(_c_) :
+ normaldraw precontrol _i_ of _c_ _ctr_opt_ t ;
+ normaldraw postcontrol _i_ of _c_ _ctr_opt_ t ;
+ endfor ;
enddef;
-def mfun_draw_controllines text t =
- for _i_=0 upto length(_c_) :
- normaldraw point _i_ of _c_ -- precontrol _i_ of _c_ _lin_opt_ t ;
- normaldraw point _i_ of _c_ -- postcontrol _i_ of _c_ _lin_opt_ t ;
- endfor ;
+def do_drawcontrollines text t =
+ for _i_=0 upto length(_c_) :
+ normaldraw point _i_ of _c_ -- precontrol _i_ of _c_ _lin_opt_ t ;
+ normaldraw point _i_ of _c_ -- postcontrol _i_ of _c_ _lin_opt_ t ;
+ endfor ;
enddef;
boolean swappointlabels ; swappointlabels := false ;
-def mfun_draw_pointlabels text t =
- for _i_=0 upto length(_c_) :
- pair _u_ ; _u_ := unitvector(direction _i_ of _c_) rotated if swappointlabels : - fi 90 ;
- pair _p_ ; _p_ := (point _i_ of _c_) ;
- _u_ := 12 * defaultscale * _u_ ;
- normaldraw thelabel ( decimal _i_, _p_ shifted if cycle _c_ and (_i_=0) : - fi _u_ ) _lab_opt_ t ;
- endfor ;
+def do_drawpointlabels text t =
+ for _i_=0 upto length(_c_) :
+ pair _u_ ; _u_ := unitvector(direction _i_ of _c_)
+ rotated if swappointlabels : - fi 90 ;
+ pair _p_ ; _p_ := (point _i_ of _c_) ;
+ _u_ := 12 * defaultscale * _u_ ;
+ normaldraw thelabel ( decimal _i_,
+ _p_ shifted if cycle _c_ and (_i_=0) : - fi _u_ ) _lab_opt_ t ;
+ endfor ;
enddef;
%D Bounding box.
def drawboundingbox expr p =
- normaldraw boundingbox p _bnd_opt_
+ normaldraw boundingbox p _bnd_opt_
enddef ;
%D Origin.
@@ -1210,8 +1167,10 @@ enddef ;
numeric originlength ; originlength := .5cm ;
def draworigin text t =
- normaldraw (origin shifted (0, originlength) -- origin shifted (0,-originlength)) _ori_opt_ t ;
- normaldraw (origin shifted ( originlength,0) -- origin shifted (-originlength,0)) _ori_opt_ t ;
+ normaldraw (origin shifted (0, originlength) --
+ origin shifted (0,-originlength)) _ori_opt_ t ;
+ normaldraw (origin shifted ( originlength,0) --
+ origin shifted (-originlength,0)) _ori_opt_ t ;
enddef;
%D Axis.
@@ -1219,108 +1178,101 @@ enddef;
numeric tickstep ; tickstep := 5mm ;
numeric ticklength ; ticklength := 2mm ;
-def drawxticks expr c = path _c_ ; _c_ := c ; mfun_draw_xticks enddef ;
-def drawyticks expr c = path _c_ ; _c_ := c ; mfun_draw_yticks enddef ;
-def drawticks expr c = path _c_ ; _c_ := c ; mfun_draw_ticks enddef ;
+def drawxticks expr c = path _c_ ; _c_ := c ; do_drawxticks enddef ;
+def drawyticks expr c = path _c_ ; _c_ := c ; do_drawyticks enddef ;
+def drawticks expr c = path _c_ ; _c_ := c ; do_drawticks enddef ;
% Adding eps prevents disappearance due to rounding errors.
-def mfun_draw_xticks text t =
- for i=0 step -tickstep until xpart llcorner _c_ - eps :
- if (i<=xpart lrcorner _c_) :
- normaldraw (i,-ticklength)--(i,ticklength) _ori_opt_ t ;
- fi ;
- endfor ;
- for i=0 step tickstep until xpart lrcorner _c_ + eps :
- if (i>=xpart llcorner _c_) :
- normaldraw (i,-ticklength)--(i,ticklength) _ori_opt_ t ;
- fi ;
- endfor ;
- normaldraw (llcorner _c_ -- ulcorner _c_) shifted (-xpart llcorner _c_,0) _ori_opt_ t ;
+def do_drawxticks text t =
+ for i=0 step -tickstep until xpart llcorner _c_ - eps :
+ if (i<=xpart lrcorner _c_) :
+ normaldraw (i,-ticklength)--(i,ticklength) _ori_opt_ t ;
+ fi ;
+ endfor ;
+ for i=0 step tickstep until xpart lrcorner _c_ + eps :
+ if (i>=xpart llcorner _c_) :
+ normaldraw (i,-ticklength)--(i,ticklength) _ori_opt_ t ;
+ fi ;
+ endfor ;
+ normaldraw (llcorner _c_ -- ulcorner _c_)
+ shifted (-xpart llcorner _c_,0) _ori_opt_ t ;
enddef ;
-def mfun_draw_yticks text t =
- for i=0 step -tickstep until ypart llcorner _c_ - eps :
- if (i<=ypart ulcorner _c_) :
- normaldraw (-ticklength,i)--(ticklength,i) _ori_opt_ t ;
- fi ;
- endfor ;
- for i=0 step tickstep until ypart ulcorner _c_ + eps :
- if (i>=ypart llcorner _c_) :
- normaldraw (-ticklength,i)--(ticklength,i) _ori_opt_ t ;
- fi ;
- endfor ;
- normaldraw (llcorner _c_ -- lrcorner _c_) shifted (0,-ypart llcorner _c_) _ori_opt_ t ;
+def do_drawyticks text t =
+ for i=0 step -tickstep until ypart llcorner _c_ - eps :
+ if (i<=ypart ulcorner _c_) :
+ normaldraw (-ticklength,i)--(ticklength,i) _ori_opt_ t ;
+ fi ;
+ endfor ;
+ for i=0 step tickstep until ypart ulcorner _c_ + eps :
+ if (i>=ypart llcorner _c_) :
+ normaldraw (-ticklength,i)--(ticklength,i) _ori_opt_ t ;
+ fi ;
+ endfor ;
+ normaldraw (llcorner _c_ -- lrcorner _c_)
+ shifted (0,-ypart llcorner _c_) _ori_opt_ t ;
enddef ;
-def mfun_draw_ticks text t =
- drawxticks _c_ t ;
- drawyticks _c_ t ;
+def do_drawticks text t =
+ drawxticks _c_ t ;
+ drawyticks _c_ t ;
enddef ;
%D All of it except axis.
def drawwholepath expr p =
- draworigin ;
- drawpath p ;
- drawcontrollines p ;
- drawcontrolpoints p ;
- drawpoints p ;
- drawboundingbox p ;
- drawpointlabels p ;
+ draworigin ;
+ drawpath p ;
+ drawcontrollines p ;
+ drawcontrolpoints p ;
+ drawpoints p ;
+ drawboundingbox p ;
+ drawpointlabels p ;
enddef ;
%D Tracing.
def visualizeddraw expr c =
- if picture c : normaldraw c else : path _c_ ; _c_ := c ; do_visualizeddraw fi
+ if picture c : normaldraw c else : path _c_ ; _c_ := c ; do_visualizeddraw fi
enddef ;
def visualizedfill expr c =
- if picture c : normalfill c else : path _c_ ; _c_ := c ; do_visualizedfill fi
+ if picture c : normalfill c else : path _c_ ; _c_ := c ; do_visualizedfill fi
enddef ;
def do_visualizeddraw text t =
- draworigin ;
- drawpath _c_ t ;
- drawcontrollines _c_ ;
- drawcontrolpoints _c_ ;
- drawpoints _c_ ;
- drawboundingbox _c_ ;
- drawpointlabels _c_ ;
+ draworigin ;
+ drawpath _c_ t ;
+ drawcontrollines _c_ ;
+ drawcontrolpoints _c_ ;
+ drawpoints _c_ ;
+ drawboundingbox _c_ ;
+ drawpointlabels _c_ ;
enddef ;
def do_visualizedfill text t =
- if cycle _c_ : normalfill _c_ t fi ;
- draworigin ;
- drawcontrollines _c_ ;
- drawcontrolpoints _c_ ;
- drawpoints _c_ ;
- drawboundingbox _c_ ;
- drawpointlabels _c_ ;
+ if cycle _c_ : normalfill _c_ t fi ;
+ draworigin ;
+ drawcontrollines _c_ ;
+ drawcontrolpoints _c_ ;
+ drawpoints _c_ ;
+ drawboundingbox _c_ ;
+ drawpointlabels _c_ ;
enddef ;
def visualizepaths =
- let fill = visualizedfill ;
- let draw = visualizeddraw ;
+ let fill = visualizedfill ;
+ let draw = visualizeddraw ;
enddef ;
def naturalizepaths =
- let fill = normalfill ;
- let draw = normaldraw ;
+ let fill = normalfill ;
+ let draw = normaldraw ;
enddef ;
extra_endfig := extra_endfig & " naturalizepaths ; " ;
-%D Nice tracer:
-
-def drawboundary primary p =
- draw p dashed evenly withcolor white ;
- draw p dashed oddly withcolor black ;
- draw (- llcorner p) withpen pencircle scaled 3 withcolor white ;
- draw (- llcorner p) withpen pencircle scaled 1.5 withcolor black ;
-enddef ;
-
%D Also handy:
extra_beginfig := extra_beginfig & " truecorners := 0 ; " ; % restores
@@ -1335,57 +1287,46 @@ boolean autoarrows ; autoarrows := false ;
numeric ahfactor ; ahfactor := 2.5 ;
def set_ahlength (text t) =
- % ahlength := (ahfactor*pen_size(_op_ t)) ; % _op_ added
- % problem: _op_ can contain color so a no-go, we could apply the transform
- % but i need to figure out the best way (fakepicture and take components).
- ahlength := (ahfactor*pen_size(t)) ;
+% ahlength := (ahfactor*pen_size(_op_ t)) ; % _op_ added
+% problem: _op_ can contain color so a no-go, we could apply the transform
+% but i need to figure out the best way (fakepicture and take components).
+ ahlength := (ahfactor*pen_size(t)) ;
enddef ;
vardef pen_size (text t) =
- save p ; picture p ; p := nullpicture ;
- addto p doublepath (top origin -- bot origin) t ;
- (ypart urcorner p - ypart lrcorner p)
+ save p ; picture p ; p := nullpicture ;
+ addto p doublepath (top origin -- bot origin) t ;
+ (ypart urcorner p - ypart lrcorner p)
enddef ;
%D The next two macros are adapted versions of plain
%D \METAPOST\ definitions.
-vardef arrowpath expr p = % patch by Peter Rolf: supports squared pen and shifting (hh: maybe just use center of head as first)
- (p cutafter makepath(pencircle scaled 2(ahlength*cosd(.5ahangle)) shifted point length p of p))
-enddef;
-
-% def _finarr text t =
-% if autoarrows : set_ahlength (t) fi ;
-% draw arrowpath _apth t ; % arrowpath added
-% filldraw arrowhead _apth t ;
-% enddef;
-
def _finarr text t =
- if autoarrows : set_ahlength (t) fi ;
- draw arrowpath _apth t ; % arrowpath added
- fill arrowhead _apth t ;
- draw arrowhead _apth t ;
+ if autoarrows : set_ahlength (t) fi ;
+ draw _apth t ;
+ filldraw arrowhead _apth t ;
enddef;
-def _finarr text t =
- if autoarrows : set_ahlength (t) fi ;
- draw arrowpath _apth t ; % arrowpath added
- fill arrowhead _apth t ;
- draw arrowhead _apth t undashed ;
-enddef;
+def _findarr text t =
+ if autoarrows : set_ahlength (t) fi ;
+ draw _apth t ;
+ fill arrowhead _apth withpen currentpen t ;
+ fill arrowhead reverse _apth withpen currentpen t ;
+enddef ;
%D Handy too ......
vardef pointarrow (expr pat, loc, len, off) =
- save l, r, s, t ; path l, r ; numeric s ; pair t ;
- t := if pair loc : loc else : point loc along pat fi ;
- s := len/2 - off ; if s<=0 : s := 0 elseif s>len : s := len fi ;
- r := pat cutbefore t ;
- r := (r cutafter point (arctime s of r) of r) ;
- s := len/2 + off ; if s<=0 : s := 0 elseif s>len : s := len fi ;
- l := reverse (pat cutafter t) ;
- l := (reverse (l cutafter point (arctime s of l) of l)) ;
- (l..r)
+ save l, r, s, t ; path l, r ; numeric s ; pair t ;
+ t := if pair loc : loc else : point loc along pat fi ;
+ s := len/2 - off ; if s<=0 : s := 0 elseif s>len : s := len fi ;
+ r := pat cutbefore t ;
+ r := (r cutafter point (arctime s of r) of r) ;
+ s := len/2 + off ; if s<=0 : s := 0 elseif s>len : s := len fi ;
+ l := reverse (pat cutafter t) ;
+ l := (reverse (l cutafter point (arctime s of l) of l)) ;
+ (l..r)
enddef ;
def rightarrow (expr pat,tim,len) = pointarrow(pat,tim,len,-len) enddef ;
@@ -1395,19 +1336,23 @@ def centerarrow (expr pat,tim,len) = pointarrow(pat,tim,len, 0) enddef ;
%D The \type {along} and \type {on} operators can be used
%D as follows:
%D
-%D \starttyping
+%D \starttypen
%D drawdot point .5 along somepath ;
%D drawdot point 3cm on somepath ;
-%D \stoptyping
+%D \stoptypen
%D
%D The number denotes a percentage (fraction).
primarydef pct along pat = % also negative
- (arctime (pct * (arclength pat)) of pat) of pat
+ (arctime (pct * (arclength pat)) of pat) of pat
enddef ;
-primarydef len on pat = % no outer ( ) .. somehow fails
- (arctime if len>0 : len else : (arclength(pat)+len) fi of pat) of pat
+% primarydef len on pat =
+% (arctime len of pat) of pat
+% enddef ;
+
+primarydef len on pat =
+ (arctime if len>0 : len else : (arclength(pat)+len) fi of pat) of pat
enddef ;
% this cuts of a piece from both ends
@@ -1420,55 +1365,69 @@ enddef ;
% enddef ;
tertiarydef pat cutends len =
- begingroup
- save tap ; path tap ;
- tap := pat cutbefore (point (xpart paired(len)) on pat) ;
- (tap cutafter (point -(ypart paired(len)) on tap))
- endgroup
+ begingroup ; save tap ; path tap ;
+ tap := pat cutbefore (point (xpart paired(len)) on pat) ;
+ (tap cutafter (point -(ypart paired(len)) on tap))
+ endgroup
enddef ;
%D To be documented.
path freesquare ;
-freesquare := (
- (-1,0) -- (-1,-1) -- (0,-1) -- (+1,-1) --
- (+1,0) -- (+1,+1) -- (0,+1) -- (-1,+1) -- cycle
-) scaled .5 ;
+freesquare := ((-1,0)--(-1,-1)--(0,-1)--(+1,-1)--
+ (+1,0)--(+1,+1)--(0,+1)--(-1,+1)--cycle) scaled .5 ;
numeric freelabeloffset ; freelabeloffset := 3pt ;
numeric freedotlabelsize ; freedotlabelsize := 3pt ;
vardef thefreelabel (expr str, loc, ori) =
- save s, p, q, l ; picture s ; path p, q ; pair l ;
- interim labeloffset := freelabeloffset ;
- s := if string str : thelabel(str,loc) else : str shifted -center str shifted loc fi ;
- setbounds s to boundingbox s enlarged freelabeloffset ;
- p := fullcircle scaled (2*length(loc-ori)) shifted ori ;
- q := freesquare xyscaled (urcorner s - llcorner s) ;
- l := point xpart (p intersectiontimes (ori--loc shifted (loc-ori))) of q ;
- setbounds s to boundingbox s enlarged -freelabeloffset ; % new
- % draw boundingbox s shifted -l withpen pencircle scaled .5pt withcolor red ;
- (s shifted -l)
+ save s, p, q, l ; picture s ; path p, q ; pair l ;
+ interim labeloffset := freelabeloffset ;
+ s := if string str : thelabel(str,loc) else : str shifted -center str shifted loc fi ;
+ setbounds s to boundingbox s enlarged freelabeloffset ;
+ p := fullcircle scaled (2*length(loc-ori)) shifted ori ;
+ q := freesquare xyscaled (urcorner s - llcorner s) ;
+% l := point (xpart (p intersectiontimes (ori--loc))) of q ;
+ l := point xpart (p intersectiontimes
+ (ori--((1+eps)*arclength(ori--loc)*unitvector(loc-ori)))) of q ;
+ setbounds s to boundingbox s enlarged -freelabeloffset ; % new
+ %draw boundingbox s shifted -l withpen pencircle scaled .5pt withcolor red ;
+ (s shifted -l)
+enddef ;
+
+% better?
+
+vardef thefreelabel (expr str, loc, ori) =
+ save s, p, q, l ; picture s ; path p, q ; pair l ;
+ interim labeloffset := freelabeloffset ;
+ s := if string str : thelabel(str,loc) else : str shifted -center str shifted loc fi ;
+ setbounds s to boundingbox s enlarged freelabeloffset ;
+ p := fullcircle scaled (2*length(loc-ori)) shifted ori ;
+ q := freesquare xyscaled (urcorner s - llcorner s) ;
+ l := point xpart (p intersectiontimes (ori--loc shifted (loc-ori))) of q ;
+ setbounds s to boundingbox s enlarged -freelabeloffset ; % new
+ %draw boundingbox s shifted -l withpen pencircle scaled .5pt withcolor red ;
+ (s shifted -l)
enddef ;
vardef freelabel (expr str, loc, ori) =
- draw thefreelabel(str,loc,ori) ;
+ draw thefreelabel(str,loc,ori) ;
enddef ;
vardef freedotlabel (expr str, loc, ori) =
- interim linecap := rounded ;
- draw loc withpen pencircle scaled freedotlabelsize ;
- draw thefreelabel(str,loc,ori) ;
+ interim linecap:=rounded ;
+ draw loc withpen pencircle scaled freedotlabelsize ;
+ draw thefreelabel(str,loc,ori) ;
enddef ;
-%D \starttyping
+%D \starttypen
%D drawarrow anglebetween(line_a,line_b,somelabel) ;
-%D \stoptyping
+%D \stoptypen
-newinternal angleoffset ; angleoffset := 0pt ;
-newinternal anglelength ; anglelength := 20pt ;
-newinternal anglemethod ; anglemethod := 1 ;
+% angleoffset ; angleoffset := 0pt ;
+numeric anglelength ; anglelength := 20pt ;
+numeric anglemethod ; anglemethod := 1 ;
% vardef anglebetween (expr a, b, str) = % path path string
% save pointa, pointb, common, middle, offset ;
@@ -1507,66 +1466,63 @@ newinternal anglemethod ; anglemethod := 1 ;
% enddef ;
vardef anglebetween (expr a, b, str) = % path path string
- save pointa, pointb, common, middle, offset ;
- pair pointa, pointb, common, middle, offset ;
- save curve ; path curve ;
- save where ; numeric where ;
- if round point 0 of a = round point 0 of b :
- common := point 0 of a ;
- else :
- common := a intersectionpoint b ;
- fi ;
- pointa := point anglelength on a ;
- pointb := point anglelength on b ;
- where := turningnumber (common--pointa--pointb--cycle) ;
- middle := (reverse(common--pointa) rotatedaround (pointa,-where*90))
- intersection_point
- (reverse(common--pointb) rotatedaround (pointb, where*90)) ;
- if not intersection_found :
- middle := point .5 along
- ((reverse(common--pointa) rotatedaround (pointa,-where*90)) --
- ( (common--pointb) rotatedaround (pointb, where*90))) ;
- fi ;
- if anglemethod = 0 :
- curve := pointa{unitvector(middle-pointa)}.. pointb;
- middle := point .5 along curve ;
- curve := common ;
- elseif anglemethod = 1 :
- curve := pointa{unitvector(middle-pointa)}.. pointb;
- middle := point .5 along curve ;
- elseif anglemethod = 2 :
- middle := common rotatedaround(.5[pointa,pointb],180) ;
- curve := pointa--middle--pointb ;
- elseif anglemethod = 3 :
- curve := pointa--middle--pointb ;
- elseif anglemethod = 4 :
- curve := pointa..controls middle..pointb ;
- middle := point .5 along curve ;
- fi ;
- draw thefreelabel(str, middle, common) ; % withcolor black ;
- curve
+ save pointa, pointb, common, middle, offset ;
+ pair pointa, pointb, common, middle, offset ;
+ save curve ; path curve ;
+ save where ; numeric where ;
+ if round point 0 of a = round point 0 of b :
+ common := point 0 of a ;
+ else :
+ common := a intersectionpoint b ;
+ fi ;
+ pointa := point anglelength on a ;
+ pointb := point anglelength on b ;
+ where := turningnumber (common--pointa--pointb--cycle) ;
+ middle := (reverse(common--pointa) rotatedaround (pointa,-where*90))
+ intersection_point
+ (reverse(common--pointb) rotatedaround (pointb, where*90)) ;
+ if not intersection_found :
+ middle := point .5 along
+ ((reverse(common--pointa) rotatedaround (pointa,-where*90)) --
+ ( (common--pointb) rotatedaround (pointb, where*90))) ;
+ fi ;
+ if anglemethod = 0 :
+ curve := pointa{unitvector(middle-pointa)}.. pointb;
+ middle := point .5 along curve ;
+ curve := common ;
+ elseif anglemethod = 1 :
+ curve := pointa{unitvector(middle-pointa)}.. pointb;
+ middle := point .5 along curve ;
+ elseif anglemethod = 2 :
+ middle := common rotatedaround(.5[pointa,pointb],180) ;
+ curve := pointa--middle--pointb ;
+ elseif anglemethod = 3 :
+ curve := pointa--middle--pointb ;
+ elseif anglemethod = 4 :
+ curve := pointa..controls middle..pointb ;
+ middle := point .5 along curve ;
+ fi ;
+ draw thefreelabel(str, middle, common) ; % withcolor black ;
+ curve
enddef ;
% Stack
-picture mfun_current_picture_stack[] ;
-numeric mfun_current_picture_depth ;
-
-mfun_current_picture_depth := 0 ;
+picture currentpicturestack[] ;
+numeric currentpicturedepth ; currentpicturedepth := 0 ;
def pushcurrentpicture =
- mfun_current_picture_depth := mfun_current_picture_depth + 1 ;
- mfun_current_picture_stack[mfun_current_picture_depth] := currentpicture ;
- currentpicture := nullpicture ;
+ currentpicturedepth := currentpicturedepth + 1 ;
+ currentpicturestack[currentpicturedepth] := currentpicture ;
+ currentpicture := nullpicture ;
enddef ;
def popcurrentpicture text t = % optional text
- if mfun_current_picture_depth > 0 :
- addto mfun_current_picture_stack[mfun_current_picture_depth] also currentpicture t ;
- currentpicture := mfun_current_picture_stack[mfun_current_picture_depth] ;
- mfun_current_picture_stack[mfun_current_picture_depth] := nullpicture ;
- mfun_current_picture_depth := mfun_current_picture_depth - 1 ;
- fi ;
+ if currentpicturedepth > 0 :
+ addto currentpicturestack[currentpicturedepth] also currentpicture t ;
+ currentpicture := currentpicturestack[currentpicturedepth] ;
+ currentpicturedepth := currentpicturedepth - 1 ;
+ fi ;
enddef ;
%D colorcircle(size, red, green, blue) ;
@@ -1647,76 +1603,71 @@ enddef ;
% popcurrentpicture ;
% enddef ;
-vardef colorcircle (expr size, red, green, blue) = % might move
- save r, g, b, c, m, y, w ; save radius ;
- path r, g, b, c, m, y, w ; numeric radius ;
+vardef colorcircle (expr size, red, green, blue) =
+ save r, g, b, c, m, y, w ; save radius ;
+ path r, g, b, c, m, y, w ; numeric radius ;
- radius := 5cm ; pickup pencircle scaled (radius/25) ;
+ radius := 5cm ; pickup pencircle scaled (radius/25) ;
- transform t ; t := identity rotatedaround(origin,120) ;
+ transform t ; t := identity rotatedaround(origin,120) ;
- r := fullcircle rotated 90 scaled radius shifted (0,radius/4) rotatedaround(origin,135) ;
+ r := fullcircle rotated 90 scaled radius
+ shifted (0,radius/4) rotatedaround(origin,135) ;
- b := r transformed t ; g := b transformed t ;
+ b := r transformed t ; g := b transformed t ;
- c := buildcycle(subpath(1,7) of g,subpath(1,7) of b) ;
- y := c transformed t ; m := y transformed t ;
+ c := buildcycle(subpath(1,7) of g,subpath(1,7) of b) ;
+ y := c transformed t ; m := y transformed t ;
- w := buildcycle(subpath(3,5) of r, subpath(3,5) of g,subpath(3,5) of b) ;
+ w := buildcycle(subpath(3,5) of r, subpath(3,5) of g,subpath(3,5) of b) ;
- pushcurrentpicture ;
+ pushcurrentpicture ;
- fill r withcolor red ;
- fill g withcolor green ;
- fill b withcolor blue ;
- fill c withcolor white - red ;
- fill m withcolor white - green ;
- fill y withcolor white - blue ;
- fill w withcolor white ;
+ fill r withcolor red ;
+ fill g withcolor green ;
+ fill b withcolor blue ;
+ fill c withcolor white-red ;
+ fill m withcolor white-green ;
+ fill y withcolor white-blue ;
+ fill w withcolor white ;
- for i = r,g,b,c,m,y : draw i withcolor .5white ; endfor ;
+ for i = r,g,b,c,m,y : draw i withcolor .5white ; endfor ;
- currentpicture := currentpicture xsized size ;
+ currentpicture := currentpicture xsized size ;
- popcurrentpicture ;
+ popcurrentpicture ;
enddef ;
% penpoint (i,2) of somepath -> inner / outer point
vardef penpoint expr pnt of p =
- save n, d ; numeric n, d ;
- (n,d) = if pair pnt : pnt else : (pnt,1) fi ;
- (point n of p shifted ((penoffset direction n of p of currentpen) scaled d))
+ save n, d ; numeric n, d ;
+ (n,d) = if pair pnt : pnt else : (pnt,1) fi ;
+ (point n of p shifted ((penoffset direction n of p of currentpen) scaled d))
enddef ;
% nice: currentpicture := inverted currentpicture ;
primarydef p uncolored c =
- if color p :
- c - p
- else :
- image (
- for i within p :
- addto currentpicture
- if stroked i or filled i :
- if filled i :
- contour
- else :
- doublepath
- fi
- pathpart i
- dashed dashpart i withpen penpart i
- else :
- also i
- fi
- withcolor c-(redpart i, greenpart i, bluepart i) ;
- endfor ;
- )
+ if color p :
+ c - p
+ else :
+ image
+ (for i within p :
+ addto currentpicture
+ if stroked i or filled i :
+ if filled i : contour else : doublepath fi pathpart i
+ dashed dashpart i withpen penpart i
+ else :
+ also i
+ fi
+ withcolor c-(redpart i, greenpart i, bluepart i) ;
+ endfor ; )
fi
enddef ;
vardef inverted primary p =
- p uncolored white
+ (p uncolored white)
enddef ;
% primarydef p softened c =
@@ -1741,54 +1692,45 @@ enddef ;
% enddef ;
primarydef p softened c =
- begingroup
- save cc ; color cc ; cc := tripled(c) ;
- if color p :
- (redpart cc * redpart p,greenpart cc * greenpart p, bluepart cc * bluepart p)
- else :
- image (
- for i within p :
- addto currentpicture
- if stroked i or filled i :
- if filled i :
- contour
- else :
- doublepath
- fi
- pathpart i
- dashed dashpart i withpen penpart i
- else :
- also i
- fi
- withcolor (redpart cc * redpart i, greenpart cc * greenpart i, bluepart cc * bluepart i) ;
- endfor ;
- )
- fi
- endgroup
+ begingroup
+ save cc ; color cc ; cc := tripled(c) ;
+ if color p :
+ (redpart cc * redpart p,
+ greenpart cc * greenpart p,
+ bluepart cc * bluepart p)
+ else :
+ image
+ (for i within p :
+ addto currentpicture
+ if stroked i or filled i :
+ if filled i : contour else : doublepath fi pathpart i
+ dashed dashpart i withpen penpart i
+ else :
+ also i
+ fi
+ withcolor (redpart cc * redpart i,
+ greenpart cc * greenpart i,
+ bluepart cc * bluepart i) ;
+ endfor ;)
+ fi
+ endgroup
enddef ;
vardef grayed primary p =
- if color p :
- tripled(.30redpart p+.59greenpart p+.11bluepart p)
- else :
- image (
- for i within p :
- addto currentpicture
- if stroked i or filled i :
- if filled i :
- contour
- else :
- doublepath
- fi
- pathpart i
- dashed dashpart i
- withpen penpart i
- else :
- also i
- fi
- withcolor tripled(.30redpart i+.59greenpart i+.11bluepart i) ;
- endfor ;
- )
+ if color p :
+ tripled(.30redpart p+.59greenpart p+.11bluepart p)
+ else :
+ image
+ (for i within p :
+ addto currentpicture
+ if stroked i or filled i :
+ if filled i : contour else : doublepath fi pathpart i
+ dashed dashpart i withpen penpart i
+ else :
+ also i
+ fi
+ withcolor tripled(.30redpart i+.59greenpart i+.11bluepart i) ;
+ endfor ; )
fi
enddef ;
@@ -1816,10 +1758,10 @@ def condition primary b = if b : "true" else : "false" fi enddef ;
% undocumented
primarydef p stretched s =
- begingroup
- save pp ; path pp ; pp := p xyscaled s ;
- (pp shifted ((point 0 of p) - (point 0 of pp)))
- endgroup
+ begingroup
+ save pp ; path pp ; pp := p xyscaled s ;
+ (pp shifted ((point 0 of p) - (point 0 of pp)))
+ endgroup
enddef ;
% primarydef p enlonged len =
@@ -1891,40 +1833,40 @@ def yshifted expr dy = shifted(0,dy) enddef ;
%
def readfile (expr name) =
- begingroup ; save ok ; boolean ok ;
- if (readfrom (name) <> EOF) :
- ok := false ;
- elseif (readfrom (name) <> EOF) :
- ok := false ;
- else :
- ok := true ;
- fi ;
- if not ok :
- scantokens("input " & name & " ") ;
- fi ;
- closefrom (name) ;
- endgroup ;
+ begingroup ; save ok ; boolean ok ;
+ if (readfrom (name) <> EOF) :
+ ok := false ;
+ elseif (readfrom (name) <> EOF) :
+ ok := false ;
+ else :
+ ok := true ;
+ fi ;
+ if not ok :
+ scantokens("input " & name & " ") ;
+ fi ;
+ closefrom (name) ;
+ endgroup ;
enddef ;
% permits redefinition of end in macro
inner end ;
-% this will be redone (when needed) using scripts and backend handling
+% real fun
let normalwithcolor = withcolor ;
def remapcolors =
- def withcolor primary c = normalwithcolor remappedcolor(c) enddef ;
+ def withcolor primary c = normalwithcolor remappedcolor(c) enddef ;
enddef ;
def normalcolors =
- let withcolor = normalwithcolor ;
+ let withcolor = normalwithcolor ;
enddef ;
def resetcolormap =
- color color_map[][][] ;
- normalcolors ;
+ color color_map[][][] ;
+ normalcolors ;
enddef ;
resetcolormap ;
@@ -1940,15 +1882,15 @@ def g_color primary c = greenpart c enddef ;
def b_color primary c = bluepart c enddef ;
def remapcolor(expr old, new) =
- color_map[redpart old][greenpart old][bluepart old] := new ;
+ color_map[r_color old][g_color old][b_color old] := new ;
enddef ;
def remappedcolor(expr c) =
- if known color_map[redpart c][greenpart c][bluepart c] :
- color_map[redpart c][greenpart c][bluepart c]
- else :
- c
- fi
+ if known color_map[r_color c][g_color c][b_color c] :
+ color_map[r_color c][g_color c][b_color c]
+ else :
+ c
+ fi
enddef ;
% def refill suffix c = do_repath (1) (c) enddef ;
@@ -1988,11 +1930,11 @@ enddef ;
% Thanks to Jens-Uwe Morawski for pointing out that we need
% to treat bounded and clipped components as local pictures.
-def recolor suffix p = p := repathed (0,p) enddef ;
-def refill suffix p = p := repathed (1,p) enddef ;
-def redraw suffix p = p := repathed (2,p) enddef ;
-def retext suffix p = p := repathed (3,p) enddef ;
-def untext suffix p = p := repathed (4,p) enddef ;
+def recolor suffix p = p := repathed (0,p) enddef ;
+def refill suffix p = p := repathed (1,p) enddef ;
+def redraw suffix p = p := repathed (2,p) enddef ;
+def retext suffix p = p := repathed (3,p) enddef ;
+def untext suffix p = p := repathed (4,p) enddef ;
% primarydef p recolored t = repathed(0,p) t enddef ;
% primarydef p refilled t = repathed(1,p) t enddef ;
@@ -2055,80 +1997,69 @@ def reprocess suffix p = p := repathed (22,p) enddef ; % no attributes
% also 11 and 12
vardef repathed (expr mode, p) text t =
- begingroup ;
- if mode = 0 :
- save withcolor ;
- remapcolors ;
+ begingroup ;
+ if mode=0 : save withcolor ; remapcolors ; fi ;
+ save _p_, _pp_, _ppp_, _f_, _b_, _t_ ;
+ picture _p_, _pp_, _ppp_ ; color _f_ ; path _b_ ; transform _t_ ;
+ _b_ := boundingbox p ; _p_ := nullpicture ;
+ for i within p :
+ _f_ := (redpart i, greenpart i, bluepart i) ;
+ if bounded i :
+ _pp_ := repathed(mode,i) t ;
+ setbounds _pp_ to pathpart i ;
+ addto _p_ also _pp_ ;
+ elseif clipped i :
+ _pp_ := repathed(mode,i) t ;
+ clip _pp_ to pathpart i ;
+ addto _p_ also _pp_ ;
+ elseif stroked i :
+ if mode=21 :
+ _ppp_ := i ; % indirectness is needed
+ addto _p_ also image(scantokens(t & " pathpart _ppp_")
+ dashed dashpart i withpen penpart i
+ withcolor _f_ ; ) ;
+ elseif mode=22 :
+ _ppp_ := i ; % indirectness is needed
+ addto _p_ also image(scantokens(t & " pathpart _ppp_")) ;
+ else :
+ addto _p_ doublepath pathpart i
+ dashed dashpart i withpen penpart i
+ withcolor _f_ % (redpart i, greenpart i, bluepart i)
+ if mode=2 : t fi ;
+ fi ;
+ elseif filled i :
+ if mode=11 :
+ _ppp_ := i ; % indirectness is needed
+ addto _p_ also image(scantokens(t & " pathpart _ppp_")
+ withcolor _f_ ; ) ;
+ elseif mode=12 :
+ _ppp_ := i ; % indirectness is needed
+ addto _p_ also image(scantokens(t & " pathpart _ppp_")) ;
+ else :
+ addto _p_ contour pathpart i
+ withcolor _f_
+ if (mode=1) and (_f_<>refillbackground) : t fi ;
+ fi ;
+ elseif textual i : % textpart i <> "" :
+ if mode <> 4 :
+ % transform _t_ ;
+ % (xpart _t_, xxpart _t_, xypart _t_) = (xpart i, xxpart i, xypart i) ;
+ % (ypart _t_, yypart _t_, yxpart _t_) = (ypart i, yypart i, yxpart i) ;
+ % addto _p_ also
+ % textpart i infont fontpart i % todo : other font
+ % transformed _t_
+ % withpen penpart i
+ % withcolor _f_
+ % if mode=3 : t fi ;
+ addto _p_ also i if mode=3 : t fi ;
+ fi ;
+ else :
+ addto _p_ also i ;
fi ;
- save _p_, _pp_, _ppp_, _f_, _b_, _t_ ;
- picture _p_, _pp_, _ppp_ ; color _f_ ; path _b_ ; transform _t_ ;
- _b_ := boundingbox p ;
- _p_ := nullpicture ;
- for i within p :
- _f_ := (redpart i, greenpart i, bluepart i) ;
- if bounded i :
- _pp_ := repathed(mode,i) t ;
- setbounds _pp_ to pathpart i ;
- addto _p_ also _pp_ ;
- elseif clipped i :
- _pp_ := repathed(mode,i) t ;
- clip _pp_ to pathpart i ;
- addto _p_ also _pp_ ;
- elseif stroked i :
- if mode=21 :
- _ppp_ := i ; % indirectness is needed
- addto _p_ also image(scantokens(t & " pathpart _ppp_")
- dashed dashpart i withpen penpart i
- withcolor _f_ ; ) ;
- elseif mode=22 :
- _ppp_ := i ; % indirectness is needed
- addto _p_ also image(scantokens(t & " pathpart _ppp_")) ;
- else :
- addto _p_ doublepath pathpart i
- dashed dashpart i withpen penpart i
- withcolor _f_ % (redpart i, greenpart i, bluepart i)
- if mode = 2 :
- t
- fi ;
- fi ;
- elseif filled i :
- if mode=11 :
- _ppp_ := i ; % indirectness is needed
- addto _p_ also image(scantokens(t & " pathpart _ppp_")
- withcolor _f_ ; ) ;
- elseif mode=12 :
- _ppp_ := i ; % indirectness is needed
- addto _p_ also image(scantokens(t & " pathpart _ppp_")) ;
- else :
- addto _p_ contour pathpart i
- withcolor _f_
- if (mode=1) and (_f_<>refillbackground) :
- t
- fi ;
- fi ;
- elseif textual i : % textpart i <> "" :
- if mode <> 4 :
- % transform _t_ ;
- % (xpart _t_, xxpart _t_, xypart _t_) = (xpart i, xxpart i, xypart i) ;
- % (ypart _t_, yypart _t_, yxpart _t_) = (ypart i, yypart i, yxpart i) ;
- % addto _p_ also
- % textpart i infont fontpart i % todo : other font
- % transformed _t_
- % withpen penpart i
- % withcolor _f_
- % if mode=3 : t fi ;
- addto _p_ also i
- if mode=3 :
- t
- fi ;
- fi ;
- else :
- addto _p_ also i ;
- fi ;
- endfor ;
- setbounds _p_ to _b_ ;
- _p_
- endgroup
+ endfor ;
+ setbounds _p_ to _b_ ;
+ _p_
+ endgroup
enddef ;
% After a question of Denis on how to erase a z variable, Jacko
@@ -2156,11 +2087,11 @@ enddef ;
% which i decided to simplify to:
def clearxy text s =
- if false for $ := s : or true endfor :
- forsuffixes $ := s : x$ := whatever ; y$ := whatever ; endfor ;
- else :
- save x, y ;
- fi
+ if false for $ := s : or true endfor :
+ forsuffixes $ := s : x$ := whatever ; y$ := whatever ; endfor ;
+ else :
+ save x, y ;
+ fi
enddef ;
% so now we can say: clearxy ; as well as clearxy 1, 2, 3 ;
@@ -2172,68 +2103,48 @@ enddef ;
% show x0 ; z0 = (30,30) ;
primarydef p smoothed d =
- (p llmoved (-xpart paired(d),0) -- p lrmoved (-xpart paired(d),0) {right} ..
- p lrmoved (0,-ypart paired(d)) -- p urmoved (0,-ypart paired(d)) {up} ..
- p urmoved (-xpart paired(d),0) -- p ulmoved (-xpart paired(d),0) {left} ..
- p ulmoved (0,-ypart paired(d)) -- p llmoved (0,-ypart paired(d)) {down} .. cycle)
+ (p llmoved (-xpart paired(d),0) -- p lrmoved (-xpart paired(d),0) {right} ..
+ p lrmoved (0,-ypart paired(d)) -- p urmoved (0,-ypart paired(d)) {up} ..
+ p urmoved (-xpart paired(d),0) -- p ulmoved (-xpart paired(d),0) {left} ..
+ p ulmoved (0,-ypart paired(d)) -- p llmoved (0,-ypart paired(d)) {down} .. cycle)
enddef ;
primarydef p cornered c =
- ((point 0 of p) shifted (c*(unitvector(point 1 of p - point 0 of p))) --
- for i=1 upto length(p) :
- (point i-1 of p) shifted (c*(unitvector(point i of p - point i-1 of p))) --
- (point i of p) shifted (c*(unitvector(point i-1 of p - point i of p))) ..
- controls point i of p ..
- endfor cycle)
+ ((point 0 of p) shifted (c*(unitvector(point 1 of p - point 0 of p))) --
+ for i=1 upto length(p) :
+ (point i-1 of p) shifted (c*(unitvector(point i of p - point i-1 of p))) --
+ (point i of p) shifted (c*(unitvector(point i-1 of p - point i of p))) ..
+ controls point i of p ..
+ endfor cycle)
enddef ;
% cmyk color support
vardef cmyk(expr c,m,y,k) =
- (1-c-k,1-m-k,1-y-k)
+ (1-c-k,1-m-k,1-y-k)
enddef ;
% handy
-% vardef bbwidth (expr p) = % vardef width_of primary p =
-% if known p :
-% if path p or picture p :
-% xpart (lrcorner p - llcorner p)
-% else :
-% 0
-% fi
-% else :
-% 0
-% fi
-% enddef ;
-
-vardef bbwidth primary p =
- if unknown p :
- 0
- elseif path p or picture p :
- xpart (lrcorner p - llcorner p)
+vardef bbwidth (expr p) = % vardef width_of primary p =
+ if known p :
+ if path p or picture p :
+ xpart (lrcorner p - llcorner p)
+ else :
+ 0
+ fi
else :
0
fi
enddef ;
-% vardef bbheight (expr p) = % vardef heigth_of primary p =
-% if known p :
-% if path p or picture p :
-% ypart (urcorner p - lrcorner p)
-% else :
-% 0
-% fi
-% else :
-% 0
-% fi
-% enddef ;
-
-vardef bbheight primary p =
- if unknown p :
- 0
- elseif path p or picture p :
- ypart (urcorner p - lrcorner p)
+vardef bbheight (expr p) = % vardef heigth_of primary p =
+ if known p :
+ if path p or picture p :
+ ypart (urcorner p - lrcorner p)
+ else :
+ 0
+ fi
else :
0
fi
@@ -2242,87 +2153,122 @@ enddef ;
color nocolor ; numeric noline ; % both unknown signals
def dowithpath (expr p, lw, lc, bc) =
- if known p :
- if known bc :
- fill p withcolor bc ;
- fi ;
- if known lw and known lc :
- draw p withpen pencircle scaled lw withcolor lc ;
- elseif known lw :
- draw p withpen pencircle scaled lw ;
- elseif known lc :
- draw p withcolor lc ;
- fi ;
+ if known p :
+ if known bc :
+ fill p withcolor bc ;
+ fi ;
+ if known lw and known lc :
+ draw p withpen pencircle scaled lw withcolor lc ;
+ elseif known lw :
+ draw p withpen pencircle scaled lw ;
+ elseif known lc :
+ draw p withcolor lc ;
fi ;
+ fi ;
enddef ;
% result from metafont discussion list (denisr/boguslawj)
-def [[ = [ [ enddef ; def [[[ = [ [ [ enddef ;
-def ]] = ] ] enddef ; def ]]] = ] ] ] enddef ;
+def ]] = ] ] enddef ; def ]]] = ] ] ] enddef ;
+def [[ = [ [ enddef ; def [[[ = [ [ [ enddef ;
-let == = = ;
-
-% added
+% not perfect, but useful since it removes redundant points.
-picture oddly ; % evenly already defined
+% vardef dostraightened(expr sign, p) =
+% if length(p)>2 : % was 1, but straight lines are ok
+% save pp ; path pp ;
+% pp := point 0 of p ;
+% for i=1 upto length(p)-1 :
+% if round(point i of p) <> round(point length(pp) of pp) :
+% pp := pp -- point i of p ;
+% fi ;
+% endfor ;
+% save n, ok ; numeric n ; boolean ok ;
+% n := length(pp) ; ok := false ;
+% if n>2 :
+% for i=0 upto n : % evt hier ook round
+% if unitvector(round(point i of pp -
+% point if i=0 : n else : i-1 fi of pp)) <>
+% sign * unitvector(round(point if i=n : 0 else : i+1 fi of pp -
+% point i of pp)) :
+% if ok : -- else : ok := true ; fi point i of pp
+% fi
+% endfor
+% if ok and (cycle p) : -- cycle fi
+% else :
+% pp
+% fi
+% else :
+% p
+% fi
+% enddef ;
-evenly := dashpattern(on 3 off 3) ;
-oddly := dashpattern(off 3 on 3) ;
+% vardef simplified expr p =
+% (reverse dostraightened(+1,dostraightened(+1,reverse p)))
+% enddef ;
-% not perfect, but useful since it removes redundant points.
+% vardef unspiked expr p =
+% (reverse dostraightened(-1,dostraightened(-1,reverse p)))
+% enddef ;
-vardef mfun_straightened(expr sign, p) =
- save _p_, _q_ ; path _p_, _q_ ;
- _p_ := p ;
- forever :
- _q_ := mfun_do_straightened(sign, _p_) ;
- exitif length(_p_) = length(_q_) ;
- _p_ := _q_ ;
+% simplified : remove same points as well as redundant points
+% unspiked : remove same points as well as areas with zero distance
+
+vardef dostraightened(expr sign, p) =
+ save _p_, _q_ ; path _p_, _q_ ;
+ _p_ := p ;
+ forever :
+ _q_ := dodostraightened(sign, _p_) ;
+ exitif length(_p_) = length(_q_) ;
+ _p_ := _q_ ;
+ endfor ;
+ _q_
+enddef ;
+
+vardef dodostraightened(expr sign, p) =
+ if length(p)>2 : % was 1, but straight lines are ok
+ save pp ; path pp ;
+ pp := point 0 of p ;
+ for i=1 upto length(p)-1 :
+ if round(point i of p) <> round(point length(pp) of pp) :
+ pp := pp -- point i of p ;
+ fi ;
endfor ;
- _q_
-enddef ;
-
-vardef mfun_do_straightened(expr sign, p) =
- if length(p)>2 : % was 1, but straight lines are ok
- save pp ; path pp ;
- pp := point 0 of p ;
- for i=1 upto length(p)-1 :
- if round(point i of p) <> round(point length(pp) of pp) :
- pp := pp -- point i of p ;
- fi ;
- endfor ;
- save n, ok ; numeric n ; boolean ok ;
- n := length(pp) ; ok := false ;
- if n>2 :
- for i=0 upto n : % evt hier ook round
- if unitvector(round(point i of pp - point if i=0 : n else : i-1 fi of pp)) <>
- sign * unitvector(round(point if i=n : 0 else : i+1 fi of pp - point i of pp)) :
- if ok :
- --
- else :
- ok := true ;
- fi point i of pp
- fi
- endfor
- if ok and (cycle p) :
- -- cycle
- fi
- else :
- pp
+ save n, ok ; numeric n ; boolean ok ;
+ n := length(pp) ; ok := false ;
+ if n>2 :
+ for i=0 upto n : % evt hier ook round
+ if unitvector(round(point i of pp -
+ point if i=0 : n else : i-1 fi of pp)) <>
+ sign * unitvector(round(point if i=n : 0 else : i+1 fi of pp -
+ point i of pp)) :
+ if ok : -- else : ok := true ; fi point i of pp
fi
+ endfor
+ if ok and (cycle p) : -- cycle fi
else :
- p
+ pp
fi
+ else :
+ p
+ fi
enddef ;
-vardef simplified expr p = (
- reverse mfun_straightened(+1,mfun_straightened(+1,reverse p))
-) enddef ;
+% vardef simplified expr p =
+% dostraightened(+1,p)
+% enddef ;
+
+% vardef unspiked expr p =
+% dostraightened(-1,p)
+% enddef ;
+
+vardef simplified expr p =
+ (reverse dostraightened(+1,dostraightened(+1,reverse p)))
+enddef ;
-vardef unspiked expr p = (
- reverse mfun_straightened(-1,mfun_straightened(-1,reverse p))
-) enddef ;
+vardef unspiked expr p =
+ (reverse dostraightened(-1,dostraightened(-1,reverse p)))
+enddef ;
% path p ;
% p := (2cm,1cm) -- (2cm,1cm) -- (2cm,1cm) -- (3cm,1cm) --
@@ -2343,197 +2289,213 @@ vardef unspiked expr p = (
path originpath ; originpath := origin -- cycle ;
vardef unitvector primary z =
- if abs z = abs origin : z else : z/abs z fi
+ if abs z = abs origin : z else : z/abs z fi
enddef;
% also new
-% vardef anchored@#(expr p, z) = % maybe use the textext variant
-% p shifted (z + (labxf@#*lrcorner p + labyf@#*ulcorner p + (1-labxf@#-labyf@#)*llcorner p))
-% enddef ;
+vardef anchored@#(expr p, z) =
+ p shifted (z + (labxf@#*lrcorner p + labyf@#*ulcorner p
+ + (1-labxf@#-labyf@#)*llcorner p))
+enddef ;
% epsed(1.2345)
vardef epsed (expr e) =
- e if e>0 : + eps elseif e<0 : - eps fi
+ e if e>0 : + eps elseif e<0 : - eps fi
enddef ;
% handy
def withgray primary g =
- withcolor (g,g,g)
+ withcolor (g,g,g)
enddef ;
% for metafun
-if unknown darkred : color darkred ; darkred := .625(1,0,0) fi ;
-if unknown darkgreen : color darkgreen ; darkgreen := .625(0,1,0) fi ;
-if unknown darkblue : color darkblue ; darkblue := .625(0,0,1) fi ;
-if unknown darkcyan : color darkcyan ; darkcyan := .625(0,1,1) fi ;
-if unknown darkmagenta : color darkmagenta ; darkmagenta := .625(1,0,1) fi ;
-if unknown darkyellow : color darkyellow ; darkyellow := .625(1,1,0) fi ;
-if unknown darkgray : color darkgray ; darkgray := .625(1,1,1) fi ;
-if unknown lightgray : color lightgray ; lightgray := .875(1,1,1) fi ;
+if unknown darkred : color darkred ; darkred := .625(1,0,0) fi ;
+if unknown darkyellow : color darkyellow ; darkyellow := .625(1,1,0) fi ;
+if unknown darkgray : color darkgray ; darkgray := .625(1,1,1) fi ;
+if unknown lightgray : color lightgray ; lightgray := .875(1,1,1) fi ;
% an improved plain mp macro
vardef center primary p =
- if pair p :
- p
- else :
- .5[llcorner p, urcorner p]
- fi
+ if pair p : p else : .5[llcorner p, urcorner p] fi
enddef;
% new, yet undocumented
vardef rangepath (expr p, d, a) =
- if length p>0 :
- (d*unitvector(direction 0 of p) rotated a) shifted point 0 of p
- -- p --
- (d*unitvector(direction length(p) of p) rotated a) shifted point length(p) of p
- else :
- p
- fi
+ (if length p>0 :
+ (d*unitvector(direction 0 of p) rotated a)
+ shifted point 0 of p
+ -- p --
+ (d*unitvector(direction length(p) of p) rotated a)
+ shifted point length(p) of p
+ else :
+ p
+ fi)
enddef ;
% under construction
-vardef straightpath (expr a, b, method) =
- if (method<1) or (method>6) :
- (a--b)
- elseif method = 1 :
- (a --
- if xpart a > xpart b :
- if ypart a > ypart b :
- (xpart b,ypart a) --
- elseif ypart a < ypart b :
- (xpart a,ypart b) --
- fi
- elseif xpart a < xpart b :
- if ypart a > ypart b :
- (xpart a,ypart b) --
- elseif ypart a < ypart b :
- (xpart b,ypart a) --
- fi
- fi
- b)
- elseif method = 3 :
- (a --
- if xpart a > xpart b :
- (xpart b,ypart a) --
- elseif xpart a < xpart b :
- (xpart a,ypart b) --
- fi
- b)
- elseif method = 5 :
- (a --
- if ypart a > ypart b :
- (xpart b,ypart a) --
- elseif ypart a < ypart b :
- (xpart a,ypart b) --
- fi
- b)
- else :
- (reverse straightpath(b,a,method-1))
+vardef straightpath(expr a, b, method) =
+ if (method<1) or (method>6) :
+ (a--b)
+ elseif method = 1 :
+ (a --
+ if xpart a > xpart b :
+ if ypart a > ypart b :
+ (xpart b,ypart a) --
+ elseif ypart a < ypart b :
+ (xpart a,ypart b) --
+ fi
+ elseif xpart a < xpart b :
+ if ypart a > ypart b :
+ (xpart a,ypart b) --
+ elseif ypart a < ypart b :
+ (xpart b,ypart a) --
+ fi
fi
+ b)
+ elseif method = 3 :
+ (a --
+ if xpart a > xpart b :
+ (xpart b,ypart a) --
+ elseif xpart a < xpart b :
+ (xpart a,ypart b) --
+ fi
+ b)
+ elseif method = 5 :
+ (a --
+ if ypart a > ypart b :
+ (xpart b,ypart a) --
+ elseif ypart a < ypart b :
+ (xpart a,ypart b) --
+ fi
+ b)
+ else :
+ (reverse straightpath(b,a,method-1))
+ fi
enddef ;
% handy for myself
def addbackground text t =
- begingroup ;
- save p, b ; picture p ; path b ;
- b := boundingbox currentpicture ;
- p := currentpicture ; currentpicture := nullpicture ;
- fill b t ;
- setbounds currentpicture to b ;
- addto currentpicture also p ;
- endgroup ;
+ begingroup ; save p, b ; picture p ; path b ;
+ b := boundingbox currentpicture ;
+ p := currentpicture ; currentpicture := nullpicture ;
+ fill b t ; setbounds currentpicture to b ; addto currentpicture also p ;
+ endgroup ;
enddef ;
% makes a (line) into an infinite one (handy for calculating
% intersection points
vardef infinite expr p =
- (-infinity*unitvector(direction 0 of p)
+ (-infinity*unitvector(direction 0 of p)
shifted point 0 of p
- -- p --
- +infinity*unitvector(direction length(p) of p)
- shifted point length(p) of p)
+ -- p --
+ +infinity*unitvector(direction length(p) of p)
+ shifted point length(p) of p)
enddef ;
% obscure macros: create var from string and replace - and :
-% (needed for process color id's) .. will go away
+% (needed for process color id's)
-string mfun_clean_ascii[] ;
+string _clean_ascii_[] ;
def register_dirty_chars(expr str) =
for i = 0 upto length(str)-1 :
- mfun_clean_ascii[ASCII substring(i,i+1) of str] := "_" ;
+ _clean_ascii_[ASCII substring(i,i+1) of str] := "_" ;
endfor ;
enddef ;
register_dirty_chars("+-*/:;., ") ;
vardef cleanstring (expr s) =
- save ss ; string ss, si ; ss = "" ; save i ;
- for i=0 upto length(s) :
- si := substring(i,i+1) of s ;
- ss := ss & if known mfun_clean_ascii[ASCII si] : mfun_clean_ascii[ASCII si] else : si fi ;
- endfor ;
- ss
+ save ss ; string ss, si ; ss = "" ; save i ;
+ for i=0 upto length(s) :
+ si := substring(i,i+1) of s ;
+ ss := ss & if known _clean_ascii_[ASCII si] : _clean_ascii_[ASCII si] else : si fi ;
+ endfor ;
+ ss
enddef ;
vardef asciistring (expr s) =
- save ss ; string ss, si ; ss = "" ; save i ;
- for i=0 upto length(s) :
- si := substring(i,i+1) of s ;
- if (ASCII si >= ASCII "0") and (ASCII si <= ASCII "9") :
- ss := ss & char(scantokens(si) + ASCII "A") ;
- else :
- ss := ss & si ;
- fi ;
- endfor ;
- ss
+ save ss ; string ss, si ; ss = "" ; save i ;
+ for i=0 upto length(s) :
+ si := substring(i,i+1) of s ;
+ if (ASCII si >= ASCII "0") and (ASCII si <= ASCII "9") :
+ ss := ss & char(scantokens(si) + ASCII "A") ;
+ else :
+ ss := ss & si ;
+ fi ;
+ endfor ;
+ ss
enddef ;
vardef setunstringed (expr s, v) =
- scantokens(cleanstring(s)) := v ;
+ scantokens(cleanstring(s)) := v ;
+enddef ;
+
+vardef setunstringed (expr s, v) =
+ scantokens(cleanstring(s)) := v ;
enddef ;
vardef getunstringed (expr s) =
- scantokens(cleanstring(s))
+ scantokens(cleanstring(s))
enddef ;
vardef unstringed (expr s) =
- expandafter known scantokens(cleanstring(s))
+ expandafter known scantokens(cleanstring(s))
+enddef ;
+
+% new
+
+% vardef colorpart(expr i) =
+% (redpart i, greenpart i,bluepart i)
+% enddef ;
+
+vardef colorpart(expr c) =
+ if colormodel c = 3 :
+ graypart c
+ elseif colormodel c = 5 :
+ (redpart c,greenpart c,bluepart c)
+ elseif colormodel c = 7 :
+ (cyanpart c,magentapart c,yellowpart c,blackpart c)
+ fi
enddef ;
% for david arnold:
% showgrid(-5,10,1cm,-10,10,1cm);
-def showgrid (expr MinX, MaxX, DeltaX, MinY, MaxY, DeltaY) = % will move
- begingroup
- save size ; numeric size ; size := 2pt ;
+def showgrid (expr MinX, MaxX, DeltaX, MinY, MaxY, DeltaY)=
+ begingroup
+ save defaultfont, defaultscale, size ;
+ string defaultfont ; defaultfont := "cmtt10"; % i.e. infofont
+ numeric defaultscale ; defaultscale := 8pt / fontsize defaultfont;
+ numeric size ; size := 2pt ;
for x=MinX upto MaxX :
- for y=MinY upto MaxY :
- draw (x*DeltaX, y*DeltaY) withpen pencircle scaled
- if (x mod 5 = 0) and (y mod 5 = 0) :
- 1.5size withcolor .50white
- else :
- size withcolor .75white
- fi ;
- endfor ;
+ for y=MinY upto MaxY :
+ draw (x*DeltaX, y*DeltaY)
+ withpen pencircle scaled
+ if (x mod 5 = 0) and (y mod 5 = 0) :
+ 1.5size withcolor .50white
+ else :
+ size withcolor .75white
+ fi ;
+ endfor ;
endfor ;
for x=MinX upto MaxX:
- label.bot(textext("\infofont " & decimal x), (x*DeltaX,-size)) ;
+ label.bot(decimal x, (x*DeltaX,-size));
endfor ;
for y=MinY upto MaxY:
- label.lft(textext("\infofont " & decimal y), (-size,y*DeltaY)) ;
+ label.lft(decimal y, (-size,y*DeltaY)) ;
endfor ;
- endgroup
+ endgroup
enddef;
% new, handy for:
@@ -2563,24 +2525,26 @@ enddef;
%
% \useMPgraphic{map}{n=3}
-vardef phantom (text t) = % to be checked
- picture _p_ ;
- _p_ := image(t) ;
- addto _p_ also currentpicture ;
- setbounds currentpicture to boundingbox _p_ ;
+vardef phantom (text t) =
+ picture _p_ ; _p_ := image(t) ; addto _p_ also currentpicture ;
+ setbounds currentpicture to boundingbox _p_ ;
enddef ;
vardef c_phantom (expr b) (text t) =
- if b :
- picture _p_ ;
- _p_ := image(t) ;
- addto _p_ also currentpicture ;
- setbounds currentpicture to boundingbox _p_ ;
- else :
- t ;
- fi ;
+ if b :
+ picture _p_ ; _p_ := image(t) ; addto _p_ also currentpicture ;
+ setbounds currentpicture to boundingbox _p_ ;
+ else :
+ t ;
+ fi ;
enddef ;
+% mark paths (for external progs to split)
+
+% def somepath(expr p)
+% p
+% enddef ;
+
%D Handy:
def break =
@@ -2589,228 +2553,27 @@ enddef ;
%D New too:
-primarydef p xstretched w = (
- p if (bbwidth (p)>0) and (w>0) : xscaled (w/bbwidth (p)) fi
-) enddef ;
-
-primarydef p ystretched h = (
- p if (bbheight(p)>0) and (h>0) : yscaled (h/bbheight(p)) fi
-) enddef ;
-
-primarydef p snapped s =
- hide (
- if path p :
- forever :
- exitif (bbheight(p) <= s) and (bbwidth(p) <= s) ;
- p := p scaled (1/2) ;
- endfor ;
- elseif numeric p :
- forever :
- exitif p <= s ;
- p := p scaled (1/2) ;
- endfor ;
- fi ;
- )
- p
+primarydef p xstretched w =
+ (p if (bbwidth (p)>0) and (w>0) : xscaled (w/bbwidth (p)) fi)
enddef ;
-% vardef somecolor = (1,1,0,0) enddef ;
-
-% fill OverlayBox withcolor (rcomponent somecolor,gcomponent somecolor,bcomponent somecolor) ;
-% fill OverlayBox withcolor (ccomponent somecolor,mcomponent somecolor,ycomponent somecolor,bcomponent somecolor) ;
-
-% This could be standard mplib 2 behaviour:
-
-vardef rcomponent expr p = if rgbcolor p : redpart p elseif cmykcolor p : 1 - cyanpart p else : p fi enddef ;
-vardef gcomponent expr p = if rgbcolor p : greenpart p elseif cmykcolor p : 1 - magentapart p else : p fi enddef ;
-vardef bcomponent expr p = if rgbcolor p : bluepart p elseif cmykcolor p : 1 - yellowpart p else : p fi enddef ;
-vardef ccomponent expr p = if cmykcolor p : cyanpart p elseif rgbcolor p : 1 - redpart p else : p fi enddef ;
-vardef mcomponent expr p = if cmykcolor p : magentapart p elseif rgbcolor p : 1 - greenpart p else : p fi enddef ;
-vardef ycomponent expr p = if cmykcolor p : yellowpart p elseif rgbcolor p : 1 - bluepart p else : p fi enddef ;
-vardef bcomponent expr p = if cmykcolor p : blackpart p elseif rgbcolor p : 0 else : p fi enddef ;
-
-% draw image (...) ... ; % prescripts prepended to first, postscripts appended to last
-% draw decorated (...) ... ; % prescripts prepended to each, postscripts appended to each
-% draw redecorated (...) ... ; % prescripts assigned to each, postscripts assigned to each
-% draw undecorated (...) ... ; % following properties are ignored, existing properties are kept
-%
-% draw decorated (
-% draw fullcircle scaled 20cm withpen pencircle scaled 20mm withcolor red withtransparency (1,.40) ;
-% draw fullcircle scaled 15cm withpen pencircle scaled 15mm withcolor green withtransparency (1,.30) ;
-% draw fullcircle scaled 10cm withpen pencircle scaled 10mm withcolor blue withtransparency (1,.20) ;
-% )
-% withcolor blue
-% withtransparency (1,.125) % selectively applied
-% withpen pencircle scaled 10mm
-% ;
-
-% vardef image (text imagedata) = % already defined
-% save currentpicture ;
-% picture currentpicture ;
-% currentpicture := nullpicture ;
-% imagedata ;
-% currentpicture
-% enddef ;
-
-vardef undecorated (text imagedata) text decoration =
- save currentpicture ;
- picture currentpicture ;
- currentpicture := nullpicture ;
- imagedata ;
- currentpicture
-enddef ;
-
-
-if metapostversion < 1.770 :
-
- vardef decorated (text imagedata) text decoration =
- save mfun_decorated_path, currentpicture ;
- picture mfun_decorated_path, currentpicture ;
- currentpicture := nullpicture ;
- imagedata ;
- mfun_decorated_path := currentpicture ;
- currentpicture := nullpicture ;
- for i within mfun_decorated_path :
- addto currentpicture
- if stroked i :
- doublepath pathpart i
- dashed dashpart i
- withpen penpart i
- withcolor colorpart i
- decoration
- elseif filled i :
- contour pathpart i
- withpen penpart i
- withcolor colorpart i
- decoration
- elseif textual i :
- also i
- withcolor colorpart i
- decoration
- else :
- also i
- fi
- ;
- endfor ;
- currentpicture
- enddef ;
-
-else:
-
- vardef decorated (text imagedata) text decoration =
- save mfun_decorated_path, currentpicture ;
- picture mfun_decorated_path, currentpicture ;
- currentpicture := nullpicture ;
- imagedata ;
- mfun_decorated_path := currentpicture ;
- currentpicture := nullpicture ;
- for i within mfun_decorated_path :
- addto currentpicture
- if stroked i :
- doublepath pathpart i
- dashed dashpart i
- withpen penpart i
- withcolor colorpart i
- withprescript prescriptpart i
- withpostscript postscriptpart i
- decoration
- elseif filled i :
- contour pathpart i
- withpen penpart i
- withcolor colorpart i
- withprescript prescriptpart i
- withpostscript postscriptpart i
- decoration
- elseif textual i :
- also i
- withcolor colorpart i
- withprescript prescriptpart i
- withpostscript postscriptpart i
- decoration
- else :
- also i
- fi
- ;
- endfor ;
- currentpicture
- enddef ;
-
-fi ;
-
-vardef redecorated (text imagedata) text decoration =
- save mfun_decorated_path, currentpicture ;
- picture mfun_decorated_path, currentpicture ;
- currentpicture := nullpicture ;
- imagedata ;
- mfun_decorated_path := currentpicture ;
- currentpicture := nullpicture ;
- for i within mfun_decorated_path :
- addto currentpicture
- if stroked i :
- doublepath pathpart i
- dashed dashpart i
- withpen penpart i
- decoration
- elseif filled i :
- contour pathpart i
- withpen penpart i
- decoration
- elseif textual i :
- also i
- decoration
- else :
- also i
- fi
- ;
- endfor ;
- currentpicture
-enddef ;
-
-% path mfun_bleed_box ;
-
-% primarydef p bleeded d =
-% image (
-% mfun_bleed_box := boundingbox p ;
-% if pair d :
-% draw p xysized (bbwidth(p)+2*xpart d,bbheight(p)+2*ypart d) shifted -d ;
-% else :
-% draw p xysized (bbwidth(p)+2d,bbheight(p)+2d) shifted (-d,-d) ;
-% fi ;
-% setbounds currentpicture to mfun_bleed_box ;
-% )
-% enddef ;
-
-%D New helpers:
-
-def beginglyph(expr unicode, width, height, depth) =
- beginfig(unicode) ; % the number is irrelevant
- charcode := unicode ;
- charwd := width ;
- charht := height ;
- chardp := depth ;
+primarydef p ystretched h =
+ (p if (bbheight(p)>0) and (h>0) : yscaled (h/bbheight(p)) fi)
enddef ;
-def endglyph =
- setbounds currentpicture to (boundingbox unitsquare xscaled charwd yscaled (charht + chardp) shifted (0,-chardp)) ;
- if known charscale :
- currentpicture := currentpicture scaled charscale ;
- fi ;
- endfig ;
+primarydef p snapped s =
+ hide ( if path p :
+ forever :
+ exitif (bbheight(p) <= s) and (bbwidth(p) <= s) ;
+ p := p scaled (1/2) ;
+ endfor ;
+ elseif numeric p :
+ forever :
+ exitif p <= s ;
+ p := p scaled (1/2) ;
+ endfor ;
+ fi ; )
+ p
enddef ;
-%D Dimensions have bever been an issue as traditional MP can't make that large
-%D pictures, but with double mode we need a catch:
-
-newinternal maxdimensions ; maxdimensions := 14000 ;
-
-def mfun_apply_max_dimensions = % not a generic helper, we want to protect this one
- if bbwidth currentpicture > maxdimensions :
- currentpicture := currentpicture if bbheight currentpicture > bbwidth currentpicture : ysized else : xsized fi maxdimensions ;
- elseif bbheight currentpicture > maxdimensions :
- currentpicture := currentpicture ysized maxdimensions ;
- fi ;
-enddef;
-
-extra_endfig := extra_endfig & "mfun_apply_max_dimensions ;" ;
-
let dump = relax ;
diff --git a/metapost/context/base/mp-tool.mpiv b/metapost/context/base/mp-tool.mpiv
index 4ca2ea0f9..672a051c2 100644
--- a/metapost/context/base/mp-tool.mpiv
+++ b/metapost/context/base/mp-tool.mpiv
@@ -289,14 +289,10 @@ vardef set_outer_boundingbox text q = % obsolete
setbounds q to outerboundingbox q;
enddef;
-%D Some missing functions can be implemented rather straightforward (thanks to
-%D Taco and others):
+%D Some missing functions can be implemented rather
+%D straightforward:
-pi := 3.14159265358979323846 ; radian := 180/pi ; % 2pi*radian = 360 ;
-
-% let +++ = ++ ;
-
-numeric Pi ; Pi := pi ; % for some old compatibility reasons i guess
+numeric Pi ; Pi := 3.1415926 ;
vardef sqr primary x = x*x enddef ;
vardef log primary x = if x=0: 0 else: mlog(x)/mlog(10) fi enddef ;
@@ -306,6 +302,15 @@ vardef inv primary x = if x=0: 0 else: x**-1 fi enddef ;
vardef pow (expr x,p) = x**p enddef ;
+vardef asin primary x = x+(x**3)/6+3(x**5)/40 enddef ;
+vardef acos primary x = asin(-x) enddef ;
+vardef atan primary x = x-(x**3)/3+(x**5)/5-(x**7)/7 enddef ;
+vardef tand primary x = sind(x)/cosd(x) enddef ;
+
+%D Here are Taco Hoekwater's alternatives (but vardef'd and primaried).
+
+pi := 3.1415926 ; radian := 180/pi ; % 2pi*radian = 360 ;
+
vardef tand primary x = sind(x)/cosd(x) enddef ;
vardef cotd primary x = cosd(x)/sind(x) enddef ;
@@ -316,11 +321,9 @@ vardef cot primary x = cos(x)/sin(x) enddef ;
vardef asin primary x = angle((1+-+x,x)) enddef ;
vardef acos primary x = angle((x,1+-+x)) enddef ;
-vardef atan primary x = angle(1,x) enddef ;
vardef invsin primary x = (asin(x))/radian enddef ;
vardef invcos primary x = (acos(x))/radian enddef ;
-vardef invtan primary x = (atan(x))/radian enddef ;
vardef acosh primary x = ln(x+(x+-+1)) enddef ;
vardef asinh primary x = ln(x+(x++1)) enddef ;
@@ -1220,7 +1223,7 @@ enddef ;
extra_endfig := extra_endfig & " naturalizepaths ; " ;
-%D Nice tracer:
+%D Noce tracer:
def drawboundary primary p =
draw p dashed evenly withcolor white ;
diff --git a/scripts/context/lua/mtx-bibtex.lua b/scripts/context/lua/mtx-bibtex.lua
deleted file mode 100644
index c81fd596f..000000000
--- a/scripts/context/lua/mtx-bibtex.lua
+++ /dev/null
@@ -1,106 +0,0 @@
-if not modules then modules = { } end modules ['mtx-bibtex'] = {
- version = 1.002,
- comment = "this script is part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE",
- license = "see context related readme files"
-}
-
-local helpinfo = [[
-<?xml version="1.0"?>
-<application>
- <metadata>
- <entry name="name">mtx-bibtex</entry>
- <entry name="detail">bibtex helpers</entry>
- <entry name="version">1.00</entry>
- </metadata>
- <flags>
- <category name="basic">
- <subcategory>
- <flag name="toxml"><short>convert bibtex database(s) to xml</short></flag>
- <flag name="tolua"><short>convert bibtex database(s) to lua</short></flag>
- </subcategory>
- </category>
- </flags>
- <examples>
- <category>
- <title>Example</title>
- <subcategory>
- <example><command>mtxrun --script bibtex --tolua bibl-001.bib</command></example>
- <example><command>mtxrun --script bibtex --tolua --simple bibl-001.bib</command></example>
- <example><command>mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml</command></example>
- </subcategory>
- </category>
- </examples>
-</application>
-]]
-
-local application = logs.application {
- name = "mtx-bibtex",
- banner = "bibtex helpers",
- helpinfo = helpinfo,
-}
-
-local report = application.report
-
-require("publ-dat")
-
-scripts = scripts or { }
-scripts.bibtex = scripts.bibtex or { }
-
-function scripts.bibtex.toxml(files)
- local instance = bibtex.new()
- local target = "mtx-bibtex-output.xml"
- for i=1,#files do
- local filename = files[i]
- local filetype = file.suffix(filename)
- if filetype == "xml" then
- target = filename
- elseif filetype == "bib" then
- bibtex.load(instance,filename)
- else
- -- not supported
- end
- end
- bibtex.converttoxml(instance,true)
- instance.shortcuts = nil
- instance.luadata = nil
- xml.save(instance.xmldata,target)
-end
-
-function scripts.bibtex.tolua(files)
- local instance = bibtex.new()
- local target = "mtx-bibtex-output.lua"
- for i=1,#files do
- local filename = files[i]
- local filetype = file.suffix(filename)
- if filetype == "lua" then
- target = filename
- elseif filetype == "bib" then
- bibtex.load(instance,filename)
- else
- -- not supported
- end
- end
- instance.shortcuts = nil
- instance.xmldata = nil
- bibtex.analyze(instance)
- if environment.arguments.simple then
- table.save(target,instance)
- else
- table.save(target,instance.luadata)
- end
-end
-
-if environment.arguments.toxml then
- scripts.bibtex.toxml(environment.files)
-elseif environment.arguments.tolua then
- scripts.bibtex.tolua(environment.files)
-elseif environment.arguments.exporthelp then
- application.export(environment.arguments.exporthelp,environment.files[1])
-else
- application.help()
-end
-
--- scripts.bibtex.toxml { "tugboat.bib" }
--- scripts.bibtex.tolua { "tugboat.bib" }
diff --git a/scripts/context/lua/mtx-context.lua b/scripts/context/lua/mtx-context.lua
index 90efb5225..4c6672051 100644
--- a/scripts/context/lua/mtx-context.lua
+++ b/scripts/context/lua/mtx-context.lua
@@ -87,17 +87,14 @@ scripts.context = scripts.context or { }
-- for the moment here
-if jit then -- already luajittex
- setargument("engine","luajittex")
- setargument("jit",nil)
-elseif getargument("jit") or getargument("jiton") then -- relaunch luajittex
+if getargument("jit") or getargument("jiton") then
-- bonus shortcut, we assume than --jit also indicates the engine
-- although --jit and --engine=luajittex are independent
setargument("engine","luajittex")
end
-local engine_new = file.nameonly(getargument("engine") or directives.value("system.engine"))
-local engine_old = file.nameonly(environment.ownbin)
+local engine_new = getargument("engine") or directives.value("system.engine")
+local engine_old = environment.ownbin
local function restart(engine_old,engine_new)
local command = format("%s --luaonly %q %s --redirected",engine_new,environment.ownname,environment.reconstructcommandline())
@@ -256,9 +253,8 @@ end
-- multipass control
-local multipass_suffixes = { ".tuc" }
-local multipass_nofruns = 8 -- or 7 to test oscillation
-local multipass_forcedruns = false
+local multipass_suffixes = { ".tuc" }
+local multipass_nofruns = 8 -- or 7 to test oscillation
local function multipass_hashfiles(jobname)
local hash = { }
@@ -694,7 +690,6 @@ function scripts.context.run(ctxdata,filename)
c_flags.final = false
c_flags.kindofrun = (a_once and 3) or (currentrun==1 and 1) or (currentrun==maxnofruns and 4) or 2
c_flags.maxnofruns = maxnofruns
- c_flags.forcedruns = multipass_forcedruns and multipass_forcedruns > 0 and multipass_forcedruns or nil
c_flags.currentrun = currentrun
c_flags.noarrange = a_noarrange or a_arrange or nil
--
@@ -712,15 +707,10 @@ function scripts.context.run(ctxdata,filename)
break
elseif returncode == 0 then
multipass_copyluafile(jobname)
- if not multipass_forcedruns then
- newhash = multipass_hashfiles(jobname)
- if multipass_changed(oldhash,newhash) then
- oldhash = newhash
- else
- break
- end
- elseif currentrun == multipass_forcedruns then
- report("quitting after force %i runs",multipass_forcedruns)
+ newhash = multipass_hashfiles(jobname)
+ if multipass_changed(oldhash,newhash) then
+ oldhash = newhash
+ else
break
end
else
@@ -1488,12 +1478,9 @@ do
end
if getargument("once") then
- multipass_nofruns = 1
-else
- if getargument("runs") then
- multipass_nofruns = tonumber(getargument("runs")) or nil
- end
- multipass_forcedruns = tonumber(getargument("forcedruns")) or nil
+ multipass_nofruns = 1
+elseif getargument("runs") then
+ multipass_nofruns = tonumber(getargument("runs")) or nil
end
if getargument("run") then
diff --git a/scripts/context/lua/mtx-fonts.lua b/scripts/context/lua/mtx-fonts.lua
index 675d9fb12..4340cb357 100644
--- a/scripts/context/lua/mtx-fonts.lua
+++ b/scripts/context/lua/mtx-fonts.lua
@@ -413,12 +413,8 @@ function scripts.fonts.save()
local sub = givenfiles[2] or ""
local function save(savename,fontblob)
if fontblob then
- if fontblob.validation_state and table.contains(fontblob.validation_state,"bad_ps_fontname") then
- report("ignoring bad fontname for %a",name)
- savename = file.nameonly(name) .. "-bad-ps-name"
- end
savename = file.addsuffix(string.lower(savename),"lua")
- report("fontsave, saving data in %a",savename)
+ report("fontsave, saving data in %s",savename)
table.tofile(savename,fontloader.to_table(fontblob),"return")
fontloader.close(fontblob)
end
@@ -430,7 +426,7 @@ function scripts.fonts.save()
if suffix == 'ttf' or suffix == 'otf' or suffix == 'ttc' or suffix == "dfont" then
local fontinfo = fontloader.info(filename)
if fontinfo then
- report("font: %a located as %a",name,filename)
+ report("font: %s located as %s",name,filename)
if #fontinfo > 0 then
for k=1,#fontinfo do
local v = fontinfo[k]
@@ -440,13 +436,13 @@ function scripts.fonts.save()
save(fontinfo.fullname,fontloader.open(filename))
end
else
- report("font: %a cannot be read",filename)
+ report("font: %s cannot be read",filename)
end
else
- report("font: %a not saved",filename)
+ report("font: %s not saved",filename)
end
else
- report("font: %a not found",name)
+ report("font: %s not found",name)
end
else
report("font: no name given")
diff --git a/scripts/context/lua/mtx-plain.lua b/scripts/context/lua/mtx-plain.lua
index de13717d3..d10c21375 100644
--- a/scripts/context/lua/mtx-plain.lua
+++ b/scripts/context/lua/mtx-plain.lua
@@ -114,7 +114,7 @@ local texformat = environment.arguments.texformat or environment.arguments.forma
local texengine = environment.arguments.texengine or environment.arguments.engine
if type(texengine) ~= "string" or texengine == "" then
- texengine = (jit or environment.arguments.jit) and "luajittex" or "luatex"
+ texengine = environment.arguments.jit and "luajittex" or"luatex"
end
if type(texformat) ~= "string" or texformat == "" then
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 3372831b3..0ff2d2897 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -56,7 +56,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lua"] = package.loaded["l-lua"] or true
--- original size: 3247, stripped down to: 1763
+-- original size: 3123, stripped down to: 1694
if not modules then modules={} end modules ['l-lua']={
version=1.001,
@@ -136,9 +136,6 @@ function optionalrequire(...)
return result
end
end
-if lua then
- lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
-end
end -- of closure
@@ -437,7 +434,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
--- original size: 29808, stripped down to: 16182
+-- original size: 29245, stripped down to: 15964
if not modules then modules={} end modules ['l-lpeg']={
version=1.001,
@@ -453,9 +450,7 @@ local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.forma
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-if setinspector then
- setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-end
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -524,11 +519,9 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
-local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
patterns.stripper=stripper
-patterns.fullstripper=fullstripper
patterns.collapser=collapser
patterns.lowercase=lowercase
patterns.uppercase=uppercase
@@ -751,7 +744,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction,isutf)
+function lpeg.finder(lst,makefunction)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -767,11 +760,7 @@ function lpeg.finder(lst,makefunction,isutf)
else
pattern=P(lst)
end
- if isutf then
- pattern=((utf8char or 1)-pattern)^0*pattern
- else
- pattern=(1-pattern)^0*pattern
- end
+ pattern=(1-pattern)^0*pattern
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -1082,7 +1071,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-string"] = package.loaded["l-string"] or true
--- original size: 5671, stripped down to: 2827
+-- original size: 5547, stripped down to: 2708
if not modules then modules={} end modules ['l-string']={
version=1.001,
@@ -1118,15 +1107,11 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
-local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
-function string.fullstrip(str)
- return lpegmatch(fullstripper,str) or ""
-end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -1187,7 +1172,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-table"] = package.loaded["l-table"] or true
--- original size: 31142, stripped down to: 20283
+-- original size: 31113, stripped down to: 20256
if not modules then modules={} end modules ['l-table']={
version=1.001,
@@ -2015,9 +2000,7 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-if setinspector then
- setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
-end
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
@@ -3524,7 +3507,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-url"] = package.loaded["l-url"] or true
--- original size: 12292, stripped down to: 5585
+-- original size: 11993, stripped down to: 5584
if not modules then modules={} end modules ['l-url']={
version=1.001,
@@ -3551,7 +3534,7 @@ local hexdigit=R("09","AF","af")
local plus=P("+")
local nothing=Cc("")
local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
-local escaped=(plus/" ")+escapedchar
+local escaped=(plus/" ")+escapedchar
local noslash=P("/")/""
local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
@@ -3735,7 +3718,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-dir"] = package.loaded["l-dir"] or true
--- original size: 14768, stripped down to: 9107
+-- original size: 14229, stripped down to: 8740
if not modules then modules={} end modules ['l-dir']={
version=1.001,
@@ -3758,7 +3741,6 @@ local isdir=lfs.isdir
local isfile=lfs.isfile
local currentdir=lfs.currentdir
local chdir=lfs.chdir
-local mkdir=lfs.mkdir
local onwindows=os.type=="windows" or find(os.getenv("PATH"),";")
if not isdir then
function isdir(name)
@@ -3931,27 +3913,16 @@ end
local make_indeed=true
if onwindows then
function dir.mkdirs(...)
- local n=select("#",...)
- local str
- if n==1 then
- str=select(1,...)
- if isdir(str) then
- return str,true
- end
- else
- str=""
- for i=1,n do
- local s=select(i,...)
- local s=select(i,...)
- if s=="" then
- elseif str=="" then
- str=s
- else
- str=str.."/"..s
- end
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s=="" then
+ elseif str=="" then
+ str=s
+ else
+ str=str.."/"..s
end
end
- local pth=""
local drive=false
local first,middle,last=match(str,"^(//)(//*)(.*)$")
if first then
@@ -3986,30 +3957,21 @@ if onwindows then
pth=pth.."/"..s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth,(isdir(pth)==true)
end
else
function dir.mkdirs(...)
- local n=select("#",...)
- local str,pth
- if n==1 then
- str=select(1,...)
- if isdir(str) then
- return str,true
- end
- else
- str=""
- for i=1,n do
- local s=select(i,...)
- if s and s~="" then
- if str~="" then
- str=str.."/"..s
- else
- str=s
- end
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s and s~="" then
+ if str~="" then
+ str=str.."/"..s
+ else
+ str=s
end
end
end
@@ -4024,7 +3986,7 @@ else
pth=pth.."/"..s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -4032,7 +3994,7 @@ else
for s in gmatch(str,"[^/]+") do
pth=pth.."/"..s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -4840,7 +4802,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-str"] = package.loaded["util-str"] or true
--- original size: 29502, stripped down to: 16632
+-- original size: 26857, stripped down to: 15062
if not modules then modules={} end modules ['util-str']={
version=1.001,
@@ -4859,19 +4821,8 @@ local unpack,concat=table.unpack,table.concat
local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
local patterns,lpegmatch=lpeg.patterns,lpeg.match
local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=nil
-if _LUAVERSION<5.2 then
- loadstripped=function(str,shortcuts)
- return load(str)
- end
-else
- loadstripped=function(str,shortcuts)
- if shortcuts then
- return load(dump(load(str),true),nil,nil,shortcuts)
- else
- return load(dump(load(str),true))
- end
- end
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
end
if not number then number={} end
local stripper=patterns.stripzeros
@@ -5021,58 +4972,31 @@ function number.sparseexponent(f,n)
end
return tostring(n)
end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+local formattednumber = number.formatted
+local sparseexponent = number.sparseexponent
+]]
local template=[[
%s
%s
return function(%s) return %s end
]]
-local preamble,environment="",{}
-if _LUAVERSION<5.2 then
- preamble=[[
-local lpeg=lpeg
-local type=type
-local tostring=tostring
-local tonumber=tonumber
-local format=string.format
-local concat=table.concat
-local signed=number.signed
-local points=number.points
-local basepoints= number.basepoints
-local utfchar=utf.char
-local utfbyte=utf.byte
-local lpegmatch=lpeg.match
-local nspaces=string.nspaces
-local tracedchar=string.tracedchar
-local autosingle=string.autosingle
-local autodouble=string.autodouble
-local sequenced=table.sequenced
-local formattednumber=number.formatted
-local sparseexponent=number.sparseexponent
- ]]
-else
- environment={
- global=global or _G,
- lpeg=lpeg,
- type=type,
- tostring=tostring,
- tonumber=tonumber,
- format=string.format,
- concat=table.concat,
- signed=number.signed,
- points=number.points,
- basepoints=number.basepoints,
- utfchar=utf.char,
- utfbyte=utf.byte,
- lpegmatch=lpeg.match,
- nspaces=string.nspaces,
- tracedchar=string.tracedchar,
- autosingle=string.autosingle,
- autodouble=string.autodouble,
- sequenced=table.sequenced,
- formattednumber=number.formatted,
- sparseexponent=number.sparseexponent,
- }
-end
local arguments={ "a1" }
setmetatable(arguments,{ __index=function(t,k)
local v=t[k-1]..",a"..k
@@ -5394,8 +5318,8 @@ local builder=Cs { "start",
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
-)
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
local function make(t,str)
local f
local p
@@ -5407,7 +5331,7 @@ local function make(t,str)
p=lpegmatch(builder,str,1,"..",t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p,t._environment_)()
+ f=loadstripped(p)()
else
f=function() return str end
end
@@ -5419,22 +5343,10 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-if _LUAVERSION<5.2 then
- function strings.formatters.new()
- local t={ _extensions_={},_preamble_=preamble,_environment_={},_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
-else
- function strings.formatters.new()
- local e={}
- for k,v in next,environment do
- e[k]=v
- end
- local t={ _extensions_={},_preamble_="",_environment_=e,_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -5442,12 +5354,8 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if type(preamble)=="string" then
+ if preamble then
t._preamble_=preamble.."\n"..t._preamble_
- elseif type(preamble)=="table" then
- for k,v in next,preamble do
- t._environment_[k]=v
- end
end
end
end
@@ -5456,15 +5364,9 @@ patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;
patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
-if _LUAVERSION<5.2 then
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
-else
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
-end
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+add(formatters,"lua",[[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
end -- of closure
@@ -5473,7 +5375,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 23980, stripped down to: 16119
+-- original size: 23952, stripped down to: 16092
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -5708,7 +5610,7 @@ local f_ordered_string=formatters["%q,"]
local f_ordered_number=formatters["%s,"]
local f_ordered_boolean=formatters["%l,"]
function table.fastserialize(t,prefix)
- local r={ type(prefix)=="string" and prefix or "return" }
+ local r={ prefix or "return" }
local m=1
local function fastserialize(t,outer)
local n=#t
@@ -7747,7 +7649,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
--- original size: 6643, stripped down to: 5272
+-- original size: 6501, stripped down to: 5156
if not modules then modules={} end modules ['trac-inf']={
version=1.001,
@@ -7848,10 +7750,7 @@ function statistics.show()
if statistics.enable then
local register=statistics.register
register("used platform",function()
- local mask=lua.mask or "ascii"
- return format("%s, type: %s, binary subtree: %s, symbol mask: %s (%s)",
- os.platform or "unknown",os.type or "unknown",environment.texos or "unknown",
- mask,mask=="utf" and "τεχ" or "tex")
+ return format("%s, type: %s, binary subtree: %s",os.platform or "unknown",os.type or "unknown",environment.texos or "unknown")
end)
register("luatex banner",function()
return lower(status.banner)
@@ -7930,7 +7829,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
--- original size: 5829, stripped down to: 3501
+-- original size: 5773, stripped down to: 3453
if not modules then modules={} end modules ['trac-pro']={
version=1.001,
@@ -7947,16 +7846,14 @@ local namespaces=namespaces
local registered={}
local function report_index(k,name)
if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
+ report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
else
report_system("reference to %a in protected namespace %a",k,name)
end
end
local function report_newindex(k,name)
if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
+ report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
else
report_system("assignment to %a in protected namespace %a",k,name)
end
@@ -8207,7 +8104,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-deb"] = package.loaded["util-deb"] or true
--- original size: 3898, stripped down to: 2644
+-- original size: 3708, stripped down to: 2568
if not modules then modules={} end modules ['util-deb']={
version=1.001,
@@ -8287,22 +8184,20 @@ end
function debugger.disable()
debug.sethook()
end
-local function showtraceback(rep)
- local level=2
- local reporter=rep or report
+function traceback()
+ local level=1
while true do
- local info=getinfo(level,"Sl")
+ local info=debug.getinfo(level,"Sl")
if not info then
break
elseif info.what=="C" then
- reporter("%2i : %s",level-1,"C function")
+ print(format("%3i : C function",level))
else
- reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
end
level=level+1
end
end
-debugger.showtraceback=showtraceback
end -- of closure
@@ -8968,7 +8863,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 42614, stripped down to: 26694
+-- original size: 42447, stripped down to: 26589
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -9494,11 +9389,8 @@ local function _xmlconvert_(data,settings)
end
if errorstr and errorstr~="" then
result.error=true
- else
- errorstr=nil
end
result.statistics={
- errormessage=errorstr,
entities={
decimals=dcache,
hexadecimals=hcache,
@@ -9696,26 +9588,24 @@ local function verbose_document(e,handlers)
end
end
local function serialize(e,handlers,...)
- if e then
- local initialize=handlers.initialize
- local finalize=handlers.finalize
- local functions=handlers.functions
- if initialize then
- local state=initialize(...)
- if not state==true then
- return state
- end
- end
- local etg=e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
- if finalize then
- return finalize()
+ local initialize=handlers.initialize
+ local finalize=handlers.finalize
+ local functions=handlers.functions
+ if initialize then
+ local state=initialize(...)
+ if not state==true then
+ return state
end
end
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
+ end
end
local function xserialize(e,handlers)
local functions=handlers.functions
@@ -15485,7 +15375,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-use"] = package.loaded["data-use"] or true
--- original size: 3899, stripped down to: 2984
+-- original size: 3913, stripped down to: 2998
if not modules then modules={} end modules ['data-use']={
version=1.001,
@@ -15531,7 +15421,7 @@ end
statistics.register("used config file",function() return caches.configfiles() end)
statistics.register("used cache path",function() return caches.usedpaths() end)
function statistics.savefmtstatus(texname,formatbanner,sourcefile)
- local enginebanner=status.banner
+ local enginebanner=status.list().banner
if formatbanner and enginebanner and sourcefile then
local luvname=file.replacesuffix(texname,"luv")
local luvdata={
@@ -15544,7 +15434,7 @@ function statistics.savefmtstatus(texname,formatbanner,sourcefile)
end
end
function statistics.checkfmtstatus(texname)
- local enginebanner=status.banner
+ local enginebanner=status.list().banner
if enginebanner and texname then
local luvname=file.replacesuffix(texname,"luv")
if lfs.isfile(luvname) then
@@ -16168,7 +16058,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-aux"] = package.loaded["data-aux"] or true
--- original size: 2431, stripped down to: 1996
+-- original size: 2394, stripped down to: 2005
if not modules then modules={} end modules ['data-aux']={
version=1.001,
@@ -16182,8 +16072,8 @@ local type,next=type,next
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local resolvers=resolvers
local report_scripts=logs.reporter("resolvers","scripts")
-function resolvers.updatescript(oldname,newname)
- local scriptpath="context/lua"
+function resolvers.updatescript(oldname,newname)
+ local scriptpath="scripts/context/lua"
newname=file.addsuffix(newname,"lua")
local oldscript=resolvers.cleanpath(oldname)
if trace_locating then
@@ -16791,8 +16681,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 689993
--- stripped bytes : 244562
+-- original bytes : 685064
+-- stripped bytes : 242353
-- end library merge
@@ -16891,18 +16781,17 @@ local ownlibs = { -- order can be made better
}
--- c:/data/develop/tex-context/tex/texmf-win64/bin/../../texmf-context/tex/context/base/data-tmf.lua
--- c:/data/develop/context/sources/data-tmf.lua
-
local ownlist = {
- -- '.',
- -- ownpath ,
- owntree .. "/../../../../context/sources", -- HH's development path
+ '.',
+ ownpath ,
+ ownpath .. "/../sources", -- HH's development path
owntree .. "/../../texmf-local/tex/context/base",
owntree .. "/../../texmf-context/tex/context/base",
+ owntree .. "/../../texmf-dist/tex/context/base",
owntree .. "/../../texmf/tex/context/base",
owntree .. "/../../../texmf-local/tex/context/base",
owntree .. "/../../../texmf-context/tex/context/base",
+ owntree .. "/../../../texmf-dist/tex/context/base",
owntree .. "/../../../texmf/tex/context/base",
}
@@ -17809,12 +17698,6 @@ if e_argument("ansi") then
status_nop = formatters["%-15s :\n"],
}
- local script = e_argument("script") or e_argument("scripts")
-
- if type(script) == "string" then
- logs.writer("]0;"..script.."") -- for Alan to test
- end
-
end
if e_argument("script") or e_argument("scripts") then
@@ -17838,16 +17721,8 @@ elseif e_argument("selfmerge") then
runners.loadbase()
local found = locate_libs()
-
if found then
- local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
- if lfs.isfile(mtxrun) then
- utilities.merger.selfmerge(mtxrun,own.libs,{ found })
- application.report("runner updated on resolved path: %s",mtxrun)
- else
- utilities.merger.selfmerge(own.name,own.libs,{ found })
- application.report("runner updated on relative path: %s",own.name)
- end
+ utilities.merger.selfmerge(own.name,own.libs,{ found })
end
elseif e_argument("selfclean") then
@@ -17855,15 +17730,7 @@ elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
-
- local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
- if lfs.isfile(mtxrun) then
- utilities.merger.selfclean(mtxrun)
- application.report("runner cleaned on resolved path: %s",mtxrun)
- else
- utilities.merger.selfclean(own.name)
- application.report("runner cleaned on relative path: %s",own.name)
- end
+ utilities.merger.selfclean(own.name)
elseif e_argument("selfupdate") then
@@ -18105,8 +17972,6 @@ elseif e_argument("version") then
application.version()
- application.report("source path",environment.ownbin)
-
elseif e_argument("directives") then
directives.show()
diff --git a/scripts/context/stubs/install/first-setup.sh b/scripts/context/stubs/install/first-setup.sh
deleted file mode 100644
index 9249fd2e0..000000000
--- a/scripts/context/stubs/install/first-setup.sh
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/bin/sh
-
-# Takes the same arguments as mtx-update
-
-# you may change this if you want ...
-CONTEXTROOT="$PWD/tex"
-
-# suggested by Tobias Florek to check for ruby & rsync
-if [ ! -x "`which rsync`" ]; then
- echo "You need to install rsync first."
- exit 1
-fi
-if [ ! -x "`which ruby`" ]; then
- echo "You might want to install Ruby first if you want to use pdfTeX or XeTeX."
-fi
-
-system=`uname -s`
-cpu=`uname -m`
-
-case "$system" in
- # linux
- Linux)
- case "$cpu" in
- i*86) platform="linux" ;;
- x86_64|ia64) platform="linux-64" ;;
- # a little bit of cheating with ppc64 (won't work on Gentoo)
- ppc|ppc64) platform="linux-ppc" ;;
- # we currently support just mipsel, but Debian is lying (reports mips64)
- # we need more hacks to fix the situation, this is just a temporary solution
- mips|mips64|mipsel|mips64el) platform="linux-mipsel" ;;
- *) platform="unknown" ;;
- esac ;;
- # Mac OS X
- Darwin)
- case "$cpu" in
- i*86) platform="osx-intel" ;;
- x86_64) platform="osx-64" ;;
- ppc*|powerpc|power*|Power*) platform="osx-ppc" ;;
- *) platform="unknown" ;;
- esac ;;
- # FreeBSD
- FreeBSD|freebsd)
- case "$cpu" in
- i*86) platform="freebsd" ;;
- x86_64) platform="freebsd" ;; # no special binaries are available yet
- amd64) platform="freebsd-amd64" ;;
- *) platform="unknown" ;;
- esac ;;
- # kFreeBSD (debian)
- GNU/kFreeBSD)
- case "$cpu" in
- i*86) platform="kfreebsd-i386" ;;
- x86_64|amd64) platform="kfreebsd-amd64" ;;
- *) platform="unknown" ;;
- esac ;;
- # cygwin
- CYGWIN*)
- case "$cpu" in
- i*86) platform="cygwin" ;;
- x86_64|ia64) platform="cygwin-64" ;;
- *) platform="unknown" ;;
- esac ;;
- # SunOS/Solaris
- SunOS)
- case "$cpu" in
- sparc) platform="solaris-sparc" ;;
- i86pc) platform="solaris-intel" ;;
- *) platform="unknown" ;;
- esac ;;
- *) platform="unknown"
-esac
-
-# temporary patch for 64-bit Leopard with 32-bit kernel
-if test "$platform" = "osx-intel"; then
- # if running Snow Leopard or later
- # better: /usr/bin/sw_vers -productVersion
- if test `uname -r|cut -f1 -d"."` -ge 10 ; then
- # if working on 64-bit hardware
- if test `sysctl -n hw.cpu64bit_capable` = 1; then
- # snowleopard32=TRUE
- platform="osx-64"
- fi
- fi
-fi
-
-if test "$platform" = "unknown" ; then
- echo "Error: your system \"$system $cpu\" is not supported yet."
- echo "Please report to the ConTeXt mailing-list (ntg-context@ntg.nl)"
- exit
-fi
-
-# if you want to enforce some specific platform
-# (when 'uname' doesn't agree with true architecture), uncomment and modify next line:
-# platform=linux
-
-# download or rsync the latest scripts first
-rsync -rlptv rsync://contextgarden.net/minimals/setup/$platform/bin .
-
-# download or update the distribution
-# you may remove the --context=beta switch if you want to use "current"
-# you can use --engine=luatex if you want just mkiv
-env PATH="$PWD/bin:$CONTEXTROOT/texmf-$platform/bin:$PATH" \
-mtxrun --script ./bin/mtx-update.lua --force --update --make --context=beta --platform=$platform --texroot="$CONTEXTROOT" $@
-
-echo
-echo "When you want to use context, you need to initialize the tree by typing:"
-echo
-echo " . $CONTEXTROOT/setuptex"
-echo
-echo "in your shell or add"
-echo " \"$CONTEXTROOT/texmf-$platform/bin\""
-echo "to PATH variable if you want to set it permanently."
-echo "This can usually be done in .bashrc, .bash_profile"
-echo "(or whatever file is used to initialize your shell)."
-echo
-
-if [ ! -x "`which ruby`" ]; then
- echo "You might want to install Ruby first if you want to use pdfTeX or XeTeX."
- echo
-fi
diff --git a/scripts/context/stubs/mswin/context.exe b/scripts/context/stubs/mswin/context.exe
index 0e7882cf9..faae5caa7 100644
--- a/scripts/context/stubs/mswin/context.exe
+++ b/scripts/context/stubs/mswin/context.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/ctxtools.exe b/scripts/context/stubs/mswin/ctxtools.exe
index 0e7882cf9..faae5caa7 100644
--- a/scripts/context/stubs/mswin/ctxtools.exe
+++ b/scripts/context/stubs/mswin/ctxtools.exe
Binary files differ
diff --git a/scripts/context/stubs/install/first-setup.bat b/scripts/context/stubs/mswin/first-setup.bat
index f06ad0e6b..f06ad0e6b 100644
--- a/scripts/context/stubs/install/first-setup.bat
+++ b/scripts/context/stubs/mswin/first-setup.bat
diff --git a/scripts/context/stubs/mswin/luatools.exe b/scripts/context/stubs/mswin/luatools.exe
index 0e7882cf9..faae5caa7 100644
--- a/scripts/context/stubs/mswin/luatools.exe
+++ b/scripts/context/stubs/mswin/luatools.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/metatex.exe b/scripts/context/stubs/mswin/metatex.exe
index 0e7882cf9..faae5caa7 100644
--- a/scripts/context/stubs/mswin/metatex.exe
+++ b/scripts/context/stubs/mswin/metatex.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/mptopdf.exe b/scripts/context/stubs/mswin/mptopdf.exe
index 0e7882cf9..faae5caa7 100644
--- a/scripts/context/stubs/mswin/mptopdf.exe
+++ b/scripts/context/stubs/mswin/mptopdf.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/mtxrun.dll b/scripts/context/stubs/mswin/mtxrun.dll
index 3c4481c31..5a79e1bad 100644
--- a/scripts/context/stubs/mswin/mtxrun.dll
+++ b/scripts/context/stubs/mswin/mtxrun.dll
Binary files differ
diff --git a/scripts/context/stubs/mswin/mtxrun.exe b/scripts/context/stubs/mswin/mtxrun.exe
index 0e7882cf9..faae5caa7 100644
--- a/scripts/context/stubs/mswin/mtxrun.exe
+++ b/scripts/context/stubs/mswin/mtxrun.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 3372831b3..0ff2d2897 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -56,7 +56,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lua"] = package.loaded["l-lua"] or true
--- original size: 3247, stripped down to: 1763
+-- original size: 3123, stripped down to: 1694
if not modules then modules={} end modules ['l-lua']={
version=1.001,
@@ -136,9 +136,6 @@ function optionalrequire(...)
return result
end
end
-if lua then
- lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
-end
end -- of closure
@@ -437,7 +434,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
--- original size: 29808, stripped down to: 16182
+-- original size: 29245, stripped down to: 15964
if not modules then modules={} end modules ['l-lpeg']={
version=1.001,
@@ -453,9 +450,7 @@ local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.forma
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-if setinspector then
- setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-end
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -524,11 +519,9 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
-local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
patterns.stripper=stripper
-patterns.fullstripper=fullstripper
patterns.collapser=collapser
patterns.lowercase=lowercase
patterns.uppercase=uppercase
@@ -751,7 +744,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction,isutf)
+function lpeg.finder(lst,makefunction)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -767,11 +760,7 @@ function lpeg.finder(lst,makefunction,isutf)
else
pattern=P(lst)
end
- if isutf then
- pattern=((utf8char or 1)-pattern)^0*pattern
- else
- pattern=(1-pattern)^0*pattern
- end
+ pattern=(1-pattern)^0*pattern
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -1082,7 +1071,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-string"] = package.loaded["l-string"] or true
--- original size: 5671, stripped down to: 2827
+-- original size: 5547, stripped down to: 2708
if not modules then modules={} end modules ['l-string']={
version=1.001,
@@ -1118,15 +1107,11 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
-local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
-function string.fullstrip(str)
- return lpegmatch(fullstripper,str) or ""
-end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -1187,7 +1172,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-table"] = package.loaded["l-table"] or true
--- original size: 31142, stripped down to: 20283
+-- original size: 31113, stripped down to: 20256
if not modules then modules={} end modules ['l-table']={
version=1.001,
@@ -2015,9 +2000,7 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-if setinspector then
- setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
-end
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
@@ -3524,7 +3507,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-url"] = package.loaded["l-url"] or true
--- original size: 12292, stripped down to: 5585
+-- original size: 11993, stripped down to: 5584
if not modules then modules={} end modules ['l-url']={
version=1.001,
@@ -3551,7 +3534,7 @@ local hexdigit=R("09","AF","af")
local plus=P("+")
local nothing=Cc("")
local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
-local escaped=(plus/" ")+escapedchar
+local escaped=(plus/" ")+escapedchar
local noslash=P("/")/""
local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
@@ -3735,7 +3718,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-dir"] = package.loaded["l-dir"] or true
--- original size: 14768, stripped down to: 9107
+-- original size: 14229, stripped down to: 8740
if not modules then modules={} end modules ['l-dir']={
version=1.001,
@@ -3758,7 +3741,6 @@ local isdir=lfs.isdir
local isfile=lfs.isfile
local currentdir=lfs.currentdir
local chdir=lfs.chdir
-local mkdir=lfs.mkdir
local onwindows=os.type=="windows" or find(os.getenv("PATH"),";")
if not isdir then
function isdir(name)
@@ -3931,27 +3913,16 @@ end
local make_indeed=true
if onwindows then
function dir.mkdirs(...)
- local n=select("#",...)
- local str
- if n==1 then
- str=select(1,...)
- if isdir(str) then
- return str,true
- end
- else
- str=""
- for i=1,n do
- local s=select(i,...)
- local s=select(i,...)
- if s=="" then
- elseif str=="" then
- str=s
- else
- str=str.."/"..s
- end
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s=="" then
+ elseif str=="" then
+ str=s
+ else
+ str=str.."/"..s
end
end
- local pth=""
local drive=false
local first,middle,last=match(str,"^(//)(//*)(.*)$")
if first then
@@ -3986,30 +3957,21 @@ if onwindows then
pth=pth.."/"..s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth,(isdir(pth)==true)
end
else
function dir.mkdirs(...)
- local n=select("#",...)
- local str,pth
- if n==1 then
- str=select(1,...)
- if isdir(str) then
- return str,true
- end
- else
- str=""
- for i=1,n do
- local s=select(i,...)
- if s and s~="" then
- if str~="" then
- str=str.."/"..s
- else
- str=s
- end
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s and s~="" then
+ if str~="" then
+ str=str.."/"..s
+ else
+ str=s
end
end
end
@@ -4024,7 +3986,7 @@ else
pth=pth.."/"..s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -4032,7 +3994,7 @@ else
for s in gmatch(str,"[^/]+") do
pth=pth.."/"..s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -4840,7 +4802,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-str"] = package.loaded["util-str"] or true
--- original size: 29502, stripped down to: 16632
+-- original size: 26857, stripped down to: 15062
if not modules then modules={} end modules ['util-str']={
version=1.001,
@@ -4859,19 +4821,8 @@ local unpack,concat=table.unpack,table.concat
local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
local patterns,lpegmatch=lpeg.patterns,lpeg.match
local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=nil
-if _LUAVERSION<5.2 then
- loadstripped=function(str,shortcuts)
- return load(str)
- end
-else
- loadstripped=function(str,shortcuts)
- if shortcuts then
- return load(dump(load(str),true),nil,nil,shortcuts)
- else
- return load(dump(load(str),true))
- end
- end
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
end
if not number then number={} end
local stripper=patterns.stripzeros
@@ -5021,58 +4972,31 @@ function number.sparseexponent(f,n)
end
return tostring(n)
end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+local formattednumber = number.formatted
+local sparseexponent = number.sparseexponent
+]]
local template=[[
%s
%s
return function(%s) return %s end
]]
-local preamble,environment="",{}
-if _LUAVERSION<5.2 then
- preamble=[[
-local lpeg=lpeg
-local type=type
-local tostring=tostring
-local tonumber=tonumber
-local format=string.format
-local concat=table.concat
-local signed=number.signed
-local points=number.points
-local basepoints= number.basepoints
-local utfchar=utf.char
-local utfbyte=utf.byte
-local lpegmatch=lpeg.match
-local nspaces=string.nspaces
-local tracedchar=string.tracedchar
-local autosingle=string.autosingle
-local autodouble=string.autodouble
-local sequenced=table.sequenced
-local formattednumber=number.formatted
-local sparseexponent=number.sparseexponent
- ]]
-else
- environment={
- global=global or _G,
- lpeg=lpeg,
- type=type,
- tostring=tostring,
- tonumber=tonumber,
- format=string.format,
- concat=table.concat,
- signed=number.signed,
- points=number.points,
- basepoints=number.basepoints,
- utfchar=utf.char,
- utfbyte=utf.byte,
- lpegmatch=lpeg.match,
- nspaces=string.nspaces,
- tracedchar=string.tracedchar,
- autosingle=string.autosingle,
- autodouble=string.autodouble,
- sequenced=table.sequenced,
- formattednumber=number.formatted,
- sparseexponent=number.sparseexponent,
- }
-end
local arguments={ "a1" }
setmetatable(arguments,{ __index=function(t,k)
local v=t[k-1]..",a"..k
@@ -5394,8 +5318,8 @@ local builder=Cs { "start",
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
-)
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
local function make(t,str)
local f
local p
@@ -5407,7 +5331,7 @@ local function make(t,str)
p=lpegmatch(builder,str,1,"..",t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p,t._environment_)()
+ f=loadstripped(p)()
else
f=function() return str end
end
@@ -5419,22 +5343,10 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-if _LUAVERSION<5.2 then
- function strings.formatters.new()
- local t={ _extensions_={},_preamble_=preamble,_environment_={},_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
-else
- function strings.formatters.new()
- local e={}
- for k,v in next,environment do
- e[k]=v
- end
- local t={ _extensions_={},_preamble_="",_environment_=e,_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -5442,12 +5354,8 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if type(preamble)=="string" then
+ if preamble then
t._preamble_=preamble.."\n"..t._preamble_
- elseif type(preamble)=="table" then
- for k,v in next,preamble do
- t._environment_[k]=v
- end
end
end
end
@@ -5456,15 +5364,9 @@ patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;
patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
-if _LUAVERSION<5.2 then
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
-else
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
-end
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+add(formatters,"lua",[[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
end -- of closure
@@ -5473,7 +5375,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 23980, stripped down to: 16119
+-- original size: 23952, stripped down to: 16092
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -5708,7 +5610,7 @@ local f_ordered_string=formatters["%q,"]
local f_ordered_number=formatters["%s,"]
local f_ordered_boolean=formatters["%l,"]
function table.fastserialize(t,prefix)
- local r={ type(prefix)=="string" and prefix or "return" }
+ local r={ prefix or "return" }
local m=1
local function fastserialize(t,outer)
local n=#t
@@ -7747,7 +7649,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
--- original size: 6643, stripped down to: 5272
+-- original size: 6501, stripped down to: 5156
if not modules then modules={} end modules ['trac-inf']={
version=1.001,
@@ -7848,10 +7750,7 @@ function statistics.show()
if statistics.enable then
local register=statistics.register
register("used platform",function()
- local mask=lua.mask or "ascii"
- return format("%s, type: %s, binary subtree: %s, symbol mask: %s (%s)",
- os.platform or "unknown",os.type or "unknown",environment.texos or "unknown",
- mask,mask=="utf" and "τεχ" or "tex")
+ return format("%s, type: %s, binary subtree: %s",os.platform or "unknown",os.type or "unknown",environment.texos or "unknown")
end)
register("luatex banner",function()
return lower(status.banner)
@@ -7930,7 +7829,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
--- original size: 5829, stripped down to: 3501
+-- original size: 5773, stripped down to: 3453
if not modules then modules={} end modules ['trac-pro']={
version=1.001,
@@ -7947,16 +7846,14 @@ local namespaces=namespaces
local registered={}
local function report_index(k,name)
if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
+ report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
else
report_system("reference to %a in protected namespace %a",k,name)
end
end
local function report_newindex(k,name)
if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
+ report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
else
report_system("assignment to %a in protected namespace %a",k,name)
end
@@ -8207,7 +8104,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-deb"] = package.loaded["util-deb"] or true
--- original size: 3898, stripped down to: 2644
+-- original size: 3708, stripped down to: 2568
if not modules then modules={} end modules ['util-deb']={
version=1.001,
@@ -8287,22 +8184,20 @@ end
function debugger.disable()
debug.sethook()
end
-local function showtraceback(rep)
- local level=2
- local reporter=rep or report
+function traceback()
+ local level=1
while true do
- local info=getinfo(level,"Sl")
+ local info=debug.getinfo(level,"Sl")
if not info then
break
elseif info.what=="C" then
- reporter("%2i : %s",level-1,"C function")
+ print(format("%3i : C function",level))
else
- reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
end
level=level+1
end
end
-debugger.showtraceback=showtraceback
end -- of closure
@@ -8968,7 +8863,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 42614, stripped down to: 26694
+-- original size: 42447, stripped down to: 26589
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -9494,11 +9389,8 @@ local function _xmlconvert_(data,settings)
end
if errorstr and errorstr~="" then
result.error=true
- else
- errorstr=nil
end
result.statistics={
- errormessage=errorstr,
entities={
decimals=dcache,
hexadecimals=hcache,
@@ -9696,26 +9588,24 @@ local function verbose_document(e,handlers)
end
end
local function serialize(e,handlers,...)
- if e then
- local initialize=handlers.initialize
- local finalize=handlers.finalize
- local functions=handlers.functions
- if initialize then
- local state=initialize(...)
- if not state==true then
- return state
- end
- end
- local etg=e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
- if finalize then
- return finalize()
+ local initialize=handlers.initialize
+ local finalize=handlers.finalize
+ local functions=handlers.functions
+ if initialize then
+ local state=initialize(...)
+ if not state==true then
+ return state
end
end
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
+ end
end
local function xserialize(e,handlers)
local functions=handlers.functions
@@ -15485,7 +15375,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-use"] = package.loaded["data-use"] or true
--- original size: 3899, stripped down to: 2984
+-- original size: 3913, stripped down to: 2998
if not modules then modules={} end modules ['data-use']={
version=1.001,
@@ -15531,7 +15421,7 @@ end
statistics.register("used config file",function() return caches.configfiles() end)
statistics.register("used cache path",function() return caches.usedpaths() end)
function statistics.savefmtstatus(texname,formatbanner,sourcefile)
- local enginebanner=status.banner
+ local enginebanner=status.list().banner
if formatbanner and enginebanner and sourcefile then
local luvname=file.replacesuffix(texname,"luv")
local luvdata={
@@ -15544,7 +15434,7 @@ function statistics.savefmtstatus(texname,formatbanner,sourcefile)
end
end
function statistics.checkfmtstatus(texname)
- local enginebanner=status.banner
+ local enginebanner=status.list().banner
if enginebanner and texname then
local luvname=file.replacesuffix(texname,"luv")
if lfs.isfile(luvname) then
@@ -16168,7 +16058,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-aux"] = package.loaded["data-aux"] or true
--- original size: 2431, stripped down to: 1996
+-- original size: 2394, stripped down to: 2005
if not modules then modules={} end modules ['data-aux']={
version=1.001,
@@ -16182,8 +16072,8 @@ local type,next=type,next
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local resolvers=resolvers
local report_scripts=logs.reporter("resolvers","scripts")
-function resolvers.updatescript(oldname,newname)
- local scriptpath="context/lua"
+function resolvers.updatescript(oldname,newname)
+ local scriptpath="scripts/context/lua"
newname=file.addsuffix(newname,"lua")
local oldscript=resolvers.cleanpath(oldname)
if trace_locating then
@@ -16791,8 +16681,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 689993
--- stripped bytes : 244562
+-- original bytes : 685064
+-- stripped bytes : 242353
-- end library merge
@@ -16891,18 +16781,17 @@ local ownlibs = { -- order can be made better
}
--- c:/data/develop/tex-context/tex/texmf-win64/bin/../../texmf-context/tex/context/base/data-tmf.lua
--- c:/data/develop/context/sources/data-tmf.lua
-
local ownlist = {
- -- '.',
- -- ownpath ,
- owntree .. "/../../../../context/sources", -- HH's development path
+ '.',
+ ownpath ,
+ ownpath .. "/../sources", -- HH's development path
owntree .. "/../../texmf-local/tex/context/base",
owntree .. "/../../texmf-context/tex/context/base",
+ owntree .. "/../../texmf-dist/tex/context/base",
owntree .. "/../../texmf/tex/context/base",
owntree .. "/../../../texmf-local/tex/context/base",
owntree .. "/../../../texmf-context/tex/context/base",
+ owntree .. "/../../../texmf-dist/tex/context/base",
owntree .. "/../../../texmf/tex/context/base",
}
@@ -17809,12 +17698,6 @@ if e_argument("ansi") then
status_nop = formatters["%-15s :\n"],
}
- local script = e_argument("script") or e_argument("scripts")
-
- if type(script) == "string" then
- logs.writer("]0;"..script.."") -- for Alan to test
- end
-
end
if e_argument("script") or e_argument("scripts") then
@@ -17838,16 +17721,8 @@ elseif e_argument("selfmerge") then
runners.loadbase()
local found = locate_libs()
-
if found then
- local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
- if lfs.isfile(mtxrun) then
- utilities.merger.selfmerge(mtxrun,own.libs,{ found })
- application.report("runner updated on resolved path: %s",mtxrun)
- else
- utilities.merger.selfmerge(own.name,own.libs,{ found })
- application.report("runner updated on relative path: %s",own.name)
- end
+ utilities.merger.selfmerge(own.name,own.libs,{ found })
end
elseif e_argument("selfclean") then
@@ -17855,15 +17730,7 @@ elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
-
- local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
- if lfs.isfile(mtxrun) then
- utilities.merger.selfclean(mtxrun)
- application.report("runner cleaned on resolved path: %s",mtxrun)
- else
- utilities.merger.selfclean(own.name)
- application.report("runner cleaned on relative path: %s",own.name)
- end
+ utilities.merger.selfclean(own.name)
elseif e_argument("selfupdate") then
@@ -18105,8 +17972,6 @@ elseif e_argument("version") then
application.version()
- application.report("source path",environment.ownbin)
-
elseif e_argument("directives") then
directives.show()
diff --git a/scripts/context/stubs/mswin/mtxrunjit.exe b/scripts/context/stubs/mswin/mtxrunjit.exe
deleted file mode 100644
index 0e7882cf9..000000000
--- a/scripts/context/stubs/mswin/mtxrunjit.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/mswin/mtxworks.exe b/scripts/context/stubs/mswin/mtxworks.exe
index 0e7882cf9..faae5caa7 100644
--- a/scripts/context/stubs/mswin/mtxworks.exe
+++ b/scripts/context/stubs/mswin/mtxworks.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/pstopdf.exe b/scripts/context/stubs/mswin/pstopdf.exe
index 0e7882cf9..faae5caa7 100644
--- a/scripts/context/stubs/mswin/pstopdf.exe
+++ b/scripts/context/stubs/mswin/pstopdf.exe
Binary files differ
diff --git a/scripts/context/stubs/setup/setuptex.bat b/scripts/context/stubs/mswin/setuptex.bat
index b61fd4494..b61fd4494 100644
--- a/scripts/context/stubs/setup/setuptex.bat
+++ b/scripts/context/stubs/mswin/setuptex.bat
diff --git a/scripts/context/stubs/mswin/texexec.exe b/scripts/context/stubs/mswin/texexec.exe
index 0e7882cf9..faae5caa7 100644
--- a/scripts/context/stubs/mswin/texexec.exe
+++ b/scripts/context/stubs/mswin/texexec.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/texmfstart.exe b/scripts/context/stubs/mswin/texmfstart.exe
index 0e7882cf9..faae5caa7 100644
--- a/scripts/context/stubs/mswin/texmfstart.exe
+++ b/scripts/context/stubs/mswin/texmfstart.exe
Binary files differ
diff --git a/scripts/context/stubs/setup/setuptex b/scripts/context/stubs/setup/setuptex
deleted file mode 100644
index d41e36707..000000000
--- a/scripts/context/stubs/setup/setuptex
+++ /dev/null
@@ -1,167 +0,0 @@
-# Example setup file for ConTeXt distribution
-#
-# Author: Hans Hagen
-# Patches: Arthur R. & Mojca M.
-#
-# Usage:
-# . setuptex [texroot]
-#
-# On the first run also execute:
-# mktexlsr
-# texexec --make --alone
-
-#
-# PLATFORM
-#
-
-# we will try to guess the platform first
-# (needs to be kept in sync with first-setup.sh and mtxrun)
-# if yours is missing, let us know
-
-system=`uname -s`
-cpu=`uname -m`
-
-case "$system" in
- # linux
- Linux)
- case "$cpu" in
- i*86) platform="linux" ;;
- x86_64|ia64) platform="linux-64" ;;
- # a little bit of cheating with ppc64 (won't work on Gentoo)
- ppc|ppc64) platform="linux-ppc" ;;
- *) platform="unknown" ;;
- esac ;;
- # Mac OS X
- Darwin)
- case "$cpu" in
- i*86) platform="osx-intel" ;;
- x86_64) platform="osx-64" ;;
- ppc*|powerpc|power*|Power*) platform="osx-ppc" ;;
- *) platform="unknown" ;;
- esac ;;
- # FreeBSD
- FreeBSD|freebsd)
- case "$cpu" in
- i*86) platform="freebsd" ;;
- x86_64) platform="freebsd" ;;
- amd64) platform="freebsd-amd64" ;;
- *) platform="unknown" ;;
- esac ;;
- # kFreeBSD (Debian)
- GNU/kFreeBSD)
- case "$cpu" in
- i*86) platform="kfreebsd-i386" ;;
- x86_64|amd64) platform="kfreebsd-amd64" ;;
- *) platform="unknown" ;;
- esac ;;
- # cygwin
- CYGWIN)
- case "$cpu" in
- i*86) platform="cygwin" ;;
- x86_64|ia64) platform="cygwin-64" ;;
- *) platform="unknown" ;;
- esac ;;
- # SunOS/Solaris
- SunOS)
- case "$cpu" in
- sparc) platform="solaris-sparc" ;;
- i86pc) platform="solaris-intel" ;;
- *) platform="unknown" ;;
- esac ;;
- *) platform="unknown"
-esac
-
-# temporary fix for Snow Leopard
-if test "$platform" = "osx-intel"; then
- # running Snow Leopard or later
- if test `uname -r|cut -f1 -d"."` -ge 10 ; then
- # working on 64-bit hardware
- if test `sysctl -n hw.cpu64bit_capable` = 1; then
- platform="osx-64"
- fi
- fi
-fi
-
-if test "$platform" = "unknown" ; then
- echo "Error: your system \"$system $cpu\" is not supported yet."
- echo "Please report to the ConTeXt mailing-list (ntg-context@ntg.nl)"
-fi
-
-#
-# PATH
-#
-
-# this resolves to path of the setuptex script
-# We use $0 for determine the path to the script, except for:
-# * bash where $0 always is bash; here we use BASH_SOURCE
-# * ksh93 where we use ${.sh.file}
-# Thanks to Vasile Gaburici and Alessandro Perucchi for reporting this
-# * http://www.ntg.nl/pipermail/ntg-context/2008/033953.html
-# * http://www.ntg.nl/pipermail/ntg-context/2012/068658.html
-if [ z"$BASH_SOURCE" != z ]; then
- SCRIPTPATH="$BASH_SOURCE"
-elif [ z"$KSH_VERSION" != z ]; then
- SCRIPTPATH="${.sh.file}"
-else
- SCRIPTPATH="$0"
-fi
-
-OWNPATH=$(cd -P -- "$(dirname -- "$SCRIPTPATH")" && pwd -P)
-
-# but one can also call
-# . setuptex path-to-tree
-
-TEXROOT=""
-# first check if any path has been provided in the argument, and try to use that one
-if [ $# -ne 0 ] ; then
- # TODO: resolve any errors
- ARGPATH=$(cd -P -- "$(dirname -- "$1")" && pwd -P) && ARGPATH=$ARGPATH/$(basename -- "$1")
- if test -f "$ARGPATH/texmf/tex/plain/base/plain.tex" ; then
- if [ -d "$ARGPATH/texmf-$platform/bin" ]; then
- TEXROOT="$ARGPATH"
- else
- echo "Binaries for platform '$platform' are missing."
- echo "(There is no folder \"$ARGPATH/texmf-$platform/bin\")"
- fi
- else
- echo "The argument \"$ARGPATH\" is not a valid TEXROOT path."
- echo "(There is no file \"$ARGPATH/texmf/tex/plain/base/plain.tex\")"
-
- if [ -f "$OWNPATH/texmf/tex/plain/base/plain.tex" ]; then
- TEXROOT="$OWNPATH"
- fi
- fi
-else
- if [ -f "$OWNPATH/texmf/tex/plain/base/plain.tex" ]; then
- if [ -d "$OWNPATH/texmf-$platform/bin" ]; then
- TEXROOT="$OWNPATH"
- else
- echo "Binaries for platform '$platform' are missing."
- echo "(There is no folder \"$OWNPATH/texmf-$platform/bin\")"
- fi
- else
- echo "\"$OWNPATH\" is not a valid TEXROOT path."
- echo "(There is no file \"$OWNPATH/texmf/tex/plain/base/plain.tex\")"
- fi
-fi
-
-if [ "$TEXROOT" != "" ]; then
- # for Alan Braslau's server :)
- if [ "x$PS1" != "x" ] ; then
- echo "Setting \"$TEXROOT\" as ConTeXt root."
- fi
-
-# ConTeXt binaries have to be added to PATH
-TEXMFOS=$TEXROOT/texmf-$platform
-export PATH=$TEXMFOS/bin:$PATH
-
-# unset variables that won't be used lately
-unset platform cpu system OWNPATH SCRIPTPATH ARGPATH TEXMFOS
-
-# not sure why this would be needed
-# export CTXMINIMAL=yes
-
-else
- echo "provide a proper tex root (like '. setuptex /something/tex')" ;
-fi
-
diff --git a/scripts/context/stubs/setup/setuptex.csh b/scripts/context/stubs/setup/setuptex.csh
deleted file mode 100644
index c1160675f..000000000
--- a/scripts/context/stubs/setup/setuptex.csh
+++ /dev/null
@@ -1,164 +0,0 @@
-# Example setup file for ConTeXt distribution
-#
-# Author: Hans Hagen
-# Patches: Arthur R. & Mojca M.
-# (t)csh version: Alan B.
-#
-# Usage :
-# source setuptex.csh [texroot]
-#
-# On the first run also execute:
-# mktexlsr
-# texexec --make --alone
-
-echo "We are considering removing setuptex.csh in case that nobody uses it."
-echo "If you still use this file please drop us some mail at"
-echo " gardeners (at) contextgarden (dot) net"
-echo "If we don't get any response, we will delete it in near future."
-
-#
-# PLATFORM
-#
-
-# we will try to guess the platform first
-# (needs to be kept in sync with first-setup.sh and mtxrun)
-# if yours is missing, let us know
-
-set system=`uname -s`
-set cpu=`uname -m`
-
-switch ( $system )
- # linux
- case Linux:
- switch ( $cpu )
- case i*86:
- set platform="linux"
- breaksw
- case x86_64:
- case ia64:
- set platform="linux-64"
- breaksw
- case ppc:
- case ppc64:
- set platform="linux-ppc"
- breaksw
- default:
- set platform="unknown"
- endsw
- breaksw
- # Mac OS X
- case Darwin:
- switch ( $cpu )
- case i*86:
- set platform="osx-intel"
- breaksw
- case x86_64:
- set platform="osx-64"
- breaksw
- case ppc*:
- case powerpc:
- case power*:
- case Power*:
- set platform="osx-ppc"
- breaksw
- default:
- set platform="unknown"
- endsw
- breaksw
- # FreeBSD
- case FreeBSD:
- case freebsd:
- switch ( $cpu )
- case i*86:
- set platform="freebsd"
- breaksw
- case x86_64:
- set platform="freebsd"
- breaksw
- case amd64:
- set platform="freebsd-amd64"
- breaksw
- default:
- set platform="unknown"
- endsw
- breaksw
- # cygwin
- case CYGWIN:
- switch ( $cpu )
- case i*86:
- set platform="cygwin"
- breaksw
- case x86_64:
- case ia64:
- set platform="cygwin-64"
- breaksw
- default:
- set platform="unknown"
- endsw
- breaksw
- # SunOS/Solaris
- case SunOS:
- switch ( $cpu )
- case sparc:
- set platform="solaris-sparc"
- breaksw
- case i86pc:
- set platform="solaris-intel"
- default:
- set platform="unknown"
- endsw
- breaksw
- # Other
- default:
- set platform="unknown"
-endsw
-
-if ( $platform == "unknown" ) then
- echo Error: your system \"$system $cpu\" is not supported yet.
- echo Please report to the ConTeXt mailing-list (ntg-context@ntg.nl).
-endif
-
-#
-# PATH
-#
-
-# this resolves to path of the setuptex script
-# We use $0 for determine the path to the script, except for bash and (t)csh where $0
-# always is bash or (t)csh.
-
-# but one can also call
-# . setuptex path-to-tex-tree
-
-# first check if any path has been provided in the argument, and try to use that one
-if ( $# > 0 ) then
- setenv TEXROOT $1
-else
- # $_ should be `history -h 1` but doesn't seem to work...
- set cmd=`history -h 1`
- if ( $cmd[2]:h == $cmd[2]:t ) then
- setenv TEXROOT $cwd
- else
- setenv TEXROOT $cmd[2]:h
- endif
- unset cmd
-endif
-cd $TEXROOT; setenv TEXROOT $cwd; cd -
-
-if ( -f "$TEXROOT/texmf/tex/plain/base/plain.tex" ) then
- echo Setting \"$TEXROOT\" as TEXROOT.
-else
- echo \"$TEXROOT\" is not a valid TEXROOT path.
- echo There is no file \"$TEXROOT/texmf/tex/plain/base/plain.tex\".
- echo Please provide a proper tex root (like \"source setuptex /path/tex\")
- unsetenv TEXROOT
- exit
-endif
-
-unsetenv TEXINPUTS MPINPUTS MFINPUTS
-
-# ConTeXt binaries have to be added to PATH
-setenv TEXMFOS $TEXROOT/texmf-$platform
-setenv PATH $TEXMFOS/bin:$PATH
-# TODO: we could set OSFONTDIR on Mac for example
-
-# setenv CTXMINIMAL yes
diff --git a/scripts/context/stubs/source/mtxrun_dll.c b/scripts/context/stubs/source/mtxrun_dll.c
index fc2e260f5..400ed6778 100644
--- a/scripts/context/stubs/source/mtxrun_dll.c
+++ b/scripts/context/stubs/source/mtxrun_dll.c
@@ -55,6 +55,7 @@
return 1; \
}
+char texlua_name[] = "texlua"; // just a bare name, luatex strips the rest anyway
static char cmdline[MAX_CMD];
static char dirpath[MAX_PATH];
static char progname[MAX_PATH];
@@ -69,12 +70,12 @@ int main( int argc, char *argv[] )
__declspec(dllexport) int dllrunscript( int argc, char *argv[] )
#endif
{
- char *binary, *s, *luatexfname, *argstr, **lua_argv;
+ char *s, *luatexfname, *argstr, **lua_argv;
int k, quoted, lua_argc;
int passprogname = 0;
- unsigned char is_jit=0;
// directory of this module/executable
+
HMODULE module_handle = GetModuleHandle( "mtxrun.dll" );
// if ( module_handle == NULL ) exe path will be used, which is OK too
k = (int) GetModuleFileName( module_handle, dirpath, MAX_PATH );
@@ -85,20 +86,13 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
*(++s) = '\0'; //remove file name, leave trailing backslash
// program name
+
k = strlen(argv[0]);
while ( k && (argv[0][k-1] != '/') && (argv[0][k-1] != '\\') ) k--;
strcpy(progname, &argv[0][k]);
s = progname;
if ( s = strrchr(s, '.') ) *s = '\0'; // remove file extension part
- /* check "jit" : strlen("jit") = 3 */
- if (strncmp(progname + strlen(progname) - 3, "jit", 3) == 0) {
- is_jit = 1;
- progname[strlen(progname) - 3]='\0';
- }
- else
- is_jit = 0;
-
// script path
strcpy( scriptpath, dirpath );
@@ -120,110 +114,44 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
strcat( scriptpath, "mtxrun.lua" );
passprogname = 1;
}
+
if ( GetFileAttributes(scriptpath) == INVALID_FILE_ATTRIBUTES )
DIE( "file not found: %s\n", scriptpath );
- // find luatex.exe /luajittex.exe
- if ( SearchPath(
- dirpath, // was getenv( "PATH" ), // path to search (optional)
- (is_jit ? "luajittex.exe":"luatex.exe"), // file name to search
- NULL, // file extension to add (optional)
- MAX_PATH, // output buffer size
- luatexpath, // output buffer pointer
- &luatexfname ) // pointer to a file part in the output buffer (optional)
- ) {
- binary = (is_jit ? "luajittex.exe":"luatex.exe");
- } else if ( SearchPath(
- dirpath, // was getenv( "PATH" ), // path to search (optional)
- (is_jit ? "texluajit.exe":"texlua.exe"), // file name to search
- NULL, // file extension to add (optional)
- MAX_PATH, // output buffer size
- luatexpath, // output buffer pointer
- &luatexfname ) // pointer to a file part in the output buffer (optional)
- ) {
- binary = (is_jit ? "texluajit.exe":"texlua.exe");
- } else if ( SearchPath(
- getenv("PATH"), // was dirpath, // path to search (optional)
- (is_jit ? "luajittex.exe":"luatex.exe"), // file name to search
- NULL, // file extension to add (optional)
- MAX_PATH, // output buffer size
- luatexpath, // output buffer pointer
- &luatexfname ) // pointer to a file part in the output buffer (optional)
- ) {
- binary = (is_jit ? "luajittex.exe":"luatex.exe");
- } else if ( SearchPath(
- getenv("PATH") , // was dirpath, // path to search (optional)
- (is_jit ? "texluajit.exe":"texlua.exe"), // file name to search
- NULL, // file extension to add (optional)
- MAX_PATH, // output buffer size
- luatexpath, // output buffer pointer
- &luatexfname ) // pointer to a file part in the output buffer (optional)
- ) {
- binary = (is_jit ? "texluajit.exe":"texlua.exe");
- }else {
- DIE( "unable to locate texlua.exe on the search path" );
- }
-
- /* if ( SearchPath( */
- /* dirpath, // was getenv( "PATH" ), // path to search (optional) */
- /* (is_jit ? "luajittex.exe":"luatex.exe"), // file name to search */
- /* NULL, // file extension to add (optional) */
- /* MAX_PATH, // output buffer size */
- /* luatexpath, // output buffer pointer */
- /* &luatexfname ) // pointer to a file part in the output buffer (optional) */
- /* ) { */
- /* binary = (is_jit ? "luajittex.exe":"luatex.exe"); */
- /* }else if ( SearchPath( */
- /* getenv("PATH"), // was dirpath, // path to search (optional) */
- /* (is_jit ? "luajittex.exe":"luatex.exe"), // file name to search */
- /* NULL, // file extension to add (optional) */
- /* MAX_PATH, // output buffer size */
- /* luatexpath, // output buffer pointer */
- /* &luatexfname ) // pointer to a file part in the output buffer (optional) */
- /* ) { */
- /* binary = (is_jit ? "luajittex.exe":"luatex.exe"); */
- /* }else if ( SearchPath( */
- /* dirpath, // was getenv( "PATH" ), // path to search (optional) */
- /* (is_jit ? "texluajit.exe":"texlua.exe"), // file name to search */
- /* NULL, // file extension to add (optional) */
- /* MAX_PATH, // output buffer size */
- /* luatexpath, // output buffer pointer */
- /* &luatexfname ) // pointer to a file part in the output buffer (optional) */
- /* ) { */
- /* binary = (is_jit ? "texluajit.exe":"texlua.exe"); */
- /* }else if ( SearchPath( */
- /* getenv("PATH") , // was dirpath, // path to search (optional) */
- /* (is_jit ? "texluajit.exe":"texlua.exe"), // file name to search */
- /* NULL, // file extension to add (optional) */
- /* MAX_PATH, // output buffer size */
- /* luatexpath, // output buffer pointer */
- /* &luatexfname ) // pointer to a file part in the output buffer (optional) */
- /* ) { */
- /* binary = (is_jit ? "texluajit.exe":"texlua.exe"); */
- /* }else { */
- /* DIE( "unable to locate texlua.exe on the search path" ); */
- /* } */
-
-
-
+ // find texlua.exe
+
+ if ( !SearchPath(
+ getenv( "PATH" ), // path to search (optional)
+ "texlua.exe", // file name to search
+ NULL, // file extension to add (optional)
+ MAX_PATH, // output buffer size
+ luatexpath, // output buffer pointer
+ &luatexfname ) // pointer to a file part in the output buffer (optional)
+ )
+ if ( !SearchPath(
+ dirpath, // path to search (optional)
+ "texlua.exe", // file name to search
+ NULL, // file extension to add (optional)
+ MAX_PATH, // output buffer size
+ luatexpath, // output buffer pointer
+ &luatexfname ) // pointer to a file part in the output buffer (optional)
+ )
+ DIE( "unable to locate texlua.exe on the search path" );
// link directly with luatex.dll if available in texlua's dir
- strcpy( luatexfname, (is_jit ? "luajittex.dll":"luatex.dll") );
+
+ strcpy( luatexfname, "luatex.dll" );
if ( dllluatex = LoadLibrary(luatexpath) )
{
- mainlikeproc dllluatexmain = (mainlikeproc) GetProcAddress( dllluatex, (is_jit ? "dllluajittexmain": "dllluatexmain" ));
+ mainlikeproc dllluatexmain = (mainlikeproc) GetProcAddress( dllluatex, "dllluatexmain" );
if ( dllluatexmain == NULL )
- if (is_jit)
- DIE( "unable to locate dllluatexmain procedure in luajittex.dll" )
- else
- DIE( "unable to locate dllluatexmain procedure in luatex.dll" );
+ DIE( "unable to locate dllluatexmain procedure in luatex.dll" );
// set up argument list for texlua script
- lua_argv = (char **)malloc( (argc + 5) * sizeof(char *) );
+ lua_argv = (char **)malloc( (argc + 4) * sizeof(char *) );
if ( lua_argv == NULL ) DIE( "out of memory\n" );
- lua_argv[lua_argc=0] = luatexfname;
- lua_argv[++lua_argc] = "--luaonly";
+ lua_argv[lua_argc=0] = texlua_name;
lua_argv[++lua_argc] = scriptpath; // script to execute
if (passprogname) {
lua_argv[++lua_argc] = "--script";
@@ -234,15 +162,15 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
// call texlua interpreter
// dllluatexmain never returns, but we pretend that it does
-
+
k = dllluatexmain( lua_argc, lua_argv );
if (lua_argv) free( lua_argv );
return k;
}
+
// we are still here, so no luatex.dll; spawn texlua.exe instead
- strcpy( luatexfname,binary);
- strcpy( cmdline, " --luaonly " );
+ strcpy( luatexfname, "texlua.exe" );
strcpy( cmdline, "\"" );
strcat( cmdline, luatexpath );
strcat( cmdline, "\" \"" );
@@ -252,6 +180,7 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
strcat( cmdline, " --script " );
strcat( cmdline, progname );
}
+
argstr = GetCommandLine(); // get the command line of this process
if ( argstr == NULL ) DIE( "unable to retrieve the command line string\n" );
@@ -280,6 +209,7 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
si.hStdOutput = GetStdHandle( STD_OUTPUT_HANDLE );
si.hStdError = GetStdHandle( STD_ERROR_HANDLE );
ZeroMemory( &pi, sizeof(pi) );
+
if( !CreateProcess(
NULL, // module name (uses command line if NULL)
cmdline, // command line
@@ -292,6 +222,7 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
&si, // STARTUPINFO structure
&pi ) // PROCESS_INFORMATION structure
) DIE( "command execution failed: %s\n", cmdline );
+
DWORD ret = 0;
CloseHandle( pi.hThread ); // thread handle is not needed
if ( WaitForSingleObject( pi.hProcess, INFINITE ) == WAIT_OBJECT_0 ) {
@@ -301,6 +232,7 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
CloseHandle( pi.hProcess );
// propagate exit code from the child process
+
return ret;
}
diff --git a/scripts/context/stubs/source/readme.txt b/scripts/context/stubs/source/readme.txt
index 72892ee2f..354d85b09 100644
--- a/scripts/context/stubs/source/readme.txt
+++ b/scripts/context/stubs/source/readme.txt
@@ -1,40 +1,36 @@
Copyright:
-The originally 'runscript' program was written by in 2009 by T.M.Trzeciak and is
-public domain. This derived mtxrun program is an adapted version by Hans Hagen and
-Luigi Scarso.
+The originally 'runscript' program was written by in 2009 by
+T.M.Trzeciak and is public domain. This derived mtxrun program
+is an adapted version by Hans Hagen.
Comment:
-In ConTeXt MkIV we have two core scripts: luatools.lua and mtxrun.lua where the
-second one is used to launch other scripts. The mtxrun.exe program calls luatex.exe.
-
+In ConTeXt MkIV we have two core scripts: luatools.lua and
+mtxrun.lua where the second one is used to launch other scripts.
Normally a user will use a call like:
- mtxrun --script font --reload
-
-Here mtxrun is a lua script. In order to avoid the usage of a cmd file on windows this
-runner will start texlua directly. In TeXlive a runner is added for each cmd file but
-we don't want that overhead (and extra files). By using an exe we can call these
-scripts in batch files without the need for using call.
-
-The mtxrun.exe file can be copied to a mtxrunjit.exe file in which case luajittex.exe
-is called.
+mtxrun --script font --reload
- mtxrunjit --script font --reload
+Here mtxrun is a lua script. In order to avoid the usage of a cmd
+file on windows this runner will start texlua directly. In TeXlive
+a runner is added for each cmd file but we don't want that overhead
+(and extra files). By using an exe we can call these scripts in
+batch files without the need for using call.
-We also don't want to use other runners, like those that use kpse to locate the script
-as this is exactly what mtxrun itself is doing already. Therefore the runscript program
-is adapted to a more direct approach suitable for mtxrun.
+We also don't want to use other runners, like those that use kpse
+to locate the script as this is exactly what mtxrun itself is doing
+already. Therefore the runscript program is adapted to a more direct
+approach suitable for mtxrun.
Compilation:
with gcc (size optimized):
- gcc -Os -s -shared -o mtxrun.dll mtxrun_dll.c
- gcc -Os -s -o mtxrun.exe mtxrun_exe.c -L./ -lmtxrun
+gcc -Os -s -shared -o mtxrun.dll mtxrun_dll.c
+gcc -Os -s -o mtxrun.exe mtxrun_exe.c -L./ -lmtxrun
with tcc (ver. 0.9.24), extra small size
- tcc -shared -o runscript.dll runscript_dll.c
- tcc -o runscript.exe runscript_exe.c runscript.def
+tcc -shared -o runscript.dll runscript_dll.c
+tcc -o runscript.exe runscript_exe.c runscript.def
diff --git a/scripts/context/stubs/unix/contextjit b/scripts/context/stubs/unix/contextjit
deleted file mode 100644
index 5ac1947c7..000000000
--- a/scripts/context/stubs/unix/contextjit
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-
-luajittex --luaonly $(dirname $0)/mtxrun --script context "$@"
-
-# luajittex --luaonly ${0%contextjit}mtxrun --script context "$@"
diff --git a/scripts/context/stubs/unix/ctxtools b/scripts/context/stubs/unix/ctxtools
new file mode 100644
index 000000000..2e6bd4afa
--- /dev/null
+++ b/scripts/context/stubs/unix/ctxtools
@@ -0,0 +1,2 @@
+#!/bin/sh
+mtxrun --script ctxtools "$@"
diff --git a/scripts/context/stubs/unix/mptopdf b/scripts/context/stubs/unix/mptopdf
new file mode 100644
index 000000000..147333740
--- /dev/null
+++ b/scripts/context/stubs/unix/mptopdf
@@ -0,0 +1,2 @@
+#!/bin/sh
+mtxrun --script mptopdf "$@"
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 3372831b3..0ff2d2897 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -56,7 +56,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lua"] = package.loaded["l-lua"] or true
--- original size: 3247, stripped down to: 1763
+-- original size: 3123, stripped down to: 1694
if not modules then modules={} end modules ['l-lua']={
version=1.001,
@@ -136,9 +136,6 @@ function optionalrequire(...)
return result
end
end
-if lua then
- lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
-end
end -- of closure
@@ -437,7 +434,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
--- original size: 29808, stripped down to: 16182
+-- original size: 29245, stripped down to: 15964
if not modules then modules={} end modules ['l-lpeg']={
version=1.001,
@@ -453,9 +450,7 @@ local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.forma
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-if setinspector then
- setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-end
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -524,11 +519,9 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
-local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
patterns.stripper=stripper
-patterns.fullstripper=fullstripper
patterns.collapser=collapser
patterns.lowercase=lowercase
patterns.uppercase=uppercase
@@ -751,7 +744,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction,isutf)
+function lpeg.finder(lst,makefunction)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -767,11 +760,7 @@ function lpeg.finder(lst,makefunction,isutf)
else
pattern=P(lst)
end
- if isutf then
- pattern=((utf8char or 1)-pattern)^0*pattern
- else
- pattern=(1-pattern)^0*pattern
- end
+ pattern=(1-pattern)^0*pattern
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -1082,7 +1071,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-string"] = package.loaded["l-string"] or true
--- original size: 5671, stripped down to: 2827
+-- original size: 5547, stripped down to: 2708
if not modules then modules={} end modules ['l-string']={
version=1.001,
@@ -1118,15 +1107,11 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
-local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
-function string.fullstrip(str)
- return lpegmatch(fullstripper,str) or ""
-end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -1187,7 +1172,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-table"] = package.loaded["l-table"] or true
--- original size: 31142, stripped down to: 20283
+-- original size: 31113, stripped down to: 20256
if not modules then modules={} end modules ['l-table']={
version=1.001,
@@ -2015,9 +2000,7 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-if setinspector then
- setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
-end
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
@@ -3524,7 +3507,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-url"] = package.loaded["l-url"] or true
--- original size: 12292, stripped down to: 5585
+-- original size: 11993, stripped down to: 5584
if not modules then modules={} end modules ['l-url']={
version=1.001,
@@ -3551,7 +3534,7 @@ local hexdigit=R("09","AF","af")
local plus=P("+")
local nothing=Cc("")
local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
-local escaped=(plus/" ")+escapedchar
+local escaped=(plus/" ")+escapedchar
local noslash=P("/")/""
local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
@@ -3735,7 +3718,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-dir"] = package.loaded["l-dir"] or true
--- original size: 14768, stripped down to: 9107
+-- original size: 14229, stripped down to: 8740
if not modules then modules={} end modules ['l-dir']={
version=1.001,
@@ -3758,7 +3741,6 @@ local isdir=lfs.isdir
local isfile=lfs.isfile
local currentdir=lfs.currentdir
local chdir=lfs.chdir
-local mkdir=lfs.mkdir
local onwindows=os.type=="windows" or find(os.getenv("PATH"),";")
if not isdir then
function isdir(name)
@@ -3931,27 +3913,16 @@ end
local make_indeed=true
if onwindows then
function dir.mkdirs(...)
- local n=select("#",...)
- local str
- if n==1 then
- str=select(1,...)
- if isdir(str) then
- return str,true
- end
- else
- str=""
- for i=1,n do
- local s=select(i,...)
- local s=select(i,...)
- if s=="" then
- elseif str=="" then
- str=s
- else
- str=str.."/"..s
- end
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s=="" then
+ elseif str=="" then
+ str=s
+ else
+ str=str.."/"..s
end
end
- local pth=""
local drive=false
local first,middle,last=match(str,"^(//)(//*)(.*)$")
if first then
@@ -3986,30 +3957,21 @@ if onwindows then
pth=pth.."/"..s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth,(isdir(pth)==true)
end
else
function dir.mkdirs(...)
- local n=select("#",...)
- local str,pth
- if n==1 then
- str=select(1,...)
- if isdir(str) then
- return str,true
- end
- else
- str=""
- for i=1,n do
- local s=select(i,...)
- if s and s~="" then
- if str~="" then
- str=str.."/"..s
- else
- str=s
- end
+ local str,pth="",""
+ for i=1,select("#",...) do
+ local s=select(i,...)
+ if s and s~="" then
+ if str~="" then
+ str=str.."/"..s
+ else
+ str=s
end
end
end
@@ -4024,7 +3986,7 @@ else
pth=pth.."/"..s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -4032,7 +3994,7 @@ else
for s in gmatch(str,"[^/]+") do
pth=pth.."/"..s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -4840,7 +4802,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-str"] = package.loaded["util-str"] or true
--- original size: 29502, stripped down to: 16632
+-- original size: 26857, stripped down to: 15062
if not modules then modules={} end modules ['util-str']={
version=1.001,
@@ -4859,19 +4821,8 @@ local unpack,concat=table.unpack,table.concat
local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
local patterns,lpegmatch=lpeg.patterns,lpeg.match
local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=nil
-if _LUAVERSION<5.2 then
- loadstripped=function(str,shortcuts)
- return load(str)
- end
-else
- loadstripped=function(str,shortcuts)
- if shortcuts then
- return load(dump(load(str),true),nil,nil,shortcuts)
- else
- return load(dump(load(str),true))
- end
- end
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
end
if not number then number={} end
local stripper=patterns.stripzeros
@@ -5021,58 +4972,31 @@ function number.sparseexponent(f,n)
end
return tostring(n)
end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+local formattednumber = number.formatted
+local sparseexponent = number.sparseexponent
+]]
local template=[[
%s
%s
return function(%s) return %s end
]]
-local preamble,environment="",{}
-if _LUAVERSION<5.2 then
- preamble=[[
-local lpeg=lpeg
-local type=type
-local tostring=tostring
-local tonumber=tonumber
-local format=string.format
-local concat=table.concat
-local signed=number.signed
-local points=number.points
-local basepoints= number.basepoints
-local utfchar=utf.char
-local utfbyte=utf.byte
-local lpegmatch=lpeg.match
-local nspaces=string.nspaces
-local tracedchar=string.tracedchar
-local autosingle=string.autosingle
-local autodouble=string.autodouble
-local sequenced=table.sequenced
-local formattednumber=number.formatted
-local sparseexponent=number.sparseexponent
- ]]
-else
- environment={
- global=global or _G,
- lpeg=lpeg,
- type=type,
- tostring=tostring,
- tonumber=tonumber,
- format=string.format,
- concat=table.concat,
- signed=number.signed,
- points=number.points,
- basepoints=number.basepoints,
- utfchar=utf.char,
- utfbyte=utf.byte,
- lpegmatch=lpeg.match,
- nspaces=string.nspaces,
- tracedchar=string.tracedchar,
- autosingle=string.autosingle,
- autodouble=string.autodouble,
- sequenced=table.sequenced,
- formattednumber=number.formatted,
- sparseexponent=number.sparseexponent,
- }
-end
local arguments={ "a1" }
setmetatable(arguments,{ __index=function(t,k)
local v=t[k-1]..",a"..k
@@ -5394,8 +5318,8 @@ local builder=Cs { "start",
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
-)
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
local function make(t,str)
local f
local p
@@ -5407,7 +5331,7 @@ local function make(t,str)
p=lpegmatch(builder,str,1,"..",t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p,t._environment_)()
+ f=loadstripped(p)()
else
f=function() return str end
end
@@ -5419,22 +5343,10 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-if _LUAVERSION<5.2 then
- function strings.formatters.new()
- local t={ _extensions_={},_preamble_=preamble,_environment_={},_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
-else
- function strings.formatters.new()
- local e={}
- for k,v in next,environment do
- e[k]=v
- end
- local t={ _extensions_={},_preamble_="",_environment_=e,_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -5442,12 +5354,8 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if type(preamble)=="string" then
+ if preamble then
t._preamble_=preamble.."\n"..t._preamble_
- elseif type(preamble)=="table" then
- for k,v in next,preamble do
- t._environment_[k]=v
- end
end
end
end
@@ -5456,15 +5364,9 @@ patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;
patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
-if _LUAVERSION<5.2 then
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
-else
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
-end
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+add(formatters,"lua",[[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
end -- of closure
@@ -5473,7 +5375,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 23980, stripped down to: 16119
+-- original size: 23952, stripped down to: 16092
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -5708,7 +5610,7 @@ local f_ordered_string=formatters["%q,"]
local f_ordered_number=formatters["%s,"]
local f_ordered_boolean=formatters["%l,"]
function table.fastserialize(t,prefix)
- local r={ type(prefix)=="string" and prefix or "return" }
+ local r={ prefix or "return" }
local m=1
local function fastserialize(t,outer)
local n=#t
@@ -7747,7 +7649,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
--- original size: 6643, stripped down to: 5272
+-- original size: 6501, stripped down to: 5156
if not modules then modules={} end modules ['trac-inf']={
version=1.001,
@@ -7848,10 +7750,7 @@ function statistics.show()
if statistics.enable then
local register=statistics.register
register("used platform",function()
- local mask=lua.mask or "ascii"
- return format("%s, type: %s, binary subtree: %s, symbol mask: %s (%s)",
- os.platform or "unknown",os.type or "unknown",environment.texos or "unknown",
- mask,mask=="utf" and "τεχ" or "tex")
+ return format("%s, type: %s, binary subtree: %s",os.platform or "unknown",os.type or "unknown",environment.texos or "unknown")
end)
register("luatex banner",function()
return lower(status.banner)
@@ -7930,7 +7829,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
--- original size: 5829, stripped down to: 3501
+-- original size: 5773, stripped down to: 3453
if not modules then modules={} end modules ['trac-pro']={
version=1.001,
@@ -7947,16 +7846,14 @@ local namespaces=namespaces
local registered={}
local function report_index(k,name)
if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
+ report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
else
report_system("reference to %a in protected namespace %a",k,name)
end
end
local function report_newindex(k,name)
if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
+ report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
else
report_system("assignment to %a in protected namespace %a",k,name)
end
@@ -8207,7 +8104,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-deb"] = package.loaded["util-deb"] or true
--- original size: 3898, stripped down to: 2644
+-- original size: 3708, stripped down to: 2568
if not modules then modules={} end modules ['util-deb']={
version=1.001,
@@ -8287,22 +8184,20 @@ end
function debugger.disable()
debug.sethook()
end
-local function showtraceback(rep)
- local level=2
- local reporter=rep or report
+function traceback()
+ local level=1
while true do
- local info=getinfo(level,"Sl")
+ local info=debug.getinfo(level,"Sl")
if not info then
break
elseif info.what=="C" then
- reporter("%2i : %s",level-1,"C function")
+ print(format("%3i : C function",level))
else
- reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
end
level=level+1
end
end
-debugger.showtraceback=showtraceback
end -- of closure
@@ -8968,7 +8863,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 42614, stripped down to: 26694
+-- original size: 42447, stripped down to: 26589
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -9494,11 +9389,8 @@ local function _xmlconvert_(data,settings)
end
if errorstr and errorstr~="" then
result.error=true
- else
- errorstr=nil
end
result.statistics={
- errormessage=errorstr,
entities={
decimals=dcache,
hexadecimals=hcache,
@@ -9696,26 +9588,24 @@ local function verbose_document(e,handlers)
end
end
local function serialize(e,handlers,...)
- if e then
- local initialize=handlers.initialize
- local finalize=handlers.finalize
- local functions=handlers.functions
- if initialize then
- local state=initialize(...)
- if not state==true then
- return state
- end
- end
- local etg=e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
- if finalize then
- return finalize()
+ local initialize=handlers.initialize
+ local finalize=handlers.finalize
+ local functions=handlers.functions
+ if initialize then
+ local state=initialize(...)
+ if not state==true then
+ return state
end
end
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
+ end
end
local function xserialize(e,handlers)
local functions=handlers.functions
@@ -15485,7 +15375,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-use"] = package.loaded["data-use"] or true
--- original size: 3899, stripped down to: 2984
+-- original size: 3913, stripped down to: 2998
if not modules then modules={} end modules ['data-use']={
version=1.001,
@@ -15531,7 +15421,7 @@ end
statistics.register("used config file",function() return caches.configfiles() end)
statistics.register("used cache path",function() return caches.usedpaths() end)
function statistics.savefmtstatus(texname,formatbanner,sourcefile)
- local enginebanner=status.banner
+ local enginebanner=status.list().banner
if formatbanner and enginebanner and sourcefile then
local luvname=file.replacesuffix(texname,"luv")
local luvdata={
@@ -15544,7 +15434,7 @@ function statistics.savefmtstatus(texname,formatbanner,sourcefile)
end
end
function statistics.checkfmtstatus(texname)
- local enginebanner=status.banner
+ local enginebanner=status.list().banner
if enginebanner and texname then
local luvname=file.replacesuffix(texname,"luv")
if lfs.isfile(luvname) then
@@ -16168,7 +16058,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-aux"] = package.loaded["data-aux"] or true
--- original size: 2431, stripped down to: 1996
+-- original size: 2394, stripped down to: 2005
if not modules then modules={} end modules ['data-aux']={
version=1.001,
@@ -16182,8 +16072,8 @@ local type,next=type,next
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local resolvers=resolvers
local report_scripts=logs.reporter("resolvers","scripts")
-function resolvers.updatescript(oldname,newname)
- local scriptpath="context/lua"
+function resolvers.updatescript(oldname,newname)
+ local scriptpath="scripts/context/lua"
newname=file.addsuffix(newname,"lua")
local oldscript=resolvers.cleanpath(oldname)
if trace_locating then
@@ -16791,8 +16681,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 689993
--- stripped bytes : 244562
+-- original bytes : 685064
+-- stripped bytes : 242353
-- end library merge
@@ -16891,18 +16781,17 @@ local ownlibs = { -- order can be made better
}
--- c:/data/develop/tex-context/tex/texmf-win64/bin/../../texmf-context/tex/context/base/data-tmf.lua
--- c:/data/develop/context/sources/data-tmf.lua
-
local ownlist = {
- -- '.',
- -- ownpath ,
- owntree .. "/../../../../context/sources", -- HH's development path
+ '.',
+ ownpath ,
+ ownpath .. "/../sources", -- HH's development path
owntree .. "/../../texmf-local/tex/context/base",
owntree .. "/../../texmf-context/tex/context/base",
+ owntree .. "/../../texmf-dist/tex/context/base",
owntree .. "/../../texmf/tex/context/base",
owntree .. "/../../../texmf-local/tex/context/base",
owntree .. "/../../../texmf-context/tex/context/base",
+ owntree .. "/../../../texmf-dist/tex/context/base",
owntree .. "/../../../texmf/tex/context/base",
}
@@ -17809,12 +17698,6 @@ if e_argument("ansi") then
status_nop = formatters["%-15s :\n"],
}
- local script = e_argument("script") or e_argument("scripts")
-
- if type(script) == "string" then
- logs.writer("]0;"..script.."") -- for Alan to test
- end
-
end
if e_argument("script") or e_argument("scripts") then
@@ -17838,16 +17721,8 @@ elseif e_argument("selfmerge") then
runners.loadbase()
local found = locate_libs()
-
if found then
- local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
- if lfs.isfile(mtxrun) then
- utilities.merger.selfmerge(mtxrun,own.libs,{ found })
- application.report("runner updated on resolved path: %s",mtxrun)
- else
- utilities.merger.selfmerge(own.name,own.libs,{ found })
- application.report("runner updated on relative path: %s",own.name)
- end
+ utilities.merger.selfmerge(own.name,own.libs,{ found })
end
elseif e_argument("selfclean") then
@@ -17855,15 +17730,7 @@ elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
-
- local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
- if lfs.isfile(mtxrun) then
- utilities.merger.selfclean(mtxrun)
- application.report("runner cleaned on resolved path: %s",mtxrun)
- else
- utilities.merger.selfclean(own.name)
- application.report("runner cleaned on relative path: %s",own.name)
- end
+ utilities.merger.selfclean(own.name)
elseif e_argument("selfupdate") then
@@ -18105,8 +17972,6 @@ elseif e_argument("version") then
application.version()
- application.report("source path",environment.ownbin)
-
elseif e_argument("directives") then
directives.show()
diff --git a/scripts/context/stubs/unix/mtxrunjit b/scripts/context/stubs/unix/mtxrunjit
deleted file mode 100644
index 117105aa5..000000000
--- a/scripts/context/stubs/unix/mtxrunjit
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-
-luajittex --luaonly $(dirname $0)/mtxrun "$@"
-
-# luajittex --luaonly ${0%jit} "$@"
diff --git a/scripts/context/stubs/unix/pstopdf b/scripts/context/stubs/unix/pstopdf
new file mode 100644
index 000000000..116f5f4a3
--- /dev/null
+++ b/scripts/context/stubs/unix/pstopdf
@@ -0,0 +1,2 @@
+#!/bin/sh
+mtxrun --script pstopdf "$@"
diff --git a/scripts/context/stubs/win64/context.exe b/scripts/context/stubs/win64/context.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/context.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/contextjit.exe b/scripts/context/stubs/win64/contextjit.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/contextjit.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/ctxtools.exe b/scripts/context/stubs/win64/ctxtools.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/ctxtools.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/luatools.exe b/scripts/context/stubs/win64/luatools.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/luatools.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/metatex.exe b/scripts/context/stubs/win64/metatex.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/metatex.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/mptopdf.exe b/scripts/context/stubs/win64/mptopdf.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/mptopdf.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/mtxrun.dll b/scripts/context/stubs/win64/mtxrun.dll
deleted file mode 100644
index 910502735..000000000
--- a/scripts/context/stubs/win64/mtxrun.dll
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/mtxrun.exe b/scripts/context/stubs/win64/mtxrun.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/mtxrun.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/mtxrun.lua b/scripts/context/stubs/win64/mtxrun.lua
deleted file mode 100644
index 3372831b3..000000000
--- a/scripts/context/stubs/win64/mtxrun.lua
+++ /dev/null
@@ -1,18175 +0,0 @@
-#!/usr/bin/env texlua
-
--- for k, v in next, _G.string do
--- local tv = type(v)
--- if tv == "table" then
--- for kk, vv in next, v do
--- print(k,kk,vv)
--- end
--- else
--- print(tv,k,v)
--- end
--- end
-
-if not modules then modules = { } end modules ['mtxrun'] = {
- version = 1.001,
- comment = "runner, lua replacement for texmfstart.rb",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- one can make a stub:
---
--- #!/bin/sh
--- env LUATEXDIR=/....../texmf/scripts/context/lua luatex --luaonly mtxrun.lua "$@"
-
--- filename : mtxrun.lua
--- comment : companion to context.tex
--- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
--- copyright: PRAGMA ADE / ConTeXt Development Team
--- license : see context related readme files
-
--- This script is based on texmfstart.rb but does not use kpsewhich to
--- locate files. Although kpse is a library it never came to opening up
--- its interface to other programs (esp scripting languages) and so we
--- do it ourselves. The lua variant evolved out of an experimental ruby
--- one. Interesting is that using a scripting language instead of c does
--- not have a speed penalty. Actually the lua variant is more efficient,
--- especially when multiple calls to kpsewhich are involved. The lua
--- library also gives way more control.
-
--- to be done / considered
---
--- support for --exec or make it default
--- support for jar files (or maybe not, never used, too messy)
--- support for $RUBYINPUTS cum suis (if still needed)
--- remember for subruns: _CTX_K_V_#{original}_
--- remember for subruns: _CTX_K_S_#{original}_
--- remember for subruns: TEXMFSTART.#{original} [tex.rb texmfstart.rb]
-
--- begin library merge
-
-
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-lua"] = package.loaded["l-lua"] or true
-
--- original size: 3247, stripped down to: 1763
-
-if not modules then modules={} end modules ['l-lua']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
-_MAJORVERSION=tonumber(major) or 5
-_MINORVERSION=tonumber(minor) or 1
-_LUAVERSION=_MAJORVERSION+_MINORVERSION/10
-if not lpeg then
- lpeg=require("lpeg")
-end
-if loadstring then
- local loadnormal=load
- function load(first,...)
- if type(first)=="string" then
- return loadstring(first,...)
- else
- return loadnormal(first,...)
- end
- end
-else
- loadstring=load
-end
-if not ipairs then
- local function iterate(a,i)
- i=i+1
- local v=a[i]
- if v~=nil then
- return i,v
- end
- end
- function ipairs(a)
- return iterate,a,0
- end
-end
-if not pairs then
- function pairs(t)
- return next,t
- end
-end
-if not table.unpack then
- table.unpack=_G.unpack
-elseif not unpack then
- _G.unpack=table.unpack
-end
-if not package.loaders then
- package.loaders=package.searchers
-end
-local print,select,tostring=print,select,tostring
-local inspectors={}
-function setinspector(inspector)
- inspectors[#inspectors+1]=inspector
-end
-function inspect(...)
- for s=1,select("#",...) do
- local value=select(s,...)
- local done=false
- for i=1,#inspectors do
- done=inspectors[i](value)
- if done then
- break
- end
- end
- if not done then
- print(tostring(value))
- end
- end
-end
-local dummy=function() end
-function optionalrequire(...)
- local ok,result=xpcall(require,dummy,...)
- if ok then
- return result
- end
-end
-if lua then
- lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-package"] = package.loaded["l-package"] or true
-
--- original size: 10587, stripped down to: 7815
-
-if not modules then modules={} end modules ['l-package']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local type=type
-local gsub,format,find=string.gsub,string.format,string.find
-local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
-local package=package
-local searchers=package.searchers or package.loaders
-local filejoin=file and file.join or function(path,name) return path.."/"..name end
-local isreadable=file and file.is_readable or function(name) local f=io.open(name) if f then f:close() return true end end
-local addsuffix=file and file.addsuffix or function(name,suffix) return name.."."..suffix end
-local function cleanpath(path)
- return path
-end
-local pattern=Cs((((1-S("\\/"))^0*(S("\\/")^1/"/"))^0*(P(".")^1/"/"+P(1))^1)*-1)
-local function lualibfile(name)
- return lpegmatch(pattern,name) or name
-end
-local offset=luarocks and 1 or 0
-local helpers=package.helpers or {
- cleanpath=cleanpath,
- lualibfile=lualibfile,
- trace=false,
- report=function(...) print(format(...)) end,
- builtin={
- ["preload table"]=searchers[1+offset],
- ["path specification"]=searchers[2+offset],
- ["cpath specification"]=searchers[3+offset],
- ["all in one fallback"]=searchers[4+offset],
- },
- methods={},
- sequence={
- "already loaded",
- "preload table",
- "qualified path",
- "lua extra list",
- "lib extra list",
- "path specification",
- "cpath specification",
- "all in one fallback",
- "not loaded",
- }
-}
-package.helpers=helpers
-local methods=helpers.methods
-local builtin=helpers.builtin
-local extraluapaths={}
-local extralibpaths={}
-local luapaths=nil
-local libpaths=nil
-local oldluapath=nil
-local oldlibpath=nil
-local nofextralua=-1
-local nofextralib=-1
-local nofpathlua=-1
-local nofpathlib=-1
-local function listpaths(what,paths)
- local nofpaths=#paths
- if nofpaths>0 then
- for i=1,nofpaths do
- helpers.report("using %s path %i: %s",what,i,paths[i])
- end
- else
- helpers.report("no %s paths defined",what)
- end
- return nofpaths
-end
-local function getextraluapaths()
- if helpers.trace and #extraluapaths~=nofextralua then
- nofextralua=listpaths("extra lua",extraluapaths)
- end
- return extraluapaths
-end
-local function getextralibpaths()
- if helpers.trace and #extralibpaths~=nofextralib then
- nofextralib=listpaths("extra lib",extralibpaths)
- end
- return extralibpaths
-end
-local function getluapaths()
- local luapath=package.path or ""
- if oldluapath~=luapath then
- luapaths=file.splitpath(luapath,";")
- oldluapath=luapath
- nofpathlua=-1
- end
- if helpers.trace and #luapaths~=nofpathlua then
- nofpathlua=listpaths("builtin lua",luapaths)
- end
- return luapaths
-end
-local function getlibpaths()
- local libpath=package.cpath or ""
- if oldlibpath~=libpath then
- libpaths=file.splitpath(libpath,";")
- oldlibpath=libpath
- nofpathlib=-1
- end
- if helpers.trace and #libpaths~=nofpathlib then
- nofpathlib=listpaths("builtin lib",libpaths)
- end
- return libpaths
-end
-package.luapaths=getluapaths
-package.libpaths=getlibpaths
-package.extraluapaths=getextraluapaths
-package.extralibpaths=getextralibpaths
-local hashes={
- lua={},
- lib={},
-}
-local function registerpath(tag,what,target,...)
- local pathlist={... }
- local cleanpath=helpers.cleanpath
- local trace=helpers.trace
- local report=helpers.report
- local hash=hashes[what]
- local function add(path)
- local path=cleanpath(path)
- if not hash[path] then
- target[#target+1]=path
- hash[path]=true
- if trace then
- report("registered %s path %s: %s",tag,#target,path)
- end
- else
- if trace then
- report("duplicate %s path: %s",tag,path)
- end
- end
- end
- for p=1,#pathlist do
- local path=pathlist[p]
- if type(path)=="table" then
- for i=1,#path do
- add(path[i])
- end
- else
- add(path)
- end
- end
- return paths
-end
-helpers.registerpath=registerpath
-function package.extraluapath(...)
- registerpath("extra lua","lua",extraluapaths,...)
-end
-function package.extralibpath(...)
- registerpath("extra lib","lib",extralibpaths,...)
-end
-local function loadedaslib(resolved,rawname)
- local base=gsub(rawname,"%.","_")
- local init="luaopen_"..gsub(base,"%.","_")
- if helpers.trace then
- helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
- end
- return package.loadlib(resolved,init)
-end
-helpers.loadedaslib=loadedaslib
-local function loadedbypath(name,rawname,paths,islib,what)
- local trace=helpers.trace
- for p=1,#paths do
- local path=paths[p]
- local resolved=filejoin(path,name)
- if trace then
- helpers.report("%s path, identifying '%s' on '%s'",what,name,path)
- end
- if isreadable(resolved) then
- if trace then
- helpers.report("%s path, '%s' found on '%s'",what,name,resolved)
- end
- if islib then
- return loadedaslib(resolved,rawname)
- else
- return loadfile(resolved)
- end
- end
- end
-end
-helpers.loadedbypath=loadedbypath
-local function loadedbyname(name,rawname)
- if find(name,"^/") or find(name,"^[a-zA-Z]:/") then
- local trace=helpers.trace
- if trace then
- helpers.report("qualified name, identifying '%s'",what,name)
- end
- if isreadable(name) then
- if trace then
- helpers.report("qualified name, '%s' found",what,name)
- end
- return loadfile(name)
- end
- end
-end
-helpers.loadedbyname=loadedbyname
-methods["already loaded"]=function(name)
- return package.loaded[name]
-end
-methods["preload table"]=function(name)
- return builtin["preload table"](name)
-end
-methods["qualified path"]=function(name)
- return loadedbyname(addsuffix(lualibfile(name),"lua"),name)
-end
-methods["lua extra list"]=function(name)
- return loadedbypath(addsuffix(lualibfile(name),"lua"),name,getextraluapaths(),false,"lua")
-end
-methods["lib extra list"]=function(name)
- return loadedbypath(addsuffix(lualibfile(name),os.libsuffix),name,getextralibpaths(),true,"lib")
-end
-methods["path specification"]=function(name)
- getluapaths()
- return builtin["path specification"](name)
-end
-methods["cpath specification"]=function(name)
- getlibpaths()
- return builtin["cpath specification"](name)
-end
-methods["all in one fallback"]=function(name)
- return builtin["all in one fallback"](name)
-end
-methods["not loaded"]=function(name)
- if helpers.trace then
- helpers.report("unable to locate '%s'",name or "?")
- end
- return nil
-end
-local level=0
-local used={}
-helpers.traceused=false
-function helpers.loaded(name)
- local sequence=helpers.sequence
- level=level+1
- for i=1,#sequence do
- local method=sequence[i]
- if helpers.trace then
- helpers.report("%s, level '%s', method '%s', name '%s'","locating",level,method,name)
- end
- local result,rest=methods[method](name)
- if type(result)=="function" then
- if helpers.trace then
- helpers.report("%s, level '%s', method '%s', name '%s'","found",level,method,name)
- end
- if helpers.traceused then
- used[#used+1]={ level=level,name=name }
- end
- level=level-1
- return result,rest
- end
- end
- level=level-1
- return nil
-end
-function helpers.showused()
- local n=#used
- if n>0 then
- helpers.report("%s libraries loaded:",n)
- helpers.report()
- for i=1,n do
- local u=used[i]
- helpers.report("%i %a",u.level,u.name)
- end
- helpers.report()
- end
-end
-function helpers.unload(name)
- if helpers.trace then
- if package.loaded[name] then
- helpers.report("unloading, name '%s', %s",name,"done")
- else
- helpers.report("unloading, name '%s', %s",name,"not loaded")
- end
- end
- package.loaded[name]=nil
-end
-table.insert(searchers,1,helpers.loaded)
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
-
--- original size: 29808, stripped down to: 16182
-
-if not modules then modules={} end modules ['l-lpeg']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-lpeg=require("lpeg")
-if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end
-local type,next,tostring=type,next,tostring
-local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
-local floor=math.floor
-local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
-local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-if setinspector then
- setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-end
-lpeg.patterns=lpeg.patterns or {}
-local patterns=lpeg.patterns
-local anything=P(1)
-local endofstring=P(-1)
-local alwaysmatched=P(true)
-patterns.anything=anything
-patterns.endofstring=endofstring
-patterns.beginofstring=alwaysmatched
-patterns.alwaysmatched=alwaysmatched
-local sign=S('+-')
-local zero=P('0')
-local digit=R('09')
-local octdigit=R("07")
-local lowercase=R("az")
-local uppercase=R("AZ")
-local underscore=P("_")
-local hexdigit=digit+lowercase+uppercase
-local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
-local newline=crlf+S("\r\n")
-local escaped=P("\\")*anything
-local squote=P("'")
-local dquote=P('"')
-local space=P(" ")
-local period=P(".")
-local comma=P(",")
-local utfbom_32_be=P('\000\000\254\255')
-local utfbom_32_le=P('\255\254\000\000')
-local utfbom_16_be=P('\254\255')
-local utfbom_16_le=P('\255\254')
-local utfbom_8=P('\239\187\191')
-local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
-local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
-local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")
-local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
-local utf8next=R("\128\191")
-patterns.utfbom_32_be=utfbom_32_be
-patterns.utfbom_32_le=utfbom_32_le
-patterns.utfbom_16_be=utfbom_16_be
-patterns.utfbom_16_le=utfbom_16_le
-patterns.utfbom_8=utfbom_8
-patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
-patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
-patterns.utf8one=R("\000\127")
-patterns.utf8two=R("\194\223")*utf8next
-patterns.utf8three=R("\224\239")*utf8next*utf8next
-patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
-patterns.utfbom=utfbom
-patterns.utftype=utftype
-patterns.utfstricttype=utfstricttype
-patterns.utfoffset=utfoffset
-local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
-local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
-local utf8character=P(1)*R("\128\191")^0
-patterns.utf8=utf8char
-patterns.utf8char=utf8char
-patterns.utf8character=utf8character
-patterns.validutf8=validutf8char
-patterns.validutf8char=validutf8char
-local eol=S("\n\r")
-local spacer=S(" \t\f\v")
-local whitespace=eol+spacer
-local nonspacer=1-spacer
-local nonwhitespace=1-whitespace
-patterns.eol=eol
-patterns.spacer=spacer
-patterns.whitespace=whitespace
-patterns.nonspacer=nonspacer
-patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
-local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
-local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
-patterns.stripper=stripper
-patterns.fullstripper=fullstripper
-patterns.collapser=collapser
-patterns.lowercase=lowercase
-patterns.uppercase=uppercase
-patterns.letter=patterns.lowercase+patterns.uppercase
-patterns.space=space
-patterns.tab=P("\t")
-patterns.spaceortab=patterns.space+patterns.tab
-patterns.newline=newline
-patterns.emptyline=newline^1
-patterns.equal=P("=")
-patterns.comma=comma
-patterns.commaspacer=comma*spacer^0
-patterns.period=period
-patterns.colon=P(":")
-patterns.semicolon=P(";")
-patterns.underscore=underscore
-patterns.escaped=escaped
-patterns.squote=squote
-patterns.dquote=dquote
-patterns.nosquote=(escaped+(1-squote))^0
-patterns.nodquote=(escaped+(1-dquote))^0
-patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
-patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
-patterns.unquoted=patterns.undouble+patterns.unsingle
-patterns.unspacer=((patterns.spacer^1)/"")^0
-patterns.singlequoted=squote*patterns.nosquote*squote
-patterns.doublequoted=dquote*patterns.nodquote*dquote
-patterns.quoted=patterns.doublequoted+patterns.singlequoted
-patterns.digit=digit
-patterns.octdigit=octdigit
-patterns.hexdigit=hexdigit
-patterns.sign=sign
-patterns.cardinal=digit^1
-patterns.integer=sign^-1*digit^1
-patterns.unsigned=digit^0*period*digit^1
-patterns.float=sign^-1*patterns.unsigned
-patterns.cunsigned=digit^0*comma*digit^1
-patterns.cfloat=sign^-1*patterns.cunsigned
-patterns.number=patterns.float+patterns.integer
-patterns.cnumber=patterns.cfloat+patterns.integer
-patterns.oct=zero*octdigit^1
-patterns.octal=patterns.oct
-patterns.HEX=zero*P("X")*(digit+uppercase)^1
-patterns.hex=zero*P("x")*(digit+lowercase)^1
-patterns.hexadecimal=zero*S("xX")*hexdigit^1
-patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1
-patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1
-patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring
-patterns.somecontent=(anything-newline-space)^1
-patterns.beginline=#(1-newline)
-patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0))
-local function anywhere(pattern)
- return P { P(pattern)+1*V(1) }
-end
-lpeg.anywhere=anywhere
-function lpeg.instringchecker(p)
- p=anywhere(p)
- return function(str)
- return lpegmatch(p,str) and true or false
- end
-end
-function lpeg.splitter(pattern,action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-function lpeg.tsplitter(pattern,action)
- return Ct((((1-P(pattern))^1)/action+1)^0)
-end
-local splitters_s,splitters_m,splitters_t={},{},{}
-local function splitat(separator,single)
- local splitter=(single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator=P(separator)
- local other=C((1-separator)^0)
- if single then
- local any=anything
- splitter=other*(separator*C(any^0)+"")
- splitters_s[separator]=splitter
- else
- splitter=other*(separator*other)^0
- splitters_m[separator]=splitter
- end
- end
- return splitter
-end
-local function tsplitat(separator)
- local splitter=splitters_t[separator]
- if not splitter then
- splitter=Ct(splitat(separator))
- splitters_t[separator]=splitter
- end
- return splitter
-end
-lpeg.splitat=splitat
-lpeg.tsplitat=tsplitat
-function string.splitup(str,separator)
- if not separator then
- separator=","
- end
- return lpegmatch(splitters_m[separator] or splitat(separator),str)
-end
-local cache={}
-function lpeg.split(separator,str)
- local c=cache[separator]
- if not c then
- c=tsplitat(separator)
- cache[separator]=c
- end
- return lpegmatch(c,str)
-end
-function string.split(str,separator)
- if separator then
- local c=cache[separator]
- if not c then
- c=tsplitat(separator)
- cache[separator]=c
- end
- return lpegmatch(c,str)
- else
- return { str }
- end
-end
-local spacing=patterns.spacer^0*newline
-local empty=spacing*Cc("")
-local nonempty=Cs((1-spacing)^1)*spacing^-1
-local content=(empty+nonempty)^1
-patterns.textline=content
-local linesplitter=tsplitat(newline)
-patterns.linesplitter=linesplitter
-function string.splitlines(str)
- return lpegmatch(linesplitter,str)
-end
-local cache={}
-function lpeg.checkedsplit(separator,str)
- local c=cache[separator]
- if not c then
- separator=P(separator)
- local other=C((1-separator)^1)
- c=Ct(separator^0*other*(separator^1*other)^0)
- cache[separator]=c
- end
- return lpegmatch(c,str)
-end
-function string.checkedsplit(str,separator)
- local c=cache[separator]
- if not c then
- separator=P(separator)
- local other=C((1-separator)^1)
- c=Ct(separator^0*other*(separator^1*other)^0)
- cache[separator]=c
- end
- return lpegmatch(c,str)
-end
-local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
-local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
-local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
-local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
-patterns.utf8byte=utf8byte
-local cache={}
-function lpeg.stripper(str)
- if type(str)=="string" then
- local s=cache[str]
- if not s then
- s=Cs(((S(str)^1)/""+1)^0)
- cache[str]=s
- end
- return s
- else
- return Cs(((str^1)/""+1)^0)
- end
-end
-local cache={}
-function lpeg.keeper(str)
- if type(str)=="string" then
- local s=cache[str]
- if not s then
- s=Cs((((1-S(str))^1)/""+1)^0)
- cache[str]=s
- end
- return s
- else
- return Cs((((1-str)^1)/""+1)^0)
- end
-end
-function lpeg.frontstripper(str)
- return (P(str)+P(true))*Cs(anything^0)
-end
-function lpeg.endstripper(str)
- return Cs((1-P(str)*endofstring)^0)
-end
-function lpeg.replacer(one,two,makefunction,isutf)
- local pattern
- local u=isutf and utf8char or 1
- if type(one)=="table" then
- local no=#one
- local p=P(false)
- if no==0 then
- for k,v in next,one do
- p=p+P(k)/v
- end
- pattern=Cs((p+u)^0)
- elseif no==1 then
- local o=one[1]
- one,two=P(o[1]),o[2]
- pattern=Cs((one/two+u)^0)
- else
- for i=1,no do
- local o=one[i]
- p=p+P(o[1])/o[2]
- end
- pattern=Cs((p+u)^0)
- end
- else
- pattern=Cs((P(one)/(two or "")+u)^0)
- end
- if makefunction then
- return function(str)
- return lpegmatch(pattern,str)
- end
- else
- return pattern
- end
-end
-function lpeg.finder(lst,makefunction,isutf)
- local pattern
- if type(lst)=="table" then
- pattern=P(false)
- if #lst==0 then
- for k,v in next,lst do
- pattern=pattern+P(k)
- end
- else
- for i=1,#lst do
- pattern=pattern+P(lst[i])
- end
- end
- else
- pattern=P(lst)
- end
- if isutf then
- pattern=((utf8char or 1)-pattern)^0*pattern
- else
- pattern=(1-pattern)^0*pattern
- end
- if makefunction then
- return function(str)
- return lpegmatch(pattern,str)
- end
- else
- return pattern
- end
-end
-local splitters_f,splitters_s={},{}
-function lpeg.firstofsplit(separator)
- local splitter=splitters_f[separator]
- if not splitter then
- local pattern=P(separator)
- splitter=C((1-pattern)^0)
- splitters_f[separator]=splitter
- end
- return splitter
-end
-function lpeg.secondofsplit(separator)
- local splitter=splitters_s[separator]
- if not splitter then
- local pattern=P(separator)
- splitter=(1-pattern)^0*pattern*C(anything^0)
- splitters_s[separator]=splitter
- end
- return splitter
-end
-local splitters_s,splitters_p={},{}
-function lpeg.beforesuffix(separator)
- local splitter=splitters_s[separator]
- if not splitter then
- local pattern=P(separator)
- splitter=C((1-pattern)^0)*pattern*endofstring
- splitters_s[separator]=splitter
- end
- return splitter
-end
-function lpeg.afterprefix(separator)
- local splitter=splitters_p[separator]
- if not splitter then
- local pattern=P(separator)
- splitter=pattern*C(anything^0)
- splitters_p[separator]=splitter
- end
- return splitter
-end
-function lpeg.balancer(left,right)
- left,right=P(left),P(right)
- return P { left*((1-left-right)+V(1))^0*right }
-end
-local nany=utf8char/""
-function lpeg.counter(pattern)
- pattern=Cs((P(pattern)/" "+nany)^0)
- return function(str)
- return #lpegmatch(pattern,str)
- end
-end
-utf=utf or (unicode and unicode.utf8) or {}
-local utfcharacters=utf and utf.characters or string.utfcharacters
-local utfgmatch=utf and utf.gmatch
-local utfchar=utf and utf.char
-lpeg.UP=lpeg.P
-if utfcharacters then
- function lpeg.US(str)
- local p=P(false)
- for uc in utfcharacters(str) do
- p=p+P(uc)
- end
- return p
- end
-elseif utfgmatch then
- function lpeg.US(str)
- local p=P(false)
- for uc in utfgmatch(str,".") do
- p=p+P(uc)
- end
- return p
- end
-else
- function lpeg.US(str)
- local p=P(false)
- local f=function(uc)
- p=p+P(uc)
- end
- lpegmatch((utf8char/f)^0,str)
- return p
- end
-end
-local range=utf8byte*utf8byte+Cc(false)
-function lpeg.UR(str,more)
- local first,last
- if type(str)=="number" then
- first=str
- last=more or first
- else
- first,last=lpegmatch(range,str)
- if not last then
- return P(str)
- end
- end
- if first==last then
- return P(str)
- elseif utfchar and (last-first<8) then
- local p=P(false)
- for i=first,last do
- p=p+P(utfchar(i))
- end
- return p
- else
- local f=function(b)
- return b>=first and b<=last
- end
- return utf8byte/f
- end
-end
-function lpeg.is_lpeg(p)
- return p and lpegtype(p)=="pattern"
-end
-function lpeg.oneof(list,...)
- if type(list)~="table" then
- list={ list,... }
- end
- local p=P(list[1])
- for l=2,#list do
- p=p+P(list[l])
- end
- return p
-end
-local sort=table.sort
-local function copyindexed(old)
- local new={}
- for i=1,#old do
- new[i]=old
- end
- return new
-end
-local function sortedkeys(tab)
- local keys,s={},0
- for key,_ in next,tab do
- s=s+1
- keys[s]=key
- end
- sort(keys)
- return keys
-end
-function lpeg.append(list,pp,delayed,checked)
- local p=pp
- if #list>0 then
- local keys=copyindexed(list)
- sort(keys)
- for i=#keys,1,-1 do
- local k=keys[i]
- if p then
- p=P(k)+p
- else
- p=P(k)
- end
- end
- elseif delayed then
- local keys=sortedkeys(list)
- if p then
- for i=1,#keys,1 do
- local k=keys[i]
- local v=list[k]
- p=P(k)/list+p
- end
- else
- for i=1,#keys do
- local k=keys[i]
- local v=list[k]
- if p then
- p=P(k)+p
- else
- p=P(k)
- end
- end
- if p then
- p=p/list
- end
- end
- elseif checked then
- local keys=sortedkeys(list)
- for i=1,#keys do
- local k=keys[i]
- local v=list[k]
- if p then
- if k==v then
- p=P(k)+p
- else
- p=P(k)/v+p
- end
- else
- if k==v then
- p=P(k)
- else
- p=P(k)/v
- end
- end
- end
- else
- local keys=sortedkeys(list)
- for i=1,#keys do
- local k=keys[i]
- local v=list[k]
- if p then
- p=P(k)/v+p
- else
- p=P(k)/v
- end
- end
- end
- return p
-end
-local function make(t)
- local p
- local keys=sortedkeys(t)
- for i=1,#keys do
- local k=keys[i]
- local v=t[k]
- if not p then
- if next(v) then
- p=P(k)*make(v)
- else
- p=P(k)
- end
- else
- if next(v) then
- p=p+P(k)*make(v)
- else
- p=p+P(k)
- end
- end
- end
- return p
-end
-function lpeg.utfchartabletopattern(list)
- local tree={}
- for i=1,#list do
- local t=tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c]={}
- end
- t=t[c]
- end
- end
- return make(tree)
-end
-patterns.containseol=lpeg.finder(eol)
-local function nextstep(n,step,result)
- local m=n%step
- local d=floor(n/step)
- if d>0 then
- local v=V(tostring(step))
- local s=result.start
- for i=1,d do
- if s then
- s=v*s
- else
- s=v
- end
- end
- result.start=s
- end
- if step>1 and result.start then
- local v=V(tostring(step/2))
- result[tostring(step)]=v*v
- end
- if step>0 then
- return nextstep(m,step/2,result)
- else
- return result
- end
-end
-function lpeg.times(pattern,n)
- return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
-end
-local trailingzeros=zero^0*-digit
-local case_1=period*trailingzeros/""
-local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
-local number=digit^1*(case_1+case_2)
-local stripper=Cs((number+1)^0)
-lpeg.patterns.stripzeros=stripper
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-function"] = package.loaded["l-function"] or true
-
--- original size: 361, stripped down to: 322
-
-if not modules then modules={} end modules ['l-functions']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-functions=functions or {}
-function functions.dummy() end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-string"] = package.loaded["l-string"] or true
-
--- original size: 5671, stripped down to: 2827
-
-if not modules then modules={} end modules ['l-string']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local string=string
-local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
-local lpegmatch,patterns=lpeg.match,lpeg.patterns
-local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
-local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
-function string.unquoted(str)
- return lpegmatch(unquoted,str) or str
-end
-function string.quoted(str)
- return format("%q",str)
-end
-function string.count(str,pattern)
- local n=0
- for _ in gmatch(str,pattern) do
- n=n+1
- end
- return n
-end
-function string.limit(str,n,sentinel)
- if #str>n then
- sentinel=sentinel or "..."
- return sub(str,1,(n-#sentinel))..sentinel
- else
- return str
- end
-end
-local stripper=patterns.stripper
-local fullstripper=patterns.fullstripper
-local collapser=patterns.collapser
-local longtostring=patterns.longtostring
-function string.strip(str)
- return lpegmatch(stripper,str) or ""
-end
-function string.fullstrip(str)
- return lpegmatch(fullstripper,str) or ""
-end
-function string.collapsespaces(str)
- return lpegmatch(collapser,str) or ""
-end
-function string.longtostring(str)
- return lpegmatch(longtostring,str) or ""
-end
-local pattern=P(" ")^0*P(-1)
-function string.is_empty(str)
- if str=="" then
- return true
- else
- return lpegmatch(pattern,str) and true or false
- end
-end
-local anything=patterns.anything
-local allescapes=Cc("%")*S(".-+%?()[]*")
-local someescapes=Cc("%")*S(".-+%()[]")
-local matchescapes=Cc(".")*S("*?")
-local pattern_a=Cs ((allescapes+anything )^0 )
-local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
-local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
-function string.escapedpattern(str,simple)
- return lpegmatch(simple and pattern_b or pattern_a,str)
-end
-function string.topattern(str,lowercase,strict)
- if str=="" or type(str)~="string" then
- return ".*"
- elseif strict then
- str=lpegmatch(pattern_c,str)
- else
- str=lpegmatch(pattern_b,str)
- end
- if lowercase then
- return lower(str)
- else
- return str
- end
-end
-function string.valid(str,default)
- return (type(str)=="string" and str~="" and str) or default or nil
-end
-string.itself=function(s) return s end
-local pattern=Ct(C(1)^0)
-function string.totable(str)
- return lpegmatch(pattern,str)
-end
-local replacer=lpeg.replacer("@","%%")
-function string.tformat(fmt,...)
- return format(lpegmatch(replacer,fmt),...)
-end
-string.quote=string.quoted
-string.unquote=string.unquoted
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-table"] = package.loaded["l-table"] or true
-
--- original size: 31142, stripped down to: 20283
-
-if not modules then modules={} end modules ['l-table']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
-local table,string=table,string
-local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
-local format,lower,dump=string.format,string.lower,string.dump
-local getmetatable,setmetatable=getmetatable,setmetatable
-local getinfo=debug.getinfo
-local lpegmatch,patterns=lpeg.match,lpeg.patterns
-local floor=math.floor
-local stripper=patterns.stripper
-function table.strip(tab)
- local lst,l={},0
- for i=1,#tab do
- local s=lpegmatch(stripper,tab[i]) or ""
- if s=="" then
- else
- l=l+1
- lst[l]=s
- end
- end
- return lst
-end
-function table.keys(t)
- if t then
- local keys,k={},0
- for key,_ in next,t do
- k=k+1
- keys[k]=key
- end
- return keys
- else
- return {}
- end
-end
-local function compare(a,b)
- local ta,tb=type(a),type(b)
- if ta==tb then
- return a<b
- else
- return tostring(a)<tostring(b)
- end
-end
-local function sortedkeys(tab)
- if tab then
- local srt,category,s={},0,0
- for key,_ in next,tab do
- s=s+1
- srt[s]=key
- if category==3 then
- else
- local tkey=type(key)
- if tkey=="string" then
- category=(category==2 and 3) or 1
- elseif tkey=="number" then
- category=(category==1 and 3) or 2
- else
- category=3
- end
- end
- end
- if category==0 or category==3 then
- sort(srt,compare)
- else
- sort(srt)
- end
- return srt
- else
- return {}
- end
-end
-local function sortedhashkeys(tab,cmp)
- if tab then
- local srt,s={},0
- for key,_ in next,tab do
- if key then
- s=s+1
- srt[s]=key
- end
- end
- sort(srt,cmp)
- return srt
- else
- return {}
- end
-end
-function table.allkeys(t)
- local keys={}
- for k,v in next,t do
- for k,v in next,v do
- keys[k]=true
- end
- end
- return sortedkeys(keys)
-end
-table.sortedkeys=sortedkeys
-table.sortedhashkeys=sortedhashkeys
-local function nothing() end
-local function sortedhash(t,cmp)
- if t then
- local s
- if cmp then
- s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
- else
- s=sortedkeys(t)
- end
- local n=0
- local m=#s
- local function kv(s)
- if n<m then
- n=n+1
- local k=s[n]
- return k,t[k]
- end
- end
- return kv,s
- else
- return nothing
- end
-end
-table.sortedhash=sortedhash
-table.sortedpairs=sortedhash
-function table.append(t,list)
- local n=#t
- for i=1,#list do
- n=n+1
- t[n]=list[i]
- end
- return t
-end
-function table.prepend(t,list)
- local nl=#list
- local nt=nl+#t
- for i=#t,1,-1 do
- t[nt]=t[i]
- nt=nt-1
- end
- for i=1,#list do
- t[i]=list[i]
- end
- return t
-end
-function table.merge(t,...)
- t=t or {}
- for i=1,select("#",...) do
- for k,v in next,(select(i,...)) do
- t[k]=v
- end
- end
- return t
-end
-function table.merged(...)
- local t={}
- for i=1,select("#",...) do
- for k,v in next,(select(i,...)) do
- t[k]=v
- end
- end
- return t
-end
-function table.imerge(t,...)
- local nt=#t
- for i=1,select("#",...) do
- local nst=select(i,...)
- for j=1,#nst do
- nt=nt+1
- t[nt]=nst[j]
- end
- end
- return t
-end
-function table.imerged(...)
- local tmp,ntmp={},0
- for i=1,select("#",...) do
- local nst=select(i,...)
- for j=1,#nst do
- ntmp=ntmp+1
- tmp[ntmp]=nst[j]
- end
- end
- return tmp
-end
-local function fastcopy(old,metatabletoo)
- if old then
- local new={}
- for k,v in next,old do
- if type(v)=="table" then
- new[k]=fastcopy(v,metatabletoo)
- else
- new[k]=v
- end
- end
- if metatabletoo then
- local mt=getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- end
- return new
- else
- return {}
- end
-end
-local function copy(t,tables)
- tables=tables or {}
- local tcopy={}
- if not tables[t] then
- tables[t]=tcopy
- end
- for i,v in next,t do
- if type(i)=="table" then
- if tables[i] then
- i=tables[i]
- else
- i=copy(i,tables)
- end
- end
- if type(v)~="table" then
- tcopy[i]=v
- elseif tables[v] then
- tcopy[i]=tables[v]
- else
- tcopy[i]=copy(v,tables)
- end
- end
- local mt=getmetatable(t)
- if mt then
- setmetatable(tcopy,mt)
- end
- return tcopy
-end
-table.fastcopy=fastcopy
-table.copy=copy
-function table.derive(parent)
- local child={}
- if parent then
- setmetatable(child,{ __index=parent })
- end
- return child
-end
-function table.tohash(t,value)
- local h={}
- if t then
- if value==nil then value=true end
- for _,v in next,t do
- h[v]=value
- end
- end
- return h
-end
-function table.fromhash(t)
- local hsh,h={},0
- for k,v in next,t do
- if v then
- h=h+1
- hsh[h]=k
- end
- end
- return hsh
-end
-local noquotes,hexify,handle,reduce,compact,inline,functions
-local reserved=table.tohash {
- 'and','break','do','else','elseif','end','false','for','function','if',
- 'in','local','nil','not','or','repeat','return','then','true','until','while',
- 'NaN','goto',
-}
-local function simple_table(t)
- if #t>0 then
- local n=0
- for _,v in next,t do
- n=n+1
- end
- if n==#t then
- local tt,nt={},0
- for i=1,#t do
- local v=t[i]
- local tv=type(v)
- if tv=="number" then
- nt=nt+1
- if hexify then
- tt[nt]=format("0x%04X",v)
- else
- tt[nt]=tostring(v)
- end
- elseif tv=="string" then
- nt=nt+1
- tt[nt]=format("%q",v)
- elseif tv=="boolean" then
- nt=nt+1
- tt[nt]=v and "true" or "false"
- else
- tt=nil
- break
- end
- end
- return tt
- end
- end
- return nil
-end
-local propername=patterns.propername
-local function dummy() end
-local function do_serialize(root,name,depth,level,indexed)
- if level>0 then
- depth=depth.." "
- if indexed then
- handle(format("%s{",depth))
- else
- local tn=type(name)
- if tn=="number" then
- if hexify then
- handle(format("%s[0x%04X]={",depth,name))
- else
- handle(format("%s[%s]={",depth,name))
- end
- elseif tn=="string" then
- if noquotes and not reserved[name] and lpegmatch(propername,name) then
- handle(format("%s%s={",depth,name))
- else
- handle(format("%s[%q]={",depth,name))
- end
- elseif tn=="boolean" then
- handle(format("%s[%s]={",depth,name and "true" or "false"))
- else
- handle(format("%s{",depth))
- end
- end
- end
- if root and next(root) then
- local first,last=nil,0
- if compact then
- last=#root
- for k=1,last do
- if root[k]==nil then
- last=k-1
- break
- end
- end
- if last>0 then
- first=1
- end
- end
- local sk=sortedkeys(root)
- for i=1,#sk do
- local k=sk[i]
- local v=root[k]
- local tv,tk=type(v),type(k)
- if compact and first and tk=="number" and k>=first and k<=last then
- if tv=="number" then
- if hexify then
- handle(format("%s 0x%04X,",depth,v))
- else
- handle(format("%s %s,",depth,v))
- end
- elseif tv=="string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
- elseif tv=="table" then
- if not next(v) then
- handle(format("%s {},",depth))
- elseif inline then
- local st=simple_table(v)
- if st then
- handle(format("%s { %s },",depth,concat(st,", ")))
- else
- do_serialize(v,k,depth,level+1,true)
- end
- else
- do_serialize(v,k,depth,level+1,true)
- end
- elseif tv=="boolean" then
- handle(format("%s %s,",depth,v and "true" or "false"))
- elseif tv=="function" then
- if functions then
- handle(format('%s load(%q),',depth,dump(v)))
- else
- handle(format('%s "function",',depth))
- end
- else
- handle(format("%s %q,",depth,tostring(v)))
- end
- elseif k=="__p__" then
- if false then
- handle(format("%s __p__=nil,",depth))
- end
- elseif tv=="number" then
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk=="boolean" then
- if hexify then
- handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v))
- else
- handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
- end
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
- else
- handle(format("%s %s=%s,",depth,k,v))
- end
- else
- if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v))
- end
- end
- elseif tv=="string" then
- if reduce and tonumber(v) then
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v))
- end
- else
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
- end
- elseif tv=="table" then
- if not next(v) then
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
- else
- handle(format("%s [%s]={},",depth,k))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]={},",depth,k and "true" or "false"))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s={},",depth,k))
- else
- handle(format("%s [%q]={},",depth,k))
- end
- elseif inline then
- local st=simple_table(v)
- if st then
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", ")))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- elseif tv=="boolean" then
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false"))
- else
- handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false"))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,v and "true" or "false"))
- else
- handle(format("%s [%q]=%s,",depth,k,v and "true" or "false"))
- end
- elseif tv=="function" then
- if functions then
- local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=load(%q),",depth,k,f))
- else
- handle(format("%s [%s]=load(%q),",depth,k,f))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=load(%q),",depth,k,f))
- else
- handle(format("%s [%q]=load(%q),",depth,k,f))
- end
- end
- else
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%q,",depth,k,tostring(v)))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v)))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%q,",depth,k,tostring(v)))
- end
- end
- end
- end
- if level>0 then
- handle(format("%s},",depth))
- end
-end
-local function serialize(_handle,root,name,specification)
- local tname=type(name)
- if type(specification)=="table" then
- noquotes=specification.noquotes
- hexify=specification.hexify
- handle=_handle or specification.handle or print
- reduce=specification.reduce or false
- functions=specification.functions
- compact=specification.compact
- inline=specification.inline and compact
- if functions==nil then
- functions=true
- end
- if compact==nil then
- compact=true
- end
- if inline==nil then
- inline=compact
- end
- else
- noquotes=false
- hexify=false
- handle=_handle or print
- reduce=false
- compact=true
- inline=true
- functions=true
- end
- if tname=="string" then
- if name=="return" then
- handle("return {")
- else
- handle(name.."={")
- end
- elseif tname=="number" then
- if hexify then
- handle(format("[0x%04X]={",name))
- else
- handle("["..name.."]={")
- end
- elseif tname=="boolean" then
- if name then
- handle("return {")
- else
- handle("{")
- end
- else
- handle("t={")
- end
- if root then
- if getmetatable(root) then
- local dummy=root._w_h_a_t_e_v_e_r_
- root._w_h_a_t_e_v_e_r_=nil
- end
- if next(root) then
- do_serialize(root,name,"",0)
- end
- end
- handle("}")
-end
-function table.serialize(root,name,specification)
- local t,n={},0
- local function flush(s)
- n=n+1
- t[n]=s
- end
- serialize(flush,root,name,specification)
- return concat(t,"\n")
-end
-table.tohandle=serialize
-local maxtab=2*1024
-function table.tofile(filename,root,name,specification)
- local f=io.open(filename,'w')
- if f then
- if maxtab>1 then
- local t,n={},0
- local function flush(s)
- n=n+1
- t[n]=s
- if n>maxtab then
- f:write(concat(t,"\n"),"\n")
- t,n={},0
- end
- end
- serialize(flush,root,name,specification)
- f:write(concat(t,"\n"),"\n")
- else
- local function flush(s)
- f:write(s,"\n")
- end
- serialize(flush,root,name,specification)
- end
- f:close()
- io.flush()
- end
-end
-local function flattened(t,f,depth)
- if f==nil then
- f={}
- depth=0xFFFF
- elseif tonumber(f) then
- depth=f
- f={}
- elseif not depth then
- depth=0xFFFF
- end
- for k,v in next,t do
- if type(k)~="number" then
- if depth>0 and type(v)=="table" then
- flattened(v,f,depth-1)
- else
- f[#f+1]=v
- end
- end
- end
- for k=1,#t do
- local v=t[k]
- if depth>0 and type(v)=="table" then
- flattened(v,f,depth-1)
- else
- f[#f+1]=v
- end
- end
- return f
-end
-table.flattened=flattened
-local function unnest(t,f)
- if not f then
- f={}
- end
- for i=1,#t do
- local v=t[i]
- if type(v)=="table" then
- if type(v[1])=="table" then
- unnest(v,f)
- else
- f[#f+1]=v
- end
- else
- f[#f+1]=v
- end
- end
- return f
-end
-function table.unnest(t)
- return unnest(t)
-end
-local function are_equal(a,b,n,m)
- if a and b and #a==#b then
- n=n or 1
- m=m or #a
- for i=n,m do
- local ai,bi=a[i],b[i]
- if ai==bi then
- elseif type(ai)=="table" and type(bi)=="table" then
- if not are_equal(ai,bi) then
- return false
- end
- else
- return false
- end
- end
- return true
- else
- return false
- end
-end
-local function identical(a,b)
- for ka,va in next,a do
- local vb=b[ka]
- if va==vb then
- elseif type(va)=="table" and type(vb)=="table" then
- if not identical(va,vb) then
- return false
- end
- else
- return false
- end
- end
- return true
-end
-table.identical=identical
-table.are_equal=are_equal
-function table.compact(t)
- if t then
- for k,v in next,t do
- if not next(v) then
- t[k]=nil
- end
- end
- end
-end
-function table.contains(t,v)
- if t then
- for i=1,#t do
- if t[i]==v then
- return i
- end
- end
- end
- return false
-end
-function table.count(t)
- local n=0
- for k,v in next,t do
- n=n+1
- end
- return n
-end
-function table.swapped(t,s)
- local n={}
- if s then
- for k,v in next,s do
- n[k]=v
- end
- end
- for k,v in next,t do
- n[v]=k
- end
- return n
-end
-function table.mirrored(t)
- local n={}
- for k,v in next,t do
- n[v]=k
- n[k]=v
- end
- return n
-end
-function table.reversed(t)
- if t then
- local tt,tn={},#t
- if tn>0 then
- local ttn=0
- for i=tn,1,-1 do
- ttn=ttn+1
- tt[ttn]=t[i]
- end
- end
- return tt
- end
-end
-function table.reverse(t)
- if t then
- local n=#t
- for i=1,floor(n/2) do
- local j=n-i+1
- t[i],t[j]=t[j],t[i]
- end
- return t
- end
-end
-function table.sequenced(t,sep,simple)
- if not t then
- return ""
- end
- local n=#t
- local s={}
- if n>0 then
- for i=1,n do
- s[i]=tostring(t[i])
- end
- else
- n=0
- for k,v in sortedhash(t) do
- if simple then
- if v==true then
- n=n+1
- s[n]=k
- elseif v and v~="" then
- n=n+1
- s[n]=k.."="..tostring(v)
- end
- else
- n=n+1
- s[n]=k.."="..tostring(v)
- end
- end
- end
- return concat(s,sep or " | ")
-end
-function table.print(t,...)
- if type(t)~="table" then
- print(tostring(t))
- else
- serialize(print,t,...)
- end
-end
-if setinspector then
- setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
-end
-function table.sub(t,i,j)
- return { unpack(t,i,j) }
-end
-function table.is_empty(t)
- return not t or not next(t)
-end
-function table.has_one_entry(t)
- return t and not next(t,next(t))
-end
-function table.loweredkeys(t)
- local l={}
- for k,v in next,t do
- l[lower(k)]=v
- end
- return l
-end
-function table.unique(old)
- local hash={}
- local new={}
- local n=0
- for i=1,#old do
- local oi=old[i]
- if not hash[oi] then
- n=n+1
- new[n]=oi
- hash[oi]=true
- end
- end
- return new
-end
-function table.sorted(t,...)
- sort(t,...)
- return t
-end
-function table.values(t,s)
- if t then
- local values,keys,v={},{},0
- for key,value in next,t do
- if not keys[value] then
- v=v+1
- values[v]=value
- keys[k]=key
- end
- end
- if s then
- sort(values)
- end
- return values
- else
- return {}
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-io"] = package.loaded["l-io"] or true
-
--- original size: 8817, stripped down to: 6340
-
-if not modules then modules={} end modules ['l-io']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local io=io
-local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
-local concat=table.concat
-local floor=math.floor
-local type=type
-if string.find(os.getenv("PATH"),";") then
- io.fileseparator,io.pathseparator="\\",";"
-else
- io.fileseparator,io.pathseparator="/",":"
-end
-local function readall(f)
- return f:read("*all")
-end
-local function readall(f)
- local size=f:seek("end")
- if size==0 then
- return ""
- elseif size<1024*1024 then
- f:seek("set",0)
- return f:read('*all')
- else
- local done=f:seek("set",0)
- local step
- if size<1024*1024 then
- step=1024*1024
- elseif size>16*1024*1024 then
- step=16*1024*1024
- else
- step=floor(size/(1024*1024))*1024*1024/8
- end
- local data={}
- while true do
- local r=f:read(step)
- if not r then
- return concat(data)
- else
- data[#data+1]=r
- end
- end
- end
-end
-io.readall=readall
-function io.loaddata(filename,textmode)
- local f=io.open(filename,(textmode and 'r') or 'rb')
- if f then
- local data=readall(f)
- f:close()
- if #data>0 then
- return data
- end
- end
-end
-function io.savedata(filename,data,joiner)
- local f=io.open(filename,"wb")
- if f then
- if type(data)=="table" then
- f:write(concat(data,joiner or ""))
- elseif type(data)=="function" then
- data(f)
- else
- f:write(data or "")
- end
- f:close()
- io.flush()
- return true
- else
- return false
- end
-end
-function io.loadlines(filename,n)
- local f=io.open(filename,'r')
- if not f then
- elseif n then
- local lines={}
- for i=1,n do
- local line=f:read("*lines")
- if line then
- lines[#lines+1]=line
- else
- break
- end
- end
- f:close()
- lines=concat(lines,"\n")
- if #lines>0 then
- return lines
- end
- else
- local line=f:read("*line") or ""
- f:close()
- if #line>0 then
- return line
- end
- end
-end
-function io.loadchunk(filename,n)
- local f=io.open(filename,'rb')
- if f then
- local data=f:read(n or 1024)
- f:close()
- if #data>0 then
- return data
- end
- end
-end
-function io.exists(filename)
- local f=io.open(filename)
- if f==nil then
- return false
- else
- f:close()
- return true
- end
-end
-function io.size(filename)
- local f=io.open(filename)
- if f==nil then
- return 0
- else
- local s=f:seek("end")
- f:close()
- return s
- end
-end
-function io.noflines(f)
- if type(f)=="string" then
- local f=io.open(filename)
- if f then
- local n=f and io.noflines(f) or 0
- f:close()
- return n
- else
- return 0
- end
- else
- local n=0
- for _ in f:lines() do
- n=n+1
- end
- f:seek('set',0)
- return n
- end
-end
-local nextchar={
- [ 4]=function(f)
- return f:read(1,1,1,1)
- end,
- [ 2]=function(f)
- return f:read(1,1)
- end,
- [ 1]=function(f)
- return f:read(1)
- end,
- [-2]=function(f)
- local a,b=f:read(1,1)
- return b,a
- end,
- [-4]=function(f)
- local a,b,c,d=f:read(1,1,1,1)
- return d,c,b,a
- end
-}
-function io.characters(f,n)
- if f then
- return nextchar[n or 1],f
- end
-end
-local nextbyte={
- [4]=function(f)
- local a,b,c,d=f:read(1,1,1,1)
- if d then
- return byte(a),byte(b),byte(c),byte(d)
- end
- end,
- [3]=function(f)
- local a,b,c=f:read(1,1,1)
- if b then
- return byte(a),byte(b),byte(c)
- end
- end,
- [2]=function(f)
- local a,b=f:read(1,1)
- if b then
- return byte(a),byte(b)
- end
- end,
- [1]=function (f)
- local a=f:read(1)
- if a then
- return byte(a)
- end
- end,
- [-2]=function (f)
- local a,b=f:read(1,1)
- if b then
- return byte(b),byte(a)
- end
- end,
- [-3]=function(f)
- local a,b,c=f:read(1,1,1)
- if b then
- return byte(c),byte(b),byte(a)
- end
- end,
- [-4]=function(f)
- local a,b,c,d=f:read(1,1,1,1)
- if d then
- return byte(d),byte(c),byte(b),byte(a)
- end
- end
-}
-function io.bytes(f,n)
- if f then
- return nextbyte[n or 1],f
- else
- return nil,nil
- end
-end
-function io.ask(question,default,options)
- while true do
- io.write(question)
- if options then
- io.write(format(" [%s]",concat(options,"|")))
- end
- if default then
- io.write(format(" [%s]",default))
- end
- io.write(format(" "))
- io.flush()
- local answer=io.read()
- answer=gsub(answer,"^%s*(.*)%s*$","%1")
- if answer=="" and default then
- return default
- elseif not options then
- return answer
- else
- for k=1,#options do
- if options[k]==answer then
- return answer
- end
- end
- local pattern="^"..answer
- for k=1,#options do
- local v=options[k]
- if find(v,pattern) then
- return v
- end
- end
- end
- end
-end
-local function readnumber(f,n,m)
- if m then
- f:seek("set",n)
- n=m
- end
- if n==1 then
- return byte(f:read(1))
- elseif n==2 then
- local a,b=byte(f:read(2),1,2)
- return 256*a+b
- elseif n==3 then
- local a,b,c=byte(f:read(3),1,3)
- return 256*256*a+256*b+c
- elseif n==4 then
- local a,b,c,d=byte(f:read(4),1,4)
- return 256*256*256*a+256*256*b+256*c+d
- elseif n==8 then
- local a,b=readnumber(f,4),readnumber(f,4)
- return 256*a+b
- elseif n==12 then
- local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
- return 256*256*a+256*b+c
- elseif n==-2 then
- local b,a=byte(f:read(2),1,2)
- return 256*a+b
- elseif n==-3 then
- local c,b,a=byte(f:read(3),1,3)
- return 256*256*a+256*b+c
- elseif n==-4 then
- local d,c,b,a=byte(f:read(4),1,4)
- return 256*256*256*a+256*256*b+256*c+d
- elseif n==-8 then
- local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
- return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
- else
- return 0
- end
-end
-io.readnumber=readnumber
-function io.readstring(f,n,m)
- if m then
- f:seek("set",n)
- n=m
- end
- local str=gsub(f:read(n),"\000","")
- return str
-end
-if not io.i_limiter then function io.i_limiter() end end
-if not io.o_limiter then function io.o_limiter() end end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-number"] = package.loaded["l-number"] or true
-
--- original size: 4939, stripped down to: 2830
-
-if not modules then modules={} end modules ['l-number']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local tostring,tonumber=tostring,tonumber
-local format,floor,match,rep=string.format,math.floor,string.match,string.rep
-local concat,insert=table.concat,table.insert
-local lpegmatch=lpeg.match
-number=number or {}
-local number=number
-if bit32 then
- local btest,bor=bit32.btest,bit32.bor
- function number.bit(p)
- return 2^(p-1)
- end
- number.hasbit=btest
- number.setbit=bor
- function number.setbit(x,p)
- return btest(x,p) and x or x+p
- end
- function number.clearbit(x,p)
- return btest(x,p) and x-p or x
- end
-else
- function number.bit(p)
- return 2^(p-1)
- end
- function number.hasbit(x,p)
- return x%(p+p)>=p
- end
- function number.setbit(x,p)
- return (x%(p+p)>=p) and x or x+p
- end
- function number.clearbit(x,p)
- return (x%(p+p)>=p) and x-p or x
- end
-end
-if bit32 then
- local bextract=bit32.extract
- local t={
- "0","0","0","0","0","0","0","0",
- "0","0","0","0","0","0","0","0",
- "0","0","0","0","0","0","0","0",
- "0","0","0","0","0","0","0","0",
- }
- function number.tobitstring(b,m)
- local n=32
- for i=0,31 do
- local v=bextract(b,i)
- local k=32-i
- if v==1 then
- n=k
- t[k]="1"
- else
- t[k]="0"
- end
- end
- if m then
- m=33-m*8
- if m<1 then
- m=1
- end
- return concat(t,"",m)
- elseif n<8 then
- return concat(t)
- elseif n<16 then
- return concat(t,"",9)
- elseif n<24 then
- return concat(t,"",17)
- else
- return concat(t,"",25)
- end
- end
-else
- function number.tobitstring(n,m)
- if n>0 then
- local t={}
- while n>0 do
- insert(t,1,n%2>0 and 1 or 0)
- n=floor(n/2)
- end
- local nn=8-#t%8
- if nn>0 and nn<8 then
- for i=1,nn do
- insert(t,1,0)
- end
- end
- if m then
- m=m*8-#t
- if m>0 then
- insert(t,1,rep("0",m))
- end
- end
- return concat(t)
- elseif m then
- rep("00000000",m)
- else
- return "00000000"
- end
- end
-end
-function number.valid(str,default)
- return tonumber(str) or default or nil
-end
-function number.toevenhex(n)
- local s=format("%X",n)
- if #s%2==0 then
- return s
- else
- return "0"..s
- end
-end
-local one=lpeg.C(1-lpeg.S('')/tonumber)^1
-function number.toset(n)
- return lpegmatch(one,tostring(n))
-end
-local function bits(n,i,...)
- if n>0 then
- local m=n%2
- local n=floor(n/2)
- if m>0 then
- return bits(n,i+1,i,...)
- else
- return bits(n,i+1,...)
- end
- else
- return...
- end
-end
-function number.bits(n)
- return { bits(n,1) }
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-set"] = package.loaded["l-set"] or true
-
--- original size: 1923, stripped down to: 1133
-
-if not modules then modules={} end modules ['l-set']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-set=set or {}
-local nums={}
-local tabs={}
-local concat=table.concat
-local next,type=next,type
-set.create=table.tohash
-function set.tonumber(t)
- if next(t) then
- local s=""
- for k,v in next,t do
- if v then
- s=s.." "..k
- end
- end
- local n=nums[s]
- if not n then
- n=#tabs+1
- tabs[n]=t
- nums[s]=n
- end
- return n
- else
- return 0
- end
-end
-function set.totable(n)
- if n==0 then
- return {}
- else
- return tabs[n] or {}
- end
-end
-function set.tolist(n)
- if n==0 or not tabs[n] then
- return ""
- else
- local t,n={},0
- for k,v in next,tabs[n] do
- if v then
- n=n+1
- t[n]=k
- end
- end
- return concat(t," ")
- end
-end
-function set.contains(n,s)
- if type(n)=="table" then
- return n[s]
- elseif n==0 then
- return false
- else
- local t=tabs[n]
- return t and t[s]
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-os"] = package.loaded["l-os"] or true
-
--- original size: 16023, stripped down to: 9634
-
-if not modules then modules={} end modules ['l-os']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local os=os
-local date,time=os.date,os.time
-local find,format,gsub,upper,gmatch=string.find,string.format,string.gsub,string.upper,string.gmatch
-local concat=table.concat
-local random,ceil,randomseed=math.random,math.ceil,math.randomseed
-local rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring=rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring
-math.initialseed=tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
-randomseed(math.initialseed)
-if not os.__getenv__ then
- os.__getenv__=os.getenv
- os.__setenv__=os.setenv
- if os.env then
- local osgetenv=os.getenv
- local ossetenv=os.setenv
- local osenv=os.env local _=osenv.PATH
- function os.setenv(k,v)
- if v==nil then
- v=""
- end
- local K=upper(k)
- osenv[K]=v
- if type(v)=="table" then
- v=concat(v,";")
- end
- ossetenv(K,v)
- end
- function os.getenv(k)
- local K=upper(k)
- local v=osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
- if v=="" then
- return nil
- else
- return v
- end
- end
- else
- local ossetenv=os.setenv
- local osgetenv=os.getenv
- local osenv={}
- function os.setenv(k,v)
- if v==nil then
- v=""
- end
- local K=upper(k)
- osenv[K]=v
- end
- function os.getenv(k)
- local K=upper(k)
- local v=osenv[K] or osgetenv(K) or osgetenv(k)
- if v=="" then
- return nil
- else
- return v
- end
- end
- local function __index(t,k)
- return os.getenv(k)
- end
- local function __newindex(t,k,v)
- os.setenv(k,v)
- end
- os.env={}
- setmetatable(os.env,{ __index=__index,__newindex=__newindex } )
- end
-end
-local execute,spawn,exec,iopopen,ioflush=os.execute,os.spawn or os.execute,os.exec or os.execute,io.popen,io.flush
-function os.execute(...) ioflush() return execute(...) end
-function os.spawn (...) ioflush() return spawn (...) end
-function os.exec (...) ioflush() return exec (...) end
-function io.popen (...) ioflush() return iopopen(...) end
-function os.resultof(command)
- local handle=io.popen(command,"r")
- if handle then
- local result=handle:read("*all") or ""
- handle:close()
- return result
- else
- return ""
- end
-end
-if not io.fileseparator then
- if find(os.getenv("PATH"),";") then
- io.fileseparator,io.pathseparator,os.type="\\",";",os.type or "mswin"
- else
- io.fileseparator,io.pathseparator,os.type="/",":",os.type or "unix"
- end
-end
-os.type=os.type or (io.pathseparator==";" and "windows") or "unix"
-os.name=os.name or (os.type=="windows" and "mswin" ) or "linux"
-if os.type=="windows" then
- os.libsuffix,os.binsuffix,os.binsuffixes='dll','exe',{ 'exe','cmd','bat' }
-else
- os.libsuffix,os.binsuffix,os.binsuffixes='so','',{ '' }
-end
-local launchers={
- windows="start %s",
- macosx="open %s",
- unix="$BROWSER %s &> /dev/null &",
-}
-function os.launch(str)
- os.execute(format(launchers[os.name] or launchers.unix,str))
-end
-if not os.times then
- function os.times()
- return {
- utime=os.gettimeofday(),
- stime=0,
- cutime=0,
- cstime=0,
- }
- end
-end
-local gettimeofday=os.gettimeofday or os.clock
-os.gettimeofday=gettimeofday
-local startuptime=gettimeofday()
-function os.runtime()
- return gettimeofday()-startuptime
-end
-local resolvers=os.resolvers or {}
-os.resolvers=resolvers
-setmetatable(os,{ __index=function(t,k)
- local r=resolvers[k]
- return r and r(t,k) or nil
-end })
-local name,platform=os.name or "linux",os.getenv("MTX_PLATFORM") or ""
-local function guess()
- local architecture=os.resultof("uname -m") or ""
- if architecture~="" then
- return architecture
- end
- architecture=os.getenv("HOSTTYPE") or ""
- if architecture~="" then
- return architecture
- end
- return os.resultof("echo $HOSTTYPE") or ""
-end
-if platform~="" then
- os.platform=platform
-elseif os.type=="windows" then
- function resolvers.platform(t,k)
- local platform,architecture="",os.getenv("PROCESSOR_ARCHITECTURE") or ""
- if find(architecture,"AMD64") then
- platform="win64"
- else
- platform="mswin"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform=platform
- return platform
- end
-elseif name=="linux" then
- function resolvers.platform(t,k)
- local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform="linux-64"
- elseif find(architecture,"ppc") then
- platform="linux-ppc"
- else
- platform="linux"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform=platform
- return platform
- end
-elseif name=="macosx" then
- function resolvers.platform(t,k)
- local platform,architecture="",os.resultof("echo $HOSTTYPE") or ""
- if architecture=="" then
- platform="osx-intel"
- elseif find(architecture,"i386") then
- platform="osx-intel"
- elseif find(architecture,"x86_64") then
- platform="osx-64"
- else
- platform="osx-ppc"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform=platform
- return platform
- end
-elseif name=="sunos" then
- function resolvers.platform(t,k)
- local platform,architecture="",os.resultof("uname -m") or ""
- if find(architecture,"sparc") then
- platform="solaris-sparc"
- else
- platform="solaris-intel"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform=platform
- return platform
- end
-elseif name=="freebsd" then
- function resolvers.platform(t,k)
- local platform,architecture="",os.resultof("uname -m") or ""
- if find(architecture,"amd64") then
- platform="freebsd-amd64"
- else
- platform="freebsd"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform=platform
- return platform
- end
-elseif name=="kfreebsd" then
- function resolvers.platform(t,k)
- local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform="kfreebsd-amd64"
- else
- platform="kfreebsd-i386"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform=platform
- return platform
- end
-else
- function resolvers.platform(t,k)
- local platform="linux"
- os.setenv("MTX_PLATFORM",platform)
- os.platform=platform
- return platform
- end
-end
-function resolvers.bits(t,k)
- local bits=find(os.platform,"64") and 64 or 32
- os.bits=bits
- return bits
-end
-local t={ 8,9,"a","b" }
-function os.uuid()
- return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
- random(0xFFFF),random(0xFFFF),
- random(0x0FFF),
- t[ceil(random(4))] or 8,random(0x0FFF),
- random(0xFFFF),
- random(0xFFFF),random(0xFFFF),random(0xFFFF)
- )
-end
-local d
-function os.timezone(delta)
- d=d or tonumber(tonumber(date("%H")-date("!%H")))
- if delta then
- if d>0 then
- return format("+%02i:00",d)
- else
- return format("-%02i:00",-d)
- end
- else
- return 1
- end
-end
-local timeformat=format("%%s%s",os.timezone(true))
-local dateformat="!%Y-%m-%d %H:%M:%S"
-local lasttime=nil
-local lastdate=nil
-function os.fulltime(t,default)
- t=t and tonumber(t) or 0
- if t>0 then
- elseif default then
- return default
- else
- t=time()
- end
- if t~=lasttime then
- lasttime=t
- lastdate=format(timeformat,date(dateformat))
- end
- return lastdate
-end
-local dateformat="%Y-%m-%d %H:%M:%S"
-local lasttime=nil
-local lastdate=nil
-function os.localtime(t,default)
- t=t and tonumber(t) or 0
- if t>0 then
- elseif default then
- return default
- else
- t=time()
- end
- if t~=lasttime then
- lasttime=t
- lastdate=date(dateformat,t)
- end
- return lastdate
-end
-function os.converttime(t,default)
- local t=tonumber(t)
- if t and t>0 then
- return date(dateformat,t)
- else
- return default or "-"
- end
-end
-local memory={}
-local function which(filename)
- local fullname=memory[filename]
- if fullname==nil then
- local suffix=file.suffix(filename)
- local suffixes=suffix=="" and os.binsuffixes or { suffix }
- for directory in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
- local df=file.join(directory,filename)
- for i=1,#suffixes do
- local dfs=file.addsuffix(df,suffixes[i])
- if io.exists(dfs) then
- fullname=dfs
- break
- end
- end
- end
- if not fullname then
- fullname=false
- end
- memory[filename]=fullname
- end
- return fullname
-end
-os.which=which
-os.where=which
-function os.today()
- return date("!*t")
-end
-function os.now()
- return date("!%Y-%m-%d %H:%M:%S")
-end
-if not os.sleep then
- local socket=socket
- function os.sleep(n)
- if not socket then
- socket=require("socket")
- end
- socket.sleep(n)
- end
-end
-local function isleapyear(year)
- return (year%400==0) or ((year%100~=0) and (year%4==0))
-end
-os.isleapyear=isleapyear
-local days={ 31,28,31,30,31,30,31,31,30,31,30,31 }
-local function nofdays(year,month)
- if not month then
- return isleapyear(year) and 365 or 364
- else
- return month==2 and isleapyear(year) and 29 or days[month]
- end
-end
-os.nofdays=nofdays
-function os.weekday(day,month,year)
- return date("%w",time { year=year,month=month,day=day })+1
-end
-function os.validdate(year,month,day)
- if month<1 then
- month=1
- elseif month>12 then
- month=12
- end
- if day<1 then
- day=1
- else
- local max=nofdays(year,month)
- if day>max then
- day=max
- end
- end
- return year,month,day
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-file"] = package.loaded["l-file"] or true
-
--- original size: 18308, stripped down to: 9948
-
-if not modules then modules={} end modules ['l-file']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-file=file or {}
-local file=file
-if not lfs then
- lfs=optionalrequire("lfs")
-end
-if not lfs then
- lfs={
- getcurrentdir=function()
- return "."
- end,
- attributes=function()
- return nil
- end,
- isfile=function(name)
- local f=io.open(name,'rb')
- if f then
- f:close()
- return true
- end
- end,
- isdir=function(name)
- print("you need to load lfs")
- return false
- end
- }
-elseif not lfs.isfile then
- local attributes=lfs.attributes
- function lfs.isdir(name)
- return attributes(name,"mode")=="directory"
- end
- function lfs.isfile(name)
- return attributes(name,"mode")=="file"
- end
-end
-local insert,concat=table.insert,table.concat
-local match,find,gmatch=string.match,string.find,string.gmatch
-local lpegmatch=lpeg.match
-local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
-local checkedsplit=string.checkedsplit
-local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
-local colon=P(":")
-local period=P(".")
-local periods=P("..")
-local fwslash=P("/")
-local bwslash=P("\\")
-local slashes=S("\\/")
-local noperiod=1-period
-local noslashes=1-slashes
-local name=noperiod^1
-local suffix=period/""*(1-period-slashes)^1*-1
-local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
-local function pathpart(name,default)
- return name and lpegmatch(pattern,name) or default or ""
-end
-local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
-local function basename(name)
- return name and lpegmatch(pattern,name) or name
-end
-local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
-local function nameonly(name)
- return name and lpegmatch(pattern,name) or name
-end
-local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
-local function suffixonly(name)
- return name and lpegmatch(pattern,name) or ""
-end
-local pattern=(noslashes^0*slashes)^0*noperiod^1*((period*C(noperiod^1))^1)*-1+Cc("")
-local function suffixesonly(name)
- if name then
- return lpegmatch(pattern,name)
- else
- return ""
- end
-end
-file.pathpart=pathpart
-file.basename=basename
-file.nameonly=nameonly
-file.suffixonly=suffixonly
-file.suffix=suffixonly
-file.suffixesonly=suffixesonly
-file.suffixes=suffixesonly
-file.dirname=pathpart
-file.extname=suffixonly
-local drive=C(R("az","AZ"))*colon
-local path=C((noslashes^0*slashes)^0)
-local suffix=period*C(P(1-period)^0*P(-1))
-local base=C((1-suffix)^0)
-local rest=C(P(1)^0)
-drive=drive+Cc("")
-path=path+Cc("")
-base=base+Cc("")
-suffix=suffix+Cc("")
-local pattern_a=drive*path*base*suffix
-local pattern_b=path*base*suffix
-local pattern_c=C(drive*path)*C(base*suffix)
-local pattern_d=path*rest
-function file.splitname(str,splitdrive)
- if not str then
- elseif splitdrive then
- return lpegmatch(pattern_a,str)
- else
- return lpegmatch(pattern_b,str)
- end
-end
-function file.splitbase(str)
- if str then
- return lpegmatch(pattern_d,str)
- else
- return "",str
- end
-end
-function file.nametotable(str,splitdrive)
- if str then
- local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
- if splitdrive then
- return {
- path=path,
- drive=drive,
- subpath=subpath,
- name=name,
- base=base,
- suffix=suffix,
- }
- else
- return {
- path=path,
- name=name,
- base=base,
- suffix=suffix,
- }
- end
- end
-end
-local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
-function file.removesuffix(name)
- return name and lpegmatch(pattern,name)
-end
-local suffix=period/""*(1-period-slashes)^1*-1
-local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
-function file.addsuffix(filename,suffix,criterium)
- if not filename or not suffix or suffix=="" then
- return filename
- elseif criterium==true then
- return filename.."."..suffix
- elseif not criterium then
- local n,s=lpegmatch(pattern,filename)
- if not s or s=="" then
- return filename.."."..suffix
- else
- return filename
- end
- else
- local n,s=lpegmatch(pattern,filename)
- if s and s~="" then
- local t=type(criterium)
- if t=="table" then
- for i=1,#criterium do
- if s==criterium[i] then
- return filename
- end
- end
- elseif t=="string" then
- if s==criterium then
- return filename
- end
- end
- end
- return (n or filename).."."..suffix
- end
-end
-local suffix=period*(1-period-slashes)^1*-1
-local pattern=Cs((1-suffix)^0)
-function file.replacesuffix(name,suffix)
- if name and suffix and suffix~="" then
- return lpegmatch(pattern,name).."."..suffix
- else
- return name
- end
-end
-local reslasher=lpeg.replacer(P("\\"),"/")
-function file.reslash(str)
- return str and lpegmatch(reslasher,str)
-end
-function file.is_writable(name)
- if not name then
- elseif lfs.isdir(name) then
- name=name.."/m_t_x_t_e_s_t.tmp"
- local f=io.open(name,"wb")
- if f then
- f:close()
- os.remove(name)
- return true
- end
- elseif lfs.isfile(name) then
- local f=io.open(name,"ab")
- if f then
- f:close()
- return true
- end
- else
- local f=io.open(name,"ab")
- if f then
- f:close()
- os.remove(name)
- return true
- end
- end
- return false
-end
-local readable=P("r")*Cc(true)
-function file.is_readable(name)
- if name then
- local a=attributes(name)
- return a and lpegmatch(readable,a.permissions) or false
- else
- return false
- end
-end
-file.isreadable=file.is_readable
-file.iswritable=file.is_writable
-function file.size(name)
- if name then
- local a=attributes(name)
- return a and a.size or 0
- else
- return 0
- end
-end
-function file.splitpath(str,separator)
- return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
-end
-function file.joinpath(tab,separator)
- return tab and concat(tab,separator or io.pathseparator)
-end
-local someslash=S("\\/")
-local stripper=Cs(P(fwslash)^0/""*reslasher)
-local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon
-local isroot=fwslash^1*-1
-local hasroot=fwslash^1
-local reslasher=lpeg.replacer(S("\\/"),"/")
-local deslasher=lpeg.replacer(S("\\/")^1,"/")
-function file.join(...)
- local lst={... }
- local one=lst[1]
- if lpegmatch(isnetwork,one) then
- local one=lpegmatch(reslasher,one)
- local two=lpegmatch(deslasher,concat(lst,"/",2))
- if lpegmatch(hasroot,two) then
- return one..two
- else
- return one.."/"..two
- end
- elseif lpegmatch(isroot,one) then
- local two=lpegmatch(deslasher,concat(lst,"/",2))
- if lpegmatch(hasroot,two) then
- return two
- else
- return "/"..two
- end
- elseif one=="" then
- return lpegmatch(stripper,concat(lst,"/",2))
- else
- return lpegmatch(deslasher,concat(lst,"/"))
- end
-end
-local drivespec=R("az","AZ")^1*colon
-local anchors=fwslash+drivespec
-local untouched=periods+(1-period)^1*P(-1)
-local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0)
-local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//")
-local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
-local absolute=fwslash
-function file.collapsepath(str,anchor)
- if not str then
- return
- end
- if anchor==true and not lpegmatch(anchors,str) then
- str=getcurrentdir().."/"..str
- end
- if str=="" or str=="." then
- return "."
- elseif lpegmatch(untouched,str) then
- return lpegmatch(reslasher,str)
- end
- local starter,oldelements=lpegmatch(splitstarter,str)
- local newelements={}
- local i=#oldelements
- while i>0 do
- local element=oldelements[i]
- if element=='.' then
- elseif element=='..' then
- local n=i-1
- while n>0 do
- local element=oldelements[n]
- if element~='..' and element~='.' then
- oldelements[n]='.'
- break
- else
- n=n-1
- end
- end
- if n<1 then
- insert(newelements,1,'..')
- end
- elseif element~="" then
- insert(newelements,1,element)
- end
- i=i-1
- end
- if #newelements==0 then
- return starter or "."
- elseif starter then
- return starter..concat(newelements,'/')
- elseif lpegmatch(absolute,str) then
- return "/"..concat(newelements,'/')
- else
- newelements=concat(newelements,'/')
- if anchor=="." and find(str,"^%./") then
- return "./"..newelements
- else
- return newelements
- end
- end
-end
-local validchars=R("az","09","AZ","--","..")
-local pattern_a=lpeg.replacer(1-validchars)
-local pattern_a=Cs((validchars+P(1)/"-")^1)
-local whatever=P("-")^0/""
-local pattern_b=Cs(whatever*(1-whatever*-1)^1)
-function file.robustname(str,strict)
- if str then
- str=lpegmatch(pattern_a,str) or str
- if strict then
- return lpegmatch(pattern_b,str) or str
- else
- return str
- end
- end
-end
-file.readdata=io.loaddata
-file.savedata=io.savedata
-function file.copy(oldname,newname)
- if oldname and newname then
- local data=io.loaddata(oldname)
- if data and data~="" then
- file.savedata(newname,data)
- end
- end
-end
-local letter=R("az","AZ")+S("_-+")
-local separator=P("://")
-local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
-local rootbased=fwslash+letter*colon
-lpeg.patterns.qualified=qualified
-lpeg.patterns.rootbased=rootbased
-function file.is_qualified_path(filename)
- return filename and lpegmatch(qualified,filename)~=nil
-end
-function file.is_rootbased_path(filename)
- return filename and lpegmatch(rootbased,filename)~=nil
-end
-function file.strip(name,dir)
- if name then
- local b,a=match(name,"^(.-)"..dir.."(.*)$")
- return a~="" and a or name
- end
-end
-function lfs.mkdirs(path)
- local full=""
- for sub in gmatch(path,"(/*[^\\/]+)") do
- full=full..sub
- lfs.mkdir(full)
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-gzip"] = package.loaded["l-gzip"] or true
-
--- original size: 1211, stripped down to: 1002
-
-if not modules then modules={} end modules ['l-gzip']={
- version=1.001,
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-if not gzip then
- return
-end
-local suffix,suffixes=file.suffix,file.suffixes
-function gzip.load(filename)
- local f=io.open(filename,"rb")
- if not f then
- elseif suffix(filename)=="gz" then
- f:close()
- local g=gzip.open(filename,"rb")
- if g then
- local str=g:read("*all")
- g:close()
- return str
- end
- else
- local str=f:read("*all")
- f:close()
- return str
- end
-end
-function gzip.save(filename,data)
- if suffix(filename)~="gz" then
- filename=filename..".gz"
- end
- local f=io.open(filename,"wb")
- if f then
- local s=zlib.compress(data or "",9,nil,15+16)
- f:write(s)
- f:close()
- return #s
- end
-end
-function gzip.suffix(filename)
- local suffix,extra=suffixes(filename)
- local gzipped=extra=="gz"
- return suffix,gzipped
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-md5"] = package.loaded["l-md5"] or true
-
--- original size: 3760, stripped down to: 2088
-
-if not modules then modules={} end modules ['l-md5']={
- version=1.001,
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-if not md5 then
- md5=optionalrequire("md5")
-end
-if not md5 then
- md5={
- sum=function(str) print("error: md5 is not loaded (sum ignored)") return str end,
- sumhexa=function(str) print("error: md5 is not loaded (sumhexa ignored)") return str end,
- }
-end
-local md5,file=md5,file
-local gsub,format,byte=string.gsub,string.format,string.byte
-local md5sum=md5.sum
-local function convert(str,fmt)
- return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
-end
-if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
-if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
-if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
-function file.needsupdating(oldname,newname,threshold)
- local oldtime=lfs.attributes(oldname,"modification")
- if oldtime then
- local newtime=lfs.attributes(newname,"modification")
- if not newtime then
- return true
- elseif newtime>=oldtime then
- return false
- elseif oldtime-newtime<(threshold or 1) then
- return false
- else
- return true
- end
- else
- return false
- end
-end
-file.needs_updating=file.needsupdating
-function file.syncmtimes(oldname,newname)
- local oldtime=lfs.attributes(oldname,"modification")
- if oldtime and lfs.isfile(newname) then
- lfs.touch(newname,oldtime,oldtime)
- end
-end
-function file.checksum(name)
- if md5 then
- local data=io.loaddata(name)
- if data then
- return md5.HEX(data)
- end
- end
- return nil
-end
-function file.loadchecksum(name)
- if md5 then
- local data=io.loaddata(name..".md5")
- return data and (gsub(data,"%s",""))
- end
- return nil
-end
-function file.savechecksum(name,checksum)
- if not checksum then checksum=file.checksum(name) end
- if checksum then
- io.savedata(name..".md5",checksum)
- return checksum
- end
- return nil
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-url"] = package.loaded["l-url"] or true
-
--- original size: 12292, stripped down to: 5585
-
-if not modules then modules={} end modules ['l-url']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local char,format,byte=string.char,string.format,string.byte
-local concat=table.concat
-local tonumber,type=tonumber,type
-local P,C,R,S,Cs,Cc,Ct,Cf,Cg,V=lpeg.P,lpeg.C,lpeg.R,lpeg.S,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.Cf,lpeg.Cg,lpeg.V
-local lpegmatch,lpegpatterns,replacer=lpeg.match,lpeg.patterns,lpeg.replacer
-url=url or {}
-local url=url
-local tochar=function(s) return char(tonumber(s,16)) end
-local colon=P(":")
-local qmark=P("?")
-local hash=P("#")
-local slash=P("/")
-local percent=P("%")
-local endofstring=P(-1)
-local hexdigit=R("09","AF","af")
-local plus=P("+")
-local nothing=Cc("")
-local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
-local escaped=(plus/" ")+escapedchar
-local noslash=P("/")/""
-local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
-local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
-local pathstr=Cs((escaped+(1- qmark-hash))^0)
-local querystr=Cs(((1- hash))^0)
-local fragmentstr=Cs((escaped+(1- endofstring))^0)
-local scheme=schemestr*colon+nothing
-local authority=slash*slash*authoritystr+nothing
-local path=slash*pathstr+nothing
-local query=qmark*querystr+nothing
-local fragment=hash*fragmentstr+nothing
-local validurl=scheme*authority*path*query*fragment
-local parser=Ct(validurl)
-lpegpatterns.url=validurl
-lpegpatterns.urlsplitter=parser
-local escapes={}
-setmetatable(escapes,{ __index=function(t,k)
- local v=format("%%%02X",byte(k))
- t[k]=v
- return v
-end })
-local escaper=Cs((R("09","AZ","az")^1+P(" ")/"%%20"+S("-./_")^1+P(1)/escapes)^0)
-local unescaper=Cs((escapedchar+1)^0)
-local getcleaner=Cs((P("+++")/"%%2B"+P("+")/"%%20"+P(1))^1)
-lpegpatterns.urlunescaped=escapedchar
-lpegpatterns.urlescaper=escaper
-lpegpatterns.urlunescaper=unescaper
-lpegpatterns.urlgetcleaner=getcleaner
-function url.unescapeget(str)
- return lpegmatch(getcleaner,str)
-end
-local function split(str)
- return (type(str)=="string" and lpegmatch(parser,str)) or str
-end
-local isscheme=schemestr*colon*slash*slash
-local function hasscheme(str)
- if str then
- local scheme=lpegmatch(isscheme,str)
- return scheme~="" and scheme or false
- else
- return false
- end
-end
-local rootletter=R("az","AZ")+S("_-+")
-local separator=P("://")
-local qualified=P(".")^0*P("/")+rootletter*P(":")+rootletter^1*separator+rootletter^1*P("/")
-local rootbased=P("/")+rootletter*P(":")
-local barswapper=replacer("|",":")
-local backslashswapper=replacer("\\","/")
-local equal=P("=")
-local amp=P("&")
-local key=Cs(((escapedchar+1)-equal )^0)
-local value=Cs(((escapedchar+1)-amp -endofstring)^0)
-local splitquery=Cf (Ct("")*P { "sequence",
- sequence=V("pair")*(amp*V("pair"))^0,
- pair=Cg(key*equal*value),
-},rawset)
-local function hashed(str)
- if str=="" then
- return {
- scheme="invalid",
- original=str,
- }
- end
- local s=split(str)
- local rawscheme=s[1]
- local rawquery=s[4]
- local somescheme=rawscheme~=""
- local somequery=rawquery~=""
- if not somescheme and not somequery then
- s={
- scheme="file",
- authority="",
- path=str,
- query="",
- fragment="",
- original=str,
- noscheme=true,
- filename=str,
- }
- else
- local authority,path,filename=s[2],s[3]
- if authority=="" then
- filename=path
- elseif path=="" then
- filename=""
- else
- filename=authority.."/"..path
- end
- s={
- scheme=rawscheme,
- authority=authority,
- path=path,
- query=lpegmatch(unescaper,rawquery),
- queries=lpegmatch(splitquery,rawquery),
- fragment=s[5],
- original=str,
- noscheme=false,
- filename=filename,
- }
- end
- return s
-end
-url.split=split
-url.hasscheme=hasscheme
-url.hashed=hashed
-function url.addscheme(str,scheme)
- if hasscheme(str) then
- return str
- elseif not scheme then
- return "file:///"..str
- else
- return scheme..":///"..str
- end
-end
-function url.construct(hash)
- local fullurl,f={},0
- local scheme,authority,path,query,fragment=hash.scheme,hash.authority,hash.path,hash.query,hash.fragment
- if scheme and scheme~="" then
- f=f+1;fullurl[f]=scheme.."://"
- end
- if authority and authority~="" then
- f=f+1;fullurl[f]=authority
- end
- if path and path~="" then
- f=f+1;fullurl[f]="/"..path
- end
- if query and query~="" then
- f=f+1;fullurl[f]="?"..query
- end
- if fragment and fragment~="" then
- f=f+1;fullurl[f]="#"..fragment
- end
- return lpegmatch(escaper,concat(fullurl))
-end
-local pattern=Cs(noslash*R("az","AZ")*(S(":|")/":")*noslash*P(1)^0)
-function url.filename(filename)
- local spec=hashed(filename)
- local path=spec.path
- return (spec.scheme=="file" and path and lpegmatch(pattern,path)) or filename
-end
-local function escapestring(str)
- return lpegmatch(escaper,str)
-end
-url.escape=escapestring
-function url.query(str)
- if type(str)=="string" then
- return lpegmatch(splitquery,str) or ""
- else
- return str
- end
-end
-function url.toquery(data)
- local td=type(data)
- if td=="string" then
- return #str and escape(data) or nil
- elseif td=="table" then
- if next(data) then
- local t={}
- for k,v in next,data do
- t[#t+1]=format("%s=%s",k,escapestring(v))
- end
- return concat(t,"&")
- end
- else
- end
-end
-local pattern=Cs(noslash^0*(1-noslash*P(-1))^0)
-function url.barepath(path)
- if not path or path=="" then
- return ""
- else
- return lpegmatch(pattern,path)
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-dir"] = package.loaded["l-dir"] or true
-
--- original size: 14768, stripped down to: 9107
-
-if not modules then modules={} end modules ['l-dir']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local type,select=type,select
-local find,gmatch,match,gsub=string.find,string.gmatch,string.match,string.gsub
-local concat,insert,remove,unpack=table.concat,table.insert,table.remove,table.unpack
-local lpegmatch=lpeg.match
-local P,S,R,C,Cc,Cs,Ct,Cv,V=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Cv,lpeg.V
-dir=dir or {}
-local dir=dir
-local lfs=lfs
-local attributes=lfs.attributes
-local walkdir=lfs.dir
-local isdir=lfs.isdir
-local isfile=lfs.isfile
-local currentdir=lfs.currentdir
-local chdir=lfs.chdir
-local mkdir=lfs.mkdir
-local onwindows=os.type=="windows" or find(os.getenv("PATH"),";")
-if not isdir then
- function isdir(name)
- local a=attributes(name)
- return a and a.mode=="directory"
- end
- lfs.isdir=isdir
-end
-if not isfile then
- function isfile(name)
- local a=attributes(name)
- return a and a.mode=="file"
- end
- lfs.isfile=isfile
-end
-function dir.current()
- return (gsub(currentdir(),"\\","/"))
-end
-local lfsisdir=isdir
-local function isdir(path)
- path=gsub(path,"[/\\]+$","")
- return lfsisdir(path)
-end
-lfs.isdir=isdir
-local function globpattern(path,patt,recurse,action)
- if path=="/" then
- path=path.."."
- elseif not find(path,"/$") then
- path=path..'/'
- end
- if isdir(path) then
- for name in walkdir(path) do
- local full=path..name
- local mode=attributes(full,'mode')
- if mode=='file' then
- if find(full,patt) then
- action(full)
- end
- elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
- globpattern(full,patt,recurse,action)
- end
- end
- end
-end
-dir.globpattern=globpattern
-local function collectpattern(path,patt,recurse,result)
- local ok,scanner
- result=result or {}
- if path=="/" then
- ok,scanner,first=xpcall(function() return walkdir(path..".") end,function() end)
- else
- ok,scanner,first=xpcall(function() return walkdir(path) end,function() end)
- end
- if ok and type(scanner)=="function" then
- if not find(path,"/$") then path=path..'/' end
- for name in scanner,first do
- local full=path..name
- local attr=attributes(full)
- local mode=attr.mode
- if mode=='file' then
- if find(full,patt) then
- result[name]=attr
- end
- elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
- attr.list=collectpattern(full,patt,recurse)
- result[name]=attr
- end
- end
- end
- return result
-end
-dir.collectpattern=collectpattern
-local separator
-if onwindows then
- local slash=S("/\\")/"/"
- pattern=Ct {
- [1]=(Cs(P(".")+slash^1)+Cs(R("az","AZ")*P(":")*slash^0)+Cc("./"))*V(2)*V(3),
- [2]=Cs(((1-S("*?/\\"))^0*slash)^0),
- [3]=Cs(P(1)^0)
- }
-else
- pattern=Ct {
- [1]=(C(P(".")+P("/")^1)+Cc("./"))*V(2)*V(3),
- [2]=C(((1-S("*?/"))^0*P("/"))^0),
- [3]=C(P(1)^0)
- }
-end
-local filter=Cs ((
- P("**")/".*"+P("*")/"[^/]*"+P("?")/"[^/]"+P(".")/"%%."+P("+")/"%%+"+P("-")/"%%-"+P(1)
-)^0 )
-local function glob(str,t)
- if type(t)=="function" then
- if type(str)=="table" then
- for s=1,#str do
- glob(str[s],t)
- end
- elseif isfile(str) then
- t(str)
- else
- local split=lpegmatch(pattern,str)
- if split then
- local root,path,base=split[1],split[2],split[3]
- local recurse=find(base,"%*%*")
- local start=root..path
- local result=lpegmatch(filter,start..base)
- globpattern(start,result,recurse,t)
- end
- end
- else
- if type(str)=="table" then
- local t=t or {}
- for s=1,#str do
- glob(str[s],t)
- end
- return t
- elseif isfile(str) then
- if t then
- t[#t+1]=str
- return t
- else
- return { str }
- end
- else
- local split=lpegmatch(pattern,str)
- if split then
- local t=t or {}
- local action=action or function(name) t[#t+1]=name end
- local root,path,base=split[1],split[2],split[3]
- local recurse=find(base,"%*%*")
- local start=root..path
- local result=lpegmatch(filter,start..base)
- globpattern(start,result,recurse,action)
- return t
- else
- return {}
- end
- end
- end
-end
-dir.glob=glob
-local function globfiles(path,recurse,func,files)
- if type(func)=="string" then
- local s=func
- func=function(name) return find(name,s) end
- end
- files=files or {}
- local noffiles=#files
- for name in walkdir(path) do
- if find(name,"^%.") then
- else
- local mode=attributes(name,'mode')
- if mode=="directory" then
- if recurse then
- globfiles(path.."/"..name,recurse,func,files)
- end
- elseif mode=="file" then
- if not func or func(name) then
- noffiles=noffiles+1
- files[noffiles]=path.."/"..name
- end
- end
- end
- end
- return files
-end
-dir.globfiles=globfiles
-function dir.ls(pattern)
- return concat(glob(pattern),"\n")
-end
-local make_indeed=true
-if onwindows then
- function dir.mkdirs(...)
- local n=select("#",...)
- local str
- if n==1 then
- str=select(1,...)
- if isdir(str) then
- return str,true
- end
- else
- str=""
- for i=1,n do
- local s=select(i,...)
- local s=select(i,...)
- if s=="" then
- elseif str=="" then
- str=s
- else
- str=str.."/"..s
- end
- end
- end
- local pth=""
- local drive=false
- local first,middle,last=match(str,"^(//)(//*)(.*)$")
- if first then
- else
- first,last=match(str,"^(//)/*(.-)$")
- if first then
- middle,last=match(str,"([^/]+)/+(.-)$")
- if middle then
- pth="//"..middle
- else
- pth="//"..last
- last=""
- end
- else
- first,middle,last=match(str,"^([a-zA-Z]:)(/*)(.-)$")
- if first then
- pth,drive=first..middle,true
- else
- middle,last=match(str,"^(/*)(.-)$")
- if not middle then
- last=str
- end
- end
- end
- end
- for s in gmatch(last,"[^/]+") do
- if pth=="" then
- pth=s
- elseif drive then
- pth,drive=pth..s,false
- else
- pth=pth.."/"..s
- end
- if make_indeed and not isdir(pth) then
- mkdir(pth)
- end
- end
- return pth,(isdir(pth)==true)
- end
-else
- function dir.mkdirs(...)
- local n=select("#",...)
- local str,pth
- if n==1 then
- str=select(1,...)
- if isdir(str) then
- return str,true
- end
- else
- str=""
- for i=1,n do
- local s=select(i,...)
- if s and s~="" then
- if str~="" then
- str=str.."/"..s
- else
- str=s
- end
- end
- end
- end
- str=gsub(str,"/+","/")
- if find(str,"^/") then
- pth="/"
- for s in gmatch(str,"[^/]+") do
- local first=(pth=="/")
- if first then
- pth=pth..s
- else
- pth=pth.."/"..s
- end
- if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
- end
- end
- else
- pth="."
- for s in gmatch(str,"[^/]+") do
- pth=pth.."/"..s
- if make_indeed and not isdir(pth) then
- mkdir(pth)
- end
- end
- end
- return pth,(isdir(pth)==true)
- end
-end
-dir.makedirs=dir.mkdirs
-if onwindows then
- function dir.expandname(str)
- local first,nothing,last=match(str,"^(//)(//*)(.*)$")
- if first then
- first=dir.current().."/"
- end
- if not first then
- first,last=match(str,"^(//)/*(.*)$")
- end
- if not first then
- first,last=match(str,"^([a-zA-Z]:)(.*)$")
- if first and not find(last,"^/") then
- local d=currentdir()
- if chdir(first) then
- first=dir.current()
- end
- chdir(d)
- end
- end
- if not first then
- first,last=dir.current(),str
- end
- last=gsub(last,"//","/")
- last=gsub(last,"/%./","/")
- last=gsub(last,"^/*","")
- first=gsub(first,"/*$","")
- if last=="" or last=="." then
- return first
- else
- return first.."/"..last
- end
- end
-else
- function dir.expandname(str)
- if not find(str,"^/") then
- str=currentdir().."/"..str
- end
- str=gsub(str,"//","/")
- str=gsub(str,"/%./","/")
- str=gsub(str,"(.)/%.$","%1")
- return str
- end
-end
-file.expandname=dir.expandname
-local stack={}
-function dir.push(newdir)
- insert(stack,currentdir())
- if newdir and newdir~="" then
- chdir(newdir)
- end
-end
-function dir.pop()
- local d=remove(stack)
- if d then
- chdir(d)
- end
- return d
-end
-local function found(...)
- for i=1,select("#",...) do
- local path=select(i,...)
- local kind=type(path)
- if kind=="string" then
- if isdir(path) then
- return path
- end
- elseif kind=="table" then
- local path=found(unpack(path))
- if path then
- return path
- end
- end
- end
-end
-dir.found=found
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-boolean"] = package.loaded["l-boolean"] or true
-
--- original size: 1809, stripped down to: 1527
-
-if not modules then modules={} end modules ['l-boolean']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local type,tonumber=type,tonumber
-boolean=boolean or {}
-local boolean=boolean
-function boolean.tonumber(b)
- if b then return 1 else return 0 end
-end
-function toboolean(str,tolerant)
- if str==nil then
- return false
- elseif str==false then
- return false
- elseif str==true then
- return true
- elseif str=="true" then
- return true
- elseif str=="false" then
- return false
- elseif not tolerant then
- return false
- elseif str==0 then
- return false
- elseif (tonumber(str) or 0)>0 then
- return true
- else
- return str=="yes" or str=="on" or str=="t"
- end
-end
-string.toboolean=toboolean
-function string.booleanstring(str)
- if str=="0" then
- return false
- elseif str=="1" then
- return true
- elseif str=="" then
- return false
- elseif str=="false" then
- return false
- elseif str=="true" then
- return true
- elseif (tonumber(str) or 0)>0 then
- return true
- else
- return str=="yes" or str=="on" or str=="t"
- end
-end
-function string.is_boolean(str,default)
- if type(str)=="string" then
- if str=="true" or str=="yes" or str=="on" or str=="t" or str=="1" then
- return true
- elseif str=="false" or str=="no" or str=="off" or str=="f" or str=="0" then
- return false
- end
- end
- return default
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
-
--- original size: 33473, stripped down to: 14938
-
-if not modules then modules={} end modules ['l-unicode']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-utf=utf or (unicode and unicode.utf8) or {}
-utf.characters=utf.characters or string.utfcharacters
-utf.values=utf.values or string.utfvalues
-local type=type
-local char,byte,format,sub,gmatch=string.char,string.byte,string.format,string.sub,string.gmatch
-local concat=table.concat
-local P,C,R,Cs,Ct,Cmt,Cc,Carg,Cp=lpeg.P,lpeg.C,lpeg.R,lpeg.Cs,lpeg.Ct,lpeg.Cmt,lpeg.Cc,lpeg.Carg,lpeg.Cp
-local lpegmatch,patterns=lpeg.match,lpeg.patterns
-local bytepairs=string.bytepairs
-local finder=lpeg.finder
-local replacer=lpeg.replacer
-local utfvalues=utf.values
-local utfgmatch=utf.gmatch
-local p_utftype=patterns.utftype
-local p_utfstricttype=patterns.utfstricttype
-local p_utfoffset=patterns.utfoffset
-local p_utf8char=patterns.utf8char
-local p_utf8byte=patterns.utf8byte
-local p_utfbom=patterns.utfbom
-local p_newline=patterns.newline
-local p_whitespace=patterns.whitespace
-if not unicode then
- unicode={ utf=utf }
-end
-if not utf.char then
- local floor,char=math.floor,string.char
- function utf.char(n)
- if n<0x80 then
- return char(n)
- elseif n<0x800 then
- return char(
- 0xC0+floor(n/0x40),
- 0x80+(n%0x40)
- )
- elseif n<0x10000 then
- return char(
- 0xE0+floor(n/0x1000),
- 0x80+(floor(n/0x40)%0x40),
- 0x80+(n%0x40)
- )
- elseif n<0x200000 then
- return char(
- 0xF0+floor(n/0x40000),
- 0x80+(floor(n/0x1000)%0x40),
- 0x80+(floor(n/0x40)%0x40),
- 0x80+(n%0x40)
- )
- else
- return ""
- end
- end
-end
-if not utf.byte then
- local utf8byte=patterns.utf8byte
- function utf.byte(c)
- return lpegmatch(utf8byte,c)
- end
-end
-local utfchar,utfbyte=utf.char,utf.byte
-function utf.filetype(data)
- return data and lpegmatch(p_utftype,data) or "unknown"
-end
-local toentities=Cs (
- (
- patterns.utf8one+(
- patterns.utf8two+patterns.utf8three+patterns.utf8four
- )/function(s) local b=utfbyte(s) if b<127 then return s else return format("&#%X;",b) end end
- )^0
-)
-patterns.toentities=toentities
-function utf.toentities(str)
- return lpegmatch(toentities,str)
-end
-local one=P(1)
-local two=C(1)*C(1)
-local four=C(R(utfchar(0xD8),utfchar(0xFF)))*C(1)*C(1)*C(1)
-local pattern=P("\254\255")*Cs((
- four/function(a,b,c,d)
- local ab=0xFF*byte(a)+byte(b)
- local cd=0xFF*byte(c)+byte(d)
- return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
- end+two/function(a,b)
- return utfchar(byte(a)*256+byte(b))
- end+one
- )^1 )+P("\255\254")*Cs((
- four/function(b,a,d,c)
- local ab=0xFF*byte(a)+byte(b)
- local cd=0xFF*byte(c)+byte(d)
- return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
- end+two/function(b,a)
- return utfchar(byte(a)*256+byte(b))
- end+one
- )^1 )
-function string.toutf(s)
- return lpegmatch(pattern,s) or s
-end
-local validatedutf=Cs (
- (
- patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four+P(1)/"�"
- )^0
-)
-patterns.validatedutf=validatedutf
-function utf.is_valid(str)
- return type(str)=="string" and lpegmatch(validatedutf,str) or false
-end
-if not utf.len then
- local n,f=0,1
- local utfcharcounter=patterns.utfbom^-1*Cmt (
- Cc(1)*patterns.utf8one^1+Cc(2)*patterns.utf8two^1+Cc(3)*patterns.utf8three^1+Cc(4)*patterns.utf8four^1,
- function(_,t,d)
- n=n+(t-f)/d
- f=t
- return true
- end
- )^0
- function utf.len(str)
- n,f=0,1
- lpegmatch(utfcharcounter,str or "")
- return n
- end
-end
-utf.length=utf.len
-if not utf.sub then
- local utflength=utf.length
- local b,e,n,first,last=0,0,0,0,0
- local function slide_zero(s,p)
- n=n+1
- if n>=last then
- e=p-1
- else
- return p
- end
- end
- local function slide_one(s,p)
- n=n+1
- if n==first then
- b=p
- end
- if n>=last then
- e=p-1
- else
- return p
- end
- end
- local function slide_two(s,p)
- n=n+1
- if n==first then
- b=p
- else
- return true
- end
- end
- local pattern_zero=Cmt(p_utf8char,slide_zero)^0
- local pattern_one=Cmt(p_utf8char,slide_one )^0
- local pattern_two=Cmt(p_utf8char,slide_two )^0
- function utf.sub(str,start,stop)
- if not start then
- return str
- end
- if start==0 then
- start=1
- end
- if not stop then
- if start<0 then
- local l=utflength(str)
- start=l+start
- else
- start=start-1
- end
- b,n,first=0,0,start
- lpegmatch(pattern_two,str)
- if n>=first then
- return sub(str,b)
- else
- return ""
- end
- end
- if start<0 or stop<0 then
- local l=utf.length(str)
- if start<0 then
- start=l+start
- if start<=0 then
- start=1
- else
- start=start+1
- end
- end
- if stop<0 then
- stop=l+stop
- if stop==0 then
- stop=1
- else
- stop=stop+1
- end
- end
- end
- if start>stop then
- return ""
- elseif start>1 then
- b,e,n,first,last=0,0,0,start-1,stop
- lpegmatch(pattern_one,str)
- if n>=first and e==0 then
- e=#str
- end
- return sub(str,b,e)
- else
- b,e,n,last=1,0,0,stop
- lpegmatch(pattern_zero,str)
- if e==0 then
- e=#str
- end
- return sub(str,b,e)
- end
- end
-end
-function utf.remapper(mapping)
- local pattern=Cs((p_utf8char/mapping)^0)
- return function(str)
- if not str or str=="" then
- return ""
- else
- return lpegmatch(pattern,str)
- end
- end,pattern
-end
-function utf.replacer(t)
- local r=replacer(t,false,false,true)
- return function(str)
- return lpegmatch(r,str)
- end
-end
-function utf.subtituter(t)
- local f=finder (t)
- local r=replacer(t,false,false,true)
- return function(str)
- local i=lpegmatch(f,str)
- if not i then
- return str
- elseif i>#str then
- return str
- else
- return lpegmatch(r,str)
- end
- end
-end
-local utflinesplitter=p_utfbom^-1*lpeg.tsplitat(p_newline)
-local utfcharsplitter_ows=p_utfbom^-1*Ct(C(p_utf8char)^0)
-local utfcharsplitter_iws=p_utfbom^-1*Ct((p_whitespace^1+C(p_utf8char))^0)
-local utfcharsplitter_raw=Ct(C(p_utf8char)^0)
-patterns.utflinesplitter=utflinesplitter
-function utf.splitlines(str)
- return lpegmatch(utflinesplitter,str or "")
-end
-function utf.split(str,ignorewhitespace)
- if ignorewhitespace then
- return lpegmatch(utfcharsplitter_iws,str or "")
- else
- return lpegmatch(utfcharsplitter_ows,str or "")
- end
-end
-function utf.totable(str)
- return lpegmatch(utfcharsplitter_raw,str)
-end
-function utf.magic(f)
- local str=f:read(4) or ""
- local off=lpegmatch(p_utfoffset,str)
- if off<4 then
- f:seek('set',off)
- end
- return lpegmatch(p_utftype,str)
-end
-local utf16_to_utf8_be,utf16_to_utf8_le
-local utf32_to_utf8_be,utf32_to_utf8_le
-local utf_16_be_linesplitter=patterns.utfbom_16_be^-1*lpeg.tsplitat(patterns.utf_16_be_nl)
-local utf_16_le_linesplitter=patterns.utfbom_16_le^-1*lpeg.tsplitat(patterns.utf_16_le_nl)
-if bytepairs then
- utf16_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_be_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in bytepairs(t[i]) do
- if right then
- local now=256*left+right
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
- end
- utf16_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_le_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in bytepairs(t[i]) do
- if right then
- local now=256*right+left
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
- end
- utf32_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utflinesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,-1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more<0 then
- more=256*256*256*a+256*256*b
- else
- r=r+1
- result[t]=utfchar(more+256*a+b)
- more=-1
- end
- else
- break
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
- end
- utf32_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utflinesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,-1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more<0 then
- more=256*b+a
- else
- r=r+1
- result[t]=utfchar(more+256*256*256*b+256*256*a)
- more=-1
- end
- else
- break
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
- end
-else
- utf16_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_be_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in gmatch(t[i],"(.)(.)") do
- if left=="\000" then
- r=r+1
- result[r]=utfchar(byte(right))
- elseif right then
- local now=256*byte(left)+byte(right)
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
- end
- utf16_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_le_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in gmatch(t[i],"(.)(.)") do
- if right=="\000" then
- r=r+1
- result[r]=utfchar(byte(left))
- elseif right then
- local now=256*byte(right)+byte(left)
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
- end
- utf32_to_utf8_le=function() return {} end
- utf32_to_utf8_be=function() return {} end
-end
-utf.utf16_to_utf8_le=utf16_to_utf8_le
-utf.utf16_to_utf8_be=utf16_to_utf8_be
-utf.utf32_to_utf8_le=utf32_to_utf8_le
-utf.utf32_to_utf8_be=utf32_to_utf8_be
-function utf.utf8_to_utf8(t)
- return type(t)=="string" and lpegmatch(utflinesplitter,t) or t
-end
-function utf.utf16_to_utf8(t,endian)
- return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
-end
-function utf.utf32_to_utf8(t,endian)
- return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
-end
-local function little(c)
- local b=byte(c)
- if b<0x10000 then
- return char(b%256,b/256)
- else
- b=b-0x10000
- local b1,b2=b/1024+0xD800,b%1024+0xDC00
- return char(b1%256,b1/256,b2%256,b2/256)
- end
-end
-local function big(c)
- local b=byte(c)
- if b<0x10000 then
- return char(b/256,b%256)
- else
- b=b-0x10000
- local b1,b2=b/1024+0xD800,b%1024+0xDC00
- return char(b1/256,b1%256,b2/256,b2%256)
- end
-end
-local _,l_remap=utf.remapper(little)
-local _,b_remap=utf.remapper(big)
-function utf.utf8_to_utf16_be(str,nobom)
- if nobom then
- return lpegmatch(b_remap,str)
- else
- return char(254,255)..lpegmatch(b_remap,str)
- end
-end
-function utf.utf8_to_utf16_le(str,nobom)
- if nobom then
- return lpegmatch(l_remap,str)
- else
- return char(255,254)..lpegmatch(l_remap,str)
- end
-end
-function utf.utf8_to_utf16(str,littleendian,nobom)
- if littleendian then
- return utf.utf8_to_utf16_le(str,nobom)
- else
- return utf.utf8_to_utf16_be(str,nobom)
- end
-end
-local pattern=Cs (
- (p_utf8byte/function(unicode ) return format("0x%04X",unicode) end)*(p_utf8byte*Carg(1)/function(unicode,separator) return format("%s0x%04X",separator,unicode) end)^0
-)
-function utf.tocodes(str,separator)
- return lpegmatch(pattern,str,1,separator or " ")
-end
-function utf.ustring(s)
- return format("U+%05X",type(s)=="number" and s or utfbyte(s))
-end
-function utf.xstring(s)
- return format("0x%05X",type(s)=="number" and s or utfbyte(s))
-end
-function utf.toeight(str)
- if not str then
- return nil
- end
- local utftype=lpegmatch(p_utfstricttype,str)
- if utftype=="utf-8" then
- return sub(str,4)
- elseif utftype=="utf-16-le" then
- return utf16_to_utf8_le(str)
- elseif utftype=="utf-16-be" then
- return utf16_to_utf8_ne(str)
- else
- return str
- end
-end
-local p_nany=p_utf8char/""
-if utfgmatch then
- function utf.count(str,what)
- if type(what)=="string" then
- local n=0
- for _ in utfgmatch(str,what) do
- n=n+1
- end
- return n
- else
- return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
- end
- end
-else
- local cache={}
- function utf.count(str,what)
- if type(what)=="string" then
- local p=cache[what]
- if not p then
- p=Cs((P(what)/" "+p_nany)^0)
- cache[p]=p
- end
- return #lpegmatch(p,str)
- else
- return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
- end
- end
-end
-if not utf.characters then
- function utf.characters(str)
- return gmatch(str,".[\128-\191]*")
- end
- string.utfcharacters=utf.characters
-end
-if not utf.values then
- local find=string.find
- local dummy=function()
- end
- function utf.values(str)
- local n=#str
- if n==0 then
- return dummy
- elseif n==1 then
- return function() return utfbyte(str) end
- else
- local p=1
- return function()
- local b,e=find(str,".[\128-\191]*",p)
- if b then
- p=e+1
- return utfbyte(sub(str,b,e))
- end
- end
- end
- end
- string.utfvalues=utf.values
-end
-function utf.chrlen(u)
- return
- (u<0x80 and 1) or
- (u<0xE0 and 2) or
- (u<0xF0 and 3) or
- (u<0xF8 and 4) or
- (u<0xFC and 5) or
- (u<0xFE and 6) or 0
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["l-math"] = package.loaded["l-math"] or true
-
--- original size: 915, stripped down to: 836
-
-if not modules then modules={} end modules ['l-math']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
-if not math.round then
- function math.round(x) return floor(x+0.5) end
-end
-if not math.div then
- function math.div(n,m) return floor(n/m) end
-end
-if not math.mod then
- function math.mod(n,m) return n%m end
-end
-local pipi=2*math.pi/360
-if not math.sind then
- function math.sind(d) return sin(d*pipi) end
- function math.cosd(d) return cos(d*pipi) end
- function math.tand(d) return tan(d*pipi) end
-end
-if not math.odd then
- function math.odd (n) return n%2~=0 end
- function math.even(n) return n%2==0 end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["util-str"] = package.loaded["util-str"] or true
-
--- original size: 29502, stripped down to: 16632
-
-if not modules then modules={} end modules ['util-str']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-utilities=utilities or {}
-utilities.strings=utilities.strings or {}
-local strings=utilities.strings
-local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub
-local load,dump=load,string.dump
-local tonumber,type,tostring=tonumber,type,tostring
-local unpack,concat=table.unpack,table.concat
-local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
-local patterns,lpegmatch=lpeg.patterns,lpeg.match
-local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=nil
-if _LUAVERSION<5.2 then
- loadstripped=function(str,shortcuts)
- return load(str)
- end
-else
- loadstripped=function(str,shortcuts)
- if shortcuts then
- return load(dump(load(str),true),nil,nil,shortcuts)
- else
- return load(dump(load(str),true))
- end
- end
-end
-if not number then number={} end
-local stripper=patterns.stripzeros
-local function points(n)
- return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
-end
-local function basepoints(n)
- return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
-end
-number.points=points
-number.basepoints=basepoints
-local rubish=patterns.spaceortab^0*patterns.newline
-local anyrubish=patterns.spaceortab+patterns.newline
-local anything=patterns.anything
-local stripped=(patterns.spaceortab^1/"")*patterns.newline
-local leading=rubish^0/""
-local trailing=(anyrubish^1*patterns.endofstring)/""
-local redundant=rubish^3/"\n"
-local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
-function strings.collapsecrlf(str)
- return lpegmatch(pattern,str)
-end
-local repeaters={}
-function strings.newrepeater(str,offset)
- offset=offset or 0
- local s=repeaters[str]
- if not s then
- s={}
- repeaters[str]=s
- end
- local t=s[offset]
- if t then
- return t
- end
- t={}
- setmetatable(t,{ __index=function(t,k)
- if not k then
- return ""
- end
- local n=k+offset
- local s=n>0 and rep(str,n) or ""
- t[k]=s
- return s
- end })
- s[offset]=t
- return t
-end
-local extra,tab,start=0,0,4,0
-local nspaces=strings.newrepeater(" ")
-string.nspaces=nspaces
-local pattern=Carg(1)/function(t)
- extra,tab,start=0,t or 7,1
- end*Cs((
- Cp()*patterns.tab/function(position)
- local current=(position-start+1)+extra
- local spaces=tab-(current-1)%tab
- if spaces>0 then
- extra=extra+spaces-1
- return nspaces[spaces]
- else
- return ""
- end
- end+patterns.newline*Cp()/function(position)
- extra,start=0,position
- end+patterns.anything
- )^1)
-function strings.tabtospace(str,tab)
- return lpegmatch(pattern,str,1,tab or 7)
-end
-function strings.striplong(str)
- str=gsub(str,"^%s*","")
- str=gsub(str,"[\n\r]+ *","\n")
- return str
-end
-function strings.nice(str)
- str=gsub(str,"[:%-+_]+"," ")
- return str
-end
-local n=0
-local sequenced=table.sequenced
-function string.autodouble(s,sep)
- if s==nil then
- return '""'
- end
- local t=type(s)
- if t=="number" then
- return tostring(s)
- end
- if t=="table" then
- return ('"'..sequenced(s,sep or ",")..'"')
- end
- return ('"'..tostring(s)..'"')
-end
-function string.autosingle(s,sep)
- if s==nil then
- return "''"
- end
- local t=type(s)
- if t=="number" then
- return tostring(s)
- end
- if t=="table" then
- return ("'"..sequenced(s,sep or ",").."'")
- end
- return ("'"..tostring(s).."'")
-end
-local tracedchars={}
-string.tracedchars=tracedchars
-strings.tracers=tracedchars
-function string.tracedchar(b)
- if type(b)=="number" then
- return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
- else
- local c=utfbyte(b)
- return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
- end
-end
-function number.signed(i)
- if i>0 then
- return "+",i
- else
- return "-",-i
- end
-end
-local zero=P("0")^1/""
-local plus=P("+")/""
-local minus=P("-")
-local separator=S(".")
-local digit=R("09")
-local trailing=zero^1*#S("eE")
-local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1))
-local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent)
-local pattern_b=Cs((exponent+P(1))^0)
-function number.sparseexponent(f,n)
- if not n then
- n=f
- f="%e"
- end
- local tn=type(n)
- if tn=="string" then
- local m=tonumber(n)
- if m then
- return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m))
- end
- elseif tn=="number" then
- return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n))
- end
- return tostring(n)
-end
-local template=[[
-%s
-%s
-return function(%s) return %s end
-]]
-local preamble,environment="",{}
-if _LUAVERSION<5.2 then
- preamble=[[
-local lpeg=lpeg
-local type=type
-local tostring=tostring
-local tonumber=tonumber
-local format=string.format
-local concat=table.concat
-local signed=number.signed
-local points=number.points
-local basepoints= number.basepoints
-local utfchar=utf.char
-local utfbyte=utf.byte
-local lpegmatch=lpeg.match
-local nspaces=string.nspaces
-local tracedchar=string.tracedchar
-local autosingle=string.autosingle
-local autodouble=string.autodouble
-local sequenced=table.sequenced
-local formattednumber=number.formatted
-local sparseexponent=number.sparseexponent
- ]]
-else
- environment={
- global=global or _G,
- lpeg=lpeg,
- type=type,
- tostring=tostring,
- tonumber=tonumber,
- format=string.format,
- concat=table.concat,
- signed=number.signed,
- points=number.points,
- basepoints=number.basepoints,
- utfchar=utf.char,
- utfbyte=utf.byte,
- lpegmatch=lpeg.match,
- nspaces=string.nspaces,
- tracedchar=string.tracedchar,
- autosingle=string.autosingle,
- autodouble=string.autodouble,
- sequenced=table.sequenced,
- formattednumber=number.formatted,
- sparseexponent=number.sparseexponent,
- }
-end
-local arguments={ "a1" }
-setmetatable(arguments,{ __index=function(t,k)
- local v=t[k-1]..",a"..k
- t[k]=v
- return v
- end
-})
-local prefix_any=C((S("+- .")+R("09"))^0)
-local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0)
-local format_s=function(f)
- n=n+1
- if f and f~="" then
- return format("format('%%%ss',a%s)",f,n)
- else
- return format("(a%s or '')",n)
- end
-end
-local format_S=function(f)
- n=n+1
- if f and f~="" then
- return format("format('%%%ss',tostring(a%s))",f,n)
- else
- return format("tostring(a%s)",n)
- end
-end
-local format_q=function()
- n=n+1
- return format("(a%s and format('%%q',a%s) or '')",n,n)
-end
-local format_Q=function()
- n=n+1
- return format("format('%%q',tostring(a%s))",n)
-end
-local format_i=function(f)
- n=n+1
- if f and f~="" then
- return format("format('%%%si',a%s)",f,n)
- else
- return format("format('%%i',a%s)",n)
- end
-end
-local format_d=format_i
-local format_I=function(f)
- n=n+1
- return format("format('%%s%%%si',signed(a%s))",f,n)
-end
-local format_f=function(f)
- n=n+1
- return format("format('%%%sf',a%s)",f,n)
-end
-local format_g=function(f)
- n=n+1
- return format("format('%%%sg',a%s)",f,n)
-end
-local format_G=function(f)
- n=n+1
- return format("format('%%%sG',a%s)",f,n)
-end
-local format_e=function(f)
- n=n+1
- return format("format('%%%se',a%s)",f,n)
-end
-local format_E=function(f)
- n=n+1
- return format("format('%%%sE',a%s)",f,n)
-end
-local format_j=function(f)
- n=n+1
- return format("sparseexponent('%%%se',a%s)",f,n)
-end
-local format_J=function(f)
- n=n+1
- return format("sparseexponent('%%%sE',a%s)",f,n)
-end
-local format_x=function(f)
- n=n+1
- return format("format('%%%sx',a%s)",f,n)
-end
-local format_X=function(f)
- n=n+1
- return format("format('%%%sX',a%s)",f,n)
-end
-local format_o=function(f)
- n=n+1
- return format("format('%%%so',a%s)",f,n)
-end
-local format_c=function()
- n=n+1
- return format("utfchar(a%s)",n)
-end
-local format_C=function()
- n=n+1
- return format("tracedchar(a%s)",n)
-end
-local format_r=function(f)
- n=n+1
- return format("format('%%%s.0f',a%s)",f,n)
-end
-local format_h=function(f)
- n=n+1
- if f=="-" then
- f=sub(f,2)
- return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
- else
- return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
- end
-end
-local format_H=function(f)
- n=n+1
- if f=="-" then
- f=sub(f,2)
- return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
- else
- return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
- end
-end
-local format_u=function(f)
- n=n+1
- if f=="-" then
- f=sub(f,2)
- return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
- else
- return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
- end
-end
-local format_U=function(f)
- n=n+1
- if f=="-" then
- f=sub(f,2)
- return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
- else
- return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
- end
-end
-local format_p=function()
- n=n+1
- return format("points(a%s)",n)
-end
-local format_b=function()
- n=n+1
- return format("basepoints(a%s)",n)
-end
-local format_t=function(f)
- n=n+1
- if f and f~="" then
- return format("concat(a%s,%q)",n,f)
- else
- return format("concat(a%s)",n)
- end
-end
-local format_T=function(f)
- n=n+1
- if f and f~="" then
- return format("sequenced(a%s,%q)",n,f)
- else
- return format("sequenced(a%s)",n)
- end
-end
-local format_l=function()
- n=n+1
- return format("(a%s and 'true' or 'false')",n)
-end
-local format_L=function()
- n=n+1
- return format("(a%s and 'TRUE' or 'FALSE')",n)
-end
-local format_N=function()
- n=n+1
- return format("tostring(tonumber(a%s) or a%s)",n,n)
-end
-local format_a=function(f)
- n=n+1
- if f and f~="" then
- return format("autosingle(a%s,%q)",n,f)
- else
- return format("autosingle(a%s)",n)
- end
-end
-local format_A=function(f)
- n=n+1
- if f and f~="" then
- return format("autodouble(a%s,%q)",n,f)
- else
- return format("autodouble(a%s)",n)
- end
-end
-local format_w=function(f)
- n=n+1
- f=tonumber(f)
- if f then
- return format("nspaces[%s+a%s]",f,n)
- else
- return format("nspaces[a%s]",n)
- end
-end
-local format_W=function(f)
- return format("nspaces[%s]",tonumber(f) or 0)
-end
-local digit=patterns.digit
-local period=patterns.period
-local three=digit*digit*digit
-local splitter=Cs (
- (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2)
-)
-patterns.formattednumber=splitter
-function number.formatted(n,sep1,sep2)
- local s=type(s)=="string" and n or format("%0.2f",n)
- if sep1==true then
- return lpegmatch(splitter,s,1,".",",")
- elseif sep1=="." then
- return lpegmatch(splitter,s,1,sep1,sep2 or ",")
- elseif sep1=="," then
- return lpegmatch(splitter,s,1,sep1,sep2 or ".")
- else
- return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".")
- end
-end
-local format_m=function(f)
- n=n+1
- if not f or f=="" then
- f=","
- end
- return format([[formattednumber(a%s,%q,".")]],n,f)
-end
-local format_M=function(f)
- n=n+1
- if not f or f=="" then
- f="."
- end
- return format([[formattednumber(a%s,%q,",")]],n,f)
-end
-local format_z=function(f)
- n=n+(tonumber(f) or 1)
- return "''"
-end
-local format_rest=function(s)
- return format("%q",s)
-end
-local format_extension=function(extensions,f,name)
- local extension=extensions[name] or "tostring(%s)"
- local f=tonumber(f) or 1
- if f==0 then
- return extension
- elseif f==1 then
- n=n+1
- local a="a"..n
- return format(extension,a,a)
- elseif f<0 then
- local a="a"..(n+f+1)
- return format(extension,a,a)
- else
- local t={}
- for i=1,f do
- n=n+1
- t[#t+1]="a"..n
- end
- return format(extension,unpack(t))
- end
-end
-local builder=Cs { "start",
- start=(
- (
- P("%")/""*(
- V("!")
-+V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
-+V("c")+V("C")+V("S")
-+V("Q")
-+V("N")
-+V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w")
-+V("W")
-+V("a")
-+V("A")
-+V("j")+V("J")
-+V("m")+V("M")
-+V("z")
-+V("*")
- )+V("*")
- )*(P(-1)+Carg(1))
- )^0,
- ["s"]=(prefix_any*P("s"))/format_s,
- ["q"]=(prefix_any*P("q"))/format_q,
- ["i"]=(prefix_any*P("i"))/format_i,
- ["d"]=(prefix_any*P("d"))/format_d,
- ["f"]=(prefix_any*P("f"))/format_f,
- ["g"]=(prefix_any*P("g"))/format_g,
- ["G"]=(prefix_any*P("G"))/format_G,
- ["e"]=(prefix_any*P("e"))/format_e,
- ["E"]=(prefix_any*P("E"))/format_E,
- ["x"]=(prefix_any*P("x"))/format_x,
- ["X"]=(prefix_any*P("X"))/format_X,
- ["o"]=(prefix_any*P("o"))/format_o,
- ["S"]=(prefix_any*P("S"))/format_S,
- ["Q"]=(prefix_any*P("Q"))/format_S,
- ["N"]=(prefix_any*P("N"))/format_N,
- ["c"]=(prefix_any*P("c"))/format_c,
- ["C"]=(prefix_any*P("C"))/format_C,
- ["r"]=(prefix_any*P("r"))/format_r,
- ["h"]=(prefix_any*P("h"))/format_h,
- ["H"]=(prefix_any*P("H"))/format_H,
- ["u"]=(prefix_any*P("u"))/format_u,
- ["U"]=(prefix_any*P("U"))/format_U,
- ["p"]=(prefix_any*P("p"))/format_p,
- ["b"]=(prefix_any*P("b"))/format_b,
- ["t"]=(prefix_tab*P("t"))/format_t,
- ["T"]=(prefix_tab*P("T"))/format_T,
- ["l"]=(prefix_any*P("l"))/format_l,
- ["L"]=(prefix_any*P("L"))/format_L,
- ["I"]=(prefix_any*P("I"))/format_I,
- ["w"]=(prefix_any*P("w"))/format_w,
- ["W"]=(prefix_any*P("W"))/format_W,
- ["j"]=(prefix_any*P("j"))/format_j,
- ["J"]=(prefix_any*P("J"))/format_J,
- ["m"]=(prefix_tab*P("m"))/format_m,
- ["M"]=(prefix_tab*P("M"))/format_M,
- ["z"]=(prefix_any*P("z"))/format_z,
- ["a"]=(prefix_any*P("a"))/format_a,
- ["A"]=(prefix_any*P("A"))/format_A,
- ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest,
- ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
-}
-local direct=Cs (
- P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
-)
-local function make(t,str)
- local f
- local p
- local p=lpegmatch(direct,str)
- if p then
- f=loadstripped(p)()
- else
- n=0
- p=lpegmatch(builder,str,1,"..",t._extensions_)
- if n>0 then
- p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p,t._environment_)()
- else
- f=function() return str end
- end
- end
- t[str]=f
- return f
-end
-local function use(t,fmt,...)
- return t[fmt](...)
-end
-strings.formatters={}
-if _LUAVERSION<5.2 then
- function strings.formatters.new()
- local t={ _extensions_={},_preamble_=preamble,_environment_={},_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
-else
- function strings.formatters.new()
- local e={}
- for k,v in next,environment do
- e[k]=v
- end
- local t={ _extensions_={},_preamble_="",_environment_=e,_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
-end
-local formatters=strings.formatters.new()
-string.formatters=formatters
-string.formatter=function(str,...) return formatters[str](...) end
-local function add(t,name,template,preamble)
- if type(t)=="table" and t._type_=="formatter" then
- t._extensions_[name]=template or "%s"
- if type(preamble)=="string" then
- t._preamble_=preamble.."\n"..t._preamble_
- elseif type(preamble)=="table" then
- for k,v in next,preamble do
- t._environment_[k]=v
- end
- end
- end
-end
-strings.formatters.add=add
-patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
-patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
-patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
-patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
-if _LUAVERSION<5.2 then
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
-else
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["util-tab"] = package.loaded["util-tab"] or true
-
--- original size: 23980, stripped down to: 16119
-
-if not modules then modules={} end modules ['util-tab']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-utilities=utilities or {}
-utilities.tables=utilities.tables or {}
-local tables=utilities.tables
-local format,gmatch,gsub,sub=string.format,string.gmatch,string.gsub,string.sub
-local concat,insert,remove=table.concat,table.insert,table.remove
-local setmetatable,getmetatable,tonumber,tostring=setmetatable,getmetatable,tonumber,tostring
-local type,next,rawset,tonumber,tostring,load,select=type,next,rawset,tonumber,tostring,load,select
-local lpegmatch,P,Cs,Cc=lpeg.match,lpeg.P,lpeg.Cs,lpeg.Cc
-local sortedkeys,sortedpairs=table.sortedkeys,table.sortedpairs
-local formatters=string.formatters
-local utftoeight=utf.toeight
-local splitter=lpeg.tsplitat(".")
-function tables.definetable(target,nofirst,nolast)
- local composed,shortcut,t=nil,nil,{}
- local snippets=lpegmatch(splitter,target)
- for i=1,#snippets-(nolast and 1 or 0) do
- local name=snippets[i]
- if composed then
- composed=shortcut.."."..name
- shortcut=shortcut.."_"..name
- t[#t+1]=formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
- else
- composed=name
- shortcut=name
- if not nofirst then
- t[#t+1]=formatters["%s = %s or { }"](composed,composed)
- end
- end
- end
- if nolast then
- composed=shortcut.."."..snippets[#snippets]
- end
- return concat(t,"\n"),composed
-end
-function tables.definedtable(...)
- local t=_G
- for i=1,select("#",...) do
- local li=select(i,...)
- local tl=t[li]
- if not tl then
- tl={}
- t[li]=tl
- end
- t=tl
- end
- return t
-end
-function tables.accesstable(target,root)
- local t=root or _G
- for name in gmatch(target,"([^%.]+)") do
- t=t[name]
- if not t then
- return
- end
- end
- return t
-end
-function tables.migratetable(target,v,root)
- local t=root or _G
- local names=string.split(target,".")
- for i=1,#names-1 do
- local name=names[i]
- t[name]=t[name] or {}
- t=t[name]
- if not t then
- return
- end
- end
- t[names[#names]]=v
-end
-function tables.removevalue(t,value)
- if value then
- for i=1,#t do
- if t[i]==value then
- remove(t,i)
- end
- end
- end
-end
-function tables.insertbeforevalue(t,value,extra)
- for i=1,#t do
- if t[i]==extra then
- remove(t,i)
- end
- end
- for i=1,#t do
- if t[i]==value then
- insert(t,i,extra)
- return
- end
- end
- insert(t,1,extra)
-end
-function tables.insertaftervalue(t,value,extra)
- for i=1,#t do
- if t[i]==extra then
- remove(t,i)
- end
- end
- for i=1,#t do
- if t[i]==value then
- insert(t,i+1,extra)
- return
- end
- end
- insert(t,#t+1,extra)
-end
-local escape=Cs(Cc('"')*((P('"')/'""'+P(1))^0)*Cc('"'))
-function table.tocsv(t,specification)
- if t and #t>0 then
- local result={}
- local r={}
- specification=specification or {}
- local fields=specification.fields
- if type(fields)~="string" then
- fields=sortedkeys(t[1])
- end
- local separator=specification.separator or ","
- if specification.preamble==true then
- for f=1,#fields do
- r[f]=lpegmatch(escape,tostring(fields[f]))
- end
- result[1]=concat(r,separator)
- end
- for i=1,#t do
- local ti=t[i]
- for f=1,#fields do
- local field=ti[fields[f]]
- if type(field)=="string" then
- r[f]=lpegmatch(escape,field)
- else
- r[f]=tostring(field)
- end
- end
- result[#result+1]=concat(r,separator)
- end
- return concat(result,"\n")
- else
- return ""
- end
-end
-local nspaces=utilities.strings.newrepeater(" ")
-local function toxml(t,d,result,step)
- for k,v in sortedpairs(t) do
- local s=nspaces[d]
- local tk=type(k)
- local tv=type(v)
- if tv=="table" then
- if tk=="number" then
- result[#result+1]=formatters["%s<entry n='%s'>"](s,k)
- toxml(v,d+step,result,step)
- result[#result+1]=formatters["%s</entry>"](s,k)
- else
- result[#result+1]=formatters["%s<%s>"](s,k)
- toxml(v,d+step,result,step)
- result[#result+1]=formatters["%s</%s>"](s,k)
- end
- elseif tv=="string" then
- if tk=="number" then
- result[#result+1]=formatters["%s<entry n='%s'>%!xml!</entry>"](s,k,v,k)
- else
- result[#result+1]=formatters["%s<%s>%!xml!</%s>"](s,k,v,k)
- end
- elseif tk=="number" then
- result[#result+1]=formatters["%s<entry n='%s'>%S</entry>"](s,k,v,k)
- else
- result[#result+1]=formatters["%s<%s>%S</%s>"](s,k,v,k)
- end
- end
-end
-function table.toxml(t,specification)
- specification=specification or {}
- local name=specification.name
- local noroot=name==false
- local result=(specification.nobanner or noroot) and {} or { "<?xml version='1.0' standalone='yes' ?>" }
- local indent=specification.indent or 0
- local spaces=specification.spaces or 1
- if noroot then
- toxml(t,indent,result,spaces)
- else
- toxml({ [name or "data"]=t },indent,result,spaces)
- end
- return concat(result,"\n")
-end
-function tables.encapsulate(core,capsule,protect)
- if type(capsule)~="table" then
- protect=true
- capsule={}
- end
- for key,value in next,core do
- if capsule[key] then
- print(formatters["\ninvalid %s %a in %a"]("inheritance",key,core))
- os.exit()
- else
- capsule[key]=value
- end
- end
- if protect then
- for key,value in next,core do
- core[key]=nil
- end
- setmetatable(core,{
- __index=capsule,
- __newindex=function(t,key,value)
- if capsule[key] then
- print(formatters["\ninvalid %s %a' in %a"]("overload",key,core))
- os.exit()
- else
- rawset(t,key,value)
- end
- end
- } )
- end
-end
-local f_hashed_string=formatters["[%q]=%q,"]
-local f_hashed_number=formatters["[%q]=%s,"]
-local f_hashed_boolean=formatters["[%q]=%l,"]
-local f_hashed_table=formatters["[%q]="]
-local f_indexed_string=formatters["[%s]=%q,"]
-local f_indexed_number=formatters["[%s]=%s,"]
-local f_indexed_boolean=formatters["[%s]=%l,"]
-local f_indexed_table=formatters["[%s]="]
-local f_ordered_string=formatters["%q,"]
-local f_ordered_number=formatters["%s,"]
-local f_ordered_boolean=formatters["%l,"]
-function table.fastserialize(t,prefix)
- local r={ type(prefix)=="string" and prefix or "return" }
- local m=1
- local function fastserialize(t,outer)
- local n=#t
- m=m+1
- r[m]="{"
- if n>0 then
- for i=0,n do
- local v=t[i]
- local tv=type(v)
- if tv=="string" then
- m=m+1 r[m]=f_ordered_string(v)
- elseif tv=="number" then
- m=m+1 r[m]=f_ordered_number(v)
- elseif tv=="table" then
- fastserialize(v)
- elseif tv=="boolean" then
- m=m+1 r[m]=f_ordered_boolean(v)
- end
- end
- end
- for k,v in next,t do
- local tk=type(k)
- if tk=="number" then
- if k>n or k<0 then
- local tv=type(v)
- if tv=="string" then
- m=m+1 r[m]=f_indexed_string(k,v)
- elseif tv=="number" then
- m=m+1 r[m]=f_indexed_number(k,v)
- elseif tv=="table" then
- m=m+1 r[m]=f_indexed_table(k)
- fastserialize(v)
- elseif tv=="boolean" then
- m=m+1 r[m]=f_indexed_boolean(k,v)
- end
- end
- else
- local tv=type(v)
- if tv=="string" then
- m=m+1 r[m]=f_hashed_string(k,v)
- elseif tv=="number" then
- m=m+1 r[m]=f_hashed_number(k,v)
- elseif tv=="table" then
- m=m+1 r[m]=f_hashed_table(k)
- fastserialize(v)
- elseif tv=="boolean" then
- m=m+1 r[m]=f_hashed_boolean(k,v)
- end
- end
- end
- m=m+1
- if outer then
- r[m]="}"
- else
- r[m]="},"
- end
- return r
- end
- return concat(fastserialize(t,true))
-end
-function table.deserialize(str)
- if not str or str=="" then
- return
- end
- local code=load(str)
- if not code then
- return
- end
- code=code()
- if not code then
- return
- end
- return code
-end
-function table.load(filename,loader)
- if filename then
- local t=(loader or io.loaddata)(filename)
- if t and t~="" then
- local t=utftoeight(t)
- t=load(t)
- if type(t)=="function" then
- t=t()
- if type(t)=="table" then
- return t
- end
- end
- end
- end
-end
-function table.save(filename,t,n,...)
- io.savedata(filename,table.serialize(t,n==nil and true or n,...))
-end
-local f_key_value=formatters["%s=%q"]
-local f_add_table=formatters[" {%t},\n"]
-local f_return_table=formatters["return {\n%t}"]
-local function slowdrop(t)
- local r={}
- local l={}
- for i=1,#t do
- local ti=t[i]
- local j=0
- for k,v in next,ti do
- j=j+1
- l[j]=f_key_value(k,v)
- end
- r[i]=f_add_table(l)
- end
- return f_return_table(r)
-end
-local function fastdrop(t)
- local r={ "return {\n" }
- local m=1
- for i=1,#t do
- local ti=t[i]
- m=m+1 r[m]=" {"
- for k,v in next,ti do
- m=m+1 r[m]=f_key_value(k,v)
- end
- m=m+1 r[m]="},\n"
- end
- m=m+1
- r[m]="}"
- return concat(r)
-end
-function table.drop(t,slow)
- if #t==0 then
- return "return { }"
- elseif slow==true then
- return slowdrop(t)
- else
- return fastdrop(t)
- end
-end
-function table.autokey(t,k)
- local v={}
- t[k]=v
- return v
-end
-local selfmapper={ __index=function(t,k) t[k]=k return k end }
-function table.twowaymapper(t)
- if not t then
- t={}
- else
- for i=0,#t do
- local ti=t[i]
- if ti then
- local i=tostring(i)
- t[i]=ti
- t[ti]=i
- end
- end
- t[""]=t[0] or ""
- end
- setmetatable(t,selfmapper)
- return t
-end
-local f_start_key_idx=formatters["%w{"]
-local f_start_key_num=formatters["%w[%s]={"]
-local f_start_key_str=formatters["%w[%q]={"]
-local f_start_key_boo=formatters["%w[%l]={"]
-local f_start_key_nop=formatters["%w{"]
-local f_stop=formatters["%w},"]
-local f_key_num_value_num=formatters["%w[%s]=%s,"]
-local f_key_str_value_num=formatters["%w[%q]=%s,"]
-local f_key_boo_value_num=formatters["%w[%l]=%s,"]
-local f_key_num_value_str=formatters["%w[%s]=%q,"]
-local f_key_str_value_str=formatters["%w[%q]=%q,"]
-local f_key_boo_value_str=formatters["%w[%l]=%q,"]
-local f_key_num_value_boo=formatters["%w[%s]=%l,"]
-local f_key_str_value_boo=formatters["%w[%q]=%l,"]
-local f_key_boo_value_boo=formatters["%w[%l]=%l,"]
-local f_key_num_value_not=formatters["%w[%s]={},"]
-local f_key_str_value_not=formatters["%w[%q]={},"]
-local f_key_boo_value_not=formatters["%w[%l]={},"]
-local f_key_num_value_seq=formatters["%w[%s]={ %, t },"]
-local f_key_str_value_seq=formatters["%w[%q]={ %, t },"]
-local f_key_boo_value_seq=formatters["%w[%l]={ %, t },"]
-local f_val_num=formatters["%w%s,"]
-local f_val_str=formatters["%w%q,"]
-local f_val_boo=formatters["%w%l,"]
-local f_val_not=formatters["%w{},"]
-local f_val_seq=formatters["%w{ %, t },"]
-local f_table_return=formatters["return {"]
-local f_table_name=formatters["%s={"]
-local f_table_direct=formatters["{"]
-local f_table_entry=formatters["[%q]={"]
-local f_table_finish=formatters["}"]
-local spaces=utilities.strings.newrepeater(" ")
-local serialize=table.serialize
-function table.serialize(root,name,specification)
- if type(specification)=="table" then
- return serialize(root,name,specification)
- end
- local t
- local n=1
- local function simple_table(t)
- if #t>0 then
- local n=0
- for _,v in next,t do
- n=n+1
- if type(v)=="table" then
- return nil
- end
- end
- if n==#t then
- local tt={}
- local nt=0
- for i=1,#t do
- local v=t[i]
- local tv=type(v)
- nt=nt+1
- if tv=="number" then
- tt[nt]=v
- elseif tv=="string" then
- tt[nt]=format("%q",v)
- elseif tv=="boolean" then
- tt[nt]=v and "true" or "false"
- else
- return nil
- end
- end
- return tt
- end
- end
- return nil
- end
- local function do_serialize(root,name,depth,level,indexed)
- if level>0 then
- n=n+1
- if indexed then
- t[n]=f_start_key_idx(depth)
- else
- local tn=type(name)
- if tn=="number" then
- t[n]=f_start_key_num(depth,name)
- elseif tn=="string" then
- t[n]=f_start_key_str(depth,name)
- elseif tn=="boolean" then
- t[n]=f_start_key_boo(depth,name)
- else
- t[n]=f_start_key_nop(depth)
- end
- end
- depth=depth+1
- end
- if root and next(root) then
- local first=nil
- local last=0
- last=#root
- for k=1,last do
- if root[k]==nil then
- last=k-1
- break
- end
- end
- if last>0 then
- first=1
- end
- local sk=sortedkeys(root)
- for i=1,#sk do
- local k=sk[i]
- local v=root[k]
- local tv=type(v)
- local tk=type(k)
- if first and tk=="number" and k>=first and k<=last then
- if tv=="number" then
- n=n+1 t[n]=f_val_num(depth,v)
- elseif tv=="string" then
- n=n+1 t[n]=f_val_str(depth,v)
- elseif tv=="table" then
- if not next(v) then
- n=n+1 t[n]=f_val_not(depth)
- else
- local st=simple_table(v)
- if st then
- n=n+1 t[n]=f_val_seq(depth,st)
- else
- do_serialize(v,k,depth,level+1,true)
- end
- end
- elseif tv=="boolean" then
- n=n+1 t[n]=f_val_boo(depth,v)
- end
- elseif tv=="number" then
- if tk=="number" then
- n=n+1 t[n]=f_key_num_value_num(depth,k,v)
- elseif tk=="string" then
- n=n+1 t[n]=f_key_str_value_num(depth,k,v)
- elseif tk=="boolean" then
- n=n+1 t[n]=f_key_boo_value_num(depth,k,v)
- end
- elseif tv=="string" then
- if tk=="number" then
- n=n+1 t[n]=f_key_num_value_str(depth,k,v)
- elseif tk=="string" then
- n=n+1 t[n]=f_key_str_value_str(depth,k,v)
- elseif tk=="boolean" then
- n=n+1 t[n]=f_key_boo_value_str(depth,k,v)
- end
- elseif tv=="table" then
- if not next(v) then
- if tk=="number" then
- n=n+1 t[n]=f_key_num_value_not(depth,k,v)
- elseif tk=="string" then
- n=n+1 t[n]=f_key_str_value_not(depth,k,v)
- elseif tk=="boolean" then
- n=n+1 t[n]=f_key_boo_value_not(depth,k,v)
- end
- else
- local st=simple_table(v)
- if not st then
- do_serialize(v,k,depth,level+1)
- elseif tk=="number" then
- n=n+1 t[n]=f_key_num_value_seq(depth,k,st)
- elseif tk=="string" then
- n=n+1 t[n]=f_key_str_value_seq(depth,k,st)
- elseif tk=="boolean" then
- n=n+1 t[n]=f_key_boo_value_seq(depth,k,st)
- end
- end
- elseif tv=="boolean" then
- if tk=="number" then
- n=n+1 t[n]=f_key_num_value_boo(depth,k,v)
- elseif tk=="string" then
- n=n+1 t[n]=f_key_str_value_boo(depth,k,v)
- elseif tk=="boolean" then
- n=n+1 t[n]=f_key_boo_value_boo(depth,k,v)
- end
- end
- end
- end
- if level>0 then
- n=n+1 t[n]=f_stop(depth-1)
- end
- end
- local tname=type(name)
- if tname=="string" then
- if name=="return" then
- t={ f_table_return() }
- else
- t={ f_table_name(name) }
- end
- elseif tname=="number" then
- t={ f_table_entry(name) }
- elseif tname=="boolean" then
- if name then
- t={ f_table_return() }
- else
- t={ f_table_direct() }
- end
- else
- t={ f_table_name("t") }
- end
- if root then
- if getmetatable(root) then
- local dummy=root._w_h_a_t_e_v_e_r_
- root._w_h_a_t_e_v_e_r_=nil
- end
- if next(root) then
- do_serialize(root,name,1,0)
- end
- end
- n=n+1
- t[n]=f_table_finish()
- return concat(t,"\n")
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["util-sto"] = package.loaded["util-sto"] or true
-
--- original size: 4172, stripped down to: 2953
-
-if not modules then modules={} end modules ['util-sto']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local setmetatable,getmetatable,type=setmetatable,getmetatable,type
-utilities=utilities or {}
-utilities.storage=utilities.storage or {}
-local storage=utilities.storage
-function storage.mark(t)
- if not t then
- print("\nfatal error: storage cannot be marked\n")
- os.exit()
- return
- end
- local m=getmetatable(t)
- if not m then
- m={}
- setmetatable(t,m)
- end
- m.__storage__=true
- return t
-end
-function storage.allocate(t)
- t=t or {}
- local m=getmetatable(t)
- if not m then
- m={}
- setmetatable(t,m)
- end
- m.__storage__=true
- return t
-end
-function storage.marked(t)
- local m=getmetatable(t)
- return m and m.__storage__
-end
-function storage.checked(t)
- if not t then
- report("\nfatal error: storage has not been allocated\n")
- os.exit()
- return
- end
- return t
-end
-function storage.setinitializer(data,initialize)
- local m=getmetatable(data) or {}
- m.__index=function(data,k)
- m.__index=nil
- initialize()
- return data[k]
- end
- setmetatable(data,m)
-end
-local keyisvalue={ __index=function(t,k)
- t[k]=k
- return k
-end }
-function storage.sparse(t)
- t=t or {}
- setmetatable(t,keyisvalue)
- return t
-end
-local function f_empty () return "" end
-local function f_self (t,k) t[k]=k return k end
-local function f_table (t,k) local v={} t[k]=v return v end
-local function f_number(t,k) t[k]=0 return 0 end
-local function f_ignore() end
-local f_index={
- ["empty"]=f_empty,
- ["self"]=f_self,
- ["table"]=f_table,
- ["number"]=f_number,
-}
-local t_index={
- ["empty"]={ __index=f_empty },
- ["self"]={ __index=f_self },
- ["table"]={ __index=f_table },
- ["number"]={ __index=f_number },
-}
-function table.setmetatableindex(t,f)
- if type(t)~="table" then
- f,t=t,{}
- end
- local m=getmetatable(t)
- if m then
- m.__index=f_index[f] or f
- else
- setmetatable(t,t_index[f] or { __index=f })
- end
- return t
-end
-local f_index={
- ["ignore"]=f_ignore,
-}
-local t_index={
- ["ignore"]={ __newindex=f_ignore },
-}
-function table.setmetatablenewindex(t,f)
- if type(t)~="table" then
- f,t=t,{}
- end
- local m=getmetatable(t)
- if m then
- m.__newindex=f_index[f] or f
- else
- setmetatable(t,t_index[f] or { __newindex=f })
- end
- return t
-end
-function table.setmetatablecall(t,f)
- if type(t)~="table" then
- f,t=t,{}
- end
- local m=getmetatable(t)
- if m then
- m.__call=f
- else
- setmetatable(t,{ __call=f })
- end
- return t
-end
-function table.setmetatablekey(t,key,value)
- local m=getmetatable(t)
- if not m then
- m={}
- setmetatable(t,m)
- end
- m[key]=value
- return t
-end
-function table.getmetatablekey(t,key,value)
- local m=getmetatable(t)
- return m and m[key]
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["util-prs"] = package.loaded["util-prs"] or true
-
--- original size: 19604, stripped down to: 13998
-
-if not modules then modules={} end modules ['util-prs']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local lpeg,table,string=lpeg,table,string
-local P,R,V,S,C,Ct,Cs,Carg,Cc,Cg,Cf,Cp=lpeg.P,lpeg.R,lpeg.V,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cs,lpeg.Carg,lpeg.Cc,lpeg.Cg,lpeg.Cf,lpeg.Cp
-local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
-local concat,gmatch,find=table.concat,string.gmatch,string.find
-local tostring,type,next,rawset=tostring,type,next,rawset
-local mod,div=math.mod,math.div
-utilities=utilities or {}
-local parsers=utilities.parsers or {}
-utilities.parsers=parsers
-local patterns=parsers.patterns or {}
-parsers.patterns=patterns
-local setmetatableindex=table.setmetatableindex
-local sortedhash=table.sortedhash
-local digit=R("09")
-local space=P(' ')
-local equal=P("=")
-local comma=P(",")
-local lbrace=P("{")
-local rbrace=P("}")
-local lparent=P("(")
-local rparent=P(")")
-local period=S(".")
-local punctuation=S(".,:;")
-local spacer=lpegpatterns.spacer
-local whitespace=lpegpatterns.whitespace
-local newline=lpegpatterns.newline
-local anything=lpegpatterns.anything
-local endofstring=lpegpatterns.endofstring
-local nobrace=1-(lbrace+rbrace )
-local noparent=1-(lparent+rparent)
-local escape,left,right=P("\\"),P('{'),P('}')
-lpegpatterns.balanced=P {
- [1]=((escape*(left+right))+(1-(left+right))+V(2))^0,
- [2]=left*V(1)*right
-}
-local nestedbraces=P { lbrace*(nobrace+V(1))^0*rbrace }
-local nestedparents=P { lparent*(noparent+V(1))^0*rparent }
-local spaces=space^0
-local argument=Cs((lbrace/"")*((nobrace+nestedbraces)^0)*(rbrace/""))
-local content=(1-endofstring)^0
-lpegpatterns.nestedbraces=nestedbraces
-lpegpatterns.nestedparents=nestedparents
-lpegpatterns.nested=nestedbraces
-lpegpatterns.argument=argument
-lpegpatterns.content=content
-local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
-local key=C((1-equal-comma)^1)
-local pattern_a=(space+comma)^0*(key*equal*value+key*C(""))
-local pattern_c=(space+comma)^0*(key*equal*value)
-local key=C((1-space-equal-comma)^1)
-local pattern_b=spaces*comma^0*spaces*(key*((spaces*equal*spaces*value)+C("")))
-local hash={}
-local function set(key,value)
- hash[key]=value
-end
-local pattern_a_s=(pattern_a/set)^1
-local pattern_b_s=(pattern_b/set)^1
-local pattern_c_s=(pattern_c/set)^1
-patterns.settings_to_hash_a=pattern_a_s
-patterns.settings_to_hash_b=pattern_b_s
-patterns.settings_to_hash_c=pattern_c_s
-function parsers.make_settings_to_hash_pattern(set,how)
- if type(str)=="table" then
- return set
- elseif how=="strict" then
- return (pattern_c/set)^1
- elseif how=="tolerant" then
- return (pattern_b/set)^1
- else
- return (pattern_a/set)^1
- end
-end
-function parsers.settings_to_hash(str,existing)
- if type(str)=="table" then
- if existing then
- for k,v in next,str do
- existing[k]=v
- end
- return exiting
- else
- return str
- end
- elseif str and str~="" then
- hash=existing or {}
- lpegmatch(pattern_a_s,str)
- return hash
- else
- return {}
- end
-end
-function parsers.settings_to_hash_tolerant(str,existing)
- if type(str)=="table" then
- if existing then
- for k,v in next,str do
- existing[k]=v
- end
- return exiting
- else
- return str
- end
- elseif str and str~="" then
- hash=existing or {}
- lpegmatch(pattern_b_s,str)
- return hash
- else
- return {}
- end
-end
-function parsers.settings_to_hash_strict(str,existing)
- if type(str)=="table" then
- if existing then
- for k,v in next,str do
- existing[k]=v
- end
- return exiting
- else
- return str
- end
- elseif str and str~="" then
- hash=existing or {}
- lpegmatch(pattern_c_s,str)
- return next(hash) and hash
- else
- return nil
- end
-end
-local separator=comma*space^0
-local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
-local pattern=spaces*Ct(value*(separator*value)^0)
-patterns.settings_to_array=pattern
-function parsers.settings_to_array(str,strict)
- if type(str)=="table" then
- return str
- elseif not str or str=="" then
- return {}
- elseif strict then
- if find(str,"{") then
- return lpegmatch(pattern,str)
- else
- return { str }
- end
- elseif find(str,",") then
- return lpegmatch(pattern,str)
- else
- return { str }
- end
-end
-local separator=space^0*comma*space^0
-local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-(space^0*(comma+P(-1)))))^0)
-local withvalue=Carg(1)*value/function(f,s) return f(s) end
-local pattern_a=spaces*Ct(value*(separator*value)^0)
-local pattern_b=spaces*withvalue*(separator*withvalue)^0
-function parsers.stripped_settings_to_array(str)
- if not str or str=="" then
- return {}
- else
- return lpegmatch(pattern_a,str)
- end
-end
-function parsers.process_stripped_settings(str,action)
- if not str or str=="" then
- return {}
- else
- return lpegmatch(pattern_b,str,1,action)
- end
-end
-local function set(t,v)
- t[#t+1]=v
-end
-local value=P(Carg(1)*value)/set
-local pattern=value*(separator*value)^0*Carg(1)
-function parsers.add_settings_to_array(t,str)
- return lpegmatch(pattern,str,nil,t)
-end
-function parsers.hash_to_string(h,separator,yes,no,strict,omit)
- if h then
- local t,tn,s={},0,table.sortedkeys(h)
- omit=omit and table.tohash(omit)
- for i=1,#s do
- local key=s[i]
- if not omit or not omit[key] then
- local value=h[key]
- if type(value)=="boolean" then
- if yes and no then
- if value then
- tn=tn+1
- t[tn]=key..'='..yes
- elseif not strict then
- tn=tn+1
- t[tn]=key..'='..no
- end
- elseif value or not strict then
- tn=tn+1
- t[tn]=key..'='..tostring(value)
- end
- else
- tn=tn+1
- t[tn]=key..'='..value
- end
- end
- end
- return concat(t,separator or ",")
- else
- return ""
- end
-end
-function parsers.array_to_string(a,separator)
- if a then
- return concat(a,separator or ",")
- else
- return ""
- end
-end
-function parsers.settings_to_set(str,t)
- t=t or {}
- for s in gmatch(str,"[^, ]+") do
- t[s]=true
- end
- return t
-end
-function parsers.simple_hash_to_string(h,separator)
- local t,tn={},0
- for k,v in sortedhash(h) do
- if v then
- tn=tn+1
- t[tn]=k
- end
- end
- return concat(t,separator or ",")
-end
-local str=C((1-whitespace-equal)^1)
-local setting=Cf(Carg(1)*(whitespace^0*Cg(str*whitespace^0*(equal*whitespace^0*str+Cc(""))))^1,rawset)
-local splitter=setting^1
-function utilities.parsers.options_to_hash(str,target)
- return str and lpegmatch(splitter,str,1,target or {}) or {}
-end
-local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C(digit^1*lparent*(noparent+nestedparents)^1*rparent)+C((nestedbraces+(1-comma))^1)
-local pattern_a=spaces*Ct(value*(separator*value)^0)
-local function repeater(n,str)
- if not n then
- return str
- else
- local s=lpegmatch(pattern_a,str)
- if n==1 then
- return unpack(s)
- else
- local t,tn={},0
- for i=1,n do
- for j=1,#s do
- tn=tn+1
- t[tn]=s[j]
- end
- end
- return unpack(t)
- end
- end
-end
-local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+(C(digit^1)/tonumber*lparent*Cs((noparent+nestedparents)^1)*rparent)/repeater+C((nestedbraces+(1-comma))^1)
-local pattern_b=spaces*Ct(value*(separator*value)^0)
-function parsers.settings_to_array_with_repeat(str,expand)
- if expand then
- return lpegmatch(pattern_b,str) or {}
- else
- return lpegmatch(pattern_a,str) or {}
- end
-end
-local value=lbrace*C((nobrace+nestedbraces)^0)*rbrace
-local pattern=Ct((space+value)^0)
-function parsers.arguments_to_table(str)
- return lpegmatch(pattern,str)
-end
-function parsers.getparameters(self,class,parentclass,settings)
- local sc=self[class]
- if not sc then
- sc={}
- self[class]=sc
- if parentclass then
- local sp=self[parentclass]
- if not sp then
- sp={}
- self[parentclass]=sp
- end
- setmetatableindex(sc,sp)
- end
- end
- parsers.settings_to_hash(settings,sc)
-end
-function parsers.listitem(str)
- return gmatch(str,"[^, ]+")
-end
-local pattern=Cs { "start",
- start=V("one")+V("two")+V("three"),
- rest=(Cc(",")*V("thousand"))^0*(P(".")+endofstring)*anything^0,
- thousand=digit*digit*digit,
- one=digit*V("rest"),
- two=digit*digit*V("rest"),
- three=V("thousand")*V("rest"),
-}
-lpegpatterns.splitthousands=pattern
-function parsers.splitthousands(str)
- return lpegmatch(pattern,str) or str
-end
-local optionalwhitespace=whitespace^0
-lpegpatterns.words=Ct((Cs((1-punctuation-whitespace)^1)+anything)^1)
-lpegpatterns.sentences=Ct((optionalwhitespace*Cs((1-period)^0*period))^1)
-lpegpatterns.paragraphs=Ct((optionalwhitespace*Cs((whitespace^1*endofstring/""+1-(spacer^0*newline*newline))^1))^1)
-local dquote=P('"')
-local equal=P('=')
-local escape=P('\\')
-local separator=S(' ,')
-local key=C((1-equal)^1)
-local value=dquote*C((1-dquote-escape*dquote)^0)*dquote
-local pattern=Cf(Ct("")*(Cg(key*equal*value)*separator^0)^1,rawset)^0*P(-1)
-function parsers.keq_to_hash(str)
- if str and str~="" then
- return lpegmatch(pattern,str)
- else
- return {}
- end
-end
-local defaultspecification={ separator=",",quote='"' }
-function parsers.csvsplitter(specification)
- specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
- local separator=specification.separator
- local quotechar=specification.quote
- local separator=S(separator~="" and separator or ",")
- local whatever=C((1-separator-newline)^0)
- if quotechar and quotechar~="" then
- local quotedata=nil
- for chr in gmatch(quotechar,".") do
- local quotechar=P(chr)
- local quoteword=quotechar*C((1-quotechar)^0)*quotechar
- if quotedata then
- quotedata=quotedata+quoteword
- else
- quotedata=quoteword
- end
- end
- whatever=quotedata+whatever
- end
- local parser=Ct((Ct(whatever*(separator*whatever)^0)*S("\n\r")^1)^0 )
- return function(data)
- return lpegmatch(parser,data)
- end
-end
-function parsers.rfc4180splitter(specification)
- specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
- local separator=specification.separator
- local quotechar=P(specification.quote)
- local dquotechar=quotechar*quotechar
-/specification.quote
- local separator=S(separator~="" and separator or ",")
- local escaped=quotechar*Cs((dquotechar+(1-quotechar))^0)*quotechar
- local non_escaped=C((1-quotechar-newline-separator)^1)
- local field=escaped+non_escaped+Cc("")
- local record=Ct(field*(separator*field)^1)
- local headerline=record*Cp()
- local wholeblob=Ct((newline^-1*record)^0)
- return function(data,getheader)
- if getheader then
- local header,position=lpegmatch(headerline,data)
- local data=lpegmatch(wholeblob,data,position)
- return data,header
- else
- return lpegmatch(wholeblob,data)
- end
- end
-end
-local function ranger(first,last,n,action)
- if not first then
- elseif last==true then
- for i=first,n or first do
- action(i)
- end
- elseif last then
- for i=first,last do
- action(i)
- end
- else
- action(first)
- end
-end
-local cardinal=lpegpatterns.cardinal/tonumber
-local spacers=lpegpatterns.spacer^0
-local endofstring=lpegpatterns.endofstring
-local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1
-local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+(P("*")+endofstring)*Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1*endofstring
-function parsers.stepper(str,n,action)
- if type(n)=="function" then
- lpegmatch(stepper,str,1,false,n or print)
- else
- lpegmatch(stepper,str,1,n,action or print)
- end
-end
-local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
-local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
-patterns.unittotex=pattern
-function parsers.unittotex(str,textmode)
- return lpegmatch(textmode and pattern_text or pattern_math,str)
-end
-local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+P(1))^0)
-function parsers.unittoxml(str)
- return lpegmatch(pattern,str)
-end
-local cache={}
-local spaces=lpeg.patterns.space^0
-local dummy=function() end
-table.setmetatableindex(cache,function(t,k)
- local separator=P(k)
- local value=(1-separator)^0
- local pattern=spaces*C(value)*separator^0*Cp()
- t[k]=pattern
- return pattern
-end)
-local commalistiterator=cache[","]
-function utilities.parsers.iterator(str,separator)
- local n=#str
- if n==0 then
- return dummy
- else
- local pattern=separator and cache[separator] or commalistiterator
- local p=1
- return function()
- if p<=n then
- local s,e=lpegmatch(pattern,str,p)
- if e then
- p=e
- return s
- end
- end
- end
- end
-end
-local function initialize(t,name)
- local source=t[name]
- if source then
- local result={}
- for k,v in next,t[name] do
- result[k]=v
- end
- return result
- else
- return {}
- end
-end
-local function fetch(t,name)
- return t[name] or {}
-end
-local function process(result,more)
- for k,v in next,more do
- result[k]=v
- end
- return result
-end
-local name=C((1-S(", "))^1)
-local parser=(Carg(1)*name/initialize)*(S(", ")^1*(Carg(1)*name/fetch))^0
-local merge=Cf(parser,process)
-function utilities.parsers.mergehashes(hash,list)
- return lpegmatch(merge,list,1,hash)
-end
-function utilities.parsers.runtime(time)
- if not time then
- time=os.runtime()
- end
- local days=div(time,24*60*60)
- time=mod(time,24*60*60)
- local hours=div(time,60*60)
- time=mod(time,60*60)
- local minutes=div(time,60)
- local seconds=mod(time,60)
- return days,hours,minutes,seconds
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["util-fmt"] = package.loaded["util-fmt"] or true
-
--- original size: 2274, stripped down to: 1781
-
-if not modules then modules={} end modules ['util-fmt']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-utilities=utilities or {}
-utilities.formatters=utilities.formatters or {}
-local formatters=utilities.formatters
-local concat,format=table.concat,string.format
-local tostring,type=tostring,type
-local strip=string.strip
-local lpegmatch=lpeg.match
-local stripper=lpeg.patterns.stripzeros
-function formatters.stripzeros(str)
- return lpegmatch(stripper,str)
-end
-function formatters.formatcolumns(result,between)
- if result and #result>0 then
- between=between or " "
- local widths,numbers={},{}
- local first=result[1]
- local n=#first
- for i=1,n do
- widths[i]=0
- end
- for i=1,#result do
- local r=result[i]
- for j=1,n do
- local rj=r[j]
- local tj=type(rj)
- if tj=="number" then
- numbers[j]=true
- end
- if tj~="string" then
- rj=tostring(rj)
- r[j]=rj
- end
- local w=#rj
- if w>widths[j] then
- widths[j]=w
- end
- end
- end
- for i=1,n do
- local w=widths[i]
- if numbers[i] then
- if w>80 then
- widths[i]="%s"..between
- else
- widths[i]="%0"..w.."i"..between
- end
- else
- if w>80 then
- widths[i]="%s"..between
- elseif w>0 then
- widths[i]="%-"..w.."s"..between
- else
- widths[i]="%s"
- end
- end
- end
- local template=strip(concat(widths))
- for i=1,#result do
- local str=format(template,unpack(result[i]))
- result[i]=strip(str)
- end
- end
- return result
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["trac-set"] = package.loaded["trac-set"] or true
-
--- original size: 12365, stripped down to: 8799
-
-if not modules then modules={} end modules ['trac-set']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local type,next,tostring=type,next,tostring
-local concat=table.concat
-local format,find,lower,gsub,topattern=string.format,string.find,string.lower,string.gsub,string.topattern
-local is_boolean=string.is_boolean
-local settings_to_hash=utilities.parsers.settings_to_hash
-local allocate=utilities.storage.allocate
-utilities=utilities or {}
-local utilities=utilities
-local setters=utilities.setters or {}
-utilities.setters=setters
-local data={}
-local trace_initialize=false
-function setters.initialize(filename,name,values)
- local setter=data[name]
- if setter then
- frozen=true
- local data=setter.data
- if data then
- for key,newvalue in next,values do
- local newvalue=is_boolean(newvalue,newvalue)
- local functions=data[key]
- if functions then
- local oldvalue=functions.value
- if functions.frozen then
- if trace_initialize then
- setter.report("%s: %a is %s to %a",filename,key,"frozen",oldvalue)
- end
- elseif #functions>0 and not oldvalue then
- if trace_initialize then
- setter.report("%s: %a is %s to %a",filename,key,"set",newvalue)
- end
- for i=1,#functions do
- functions[i](newvalue)
- end
- functions.value=newvalue
- functions.frozen=functions.frozen or frozen
- else
- if trace_initialize then
- setter.report("%s: %a is %s as %a",filename,key,"kept",oldvalue)
- end
- end
- else
- functions={ default=newvalue,frozen=frozen }
- data[key]=functions
- if trace_initialize then
- setter.report("%s: %a is %s to %a",filename,key,"defaulted",newvalue)
- end
- end
- end
- return true
- end
- end
-end
-local function set(t,what,newvalue)
- local data=t.data
- if not data.frozen then
- local done=t.done
- if type(what)=="string" then
- what=settings_to_hash(what)
- end
- if type(what)~="table" then
- return
- end
- if not done then
- done={}
- t.done=done
- end
- for w,value in next,what do
- if value=="" then
- value=newvalue
- elseif not value then
- value=false
- else
- value=is_boolean(value,value)
- end
- w=topattern(w,true,true)
- for name,functions in next,data do
- if done[name] then
- elseif find(name,w) then
- done[name]=true
- for i=1,#functions do
- functions[i](value)
- end
- functions.value=value
- end
- end
- end
- end
-end
-local function reset(t)
- local data=t.data
- if not data.frozen then
- for name,functions in next,data do
- for i=1,#functions do
- functions[i](false)
- end
- functions.value=false
- end
- end
-end
-local function enable(t,what)
- set(t,what,true)
-end
-local function disable(t,what)
- local data=t.data
- if not what or what=="" then
- t.done={}
- reset(t)
- else
- set(t,what,false)
- end
-end
-function setters.register(t,what,...)
- local data=t.data
- what=lower(what)
- local functions=data[what]
- if not functions then
- functions={}
- data[what]=functions
- if trace_initialize then
- t.report("defining %a",what)
- end
- end
- local default=functions.default
- for i=1,select("#",...) do
- local fnc=select(i,...)
- local typ=type(fnc)
- if typ=="string" then
- if trace_initialize then
- t.report("coupling %a to %a",what,fnc)
- end
- local s=fnc
- fnc=function(value) set(t,s,value) end
- elseif typ~="function" then
- fnc=nil
- end
- if fnc then
- functions[#functions+1]=fnc
- local value=functions.value or default
- if value~=nil then
- fnc(value)
- functions.value=value
- end
- end
- end
- return false
-end
-function setters.enable(t,what)
- local e=t.enable
- t.enable,t.done=enable,{}
- enable(t,what)
- t.enable,t.done=e,{}
-end
-function setters.disable(t,what)
- local e=t.disable
- t.disable,t.done=disable,{}
- disable(t,what)
- t.disable,t.done=e,{}
-end
-function setters.reset(t)
- t.done={}
- reset(t)
-end
-function setters.list(t)
- local list=table.sortedkeys(t.data)
- local user,system={},{}
- for l=1,#list do
- local what=list[l]
- if find(what,"^%*") then
- system[#system+1]=what
- else
- user[#user+1]=what
- end
- end
- return user,system
-end
-function setters.show(t)
- local category=t.name
- local list=setters.list(t)
- t.report()
- for k=1,#list do
- local name=list[k]
- local functions=t.data[name]
- if functions then
- local value,default,modules=functions.value,functions.default,#functions
- value=value==nil and "unset" or tostring(value)
- default=default==nil and "unset" or tostring(default)
- t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value)
- end
- end
- t.report()
-end
-local enable,disable,register,list,show=setters.enable,setters.disable,setters.register,setters.list,setters.show
-function setters.report(setter,...)
- print(format("%-15s : %s\n",setter.name,format(...)))
-end
-local function default(setter,name)
- local d=setter.data[name]
- return d and d.default
-end
-local function value(setter,name)
- local d=setter.data[name]
- return d and (d.value or d.default)
-end
-function setters.new(name)
- local setter
- setter={
- data=allocate(),
- name=name,
- report=function(...) setters.report (setter,...) end,
- enable=function(...) enable (setter,...) end,
- disable=function(...) disable (setter,...) end,
- register=function(...) register(setter,...) end,
- list=function(...) list (setter,...) end,
- show=function(...) show (setter,...) end,
- default=function(...) return default (setter,...) end,
- value=function(...) return value (setter,...) end,
- }
- data[name]=setter
- return setter
-end
-trackers=setters.new("trackers")
-directives=setters.new("directives")
-experiments=setters.new("experiments")
-local t_enable,t_disable=trackers .enable,trackers .disable
-local d_enable,d_disable=directives .enable,directives .disable
-local e_enable,e_disable=experiments.enable,experiments.disable
-local trace_directives=false local trace_directives=false trackers.register("system.directives",function(v) trace_directives=v end)
-local trace_experiments=false local trace_experiments=false trackers.register("system.experiments",function(v) trace_experiments=v end)
-function directives.enable(...)
- if trace_directives then
- directives.report("enabling: % t",{...})
- end
- d_enable(...)
-end
-function directives.disable(...)
- if trace_directives then
- directives.report("disabling: % t",{...})
- end
- d_disable(...)
-end
-function experiments.enable(...)
- if trace_experiments then
- experiments.report("enabling: % t",{...})
- end
- e_enable(...)
-end
-function experiments.disable(...)
- if trace_experiments then
- experiments.report("disabling: % t",{...})
- end
- e_disable(...)
-end
-directives.register("system.nostatistics",function(v)
- if statistics then
- statistics.enable=not v
- else
- end
-end)
-directives.register("system.nolibraries",function(v)
- if libraries then
- libraries=nil
- else
- end
-end)
-if environment then
- local engineflags=environment.engineflags
- if engineflags then
- local list=engineflags["c:trackers"] or engineflags["trackers"]
- if type(list)=="string" then
- setters.initialize("commandline flags","trackers",settings_to_hash(list))
- end
- local list=engineflags["c:directives"] or engineflags["directives"]
- if type(list)=="string" then
- setters.initialize("commandline flags","directives",settings_to_hash(list))
- end
- end
-end
-if texconfig then
- local function set(k,v)
- v=tonumber(v)
- if v then
- texconfig[k]=v
- end
- end
- directives.register("luatex.expanddepth",function(v) set("expand_depth",v) end)
- directives.register("luatex.hashextra",function(v) set("hash_extra",v) end)
- directives.register("luatex.nestsize",function(v) set("nest_size",v) end)
- directives.register("luatex.maxinopen",function(v) set("max_in_open",v) end)
- directives.register("luatex.maxprintline",function(v) set("max_print_line",v) end)
- directives.register("luatex.maxstrings",function(v) set("max_strings",v) end)
- directives.register("luatex.paramsize",function(v) set("param_size",v) end)
- directives.register("luatex.savesize",function(v) set("save_size",v) end)
- directives.register("luatex.stacksize",function(v) set("stack_size",v) end)
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["trac-log"] = package.loaded["trac-log"] or true
-
--- original size: 25391, stripped down to: 16561
-
-if not modules then modules={} end modules ['trac-log']={
- version=1.001,
- comment="companion to trac-log.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local write_nl,write=texio and texio.write_nl or print,texio and texio.write or io.write
-local format,gmatch,find=string.format,string.gmatch,string.find
-local concat,insert,remove=table.concat,table.insert,table.remove
-local topattern=string.topattern
-local next,type,select=next,type,select
-local utfchar=utf.char
-local setmetatableindex=table.setmetatableindex
-local formatters=string.formatters
-local texgetcount=tex and tex.getcount
-logs=logs or {}
-local logs=logs
-local moreinfo=[[
-More information about ConTeXt and the tools that come with it can be found at:
-]].."\n"..[[
-maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
-wiki : http://contextgarden.net
-]]
-utilities.strings.formatters.add (
- formatters,"unichr",
- [["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
-)
-utilities.strings.formatters.add (
- formatters,"chruni",
- [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
-)
-local function ignore() end
-setmetatableindex(logs,function(t,k) t[k]=ignore;return ignore end)
-local report,subreport,status,settarget,setformats,settranslations
-local direct,subdirect,writer,pushtarget,poptarget,setlogfile,settimedlog,setprocessor,setformatters
-if tex and (tex.jobname or tex.formatname) then
- local valueiskey={ __index=function(t,k) t[k]=k return k end }
- local target="term and log"
- logs.flush=io.flush
- local formats={} setmetatable(formats,valueiskey)
- local translations={} setmetatable(translations,valueiskey)
- writer=function(...)
- write_nl(target,...)
- end
- newline=function()
- write_nl(target,"\n")
- end
- local report_yes=formatters["%-15s > %s\n"]
- local report_nop=formatters["%-15s >\n"]
- report=function(a,b,c,...)
- if c then
- write_nl(target,report_yes(translations[a],formatters[formats[b]](c,...)))
- elseif b then
- write_nl(target,report_yes(translations[a],formats[b]))
- elseif a then
- write_nl(target,report_nop(translations[a]))
- else
- write_nl(target,"\n")
- end
- end
- local direct_yes=formatters["%-15s > %s"]
- local direct_nop=formatters["%-15s >"]
- direct=function(a,b,c,...)
- if c then
- return direct_yes(translations[a],formatters[formats[b]](c,...))
- elseif b then
- return direct_yes(translations[a],formats[b])
- elseif a then
- return direct_nop(translations[a])
- else
- return ""
- end
- end
- local subreport_yes=formatters["%-15s > %s > %s\n"]
- local subreport_nop=formatters["%-15s > %s >\n"]
- subreport=function(a,s,b,c,...)
- if c then
- write_nl(target,subreport_yes(translations[a],translations[s],formatters[formats[b]](c,...)))
- elseif b then
- write_nl(target,subreport_yes(translations[a],translations[s],formats[b]))
- elseif a then
- write_nl(target,subreport_nop(translations[a],translations[s]))
- else
- write_nl(target,"\n")
- end
- end
- local subdirect_yes=formatters["%-15s > %s > %s"]
- local subdirect_nop=formatters["%-15s > %s >"]
- subdirect=function(a,s,b,c,...)
- if c then
- return subdirect_yes(translations[a],translations[s],formatters[formats[b]](c,...))
- elseif b then
- return subdirect_yes(translations[a],translations[s],formats[b])
- elseif a then
- return subdirect_nop(translations[a],translations[s])
- else
- return ""
- end
- end
- local status_yes=formatters["%-15s : %s\n"]
- local status_nop=formatters["%-15s :\n"]
- status=function(a,b,c,...)
- if c then
- write_nl(target,status_yes(translations[a],formatters[formats[b]](c,...)))
- elseif b then
- write_nl(target,status_yes(translations[a],formats[b]))
- elseif a then
- write_nl(target,status_nop(translations[a]))
- else
- write_nl(target,"\n")
- end
- end
- local targets={
- logfile="log",
- log="log",
- file="log",
- console="term",
- terminal="term",
- both="term and log",
- }
- settarget=function(whereto)
- target=targets[whereto or "both"] or targets.both
- if target=="term" or target=="term and log" then
- logs.flush=io.flush
- else
- logs.flush=ignore
- end
- end
- local stack={}
- pushtarget=function(newtarget)
- insert(stack,target)
- settarget(newtarget)
- end
- poptarget=function()
- if #stack>0 then
- settarget(remove(stack))
- end
- end
- setformats=function(f)
- formats=f
- end
- settranslations=function(t)
- translations=t
- end
- setprocessor=function(f)
- local writeline=write_nl
- write_nl=function(target,...)
- writeline(target,f(...))
- end
- end
- setformatters=function(f)
- report_yes=f.report_yes or report_yes
- report_nop=f.report_nop or report_nop
- subreport_yes=f.subreport_yes or subreport_yes
- subreport_nop=f.subreport_nop or subreport_nop
- direct_yes=f.direct_yes or direct_yes
- direct_nop=f.direct_nop or direct_nop
- subdirect_yes=f.subdirect_yes or subdirect_yes
- subdirect_nop=f.subdirect_nop or subdirect_nop
- status_yes=f.status_yes or status_yes
- status_nop=f.status_nop or status_nop
- end
- setlogfile=ignore
- settimedlog=ignore
-else
- logs.flush=ignore
- writer=function(s)
- write_nl(s)
- end
- newline=function()
- write_nl("\n")
- end
- local report_yes=formatters["%-15s | %s"]
- local report_nop=formatters["%-15s |"]
- report=function(a,b,c,...)
- if c then
- write_nl(report_yes(a,formatters[b](c,...)))
- elseif b then
- write_nl(report_yes(a,b))
- elseif a then
- write_nl(report_nop(a))
- else
- write_nl("")
- end
- end
- local subreport_yes=formatters["%-15s | %s | %s"]
- local subreport_nop=formatters["%-15s | %s |"]
- subreport=function(a,sub,b,c,...)
- if c then
- write_nl(subreport_yes(a,sub,formatters[b](c,...)))
- elseif b then
- write_nl(subreport_yes(a,sub,b))
- elseif a then
- write_nl(subreport_nop(a,sub))
- else
- write_nl("")
- end
- end
- local status_yes=formatters["%-15s : %s\n"]
- local status_nop=formatters["%-15s :\n"]
- status=function(a,b,c,...)
- if c then
- write_nl(status_yes(a,formatters[b](c,...)))
- elseif b then
- write_nl(status_yes(a,b))
- elseif a then
- write_nl(status_nop(a))
- else
- write_nl("\n")
- end
- end
- direct=ignore
- subdirect=ignore
- settarget=ignore
- pushtarget=ignore
- poptarget=ignore
- setformats=ignore
- settranslations=ignore
- setprocessor=function(f)
- local writeline=write_nl
- write_nl=function(s)
- writeline(f(s))
- end
- end
- setformatters=function(f)
- report_yes=f.report_yes or report_yes
- report_nop=f.report_nop or report_nop
- subreport_yes=f.subreport_yes or subreport_yes
- subreport_nop=f.subreport_nop or subreport_nop
- status_yes=f.status_yes or status_yes
- status_nop=f.status_nop or status_nop
- end
- setlogfile=function(name,keepopen)
- if name and name~="" then
- local localtime=os.localtime
- local writeline=write_nl
- if keepopen then
- local f=io.open(name,"ab")
- write_nl=function(s)
- writeline(s)
- f:write(localtime()," | ",s,"\n")
- end
- else
- write_nl=function(s)
- writeline(s)
- local f=io.open(name,"ab")
- f:write(localtime()," | ",s,"\n")
- f:close()
- end
- end
- end
- setlogfile=ignore
- end
- settimedlog=function()
- local localtime=os.localtime
- local writeline=write_nl
- write_nl=function(s)
- writeline(localtime().." | "..s)
- end
- settimedlog=ignore
- end
-end
-logs.report=report
-logs.subreport=subreport
-logs.status=status
-logs.settarget=settarget
-logs.pushtarget=pushtarget
-logs.poptarget=poptarget
-logs.setformats=setformats
-logs.settranslations=settranslations
-logs.setlogfile=setlogfile
-logs.settimedlog=settimedlog
-logs.setprocessor=setprocessor
-logs.setformatters=setformatters
-logs.direct=direct
-logs.subdirect=subdirect
-logs.writer=writer
-logs.newline=newline
-local data,states={},nil
-function logs.reporter(category,subcategory)
- local logger=data[category]
- if not logger then
- local state=false
- if states==true then
- state=true
- elseif type(states)=="table" then
- for c,_ in next,states do
- if find(category,c) then
- state=true
- break
- end
- end
- end
- logger={
- reporters={},
- state=state,
- }
- data[category]=logger
- end
- local reporter=logger.reporters[subcategory or "default"]
- if not reporter then
- if subcategory then
- reporter=function(...)
- if not logger.state then
- subreport(category,subcategory,...)
- end
- end
- logger.reporters[subcategory]=reporter
- else
- local tag=category
- reporter=function(...)
- if not logger.state then
- report(category,...)
- end
- end
- logger.reporters.default=reporter
- end
- end
- return reporter
-end
-logs.new=logs.reporter
-local ctxreport=logs.writer
-function logs.setmessenger(m)
- ctxreport=m
-end
-function logs.messenger(category,subcategory)
- if subcategory then
- return function(...)
- ctxreport(subdirect(category,subcategory,...))
- end
- else
- return function(...)
- ctxreport(direct(category,...))
- end
- end
-end
-local function setblocked(category,value)
- if category==true then
- category,value="*",true
- elseif category==false then
- category,value="*",false
- elseif value==nil then
- value=true
- end
- if category=="*" then
- states=value
- for k,v in next,data do
- v.state=value
- end
- else
- states=utilities.parsers.settings_to_hash(category)
- for c,_ in next,states do
- if data[c] then
- v.state=value
- else
- c=topattern(c,true,true)
- for k,v in next,data do
- if find(k,c) then
- v.state=value
- end
- end
- end
- end
- end
-end
-function logs.disable(category,value)
- setblocked(category,value==nil and true or value)
-end
-function logs.enable(category)
- setblocked(category,false)
-end
-function logs.categories()
- return table.sortedkeys(data)
-end
-function logs.show()
- local n,c,s,max=0,0,0,0
- for category,v in table.sortedpairs(data) do
- n=n+1
- local state=v.state
- local reporters=v.reporters
- local nc=#category
- if nc>c then
- c=nc
- end
- for subcategory,_ in next,reporters do
- local ns=#subcategory
- if ns>c then
- s=ns
- end
- local m=nc+ns
- if m>max then
- max=m
- end
- end
- local subcategories=concat(table.sortedkeys(reporters),", ")
- if state==true then
- state="disabled"
- elseif state==false then
- state="enabled"
- else
- state="unknown"
- end
- report("logging","category %a, subcategories %a, state %a",category,subcategories,state)
- end
- report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
-end
-local delayed_reporters={}
-setmetatableindex(delayed_reporters,function(t,k)
- local v=logs.reporter(k.name)
- t[k]=v
- return v
-end)
-function utilities.setters.report(setter,...)
- delayed_reporters[setter](...)
-end
-directives.register("logs.blocked",function(v)
- setblocked(v,true)
-end)
-directives.register("logs.target",function(v)
- settarget(v)
-end)
-local report_pages=logs.reporter("pages")
-local real,user,sub
-function logs.start_page_number()
- real=texgetcount("realpageno")
- user=texgetcount("userpageno")
- sub=texgetcount("subpageno")
-end
-local timing=false
-local starttime=nil
-local lasttime=nil
-trackers.register("pages.timing",function(v)
- starttime=os.clock()
- timing=true
-end)
-function logs.stop_page_number()
- if timing then
- local elapsed,average
- local stoptime=os.clock()
- if not lasttime or real<2 then
- elapsed=stoptime
- average=stoptime
- starttime=stoptime
- else
- elapsed=stoptime-lasttime
- average=(stoptime-starttime)/(real-1)
- end
- lasttime=stoptime
- if real<=0 then
- report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
- elseif user<=0 then
- report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
- elseif sub<=0 then
- report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
- else
- report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
- end
- else
- if real<=0 then
- report_pages("flushing page")
- elseif user<=0 then
- report_pages("flushing realpage %s",real)
- elseif sub<=0 then
- report_pages("flushing realpage %s, userpage %s",real,user)
- else
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
- end
- end
- logs.flush()
-end
-local report_files=logs.reporter("files")
-local nesting=0
-local verbose=false
-local hasscheme=url.hasscheme
-function logs.show_open(name)
-end
-function logs.show_close(name)
-end
-function logs.show_load(name)
-end
-local simple=logs.reporter("comment")
-logs.simple=simple
-logs.simpleline=simple
-function logs.setprogram () end
-function logs.extendbanner() end
-function logs.reportlines () end
-function logs.reportbanner() end
-function logs.reportline () end
-function logs.simplelines () end
-function logs.help () end
-local Carg,C,lpegmatch=lpeg.Carg,lpeg.C,lpeg.match
-local p_newline=lpeg.patterns.newline
-local linewise=(
- Carg(1)*C((1-p_newline)^1)/function(t,s) t.report(s) end+Carg(1)*p_newline^2/function(t) t.report() end+p_newline
-)^1
-local function reportlines(t,str)
- if str then
- lpegmatch(linewise,str,1,t)
- end
-end
-local function reportbanner(t)
- local banner=t.banner
- if banner then
- t.report(banner)
- t.report()
- end
-end
-local function reportversion(t)
- local banner=t.banner
- if banner then
- t.report(banner)
- end
-end
-local function reporthelp(t,...)
- local helpinfo=t.helpinfo
- if type(helpinfo)=="string" then
- reportlines(t,helpinfo)
- elseif type(helpinfo)=="table" then
- for i=1,select("#",...) do
- reportlines(t,t.helpinfo[select(i,...)])
- if i<n then
- t.report()
- end
- end
- end
-end
-local function reportinfo(t)
- t.report()
- reportlines(t,t.moreinfo)
-end
-local function reportexport(t,method)
- report(t.helpinfo)
-end
-local reporters={
- lines=reportlines,
- banner=reportbanner,
- version=reportversion,
- help=reporthelp,
- info=reportinfo,
- export=reportexport,
-}
-local exporters={
-}
-logs.reporters=reporters
-logs.exporters=exporters
-function logs.application(t)
- t.name=t.name or "unknown"
- t.banner=t.banner
- t.moreinfo=moreinfo
- t.report=logs.reporter(t.name)
- t.help=function(...)
- reporters.banner(t)
- reporters.help(t,...)
- reporters.info(t)
- end
- t.export=function(...)
- reporters.export(t,...)
- end
- t.identify=function()
- reporters.banner(t)
- end
- t.version=function()
- reporters.version(t)
- end
- return t
-end
-function logs.system(whereto,process,jobname,category,...)
- local message=formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
- for i=1,10 do
- local f=io.open(whereto,"a")
- if f then
- f:write(message)
- f:close()
- break
- else
- sleep(0.1)
- end
- end
-end
-local report_system=logs.reporter("system","logs")
-function logs.obsolete(old,new)
- local o=loadstring("return "..new)()
- if type(o)=="function" then
- return function(...)
- report_system("function %a is obsolete, use %a",old,new)
- loadstring(old.."="..new.." return "..old)()(...)
- end
- elseif type(o)=="table" then
- local t,m={},{}
- m.__index=function(t,k)
- report_system("table %a is obsolete, use %a",old,new)
- m.__index,m.__newindex=o,o
- return o[k]
- end
- m.__newindex=function(t,k,v)
- report_system("table %a is obsolete, use %a",old,new)
- m.__index,m.__newindex=o,o
- o[k]=v
- end
- if libraries then
- libraries.obsolete[old]=t
- end
- setmetatable(t,m)
- return t
- end
-end
-if utilities then
- utilities.report=report_system
-end
-if tex and tex.error then
- function logs.texerrormessage(...)
- tex.error(format(...),{})
- end
-else
- function logs.texerrormessage(...)
- print(format(...))
- end
-end
-io.stdout:setvbuf('no')
-io.stderr:setvbuf('no')
-if package.helpers.report then
- package.helpers.report=logs.reporter("package loader")
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
-
--- original size: 6643, stripped down to: 5272
-
-if not modules then modules={} end modules ['trac-inf']={
- version=1.001,
- comment="companion to trac-inf.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local type,tonumber,select=type,tonumber,select
-local format,lower=string.format,string.lower
-local concat=table.concat
-local clock=os.gettimeofday or os.clock
-local setmetatableindex=table.setmetatableindex
-local serialize=table.serialize
-local formatters=string.formatters
-statistics=statistics or {}
-local statistics=statistics
-statistics.enable=true
-statistics.threshold=0.01
-local statusinfo,n,registered,timers={},0,{},{}
-setmetatableindex(timers,function(t,k)
- local v={ timing=0,loadtime=0 }
- t[k]=v
- return v
-end)
-local function hastiming(instance)
- return instance and timers[instance]
-end
-local function resettiming(instance)
- timers[instance or "notimer"]={ timing=0,loadtime=0 }
-end
-local function starttiming(instance)
- local timer=timers[instance or "notimer"]
- local it=timer.timing or 0
- if it==0 then
- timer.starttime=clock()
- if not timer.loadtime then
- timer.loadtime=0
- end
- end
- timer.timing=it+1
-end
-local function stoptiming(instance)
- local timer=timers[instance or "notimer"]
- local it=timer.timing
- if it>1 then
- timer.timing=it-1
- else
- local starttime=timer.starttime
- if starttime then
- local stoptime=clock()
- local loadtime=stoptime-starttime
- timer.stoptime=stoptime
- timer.loadtime=timer.loadtime+loadtime
- timer.timing=0
- return loadtime
- end
- end
- return 0
-end
-local function elapsed(instance)
- if type(instance)=="number" then
- return instance or 0
- else
- local timer=timers[instance or "notimer"]
- return timer and timer.loadtime or 0
- end
-end
-local function elapsedtime(instance)
- return format("%0.3f",elapsed(instance))
-end
-local function elapsedindeed(instance)
- return elapsed(instance)>statistics.threshold
-end
-local function elapsedseconds(instance,rest)
- if elapsedindeed(instance) then
- return format("%0.3f seconds %s",elapsed(instance),rest or "")
- end
-end
-statistics.hastiming=hastiming
-statistics.resettiming=resettiming
-statistics.starttiming=starttiming
-statistics.stoptiming=stoptiming
-statistics.elapsed=elapsed
-statistics.elapsedtime=elapsedtime
-statistics.elapsedindeed=elapsedindeed
-statistics.elapsedseconds=elapsedseconds
-function statistics.register(tag,fnc)
- if statistics.enable and type(fnc)=="function" then
- local rt=registered[tag] or (#statusinfo+1)
- statusinfo[rt]={ tag,fnc }
- registered[tag]=rt
- if #tag>n then n=#tag end
- end
-end
-local report=logs.reporter("mkiv lua stats")
-function statistics.show()
- if statistics.enable then
- local register=statistics.register
- register("used platform",function()
- local mask=lua.mask or "ascii"
- return format("%s, type: %s, binary subtree: %s, symbol mask: %s (%s)",
- os.platform or "unknown",os.type or "unknown",environment.texos or "unknown",
- mask,mask=="utf" and "τεχ" or "tex")
- end)
- register("luatex banner",function()
- return lower(status.banner)
- end)
- register("control sequences",function()
- return format("%s of %s + %s",status.cs_count,status.hash_size,status.hash_extra)
- end)
- register("callbacks",function()
- local total,indirect=status.callbacks or 0,status.indirect_callbacks or 0
- return format("%s direct, %s indirect, %s total",total-indirect,indirect,total)
- end)
- if jit then
- local status={ jit.status() }
- if status[1] then
- register("luajit status",function()
- return concat(status," ",2)
- end)
- end
- end
- register("current memory usage",statistics.memused)
- register("runtime",statistics.runtime)
- logs.newline()
- for i=1,#statusinfo do
- local s=statusinfo[i]
- local r=s[2]()
- if r then
- report("%s: %s",s[1],r)
- end
- end
- statistics.enable=false
- end
-end
-function statistics.memused()
- local round=math.round or math.floor
- return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000),round(status.luastate_bytes/1000000))
-end
-starttiming(statistics)
-function statistics.formatruntime(runtime)
- return format("%s seconds",runtime)
-end
-function statistics.runtime()
- stoptiming(statistics)
- return statistics.formatruntime(elapsedtime(statistics))
-end
-local report=logs.reporter("system")
-function statistics.timed(action)
- starttiming("run")
- action()
- stoptiming("run")
- report("total runtime: %s seconds",elapsedtime("run"))
-end
-function statistics.tracefunction(base,tag,...)
- for i=1,select("#",...) do
- local name=select(i,...)
- local stat={}
- local func=base[name]
- setmetatableindex(stat,function(t,k) t[k]=0 return 0 end)
- base[name]=function(n,k,v) stat[k]=stat[k]+1 return func(n,k,v) end
- statistics.register(formatters["%s.%s"](tag,name),function() return serialize(stat,"calls") end)
- end
-end
-commands=commands or {}
-function commands.resettimer(name)
- resettiming(name or "whatever")
- starttiming(name or "whatever")
-end
-function commands.elapsedtime(name)
- stoptiming(name or "whatever")
- context(elapsedtime(name or "whatever"))
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
-
--- original size: 5829, stripped down to: 3501
-
-if not modules then modules={} end modules ['trac-pro']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local getmetatable,setmetatable,rawset,type=getmetatable,setmetatable,rawset,type
-local trace_namespaces=false trackers.register("system.namespaces",function(v) trace_namespaces=v end)
-local report_system=logs.reporter("system","protection")
-namespaces=namespaces or {}
-local namespaces=namespaces
-local registered={}
-local function report_index(k,name)
- if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
- else
- report_system("reference to %a in protected namespace %a",k,name)
- end
-end
-local function report_newindex(k,name)
- if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
- else
- report_system("assignment to %a in protected namespace %a",k,name)
- end
-end
-local function register(name)
- local data=name=="global" and _G or _G[name]
- if not data then
- return
- end
- registered[name]=data
- local m=getmetatable(data)
- if not m then
- m={}
- setmetatable(data,m)
- end
- local index,newindex={},{}
- m.__saved__index=m.__index
- m.__no__index=function(t,k)
- if not index[k] then
- index[k]=true
- report_index(k,name)
- end
- return nil
- end
- m.__saved__newindex=m.__newindex
- m.__no__newindex=function(t,k,v)
- if not newindex[k] then
- newindex[k]=true
- report_newindex(k,name)
- end
- rawset(t,k,v)
- end
- m.__protection__depth=0
-end
-local function private(name)
- local data=registered[name]
- if not data then
- data=_G[name]
- if not data then
- data={}
- _G[name]=data
- end
- register(name)
- end
- return data
-end
-local function protect(name)
- local data=registered[name]
- if not data then
- return
- end
- local m=getmetatable(data)
- local pd=m.__protection__depth
- if pd>0 then
- m.__protection__depth=pd+1
- else
- m.__save_d_index,m.__saved__newindex=m.__index,m.__newindex
- m.__index,m.__newindex=m.__no__index,m.__no__newindex
- m.__protection__depth=1
- end
-end
-local function unprotect(name)
- local data=registered[name]
- if not data then
- return
- end
- local m=getmetatable(data)
- local pd=m.__protection__depth
- if pd>1 then
- m.__protection__depth=pd-1
- else
- m.__index,m.__newindex=m.__saved__index,m.__saved__newindex
- m.__protection__depth=0
- end
-end
-local function protectall()
- for name,_ in next,registered do
- if name~="global" then
- protect(name)
- end
- end
-end
-local function unprotectall()
- for name,_ in next,registered do
- if name~="global" then
- unprotect(name)
- end
- end
-end
-namespaces.register=register
-namespaces.private=private
-namespaces.protect=protect
-namespaces.unprotect=unprotect
-namespaces.protectall=protectall
-namespaces.unprotectall=unprotectall
-namespaces.private("namespaces") registered={} register("global")
-directives.register("system.protect",function(v)
- if v then
- protectall()
- else
- unprotectall()
- end
-end)
-directives.register("system.checkglobals",function(v)
- if v then
- report_system("enabling global namespace guard")
- protect("global")
- else
- report_system("disabling global namespace guard")
- unprotect("global")
- end
-end)
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["util-lua"] = package.loaded["util-lua"] or true
-
--- original size: 4982, stripped down to: 3511
-
-if not modules then modules={} end modules ['util-lua']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- comment="the strip code is written by Peter Cawley",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local rep,sub,byte,dump,format=string.rep,string.sub,string.byte,string.dump,string.format
-local load,loadfile,type=load,loadfile,type
-utilities=utilities or {}
-utilities.lua=utilities.lua or {}
-local luautilities=utilities.lua
-local report_lua=logs.reporter("system","lua")
-local tracestripping=false
-local forcestupidcompile=true
-luautilities.stripcode=true
-luautilities.alwaysstripcode=false
-luautilities.nofstrippedchunks=0
-luautilities.nofstrippedbytes=0
-local strippedchunks={}
-luautilities.strippedchunks=strippedchunks
-luautilities.suffixes={
- tma="tma",
- tmc=jit and "tmb" or "tmc",
- lua="lua",
- luc=jit and "lub" or "luc",
- lui="lui",
- luv="luv",
- luj="luj",
- tua="tua",
- tuc="tuc",
-}
-local function register(name)
- if tracestripping then
- report_lua("stripped bytecode from %a",name or "unknown")
- end
- strippedchunks[#strippedchunks+1]=name
- luautilities.nofstrippedchunks=luautilities.nofstrippedchunks+1
-end
-local function stupidcompile(luafile,lucfile,strip)
- local code=io.loaddata(luafile)
- if code and code~="" then
- code=load(code)
- if code then
- code=dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode)
- if code and code~="" then
- register(name)
- io.savedata(lucfile,code)
- return true,0
- end
- else
- report_lua("fatal error %a in file %a",1,luafile)
- end
- else
- report_lua("fatal error %a in file %a",2,luafile)
- end
- return false,0
-end
-function luautilities.loadedluacode(fullname,forcestrip,name)
- name=name or fullname
- local code,message
- if environment.loadpreprocessedfile then
- code,message=environment.loadpreprocessedfile(fullname)
- else
- code,message=loadfile(fullname)
- end
- if code then
- code()
- else
- report_lua("loading of file %a failed:\n\t%s",fullname,message or "no message")
- end
- if forcestrip and luautilities.stripcode then
- if type(forcestrip)=="function" then
- forcestrip=forcestrip(fullname)
- end
- if forcestrip or luautilities.alwaysstripcode then
- register(name)
- return load(dump(code,true)),0
- else
- return code,0
- end
- elseif luautilities.alwaysstripcode then
- register(name)
- return load(dump(code,true)),0
- else
- return code,0
- end
-end
-function luautilities.strippedloadstring(code,forcestrip,name)
- local code,message=load(code)
- if not code then
- report_lua("loading of file %a failed:\n\t%s",name,message or "no message")
- end
- if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then
- register(name)
- return load(dump(code,true)),0
- else
- return code,0
- end
-end
-function luautilities.compile(luafile,lucfile,cleanup,strip,fallback)
- report_lua("compiling %a into %a",luafile,lucfile)
- os.remove(lucfile)
- local done=stupidcompile(luafile,lucfile,strip~=false)
- if done then
- report_lua("dumping %a into %a stripped",luafile,lucfile)
- if cleanup==true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- report_lua("removing %a",luafile)
- os.remove(luafile)
- end
- end
- return done
-end
-function luautilities.loadstripped(...)
- local l=load(...)
- if l then
- return load(dump(l,true))
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["util-deb"] = package.loaded["util-deb"] or true
-
--- original size: 3898, stripped down to: 2644
-
-if not modules then modules={} end modules ['util-deb']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local debug=require "debug"
-local getinfo=debug.getinfo
-local type,next,tostring=type,next,tostring
-local format,find=string.format,string.find
-local is_boolean=string.is_boolean
-utilities=utilities or {}
-local debugger=utilities.debugger or {}
-utilities.debugger=debugger
-local counters={}
-local names={}
-local report=logs.reporter("debugger")
-local function hook()
- local f=getinfo(2)
- if f then
- local n="unknown"
- if f.what=="C" then
- n=f.name or '<anonymous>'
- if not names[n] then
- names[n]=format("%42s",n)
- end
- else
- n=f.name or f.namewhat or f.what
- if not n or n=="" then
- n="?"
- end
- if not names[n] then
- names[n]=format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
- end
- end
- counters[n]=(counters[n] or 0)+1
- end
-end
-function debugger.showstats(printer,threshold)
- printer=printer or report
- threshold=threshold or 0
- local total,grandtotal,functions=0,0,0
- local dataset={}
- for name,count in next,counters do
- dataset[#dataset+1]={ name,count }
- end
- table.sort(dataset,function(a,b) return a[2]==b[2] and b[1]>a[1] or a[2]>b[2] end)
- for i=1,#dataset do
- local d=dataset[i]
- local name=d[1]
- local count=d[2]
- if count>threshold and not find(name,"for generator") then
- printer(format("%8i %s\n",count,names[name]))
- total=total+count
- end
- grandtotal=grandtotal+count
- functions=functions+1
- end
- printer("\n")
- printer(format("functions : % 10i\n",functions))
- printer(format("total : % 10i\n",total))
- printer(format("grand total: % 10i\n",grandtotal))
- printer(format("threshold : % 10i\n",threshold))
-end
-function debugger.savestats(filename,threshold)
- local f=io.open(filename,'w')
- if f then
- debugger.showstats(function(str) f:write(str) end,threshold)
- f:close()
- end
-end
-function debugger.enable()
- debug.sethook(hook,"c")
-end
-function debugger.disable()
- debug.sethook()
-end
-local function showtraceback(rep)
- local level=2
- local reporter=rep or report
- while true do
- local info=getinfo(level,"Sl")
- if not info then
- break
- elseif info.what=="C" then
- reporter("%2i : %s",level-1,"C function")
- else
- reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
- end
- level=level+1
- end
-end
-debugger.showtraceback=showtraceback
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["util-mrg"] = package.loaded["util-mrg"] or true
-
--- original size: 7757, stripped down to: 6015
-
-if not modules then modules={} end modules ['util-mrg']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local gsub,format=string.gsub,string.format
-local concat=table.concat
-local type,next=type,next
-local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt,Cb,Cg=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt,lpeg.Cb,lpeg.Cg
-local lpegmatch,patterns=lpeg.match,lpeg.patterns
-utilities=utilities or {}
-local merger=utilities.merger or {}
-utilities.merger=merger
-merger.strip_comment=true
-local report=logs.reporter("system","merge")
-utilities.report=report
-local m_begin_merge="begin library merge"
-local m_end_merge="end library merge"
-local m_begin_closure="do -- create closure to overcome 200 locals limit"
-local m_end_closure="end -- of closure"
-local m_pattern="%c+".."%-%-%s+"..m_begin_merge.."%c+(.-)%c+".."%-%-%s+"..m_end_merge.."%c+"
-local m_format="\n\n-- "..m_begin_merge.."\n%s\n".."-- "..m_end_merge.."\n\n"
-local m_faked="-- ".."created merged file".."\n\n".."-- "..m_begin_merge.."\n\n".."-- "..m_end_merge.."\n\n"
-local m_report=[[
--- used libraries : %s
--- skipped libraries : %s
--- original bytes : %s
--- stripped bytes : %s
-]]
-local m_preloaded=[[package.loaded[%q] = package.loaded[%q] or true]]
-local function self_fake()
- return m_faked
-end
-local function self_nothing()
- return ""
-end
-local function self_load(name)
- local data=io.loaddata(name) or ""
- if data=="" then
- report("unknown file %a",name)
- else
- report("inserting file %a",name)
- end
- return data or ""
-end
-local space=patterns.space
-local eol=patterns.newline
-local equals=P("=")^0
-local open=P("[")*Cg(equals,"init")*P("[")*P("\n")^-1
-local close=P("]")*C(equals)*P("]")
-local closeeq=Cmt(close*Cb("init"),function(s,i,a,b) return a==b end)
-local longstring=open*(1-closeeq)^0*close
-local quoted=patterns.quoted
-local digit=patterns.digit
-local emptyline=space^0*eol
-local operator1=P("<=")+P(">=")+P("~=")+P("..")+S("/^<>=*+%%")
-local operator2=S("*+/")
-local operator3=S("-")
-local operator4=P("..")
-local separator=S(",;")
-local ignore=(P("]")*space^1*P("=")*space^1*P("]"))/"]=["+(P("=")*space^1*P("{"))/"={"+(P("(")*space^1)/"("+(P("{")*(space+eol)^1*P("}"))/"{}"
-local strings=quoted
-local longcmt=(emptyline^0*P("--")*longstring*emptyline^0)/""
-local longstr=longstring
-local comment=emptyline^0*P("--")*P("-")^0*(1-eol)^0*emptyline^1/"\n"
-local optionalspaces=space^0/""
-local mandatespaces=space^1/""
-local optionalspacing=(eol+space)^0/""
-local mandatespacing=(eol+space)^1/""
-local pack=digit*space^1*operator4*optionalspacing+optionalspacing*operator1*optionalspacing+optionalspacing*operator2*optionalspaces+mandatespacing*operator3*mandatespaces+optionalspaces*separator*optionalspaces
-local lines=emptyline^2/"\n"
-local spaces=(space*space)/" "
-local compact=Cs ((
- ignore+strings+longcmt+longstr+comment+pack+lines+spaces+1
-)^1 )
-local strip=Cs((emptyline^2/"\n"+1)^0)
-local stripreturn=Cs((1-P("return")*space^1*P(1-space-eol)^1*(space+eol)^0*P(-1))^1)
-function merger.compact(data)
- return lpegmatch(strip,lpegmatch(compact,data))
-end
-local function self_compact(data)
- local delta=0
- if merger.strip_comment then
- local before=#data
- data=lpegmatch(compact,data)
- data=lpegmatch(strip,data)
- local after=#data
- delta=before-after
- report("original size %s, compacted to %s, stripped %s",before,after,delta)
- data=format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data)
- end
- return lpegmatch(stripreturn,data) or data,delta
-end
-local function self_save(name,data)
- if data~="" then
- io.savedata(name,data)
- report("saving %s with size %s",name,#data)
- end
-end
-local function self_swap(data,code)
- return data~="" and (gsub(data,m_pattern,function() return format(m_format,code) end,1)) or ""
-end
-local function self_libs(libs,list)
- local result,f,frozen,foundpath={},nil,false,nil
- result[#result+1]="\n"
- if type(libs)=='string' then libs={ libs } end
- if type(list)=='string' then list={ list } end
- for i=1,#libs do
- local lib=libs[i]
- for j=1,#list do
- local pth=gsub(list[j],"\\","/")
- report("checking library path %a",pth)
- local name=pth.."/"..lib
- if lfs.isfile(name) then
- foundpath=pth
- end
- end
- if foundpath then break end
- end
- if foundpath then
- report("using library path %a",foundpath)
- local right,wrong,original,stripped={},{},0,0
- for i=1,#libs do
- local lib=libs[i]
- local fullname=foundpath.."/"..lib
- if lfs.isfile(fullname) then
- report("using library %a",fullname)
- local preloaded=file.nameonly(lib)
- local data=io.loaddata(fullname,true)
- original=original+#data
- local data,delta=self_compact(data)
- right[#right+1]=lib
- result[#result+1]=m_begin_closure
- result[#result+1]=format(m_preloaded,preloaded,preloaded)
- result[#result+1]=data
- result[#result+1]=m_end_closure
- stripped=stripped+delta
- else
- report("skipping library %a",fullname)
- wrong[#wrong+1]=lib
- end
- end
- right=#right>0 and concat(right," ") or "-"
- wrong=#wrong>0 and concat(wrong," ") or "-"
- report("used libraries: %a",right)
- report("skipped libraries: %a",wrong)
- report("original bytes: %a",original)
- report("stripped bytes: %a",stripped)
- result[#result+1]=format(m_report,right,wrong,original,stripped)
- else
- report("no valid library path found")
- end
- return concat(result,"\n\n")
-end
-function merger.selfcreate(libs,list,target)
- if target then
- self_save(target,self_swap(self_fake(),self_libs(libs,list)))
- end
-end
-function merger.selfmerge(name,libs,list,target)
- self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
-end
-function merger.selfclean(name)
- self_save(name,self_swap(self_load(name),self_nothing()))
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["util-tpl"] = package.loaded["util-tpl"] or true
-
--- original size: 6251, stripped down to: 3488
-
-if not modules then modules={} end modules ['util-tpl']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-utilities.templates=utilities.templates or {}
-local templates=utilities.templates
-local trace_template=false trackers.register("templates.trace",function(v) trace_template=v end)
-local report_template=logs.reporter("template")
-local tostring=tostring
-local format,sub,byte=string.format,string.sub,string.byte
-local P,C,R,Cs,Cc,Carg,lpegmatch,lpegpatterns=lpeg.P,lpeg.C,lpeg.R,lpeg.Cs,lpeg.Cc,lpeg.Carg,lpeg.match,lpeg.patterns
-local replacer
-local function replacekey(k,t,how,recursive)
- local v=t[k]
- if not v then
- if trace_template then
- report_template("unknown key %a",k)
- end
- return ""
- else
- v=tostring(v)
- if trace_template then
- report_template("setting key %a to value %a",k,v)
- end
- if recursive then
- return lpegmatch(replacer,v,1,t,how,recursive)
- else
- return v
- end
- end
-end
-local sqlescape=lpeg.replacer {
- { "'","''" },
- { "\\","\\\\" },
- { "\r\n","\\n" },
- { "\r","\\n" },
-}
-local sqlquoted=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'"))
-lpegpatterns.sqlescape=sqlescape
-lpegpatterns.sqlquoted=sqlquoted
-local luaescape=lpegpatterns.luaescape
-local escapers={
- lua=function(s)
- return lpegmatch(luaescape,s)
- end,
- sql=function(s)
- return lpegmatch(sqlescape,s)
- end,
-}
-local quotedescapers={
- lua=function(s)
- return format("%q",s)
- end,
- sql=function(s)
- return lpegmatch(sqlquoted,s)
- end,
-}
-local luaescaper=escapers.lua
-local quotedluaescaper=quotedescapers.lua
-local function replacekeyunquoted(s,t,how,recurse)
- local escaper=how and escapers[how] or luaescaper
- return escaper(replacekey(s,t,how,recurse))
-end
-local function replacekeyquoted(s,t,how,recurse)
- local escaper=how and quotedescapers[how] or quotedluaescaper
- return escaper(replacekey(s,t,how,recurse))
-end
-local single=P("%")
-local double=P("%%")
-local lquoted=P("%[")
-local rquoted=P("]%")
-local lquotedq=P("%(")
-local rquotedq=P(")%")
-local escape=double/'%%'
-local nosingle=single/''
-local nodouble=double/''
-local nolquoted=lquoted/''
-local norquoted=rquoted/''
-local nolquotedq=lquotedq/''
-local norquotedq=rquotedq/''
-local key=nosingle*((C((1-nosingle )^1)*Carg(1)*Carg(2)*Carg(3))/replacekey )*nosingle
-local quoted=nolquotedq*((C((1-norquotedq)^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyquoted )*norquotedq
-local unquoted=nolquoted*((C((1-norquoted )^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyunquoted)*norquoted
-local any=P(1)
- replacer=Cs((unquoted+quoted+escape+key+any)^0)
-local function replace(str,mapping,how,recurse)
- if mapping and str then
- return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
- else
- return str
- end
-end
-templates.replace=replace
-function templates.replacer(str,how,recurse)
- return function(mapping)
- return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
- end
-end
-function templates.load(filename,mapping,how,recurse)
- local data=io.loaddata(filename) or ""
- if mapping and next(mapping) then
- return replace(data,mapping,how,recurse)
- else
- return data
- end
-end
-function templates.resolve(t,mapping,how,recurse)
- if not mapping then
- mapping=t
- end
- for k,v in next,t do
- t[k]=replace(v,mapping,how,recurse)
- end
- return t
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["util-env"] = package.loaded["util-env"] or true
-
--- original size: 8807, stripped down to: 5085
-
-if not modules then modules={} end modules ['util-env']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local allocate,mark=utilities.storage.allocate,utilities.storage.mark
-local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
-local unquoted,quoted=string.unquoted,string.quoted
-local concat,insert,remove=table.concat,table.insert,table.remove
-environment=environment or {}
-local environment=environment
-os.setlocale(nil,nil)
-function os.setlocale()
-end
-local validengines=allocate {
- ["luatex"]=true,
- ["luajittex"]=true,
-}
-local basicengines=allocate {
- ["luatex"]="luatex",
- ["texlua"]="luatex",
- ["texluac"]="luatex",
- ["luajittex"]="luajittex",
- ["texluajit"]="luajittex",
-}
-local luaengines=allocate {
- ["lua"]=true,
- ["luajit"]=true,
-}
-environment.validengines=validengines
-environment.basicengines=basicengines
-if not arg then
- environment.used_as_library=true
-elseif luaengines[file.removesuffix(arg[-1])] then
-elseif validengines[file.removesuffix(arg[0])] then
- if arg[1]=="--luaonly" then
- arg[-1]=arg[0]
- arg[ 0]=arg[2]
- for k=3,#arg do
- arg[k-2]=arg[k]
- end
- remove(arg)
- remove(arg)
- else
- end
- local originalzero=file.basename(arg[0])
- local specialmapping={ luatools=="base" }
- if originalzero~="mtxrun" and originalzero~="mtxrun.lua" then
- arg[0]=specialmapping[originalzero] or originalzero
- insert(arg,0,"--script")
- insert(arg,0,"mtxrun")
- end
-end
-environment.arguments=allocate()
-environment.files=allocate()
-environment.sortedflags=nil
-function environment.initializearguments(arg)
- local arguments,files={},{}
- environment.arguments,environment.files,environment.sortedflags=arguments,files,nil
- for index=1,#arg do
- local argument=arg[index]
- if index>0 then
- local flag,value=match(argument,"^%-+(.-)=(.-)$")
- if flag then
- flag=gsub(flag,"^c:","")
- arguments[flag]=unquoted(value or "")
- else
- flag=match(argument,"^%-+(.+)")
- if flag then
- flag=gsub(flag,"^c:","")
- arguments[flag]=true
- else
- files[#files+1]=argument
- end
- end
- end
- end
- environment.ownname=file.reslash(environment.ownname or arg[0] or 'unknown.lua')
-end
-function environment.setargument(name,value)
- environment.arguments[name]=value
-end
-function environment.getargument(name,partial)
- local arguments,sortedflags=environment.arguments,environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags=allocate(table.sortedkeys(arguments))
- for k=1,#sortedflags do
- sortedflags[k]="^"..sortedflags[k]
- end
- environment.sortedflags=sortedflags
- end
- for k=1,#sortedflags do
- local v=sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
- end
- end
- end
- return nil
-end
-environment.argument=environment.getargument
-function environment.splitarguments(separator)
- local done,before,after=false,{},{}
- local originalarguments=environment.originalarguments
- for k=1,#originalarguments do
- local v=originalarguments[k]
- if not done and v==separator then
- done=true
- elseif done then
- after[#after+1]=v
- else
- before[#before+1]=v
- end
- end
- return before,after
-end
-function environment.reconstructcommandline(arg,noquote)
- arg=arg or environment.originalarguments
- if noquote and #arg==1 then
- local a=arg[1]
- a=resolvers.resolve(a)
- a=unquoted(a)
- return a
- elseif #arg>0 then
- local result={}
- for i=1,#arg do
- local a=arg[i]
- a=resolvers.resolve(a)
- a=unquoted(a)
- a=gsub(a,'"','\\"')
- if find(a," ") then
- result[#result+1]=quoted(a)
- else
- result[#result+1]=a
- end
- end
- return concat(result," ")
- else
- return ""
- end
-end
-function environment.relativepath(path,root)
- if not path then
- path=""
- end
- if not file.is_rootbased_path(path) then
- if not root then
- root=file.pathpart(environment.ownscript or environment.ownname or ".")
- end
- if root=="" then
- root="."
- end
- path=root.."/"..path
- end
- return file.collapsepath(path,true)
-end
-if arg then
- local newarg,instring={},false
- for index=1,#arg do
- local argument=arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1]=gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring=true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg]=newarg[#newarg].." "..gsub(argument,"\"$","")
- instring=false
- elseif instring then
- newarg[#newarg]=newarg[#newarg].." "..argument
- else
- newarg[#newarg+1]=argument
- end
- end
- for i=1,-5,-1 do
- newarg[i]=arg[i]
- end
- environment.initializearguments(newarg)
- environment.originalarguments=mark(newarg)
- environment.rawarguments=mark(arg)
- arg={}
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["luat-env"] = package.loaded["luat-env"] or true
-
--- original size: 5930, stripped down to: 4235
-
- if not modules then modules={} end modules ['luat-env']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local rawset,rawget,loadfile,assert=rawset,rawget,loadfile,assert
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local report_lua=logs.reporter("resolvers","lua")
-local luautilities=utilities.lua
-local luasuffixes=luautilities.suffixes
-local texgettoks=tex and tex.gettoks
-environment=environment or {}
-local environment=environment
-local mt={
- __index=function(_,k)
- if k=="version" then
- local version=texgettoks and texgettoks("contextversiontoks")
- if version and version~="" then
- rawset(environment,"version",version)
- return version
- else
- return "unknown"
- end
- elseif k=="kind" then
- local kind=texgettoks and texgettoks("contextkindtoks")
- if kind and kind~="" then
- rawset(environment,"kind",kind)
- return kind
- else
- return "unknown"
- end
- elseif k=="jobname" or k=="formatname" then
- local name=tex and tex[k]
- if name or name=="" then
- rawset(environment,k,name)
- return name
- else
- return "unknown"
- end
- elseif k=="outputfilename" then
- local name=environment.jobname
- rawset(environment,k,name)
- return name
- end
- end
-}
-setmetatable(environment,mt)
-function environment.texfile(filename)
- return resolvers.findfile(filename,'tex')
-end
-function environment.luafile(filename)
- local resolved=resolvers.findfile(filename,'tex') or ""
- if resolved~="" then
- return resolved
- end
- resolved=resolvers.findfile(filename,'texmfscripts') or ""
- if resolved~="" then
- return resolved
- end
- return resolvers.findfile(filename,'luatexlibs') or ""
-end
-local stripindeed=false directives.register("system.compile.strip",function(v) stripindeed=v end)
-local function strippable(filename)
- if stripindeed then
- local modu=modules[file.nameonly(filename)]
- return modu and modu.dataonly
- else
- return false
- end
-end
-function environment.luafilechunk(filename,silent)
- filename=file.replacesuffix(filename,"lua")
- local fullname=environment.luafile(filename)
- if fullname and fullname~="" then
- local data=luautilities.loadedluacode(fullname,strippable,filename)
- if trace_locating then
- report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
- elseif not silent then
- texio.write("<",data and "+ " or "- ",fullname,">")
- end
- return data
- else
- if trace_locating then
- report_lua("unknown file %a",filename)
- end
- return nil
- end
-end
-function environment.loadluafile(filename,version)
- local lucname,luaname,chunk
- local basename=file.removesuffix(filename)
- if basename==filename then
- luaname=file.addsuffix(basename,luasuffixes.lua)
- lucname=file.addsuffix(basename,luasuffixes.luc)
- else
- luaname=basename
- lucname=nil
- end
- local fullname=(lucname and environment.luafile(lucname)) or ""
- if fullname~="" then
- if trace_locating then
- report_lua("loading %a",fullname)
- end
- chunk=loadfile(fullname)
- end
- if chunk then
- assert(chunk)()
- if version then
- local v=version
- if modules and modules[filename] then
- v=modules[filename].version
- elseif versions and versions[filename] then
- v=versions[filename]
- end
- if v==version then
- return true
- else
- if trace_locating then
- report_lua("version mismatch for %a, lua version %a, luc version %a",filename,v,version)
- end
- environment.loadluafile(filename)
- end
- else
- return true
- end
- end
- fullname=(luaname and environment.luafile(luaname)) or ""
- if fullname~="" then
- if trace_locating then
- report_lua("loading %a",fullname)
- end
- chunk=loadfile(fullname)
- if not chunk then
- if trace_locating then
- report_lua("unknown file %a",filename)
- end
- else
- assert(chunk)()
- return true
- end
- end
- return false
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
-
--- original size: 42614, stripped down to: 26694
-
-if not modules then modules={} end modules ['lxml-tab']={
- version=1.001,
- comment="this module is the basis for the lxml-* ones",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local trace_entities=false trackers.register("xml.entities",function(v) trace_entities=v end)
-local report_xml=logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
-if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
-xml=xml or {}
-local xml=xml
-local concat,remove,insert=table.concat,table.remove,table.insert
-local type,next,setmetatable,getmetatable,tonumber=type,next,setmetatable,getmetatable,tonumber
-local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
-local utfchar=utf.char
-local lpegmatch=lpeg.match
-local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
-local formatters=string.formatters
-xml.xmlns=xml.xmlns or {}
-local check=P(false)
-local parse=check
-function xml.registerns(namespace,pattern)
- check=check+C(P(lower(pattern)))/namespace
- parse=P { P(check)+1*V(1) }
-end
-function xml.checkns(namespace,url)
- local ns=lpegmatch(parse,lower(url))
- if ns and namespace~=ns then
- xml.xmlns[namespace]=ns
- end
-end
-function xml.resolvens(url)
- return lpegmatch(parse,lower(url)) or ""
-end
-local nsremap,resolvens=xml.xmlns,xml.resolvens
-local stack={}
-local top={}
-local dt={}
-local at={}
-local xmlns={}
-local errorstr=nil
-local entities={}
-local strip=false
-local cleanup=false
-local utfize=false
-local resolve_predefined=false
-local unify_predefined=false
-local dcache={}
-local hcache={}
-local acache={}
-local mt={}
-local function initialize_mt(root)
- mt={ __index=root }
-end
-function xml.setproperty(root,k,v)
- getmetatable(root).__index[k]=v
-end
-function xml.checkerror(top,toclose)
- return ""
-end
-local function add_attribute(namespace,tag,value)
- if cleanup and #value>0 then
- value=cleanup(value)
- end
- if tag=="xmlns" then
- xmlns[#xmlns+1]=resolvens(value)
- at[tag]=value
- elseif namespace=="" then
- at[tag]=value
- elseif namespace=="xmlns" then
- xml.checkns(tag,value)
- at["xmlns:"..tag]=value
- else
- at[namespace..":"..tag]=value
- end
-end
-local function add_empty(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
- end
- local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top=stack[#stack]
- dt=top.dt
- local t={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=top }
- dt[#dt+1]=t
- setmetatable(t,mt)
- if at.xmlns then
- remove(xmlns)
- end
- at={}
-end
-local function add_begin(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
- end
- local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=stack[#stack] }
- setmetatable(top,mt)
- dt=top.dt
- stack[#stack+1]=top
- at={}
-end
-local function add_end(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
- end
- local toclose=remove(stack)
- top=stack[#stack]
- if #stack<1 then
- errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
- elseif toclose.tg~=tag then
- errorstr=formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
- end
- dt=top.dt
- dt[#dt+1]=toclose
- if toclose.at.xmlns then
- remove(xmlns)
- end
-end
-local function add_text(text)
- if cleanup and #text>0 then
- dt[#dt+1]=cleanup(text)
- else
- dt[#dt+1]=text
- end
-end
-local function add_special(what,spacing,text)
- if #spacing>0 then
- dt[#dt+1]=spacing
- end
- if strip and (what=="@cm@" or what=="@dt@") then
- else
- dt[#dt+1]={ special=true,ns="",tg=what,dt={ text } }
- end
-end
-local function set_message(txt)
- errorstr="garbage at the end of the file: "..gsub(txt,"([ \n\r\t]*)","")
-end
-local reported_attribute_errors={}
-local function attribute_value_error(str)
- if not reported_attribute_errors[str] then
- report_xml("invalid attribute value %a",str)
- reported_attribute_errors[str]=true
- at._error_=str
- end
- return str
-end
-local function attribute_specification_error(str)
- if not reported_attribute_errors[str] then
- report_xml("invalid attribute specification %a",str)
- reported_attribute_errors[str]=true
- at._error_=str
- end
- return str
-end
-xml.placeholders={
- unknown_dec_entity=function(str) return str=="" and "&error;" or formatters["&%s;"](str) end,
- unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
- unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
-}
-local placeholders=xml.placeholders
-local function fromhex(s)
- local n=tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return formatters["h:%s"](s),true
- end
-end
-local function fromdec(s)
- local n=tonumber(s)
- if n then
- return utfchar(n)
- else
- return formatters["d:%s"](s),true
- end
-end
-local rest=(1-P(";"))^0
-local many=P(1)^0
-local parsedentity=P("&")*(P("#x")*(rest/fromhex)+P("#")*(rest/fromdec))*P(";")*P(-1)+(P("#x")*(many/fromhex)+P("#")*(many/fromdec))
-local predefined_unified={
- [38]="&amp;",
- [42]="&quot;",
- [47]="&apos;",
- [74]="&lt;",
- [76]="&gt;",
-}
-local predefined_simplified={
- [38]="&",amp="&",
- [42]='"',quot='"',
- [47]="'",apos="'",
- [74]="<",lt="<",
- [76]=">",gt=">",
-}
-local nofprivates=0xF0000
-local privates_u={
- [ [[&]] ]="&amp;",
- [ [["]] ]="&quot;",
- [ [[']] ]="&apos;",
- [ [[<]] ]="&lt;",
- [ [[>]] ]="&gt;",
-}
-local privates_p={}
-local privates_n={
-}
-local escaped=utf.remapper(privates_u)
-local function unescaped(s)
- local p=privates_n[s]
- if not p then
- nofprivates=nofprivates+1
- p=utfchar(nofprivates)
- privates_n[s]=p
- s="&"..s..";"
- privates_u[p]=s
- privates_p[p]=s
- end
- return p
-end
-local unprivatized=utf.remapper(privates_p)
-xml.privatetoken=unescaped
-xml.unprivatized=unprivatized
-xml.privatecodes=privates_n
-local function handle_hex_entity(str)
- local h=hcache[str]
- if not h then
- local n=tonumber(str,16)
- h=unify_predefined and predefined_unified[n]
- if h then
- if trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
- elseif utfize then
- h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring hex entity &#x%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
- else
- if trace_entities then
- report_xml("found entity &#x%s;",str)
- end
- h="&#x"..str..";"
- end
- hcache[str]=h
- end
- return h
-end
-local function handle_dec_entity(str)
- local d=dcache[str]
- if not d then
- local n=tonumber(str)
- d=unify_predefined and predefined_unified[n]
- if d then
- if trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
- end
- elseif utfize then
- d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring dec entity &#%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
- end
- else
- if trace_entities then
- report_xml("found entity &#%s;",str)
- end
- d="&#"..str..";"
- end
- dcache[str]=d
- end
- return d
-end
-xml.parsedentitylpeg=parsedentity
-local function handle_any_entity(str)
- if resolve then
- local a=acache[str]
- if not a then
- a=resolve_predefined and predefined_simplified[str]
- if a then
- if trace_entities then
- report_xml("resolving entity &%s; to predefined %a",str,a)
- end
- else
- if type(resolve)=="function" then
- a=resolve(str) or entities[str]
- else
- a=entities[str]
- end
- if a then
- if type(a)=="function" then
- if trace_entities then
- report_xml("expanding entity &%s; to function call",str)
- end
- a=a(str) or ""
- end
- a=lpegmatch(parsedentity,a) or a
- if trace_entities then
- report_xml("resolving entity &%s; to internal %a",str,a)
- end
- else
- local unknown_any_entity=placeholders.unknown_any_entity
- if unknown_any_entity then
- a=unknown_any_entity(str) or ""
- end
- if a then
- if trace_entities then
- report_xml("resolving entity &%s; to external %s",str,a)
- end
- else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
- end
- if str=="" then
- a="&error;"
- else
- a="&"..str..";"
- end
- end
- end
- end
- acache[str]=a
- elseif trace_entities then
- if not acache[str] then
- report_xml("converting entity &%s; to %a",str,a)
- acache[str]=a
- end
- end
- return a
- else
- local a=acache[str]
- if not a then
- a=resolve_predefined and predefined_simplified[str]
- if a then
- acache[str]=a
- if trace_entities then
- report_xml("entity &%s; becomes %a",str,a)
- end
- elseif str=="" then
- if trace_entities then
- report_xml("invalid entity &%s;",str)
- end
- a="&error;"
- acache[str]=a
- else
- if trace_entities then
- report_xml("entity &%s; is made private",str)
- end
- a=unescaped(str)
- acache[str]=a
- end
- end
- return a
- end
-end
-local function handle_end_entity(chr)
- report_xml("error in entity, %a found instead of %a",chr,";")
-end
-local space=S(' \r\n\t')
-local open=P('<')
-local close=P('>')
-local squote=S("'")
-local dquote=S('"')
-local equal=P('=')
-local slash=P('/')
-local colon=P(':')
-local semicolon=P(';')
-local ampersand=P('&')
-local valid=R('az','AZ','09')+S('_-.')
-local name_yes=C(valid^1)*colon*C(valid^1)
-local name_nop=C(P(true))*C(valid^1)
-local name=name_yes+name_nop
-local utfbom=lpeg.patterns.utfbom
-local spacing=C(space^0)
-local anyentitycontent=(1-open-semicolon-space-close)^0
-local hexentitycontent=R("AF","af","09")^0
-local decentitycontent=R("09")^0
-local parsedentity=P("#")/""*(
- P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
- )+(anyentitycontent/handle_any_entity)
-local entity=ampersand/""*parsedentity*((semicolon/"")+#(P(1)/handle_end_entity))
-local text_unparsed=C((1-open)^1)
-local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
-local somespace=space^1
-local optionalspace=space^0
-local value=(squote*Cs((entity+(1-squote))^0)*squote)+(dquote*Cs((entity+(1-dquote))^0)*dquote)
-local endofattributes=slash*close+close
-local whatever=space*name*optionalspace*equal
-local wrongvalue=Cs(P(entity+(1-space-endofattributes))^1)/attribute_value_error
-local attributevalue=value+wrongvalue
-local attribute=(somespace*name*optionalspace*equal*optionalspace*attributevalue)/add_attribute
-local attributes=(attribute+somespace^-1*(((1-endofattributes)^1)/attribute_specification_error))^0
-local parsedtext=text_parsed/add_text
-local unparsedtext=text_unparsed/add_text
-local balanced=P { "["*((1-S"[]")+V(1))^0*"]" }
-local emptyelement=(spacing*open*name*attributes*optionalspace*slash*close)/add_empty
-local beginelement=(spacing*open*name*attributes*optionalspace*close)/add_begin
-local endelement=(spacing*open*slash*name*optionalspace*close)/add_end
-local begincomment=open*P("!--")
-local endcomment=P("--")*close
-local begininstruction=open*P("?")
-local endinstruction=P("?")*close
-local begincdata=open*P("![CDATA[")
-local endcdata=P("]]")*close
-local someinstruction=C((1-endinstruction)^0)
-local somecomment=C((1-endcomment )^0)
-local somecdata=C((1-endcdata )^0)
-local function normalentity(k,v ) entities[k]=v end
-local function systementity(k,v,n) entities[k]=v end
-local function publicentity(k,v,n) entities[k]=v end
-local begindoctype=open*P("!DOCTYPE")
-local enddoctype=close
-local beginset=P("[")
-local endset=P("]")
-local doctypename=C((1-somespace-close)^0)
-local elementdoctype=optionalspace*P("<!ELEMENT")*(1-close)^0*close
-local basiccomment=begincomment*((1-endcomment)^0)*endcomment
-local normalentitytype=(doctypename*somespace*value)/normalentity
-local publicentitytype=(doctypename*somespace*P("PUBLIC")*somespace*value)/publicentity
-local systementitytype=(doctypename*somespace*P("SYSTEM")*somespace*value*somespace*P("NDATA")*somespace*doctypename)/systementity
-local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype)*optionalspace*close
-local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+basiccomment+space)^0*optionalspace*endset
-local definitiondoctype=doctypename*somespace*doctypeset
-local publicdoctype=doctypename*somespace*P("PUBLIC")*somespace*value*somespace*value*somespace*doctypeset
-local systemdoctype=doctypename*somespace*P("SYSTEM")*somespace*value*somespace*doctypeset
-local simpledoctype=(1-close)^1
-local somedoctype=C((somespace*(publicdoctype+systemdoctype+definitiondoctype+simpledoctype)*optionalspace)^0)
-local instruction=(spacing*begininstruction*someinstruction*endinstruction)/function(...) add_special("@pi@",...) end
-local comment=(spacing*begincomment*somecomment*endcomment )/function(...) add_special("@cm@",...) end
-local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special("@cd@",...) end
-local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
-local trailer=space^0*(text_unparsed/set_message)^0
-local grammar_parsed_text=P { "preamble",
- preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
- parent=beginelement*V("children")^0*endelement,
- children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction,
-}
-local grammar_unparsed_text=P { "preamble",
- preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
- parent=beginelement*V("children")^0*endelement,
- children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction,
-}
-local function _xmlconvert_(data,settings)
- settings=settings or {}
- strip=settings.strip_cm_and_dt
- utfize=settings.utfize_entities
- resolve=settings.resolve_entities
- resolve_predefined=settings.resolve_predefined_entities
- unify_predefined=settings.unify_predefined_entities
- cleanup=settings.text_cleanup
- entities=settings.entities or {}
- if utfize==nil then
- settings.utfize_entities=true
- utfize=true
- end
- if resolve_predefined==nil then
- settings.resolve_predefined_entities=true
- resolve_predefined=true
- end
- stack,top,at,xmlns,errorstr={},{},{},{},nil
- acache,hcache,dcache={},{},{}
- reported_attribute_errors={}
- if settings.parent_root then
- mt=getmetatable(settings.parent_root)
- else
- initialize_mt(top)
- end
- stack[#stack+1]=top
- top.dt={}
- dt=top.dt
- if not data or data=="" then
- errorstr="empty xml file"
- elseif utfize or resolve then
- if lpegmatch(grammar_parsed_text,data) then
- errorstr=""
- else
- errorstr="invalid xml file - parsed text"
- end
- elseif type(data)=="string" then
- if lpegmatch(grammar_unparsed_text,data) then
- errorstr=""
- else
- errorstr="invalid xml file - unparsed text"
- end
- else
- errorstr="invalid xml file - no text at all"
- end
- local result
- if errorstr and errorstr~="" then
- result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
- setmetatable(stack,mt)
- local errorhandler=settings.error_handler
- if errorhandler==false then
- else
- errorhandler=errorhandler or xml.errorhandler
- if errorhandler then
- local currentresource=settings.currentresource
- if currentresource and currentresource~="" then
- xml.errorhandler(formatters["load error in [%s]: %s"](currentresource,errorstr))
- else
- xml.errorhandler(formatters["load error: %s"](errorstr))
- end
- end
- end
- else
- result=stack[1]
- end
- if not settings.no_root then
- result={ special=true,ns="",tg='@rt@',dt=result.dt,at={},entities=entities,settings=settings }
- setmetatable(result,mt)
- local rdt=result.dt
- for k=1,#rdt do
- local v=rdt[k]
- if type(v)=="table" and not v.special then
- result.ri=k
- v.__p__=result
- break
- end
- end
- end
- if errorstr and errorstr~="" then
- result.error=true
- else
- errorstr=nil
- end
- result.statistics={
- errormessage=errorstr,
- entities={
- decimals=dcache,
- hexadecimals=hcache,
- names=acache,
- }
- }
- strip,utfize,resolve,resolve_predefined=nil,nil,nil,nil
- unify_predefined,cleanup,entities=nil,nil,nil
- stack,top,at,xmlns,errorstr=nil,nil,nil,nil,nil
- acache,hcache,dcache=nil,nil,nil
- reported_attribute_errors,mt,errorhandler=nil,nil,nil
- return result
-end
-function xmlconvert(data,settings)
- local ok,result=pcall(function() return _xmlconvert_(data,settings) end)
- if ok then
- return result
- else
- return _xmlconvert_("",settings)
- end
-end
-xml.convert=xmlconvert
-function xml.inheritedconvert(data,xmldata)
- local settings=xmldata.settings
- if settings then
- settings.parent_root=xmldata
- end
- local xc=xmlconvert(data,settings)
- return xc
-end
-function xml.is_valid(root)
- return root and root.dt and root.dt[1] and type(root.dt[1])=="table" and not root.dt[1].er
-end
-function xml.package(tag,attributes,data)
- local ns,tg=match(tag,"^(.-):?([^:]+)$")
- local t={ ns=ns,tg=tg,dt=data or "",at=attributes or {} }
- setmetatable(t,mt)
- return t
-end
-function xml.is_valid(root)
- return root and not root.error
-end
-xml.errorhandler=report_xml
-function xml.load(filename,settings)
- local data=""
- if type(filename)=="string" then
- local f=io.open(filename,'r')
- if f then
- data=f:read("*all")
- f:close()
- end
- elseif filename then
- data=filename:read("*all")
- end
- if settings then
- settings.currentresource=filename
- local result=xmlconvert(data,settings)
- settings.currentresource=nil
- return result
- else
- return xmlconvert(data,{ currentresource=filename })
- end
-end
-local no_root={ no_root=true }
-function xml.toxml(data)
- if type(data)=="string" then
- local root={ xmlconvert(data,no_root) }
- return (#root>1 and root) or root[1]
- else
- return data
- end
-end
-local function copy(old,tables)
- if old then
- tables=tables or {}
- local new={}
- if not tables[old] then
- tables[old]=new
- end
- for k,v in next,old do
- new[k]=(type(v)=="table" and (tables[v] or copy(v,tables))) or v
- end
- local mt=getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- return new
- else
- return {}
- end
-end
-xml.copy=copy
-function xml.checkbom(root)
- if root.ri then
- local dt=root.dt
- for k=1,#dt do
- local v=dt[k]
- if type(v)=="table" and v.special and v.tg=="@pi@" and find(v.dt[1],"xml.*version=") then
- return
- end
- end
- insert(dt,1,{ special=true,ns="",tg="@pi@",dt={ "xml version='1.0' standalone='yes'" } } )
- insert(dt,2,"\n" )
- end
-end
-local function verbose_element(e,handlers)
- local handle=handlers.handle
- local serialize=handlers.serialize
- local ens,etg,eat,edt,ern=e.ns,e.tg,e.at,e.dt,e.rn
- local ats=eat and next(eat) and {}
- if ats then
- for k,v in next,eat do
- ats[#ats+1]=formatters['%s=%q'](k,escaped(v))
- end
- end
- if ern and trace_entities and ern~=ens then
- ens=ern
- end
- if ens~="" then
- if edt and #edt>0 then
- if ats then
- handle("<",ens,":",etg," ",concat(ats," "),">")
- else
- handle("<",ens,":",etg,">")
- end
- for i=1,#edt do
- local e=edt[i]
- if type(e)=="string" then
- handle(escaped(e))
- else
- serialize(e,handlers)
- end
- end
- handle("</",ens,":",etg,">")
- else
- if ats then
- handle("<",ens,":",etg," ",concat(ats," "),"/>")
- else
- handle("<",ens,":",etg,"/>")
- end
- end
- else
- if edt and #edt>0 then
- if ats then
- handle("<",etg," ",concat(ats," "),">")
- else
- handle("<",etg,">")
- end
- for i=1,#edt do
- local e=edt[i]
- if type(e)=="string" then
- handle(escaped(e))
- else
- serialize(e,handlers)
- end
- end
- handle("</",etg,">")
- else
- if ats then
- handle("<",etg," ",concat(ats," "),"/>")
- else
- handle("<",etg,"/>")
- end
- end
- end
-end
-local function verbose_pi(e,handlers)
- handlers.handle("<?",e.dt[1],"?>")
-end
-local function verbose_comment(e,handlers)
- handlers.handle("<!--",e.dt[1],"-->")
-end
-local function verbose_cdata(e,handlers)
- handlers.handle("<![CDATA[",e.dt[1],"]]>")
-end
-local function verbose_doctype(e,handlers)
- handlers.handle("<!DOCTYPE ",e.dt[1],">")
-end
-local function verbose_root(e,handlers)
- handlers.serialize(e.dt,handlers)
-end
-local function verbose_text(e,handlers)
- handlers.handle(escaped(e))
-end
-local function verbose_document(e,handlers)
- local serialize=handlers.serialize
- local functions=handlers.functions
- for i=1,#e do
- local ei=e[i]
- if type(ei)=="string" then
- functions["@tx@"](ei,handlers)
- else
- serialize(ei,handlers)
- end
- end
-end
-local function serialize(e,handlers,...)
- if e then
- local initialize=handlers.initialize
- local finalize=handlers.finalize
- local functions=handlers.functions
- if initialize then
- local state=initialize(...)
- if not state==true then
- return state
- end
- end
- local etg=e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
- if finalize then
- return finalize()
- end
- end
-end
-local function xserialize(e,handlers)
- local functions=handlers.functions
- local etg=e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
-end
-local handlers={}
-local function newhandlers(settings)
- local t=table.copy(handlers[settings and settings.parent or "verbose"] or {})
- if settings then
- for k,v in next,settings do
- if type(v)=="table" then
- local tk=t[k] if not tk then tk={} t[k]=tk end
- for kk,vv in next,v do
- tk[kk]=vv
- end
- else
- t[k]=v
- end
- end
- if settings.name then
- handlers[settings.name]=t
- end
- end
- utilities.storage.mark(t)
- return t
-end
-local nofunction=function() end
-function xml.sethandlersfunction(handler,name,fnc)
- handler.functions[name]=fnc or nofunction
-end
-function xml.gethandlersfunction(handler,name)
- return handler.functions[name]
-end
-function xml.gethandlers(name)
- return handlers[name]
-end
-newhandlers {
- name="verbose",
- initialize=false,
- finalize=false,
- serialize=xserialize,
- handle=print,
- functions={
- ["@dc@"]=verbose_document,
- ["@dt@"]=verbose_doctype,
- ["@rt@"]=verbose_root,
- ["@el@"]=verbose_element,
- ["@pi@"]=verbose_pi,
- ["@cm@"]=verbose_comment,
- ["@cd@"]=verbose_cdata,
- ["@tx@"]=verbose_text,
- }
-}
-local result
-local xmlfilehandler=newhandlers {
- name="file",
- initialize=function(name)
- result=io.open(name,"wb")
- return result
- end,
- finalize=function()
- result:close()
- return true
- end,
- handle=function(...)
- result:write(...)
- end,
-}
-function xml.save(root,name)
- serialize(root,xmlfilehandler,name)
-end
-local result
-local xmlstringhandler=newhandlers {
- name="string",
- initialize=function()
- result={}
- return result
- end,
- finalize=function()
- return concat(result)
- end,
- handle=function(...)
- result[#result+1]=concat {... }
- end,
-}
-local function xmltostring(root)
- if not root then
- return ""
- elseif type(root)=="string" then
- return root
- else
- return serialize(root,xmlstringhandler) or ""
- end
-end
-local function __tostring(root)
- return (root and xmltostring(root)) or ""
-end
-initialize_mt=function(root)
- mt={ __tostring=__tostring,__index=root }
-end
-xml.defaulthandlers=handlers
-xml.newhandlers=newhandlers
-xml.serialize=serialize
-xml.tostring=xmltostring
-local function xmlstring(e,handle)
- if not handle or (e.special and e.tg~="@rt@") then
- elseif e.tg then
- local edt=e.dt
- if edt then
- for i=1,#edt do
- xmlstring(edt[i],handle)
- end
- end
- else
- handle(e)
- end
-end
-xml.string=xmlstring
-function xml.settings(e)
- while e do
- local s=e.settings
- if s then
- return s
- else
- e=e.__p__
- end
- end
- return nil
-end
-function xml.root(e)
- local r=e
- while e do
- e=e.__p__
- if e then
- r=e
- end
- end
- return r
-end
-function xml.parent(root)
- return root.__p__
-end
-function xml.body(root)
- return root.ri and root.dt[root.ri] or root
-end
-function xml.name(root)
- if not root then
- return ""
- end
- local ns=root.ns
- local tg=root.tg
- if ns=="" then
- return tg
- else
- return ns..":"..tg
- end
-end
-function xml.erase(dt,k)
- if dt then
- if k then
- dt[k]=""
- else for k=1,#dt do
- dt[1]={ "" }
- end end
- end
-end
-function xml.assign(dt,k,root)
- if dt and k then
- dt[k]=type(root)=="table" and xml.body(root) or root
- return dt[k]
- else
- return xml.body(root)
- end
-end
-function xml.tocdata(e,wrapper)
- local whatever=type(e)=="table" and xmltostring(e.dt) or e or ""
- if wrapper then
- whatever=formatters["<%s>%s</%s>"](wrapper,whatever,wrapper)
- end
- local t={ special=true,ns="",tg="@cd@",at={},rn="",dt={ whatever },__p__=e }
- setmetatable(t,getmetatable(e))
- e.dt={ t }
-end
-function xml.makestandalone(root)
- if root.ri then
- local dt=root.dt
- for k=1,#dt do
- local v=dt[k]
- if type(v)=="table" and v.special and v.tg=="@pi@" then
- local txt=v.dt[1]
- if find(txt,"xml.*version=") then
- v.dt[1]=txt.." standalone='yes'"
- break
- end
- end
- end
- end
- return root
-end
-function xml.kind(e)
- local dt=e and e.dt
- if dt then
- local n=#dt
- if n==1 then
- local d=dt[1]
- if d.special then
- local tg=d.tg
- if tg=="@cd@" then
- return "cdata"
- elseif tg=="@cm" then
- return "comment"
- elseif tg=="@pi@" then
- return "instruction"
- elseif tg=="@dt@" then
- return "declaration"
- end
- elseif type(d)=="string" then
- return "text"
- end
- return "element"
- elseif n>0 then
- return "mixed"
- end
- end
- return "empty"
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
-
--- original size: 48956, stripped down to: 30516
-
-if not modules then modules={} end modules ['lxml-lpt']={
- version=1.001,
- comment="this module is the basis for the lxml-* ones",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local concat,remove,insert=table.concat,table.remove,table.insert
-local type,next,tonumber,tostring,setmetatable,load,select=type,next,tonumber,tostring,setmetatable,load,select
-local format,upper,lower,gmatch,gsub,find,rep=string.format,string.upper,string.lower,string.gmatch,string.gsub,string.find,string.rep
-local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
-local setmetatableindex=table.setmetatableindex
-local formatters=string.formatters
-local trace_lpath=false if trackers then trackers.register("xml.path",function(v) trace_lpath=v end) end
-local trace_lparse=false if trackers then trackers.register("xml.parse",function(v) trace_lparse=v end) end
-local trace_lprofile=false if trackers then trackers.register("xml.profile",function(v) trace_lpath=v trace_lparse=v trace_lprofile=v end) end
-local report_lpath=logs.reporter("xml","lpath")
-local xml=xml
-local lpathcalls=0 function xml.lpathcalls () return lpathcalls end
-local lpathcached=0 function xml.lpathcached() return lpathcached end
-xml.functions=xml.functions or {}
-local functions=xml.functions
-xml.expressions=xml.expressions or {}
-local expressions=xml.expressions
-xml.finalizers=xml.finalizers or {}
-local finalizers=xml.finalizers
-xml.specialhandler=xml.specialhandler or {}
-local specialhandler=xml.specialhandler
-lpegpatterns.xml=lpegpatterns.xml or {}
-local xmlpatterns=lpegpatterns.xml
-finalizers.xml=finalizers.xml or {}
-finalizers.tex=finalizers.tex or {}
-local function fallback (t,name)
- local fn=finalizers[name]
- if fn then
- t[name]=fn
- else
- report_lpath("unknown sub finalizer %a",name)
- fn=function() end
- end
- return fn
-end
-setmetatableindex(finalizers.xml,fallback)
-setmetatableindex(finalizers.tex,fallback)
-xml.defaultprotocol="xml"
-local apply_axis={}
-apply_axis['root']=function(list)
- local collected={}
- for l=1,#list do
- local ll=list[l]
- local rt=ll
- while ll do
- ll=ll.__p__
- if ll then
- rt=ll
- end
- end
- collected[l]=rt
- end
- return collected
-end
-apply_axis['self']=function(list)
- return list
-end
-apply_axis['child']=function(list)
- local collected,c={},0
- for l=1,#list do
- local ll=list[l]
- local dt=ll.dt
- if dt then
- local en=0
- for k=1,#dt do
- local dk=dt[k]
- if dk.tg then
- c=c+1
- collected[c]=dk
- dk.ni=k
- en=en+1
- dk.ei=en
- end
- end
- ll.en=en
- end
- end
- return collected
-end
-local function collect(list,collected,c)
- local dt=list.dt
- if dt then
- local en=0
- for k=1,#dt do
- local dk=dt[k]
- if dk.tg then
- c=c+1
- collected[c]=dk
- dk.ni=k
- en=en+1
- dk.ei=en
- c=collect(dk,collected,c)
- end
- end
- list.en=en
- end
- return c
-end
-apply_axis['descendant']=function(list)
- local collected,c={},0
- for l=1,#list do
- c=collect(list[l],collected,c)
- end
- return collected
-end
-local function collect(list,collected,c)
- local dt=list.dt
- if dt then
- local en=0
- for k=1,#dt do
- local dk=dt[k]
- if dk.tg then
- c=c+1
- collected[c]=dk
- dk.ni=k
- en=en+1
- dk.ei=en
- c=collect(dk,collected,c)
- end
- end
- list.en=en
- end
- return c
-end
-apply_axis['descendant-or-self']=function(list)
- local collected,c={},0
- for l=1,#list do
- local ll=list[l]
- if ll.special~=true then
- c=c+1
- collected[c]=ll
- end
- c=collect(ll,collected,c)
- end
- return collected
-end
-apply_axis['ancestor']=function(list)
- local collected,c={},0
- for l=1,#list do
- local ll=list[l]
- while ll do
- ll=ll.__p__
- if ll then
- c=c+1
- collected[c]=ll
- end
- end
- end
- return collected
-end
-apply_axis['ancestor-or-self']=function(list)
- local collected,c={},0
- for l=1,#list do
- local ll=list[l]
- c=c+1
- collected[c]=ll
- while ll do
- ll=ll.__p__
- if ll then
- c=c+1
- collected[c]=ll
- end
- end
- end
- return collected
-end
-apply_axis['parent']=function(list)
- local collected,c={},0
- for l=1,#list do
- local pl=list[l].__p__
- if pl then
- c=c+1
- collected[c]=pl
- end
- end
- return collected
-end
-apply_axis['attribute']=function(list)
- return {}
-end
-apply_axis['namespace']=function(list)
- return {}
-end
-apply_axis['following']=function(list)
- return {}
-end
-apply_axis['preceding']=function(list)
- return {}
-end
-apply_axis['following-sibling']=function(list)
- local collected,c={},0
- for l=1,#list do
- local ll=list[l]
- local p=ll.__p__
- local d=p.dt
- for i=ll.ni+1,#d do
- local di=d[i]
- if type(di)=="table" then
- c=c+1
- collected[c]=di
- end
- end
- end
- return collected
-end
-apply_axis['preceding-sibling']=function(list)
- local collected,c={},0
- for l=1,#list do
- local ll=list[l]
- local p=ll.__p__
- local d=p.dt
- for i=1,ll.ni-1 do
- local di=d[i]
- if type(di)=="table" then
- c=c+1
- collected[c]=di
- end
- end
- end
- return collected
-end
-apply_axis['reverse-sibling']=function(list)
- local collected,c={},0
- for l=1,#list do
- local ll=list[l]
- local p=ll.__p__
- local d=p.dt
- for i=ll.ni-1,1,-1 do
- local di=d[i]
- if type(di)=="table" then
- c=c+1
- collected[c]=di
- end
- end
- end
- return collected
-end
-apply_axis['auto-descendant-or-self']=apply_axis['descendant-or-self']
-apply_axis['auto-descendant']=apply_axis['descendant']
-apply_axis['auto-child']=apply_axis['child']
-apply_axis['auto-self']=apply_axis['self']
-apply_axis['initial-child']=apply_axis['child']
-local function apply_nodes(list,directive,nodes)
- local maxn=#nodes
- if maxn==3 then
- local nns,ntg=nodes[2],nodes[3]
- if not nns and not ntg then
- if directive then
- return list
- else
- return {}
- end
- else
- local collected,c,m,p={},0,0,nil
- if not nns then
- for l=1,#list do
- local ll=list[l]
- local ltg=ll.tg
- if ltg then
- if directive then
- if ntg==ltg then
- local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
- c=c+1
- collected[c],ll.mi=ll,m
- end
- elseif ntg~=ltg then
- local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
- c=c+1
- collected[c],ll.mi=ll,m
- end
- end
- end
- elseif not ntg then
- for l=1,#list do
- local ll=list[l]
- local lns=ll.rn or ll.ns
- if lns then
- if directive then
- if lns==nns then
- local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
- c=c+1
- collected[c],ll.mi=ll,m
- end
- elseif lns~=nns then
- local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
- c=c+1
- collected[c],ll.mi=ll,m
- end
- end
- end
- else
- for l=1,#list do
- local ll=list[l]
- local ltg=ll.tg
- if ltg then
- local lns=ll.rn or ll.ns
- local ok=ltg==ntg and lns==nns
- if directive then
- if ok then
- local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
- c=c+1
- collected[c],ll.mi=ll,m
- end
- elseif not ok then
- local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
- c=c+1
- collected[c],ll.mi=ll,m
- end
- end
- end
- end
- return collected
- end
- else
- local collected,c,m,p={},0,0,nil
- for l=1,#list do
- local ll=list[l]
- local ltg=ll.tg
- if ltg then
- local lns=ll.rn or ll.ns
- local ok=false
- for n=1,maxn,3 do
- local nns,ntg=nodes[n+1],nodes[n+2]
- ok=(not ntg or ltg==ntg) and (not nns or lns==nns)
- if ok then
- break
- end
- end
- if directive then
- if ok then
- local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
- c=c+1
- collected[c],ll.mi=ll,m
- end
- elseif not ok then
- local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
- c=c+1
- collected[c],ll.mi=ll,m
- end
- end
- end
- return collected
- end
-end
-local quit_expression=false
-local function apply_expression(list,expression,order)
- local collected,c={},0
- quit_expression=false
- for l=1,#list do
- local ll=list[l]
- if expression(list,ll,l,order) then
- c=c+1
- collected[c]=ll
- end
- if quit_expression then
- break
- end
- end
- return collected
-end
-local P,V,C,Cs,Cc,Ct,R,S,Cg,Cb=lpeg.P,lpeg.V,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.R,lpeg.S,lpeg.Cg,lpeg.Cb
-local spaces=S(" \n\r\t\f")^0
-local lp_space=S(" \n\r\t\f")
-local lp_any=P(1)
-local lp_noequal=P("!=")/"~="+P("<=")+P(">=")+P("==")
-local lp_doequal=P("=")/"=="
-local lp_or=P("|")/" or "
-local lp_and=P("&")/" and "
-local lp_builtin=P (
- P("text")/"(ll.dt[1] or '')"+
- P("content")/"ll.dt"+
- P("name")/"((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)"+P("tag")/"ll.tg"+P("position")/"l"+
- P("firstindex")/"1"+P("lastindex")/"(#ll.__p__.dt or 1)"+P("firstelement")/"1"+P("lastelement")/"(ll.__p__.en or 1)"+P("first")/"1"+P("last")/"#list"+P("rootposition")/"order"+P("order")/"order"+P("element")/"(ll.ei or 1)"+P("index")/"(ll.ni or 1)"+P("match")/"(ll.mi or 1)"+
- P("ns")/"ll.ns"
- )*((spaces*P("(")*spaces*P(")"))/"")
-local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
-lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
-lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
-local lp_fastpos=lp_fastpos_n+lp_fastpos_p
-local lp_reserved=C("and")+C("or")+C("not")+C("div")+C("mod")+C("true")+C("false")
-local lp_lua_function=Cs((R("az","AZ","__")^1*(P(".")*R("az","AZ","__")^1)^1)*("("))/"%0"
-local lp_function=C(R("az","AZ","__")^1)*P("(")/function(t)
- if expressions[t] then
- return "expr."..t.."("
- else
- return "expr.error("
- end
-end
-local lparent=P("(")
-local rparent=P(")")
-local noparent=1-(lparent+rparent)
-local nested=P{lparent*(noparent+V(1))^0*rparent}
-local value=P(lparent*C((noparent+nested)^0)*rparent)
-local lp_child=Cc("expr.child(ll,'")*R("az","AZ","--","__")^1*Cc("')")
-local lp_number=S("+-")*R("09")^1
-local lp_string=Cc("'")*R("az","AZ","--","__")^1*Cc("'")
-local lp_content=(P("'")*(1-P("'"))^0*P("'")+P('"')*(1-P('"'))^0*P('"'))
-local cleaner
-local lp_special=(C(P("name")+P("text")+P("tag")+P("count")+P("child")))*value/function(t,s)
- if expressions[t] then
- s=s and s~="" and lpegmatch(cleaner,s)
- if s and s~="" then
- return "expr."..t.."(ll,"..s..")"
- else
- return "expr."..t.."(ll)"
- end
- else
- return "expr.error("..t..")"
- end
-end
-local content=lp_builtin+lp_attribute+lp_special+lp_noequal+lp_doequal+lp_or+lp_and+lp_reserved+lp_lua_function+lp_function+lp_content+
- lp_child+lp_any
-local converter=Cs (
- lp_fastpos+(P { lparent*(V(1))^0*rparent+content } )^0
-)
-cleaner=Cs ((
- lp_reserved+lp_number+lp_string+1 )^1 )
-local template_e=[[
- local expr = xml.expressions
- return function(list,ll,l,order)
- return %s
- end
-]]
-local template_f_y=[[
- local finalizer = xml.finalizers['%s']['%s']
- return function(collection)
- return finalizer(collection,%s)
- end
-]]
-local template_f_n=[[
- return xml.finalizers['%s']['%s']
-]]
-local register_self={ kind="axis",axis="self" }
-local register_parent={ kind="axis",axis="parent" }
-local register_descendant={ kind="axis",axis="descendant" }
-local register_child={ kind="axis",axis="child" }
-local register_descendant_or_self={ kind="axis",axis="descendant-or-self" }
-local register_root={ kind="axis",axis="root" }
-local register_ancestor={ kind="axis",axis="ancestor" }
-local register_ancestor_or_self={ kind="axis",axis="ancestor-or-self" }
-local register_attribute={ kind="axis",axis="attribute" }
-local register_namespace={ kind="axis",axis="namespace" }
-local register_following={ kind="axis",axis="following" }
-local register_following_sibling={ kind="axis",axis="following-sibling" }
-local register_preceding={ kind="axis",axis="preceding" }
-local register_preceding_sibling={ kind="axis",axis="preceding-sibling" }
-local register_reverse_sibling={ kind="axis",axis="reverse-sibling" }
-local register_auto_descendant_or_self={ kind="axis",axis="auto-descendant-or-self" }
-local register_auto_descendant={ kind="axis",axis="auto-descendant" }
-local register_auto_self={ kind="axis",axis="auto-self" }
-local register_auto_child={ kind="axis",axis="auto-child" }
-local register_initial_child={ kind="axis",axis="initial-child" }
-local register_all_nodes={ kind="nodes",nodetest=true,nodes={ true,false,false } }
-local skip={}
-local function errorrunner_e(str,cnv)
- if not skip[str] then
- report_lpath("error in expression: %s => %s",str,cnv)
- skip[str]=cnv or str
- end
- return false
-end
-local function errorrunner_f(str,arg)
- report_lpath("error in finalizer: %s(%s)",str,arg or "")
- return false
-end
-local function register_nodes(nodetest,nodes)
- return { kind="nodes",nodetest=nodetest,nodes=nodes }
-end
-local function register_expression(expression)
- local converted=lpegmatch(converter,expression)
- local runner=load(format(template_e,converted))
- runner=(runner and runner()) or function() errorrunner_e(expression,converted) end
- return { kind="expression",expression=expression,converted=converted,evaluator=runner }
-end
-local function register_finalizer(protocol,name,arguments)
- local runner
- if arguments and arguments~="" then
- runner=load(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
- else
- runner=load(format(template_f_n,protocol or xml.defaultprotocol,name))
- end
- runner=(runner and runner()) or function() errorrunner_f(name,arguments) end
- return { kind="finalizer",name=name,arguments=arguments,finalizer=runner }
-end
-local expression=P { "ex",
- ex="["*C((V("sq")+V("dq")+(1-S("[]"))+V("ex"))^0)*"]",
- sq="'"*(1-S("'"))^0*"'",
- dq='"'*(1-S('"'))^0*'"',
-}
-local arguments=P { "ar",
- ar="("*Cs((V("sq")+V("dq")+V("nq")+P(1-P(")")))^0)*")",
- nq=((1-S("),'\""))^1)/function(s) return format("%q",s) end,
- sq=P("'")*(1-P("'"))^0*P("'"),
- dq=P('"')*(1-P('"'))^0*P('"'),
-}
-local function register_error(str)
- return { kind="error",error=format("unparsed: %s",str) }
-end
-local special_1=P("*")*Cc(register_auto_descendant)*Cc(register_all_nodes)
-local special_2=P("/")*Cc(register_auto_self)
-local special_3=P("")*Cc(register_auto_self)
-local no_nextcolon=P(-1)+#(1-P(":"))
-local no_nextlparent=P(-1)+#(1-P("("))
-local pathparser=Ct { "patterns",
- patterns=spaces*V("protocol")*spaces*(
- (V("special")*spaces*P(-1) )+(V("initial")*spaces*V("step")*spaces*(P("/")*spaces*V("step")*spaces)^0 )
- ),
- protocol=Cg(V("letters"),"protocol")*P("://")+Cg(Cc(nil),"protocol"),
- step=((V("shortcuts")+P("/")+V("axis"))*spaces*V("nodes")^0+V("error"))*spaces*V("expressions")^0*spaces*V("finalizer")^0,
- axis=V("descendant")+V("child")+V("parent")+V("self")+V("root")+V("ancestor")+V("descendant_or_self")+V("following_sibling")+V("following")+V("reverse_sibling")+V("preceding_sibling")+V("preceding")+V("ancestor_or_self")+#(1-P(-1))*Cc(register_auto_child),
- special=special_1+special_2+special_3,
- initial=(P("/")*spaces*Cc(register_initial_child))^-1,
- error=(P(1)^1)/register_error,
- shortcuts_a=V("s_descendant_or_self")+V("s_descendant")+V("s_child")+V("s_parent")+V("s_self")+V("s_root")+V("s_ancestor"),
- shortcuts=V("shortcuts_a")*(spaces*"/"*spaces*V("shortcuts_a"))^0,
- s_descendant_or_self=(P("***/")+P("/"))*Cc(register_descendant_or_self),
- s_descendant=P("**")*Cc(register_descendant),
- s_child=P("*")*no_nextcolon*Cc(register_child ),
- s_parent=P("..")*Cc(register_parent ),
- s_self=P("." )*Cc(register_self ),
- s_root=P("^^")*Cc(register_root ),
- s_ancestor=P("^")*Cc(register_ancestor ),
- descendant=P("descendant::")*Cc(register_descendant ),
- child=P("child::")*Cc(register_child ),
- parent=P("parent::")*Cc(register_parent ),
- self=P("self::")*Cc(register_self ),
- root=P('root::')*Cc(register_root ),
- ancestor=P('ancestor::')*Cc(register_ancestor ),
- descendant_or_self=P('descendant-or-self::')*Cc(register_descendant_or_self ),
- ancestor_or_self=P('ancestor-or-self::')*Cc(register_ancestor_or_self ),
- following=P('following::')*Cc(register_following ),
- following_sibling=P('following-sibling::')*Cc(register_following_sibling ),
- preceding=P('preceding::')*Cc(register_preceding ),
- preceding_sibling=P('preceding-sibling::')*Cc(register_preceding_sibling ),
- reverse_sibling=P('reverse-sibling::')*Cc(register_reverse_sibling ),
- nodes=(V("nodefunction")*spaces*P("(")*V("nodeset")*P(")")+V("nodetest")*V("nodeset"))/register_nodes,
- expressions=expression/register_expression,
- letters=R("az")^1,
- name=(1-S("/[]()|:*!"))^1,
- negate=P("!")*Cc(false),
- nodefunction=V("negate")+P("not")*Cc(false)+Cc(true),
- nodetest=V("negate")+Cc(true),
- nodename=(V("negate")+Cc(true))*spaces*((V("wildnodename")*P(":")*V("wildnodename"))+(Cc(false)*V("wildnodename"))),
- wildnodename=(C(V("name"))+P("*")*Cc(false))*no_nextlparent,
- nodeset=spaces*Ct(V("nodename")*(spaces*P("|")*spaces*V("nodename"))^0)*spaces,
- finalizer=(Cb("protocol")*P("/")^-1*C(V("name"))*arguments*P(-1))/register_finalizer,
-}
-xmlpatterns.pathparser=pathparser
-local cache={}
-local function nodesettostring(set,nodetest)
- local t={}
- for i=1,#set,3 do
- local directive,ns,tg=set[i],set[i+1],set[i+2]
- if not ns or ns=="" then ns="*" end
- if not tg or tg=="" then tg="*" end
- tg=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
- t[i]=(directive and tg) or format("not(%s)",tg)
- end
- if nodetest==false then
- return format("not(%s)",concat(t,"|"))
- else
- return concat(t,"|")
- end
-end
-local function tagstostring(list)
- if #list==0 then
- return "no elements"
- else
- local t={}
- for i=1,#list do
- local li=list[i]
- local ns,tg=li.ns,li.tg
- if not ns or ns=="" then ns="*" end
- if not tg or tg=="" then tg="*" end
- t[i]=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
- end
- return concat(t," ")
- end
-end
-xml.nodesettostring=nodesettostring
-local lpath
-local lshowoptions={ functions=false }
-local function lshow(parsed)
- if type(parsed)=="string" then
- parsed=lpath(parsed)
- end
- report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
- table.serialize(parsed,false,lshowoptions))
-end
-xml.lshow=lshow
-local function add_comment(p,str)
- local pc=p.comment
- if not pc then
- p.comment={ str }
- else
- pc[#pc+1]=str
- end
-end
-lpath=function (pattern)
- lpathcalls=lpathcalls+1
- if type(pattern)=="table" then
- return pattern
- else
- local parsed=cache[pattern]
- if parsed then
- lpathcached=lpathcached+1
- else
- parsed=lpegmatch(pathparser,pattern)
- if parsed then
- parsed.pattern=pattern
- local np=#parsed
- if np==0 then
- parsed={ pattern=pattern,register_self,state="parsing error" }
- report_lpath("parsing error in pattern: %s",pattern)
- lshow(parsed)
- else
- local pi=parsed[1]
- if pi.axis=="auto-child" then
- if false then
- add_comment(parsed,"auto-child replaced by auto-descendant-or-self")
- parsed[1]=register_auto_descendant_or_self
- else
- add_comment(parsed,"auto-child replaced by auto-descendant")
- parsed[1]=register_auto_descendant
- end
- elseif pi.axis=="initial-child" and np>1 and parsed[2].axis then
- add_comment(parsed,"initial-child removed")
- remove(parsed,1)
- end
- local np=#parsed
- if np>1 then
- local pnp=parsed[np]
- if pnp.kind=="nodes" and pnp.nodetest==true then
- local nodes=pnp.nodes
- if nodes[1]==true and nodes[2]==false and nodes[3]==false then
- add_comment(parsed,"redundant final wildcard filter removed")
- remove(parsed,np)
- end
- end
- end
- end
- else
- parsed={ pattern=pattern }
- end
- cache[pattern]=parsed
- if trace_lparse and not trace_lprofile then
- lshow(parsed)
- end
- end
- return parsed
- end
-end
-xml.lpath=lpath
-local profiled={} xml.profiled=profiled
-local function profiled_apply(list,parsed,nofparsed,order)
- local p=profiled[parsed.pattern]
- if p then
- p.tested=p.tested+1
- else
- p={ tested=1,matched=0,finalized=0 }
- profiled[parsed.pattern]=p
- end
- local collected=list
- for i=1,nofparsed do
- local pi=parsed[i]
- local kind=pi.kind
- if kind=="axis" then
- collected=apply_axis[pi.axis](collected)
- elseif kind=="nodes" then
- collected=apply_nodes(collected,pi.nodetest,pi.nodes)
- elseif kind=="expression" then
- collected=apply_expression(collected,pi.evaluator,order)
- elseif kind=="finalizer" then
- collected=pi.finalizer(collected)
- p.matched=p.matched+1
- p.finalized=p.finalized+1
- return collected
- end
- if not collected or #collected==0 then
- local pn=i<nofparsed and parsed[nofparsed]
- if pn and pn.kind=="finalizer" then
- collected=pn.finalizer(collected)
- p.finalized=p.finalized+1
- return collected
- end
- return nil
- end
- end
- if collected then
- p.matched=p.matched+1
- end
- return collected
-end
-local function traced_apply(list,parsed,nofparsed,order)
- if trace_lparse then
- lshow(parsed)
- end
- report_lpath("collecting: %s",parsed.pattern)
- report_lpath("root tags : %s",tagstostring(list))
- report_lpath("order : %s",order or "unset")
- local collected=list
- for i=1,nofparsed do
- local pi=parsed[i]
- local kind=pi.kind
- if kind=="axis" then
- collected=apply_axis[pi.axis](collected)
- report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
- elseif kind=="nodes" then
- collected=apply_nodes(collected,pi.nodetest,pi.nodes)
- report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
- elseif kind=="expression" then
- collected=apply_expression(collected,pi.evaluator,order)
- report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
- elseif kind=="finalizer" then
- collected=pi.finalizer(collected)
- report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
- return collected
- end
- if not collected or #collected==0 then
- local pn=i<nofparsed and parsed[nofparsed]
- if pn and pn.kind=="finalizer" then
- collected=pn.finalizer(collected)
- report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
- return collected
- end
- return nil
- end
- end
- return collected
-end
-local function normal_apply(list,parsed,nofparsed,order)
- local collected=list
- for i=1,nofparsed do
- local pi=parsed[i]
- local kind=pi.kind
- if kind=="axis" then
- local axis=pi.axis
- if axis~="self" then
- collected=apply_axis[axis](collected)
- end
- elseif kind=="nodes" then
- collected=apply_nodes(collected,pi.nodetest,pi.nodes)
- elseif kind=="expression" then
- collected=apply_expression(collected,pi.evaluator,order)
- elseif kind=="finalizer" then
- return pi.finalizer(collected)
- end
- if not collected or #collected==0 then
- local pf=i<nofparsed and parsed[nofparsed].finalizer
- if pf then
- return pf(collected)
- end
- return nil
- end
- end
- return collected
-end
-local function applylpath(list,pattern)
- if not list then
- return
- end
- local parsed=cache[pattern]
- if parsed then
- lpathcalls=lpathcalls+1
- lpathcached=lpathcached+1
- elseif type(pattern)=="table" then
- lpathcalls=lpathcalls+1
- parsed=pattern
- else
- parsed=lpath(pattern) or pattern
- end
- if not parsed then
- return
- end
- local nofparsed=#parsed
- if nofparsed==0 then
- return
- end
- if not trace_lpath then
- return normal_apply ({ list },parsed,nofparsed,list.mi)
- elseif trace_lprofile then
- return profiled_apply({ list },parsed,nofparsed,list.mi)
- else
- return traced_apply ({ list },parsed,nofparsed,list.mi)
- end
-end
-xml.applylpath=applylpath
-function xml.filter(root,pattern)
- return applylpath(root,pattern)
-end
-expressions.child=function(e,pattern)
- return applylpath(e,pattern)
-end
-expressions.count=function(e,pattern)
- local collected=applylpath(e,pattern)
- return pattern and (collected and #collected) or 0
-end
-expressions.oneof=function(s,...)
- for i=1,select("#",...) do
- if s==select(i,...) then
- return true
- end
- end
- return false
-end
-expressions.error=function(str)
- xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
- return false
-end
-expressions.undefined=function(s)
- return s==nil
-end
-expressions.quit=function(s)
- if s or s==nil then
- quit_expression=true
- end
- return true
-end
-expressions.print=function(...)
- print(...)
- return true
-end
-expressions.contains=find
-expressions.find=find
-expressions.upper=upper
-expressions.lower=lower
-expressions.number=tonumber
-expressions.boolean=toboolean
-function expressions.contains(str,pattern)
- local t=type(str)
- if t=="string" then
- if find(str,pattern) then
- return true
- end
- elseif t=="table" then
- for i=1,#str do
- local d=str[i]
- if type(d)=="string" and find(d,pattern) then
- return true
- end
- end
- end
- return false
-end
-local function traverse(root,pattern,handle)
- local collected=applylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e=collected[c]
- local r=e.__p__
- handle(r,r.dt,e.ni)
- end
- end
-end
-local function selection(root,pattern,handle)
- local collected=applylpath(root,pattern)
- if collected then
- if handle then
- for c=1,#collected do
- handle(collected[c])
- end
- else
- return collected
- end
- end
-end
-xml.traverse=traverse
-xml.selection=selection
-local function dofunction(collected,fnc,...)
- if collected then
- local f=functions[fnc]
- if f then
- for c=1,#collected do
- f(collected[c],...)
- end
- else
- report_lpath("unknown function %a",fnc)
- end
- end
-end
-finalizers.xml["function"]=dofunction
-finalizers.tex["function"]=dofunction
-expressions.text=function(e,n)
- local rdt=e.__p__.dt
- return rdt and rdt[n] or ""
-end
-expressions.name=function(e,n)
- local found=false
- n=tonumber(n) or 0
- if n==0 then
- found=type(e)=="table" and e
- elseif n<0 then
- local d,k=e.__p__.dt,e.ni
- for i=k-1,1,-1 do
- local di=d[i]
- if type(di)=="table" then
- if n==-1 then
- found=di
- break
- else
- n=n+1
- end
- end
- end
- else
- local d,k=e.__p__.dt,e.ni
- for i=k+1,#d,1 do
- local di=d[i]
- if type(di)=="table" then
- if n==1 then
- found=di
- break
- else
- n=n-1
- end
- end
- end
- end
- if found then
- local ns,tg=found.rn or found.ns or "",found.tg
- if ns~="" then
- return ns..":"..tg
- else
- return tg
- end
- else
- return ""
- end
-end
-expressions.tag=function(e,n)
- if not e then
- return ""
- else
- local found=false
- n=tonumber(n) or 0
- if n==0 then
- found=(type(e)=="table") and e
- elseif n<0 then
- local d,k=e.__p__.dt,e.ni
- for i=k-1,1,-1 do
- local di=d[i]
- if type(di)=="table" then
- if n==-1 then
- found=di
- break
- else
- n=n+1
- end
- end
- end
- else
- local d,k=e.__p__.dt,e.ni
- for i=k+1,#d,1 do
- local di=d[i]
- if type(di)=="table" then
- if n==1 then
- found=di
- break
- else
- n=n-1
- end
- end
- end
- end
- return (found and found.tg) or ""
- end
-end
-local dummy=function() end
-function xml.elements(root,pattern,reverse)
- local collected=applylpath(root,pattern)
- if not collected then
- return dummy
- elseif reverse then
- local c=#collected+1
- return function()
- if c>1 then
- c=c-1
- local e=collected[c]
- local r=e.__p__
- return r,r.dt,e.ni
- end
- end
- else
- local n,c=#collected,0
- return function()
- if c<n then
- c=c+1
- local e=collected[c]
- local r=e.__p__
- return r,r.dt,e.ni
- end
- end
- end
-end
-function xml.collected(root,pattern,reverse)
- local collected=applylpath(root,pattern)
- if not collected then
- return dummy
- elseif reverse then
- local c=#collected+1
- return function()
- if c>1 then
- c=c-1
- return collected[c]
- end
- end
- else
- local n,c=#collected,0
- return function()
- if c<n then
- c=c+1
- return collected[c]
- end
- end
- end
-end
-function xml.inspect(collection,pattern)
- pattern=pattern or "."
- for e in xml.collected(collection,pattern or ".") do
- report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
- end
-end
-local function split(e)
- local dt=e.dt
- if dt then
- for i=1,#dt do
- local dti=dt[i]
- if type(dti)=="string" then
- dti=gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
- dti=gsub(dti,"[\n\r]+","\n\n")
- dt[i]=dti
- else
- split(dti)
- end
- end
- end
- return e
-end
-function xml.finalizers.paragraphs(c)
- for i=1,#c do
- split(c[i])
- end
- return c
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["lxml-mis"] = package.loaded["lxml-mis"] or true
-
--- original size: 3684, stripped down to: 1957
-
-if not modules then modules={} end modules ['lxml-mis']={
- version=1.001,
- comment="this module is the basis for the lxml-* ones",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local xml,lpeg,string=xml,lpeg,string
-local concat=table.concat
-local type,next,tonumber,tostring,setmetatable,loadstring=type,next,tonumber,tostring,setmetatable,loadstring
-local format,gsub,match=string.format,string.gsub,string.match
-local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
-local P,S,R,C,V,Cc,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.Cc,lpeg.Cs
-lpegpatterns.xml=lpegpatterns.xml or {}
-local xmlpatterns=lpegpatterns.xml
-local function xmlgsub(t,old,new)
- local dt=t.dt
- if dt then
- for k=1,#dt do
- local v=dt[k]
- if type(v)=="string" then
- dt[k]=gsub(v,old,new)
- else
- xmlgsub(v,old,new)
- end
- end
- end
-end
-function xml.stripleadingspaces(dk,d,k)
- if d and k then
- local dkm=d[k-1]
- if dkm and type(dkm)=="string" then
- local s=match(dkm,"\n(%s+)")
- xmlgsub(dk,"\n"..rep(" ",#s),"\n")
- end
- end
-end
-local normal=(1-S("<&>"))^0
-local special=P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"
-local escaped=Cs(normal*(special*normal)^0)
-local normal=(1-S"&")^0
-local special=P("&lt;")/"<"+P("&gt;")/">"+P("&amp;")/"&"
-local unescaped=Cs(normal*(special*normal)^0)
-local cleansed=Cs(((P("<")*(1-P(">"))^0*P(">"))/""+1)^0)
-xmlpatterns.escaped=escaped
-xmlpatterns.unescaped=unescaped
-xmlpatterns.cleansed=cleansed
-function xml.escaped (str) return lpegmatch(escaped,str) end
-function xml.unescaped(str) return lpegmatch(unescaped,str) end
-function xml.cleansed (str) return lpegmatch(cleansed,str) end
-function xml.fillin(root,pattern,str,check)
- local e=xml.first(root,pattern)
- if e then
- local n=#e.dt
- if not check or n==0 or (n==1 and e.dt[1]=="") then
- e.dt={ str }
- end
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["lxml-aux"] = package.loaded["lxml-aux"] or true
-
--- original size: 23804, stripped down to: 16817
-
-if not modules then modules={} end modules ['lxml-aux']={
- version=1.001,
- comment="this module is the basis for the lxml-* ones",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local trace_manipulations=false trackers.register("lxml.manipulations",function(v) trace_manipulations=v end)
-local report_xml=logs.reporter("xml")
-local xml=xml
-local xmlconvert,xmlcopy,xmlname=xml.convert,xml.copy,xml.name
-local xmlinheritedconvert=xml.inheritedconvert
-local xmlapplylpath=xml.applylpath
-local xmlfilter=xml.filter
-local type,setmetatable,getmetatable=type,setmetatable,getmetatable
-local insert,remove,fastcopy,concat=table.insert,table.remove,table.fastcopy,table.concat
-local gmatch,gsub,format,find,strip=string.gmatch,string.gsub,string.format,string.find,string.strip
-local utfbyte=utf.byte
-local function report(what,pattern,c,e)
- report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
-end
-local function withelements(e,handle,depth)
- if e and handle then
- local edt=e.dt
- if edt then
- depth=depth or 0
- for i=1,#edt do
- local e=edt[i]
- if type(e)=="table" then
- handle(e,depth)
- withelements(e,handle,depth+1)
- end
- end
- end
- end
-end
-xml.withelements=withelements
-function xml.withelement(e,n,handle)
- if e and n~=0 and handle then
- local edt=e.dt
- if edt then
- if n>0 then
- for i=1,#edt do
- local ei=edt[i]
- if type(ei)=="table" then
- if n==1 then
- handle(ei)
- return
- else
- n=n-1
- end
- end
- end
- elseif n<0 then
- for i=#edt,1,-1 do
- local ei=edt[i]
- if type(ei)=="table" then
- if n==-1 then
- handle(ei)
- return
- else
- n=n+1
- end
- end
- end
- end
- end
- end
-end
-function xml.each(root,pattern,handle,reverse)
- local collected=xmlapplylpath(root,pattern)
- if collected then
- if reverse then
- for c=#collected,1,-1 do
- handle(collected[c])
- end
- else
- for c=1,#collected do
- handle(collected[c])
- end
- end
- return collected
- end
-end
-function xml.processattributes(root,pattern,handle)
- local collected=xmlapplylpath(root,pattern)
- if collected and handle then
- for c=1,#collected do
- handle(collected[c].at)
- end
- end
- return collected
-end
-function xml.collect(root,pattern)
- return xmlapplylpath(root,pattern)
-end
-function xml.collecttexts(root,pattern,flatten)
- local collected=xmlapplylpath(root,pattern)
- if collected and flatten then
- local xmltostring=xml.tostring
- for c=1,#collected do
- collected[c]=xmltostring(collected[c].dt)
- end
- end
- return collected or {}
-end
-function xml.collect_tags(root,pattern,nonamespace)
- local collected=xmlapplylpath(root,pattern)
- if collected then
- local t,n={},0
- for c=1,#collected do
- local e=collected[c]
- local ns,tg=e.ns,e.tg
- n=n+1
- if nonamespace then
- t[n]=tg
- elseif ns=="" then
- t[n]=tg
- else
- t[n]=ns..":"..tg
- end
- end
- return t
- end
-end
-local no_root={ no_root=true }
-local function redo_ni(d)
- for k=1,#d do
- local dk=d[k]
- if type(dk)=="table" then
- dk.ni=k
- end
- end
-end
-local function xmltoelement(whatever,root)
- if not whatever then
- return nil
- end
- local element
- if type(whatever)=="string" then
- element=xmlinheritedconvert(whatever,root)
- else
- element=whatever
- end
- if element.error then
- return whatever
- end
- if element then
- end
- return element
-end
-xml.toelement=xmltoelement
-local function copiedelement(element,newparent)
- if type(element)=="string" then
- return element
- else
- element=xmlcopy(element).dt
- if newparent and type(element)=="table" then
- element.__p__=newparent
- end
- return element
- end
-end
-function xml.delete(root,pattern)
- if not pattern or pattern=="" then
- local p=root.__p__
- if p then
- if trace_manipulations then
- report('deleting',"--",c,root)
- end
- local d=p.dt
- remove(d,root.ni)
- redo_ni(d)
- end
- else
- local collected=xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e=collected[c]
- local p=e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
- end
- local d=p.dt
- remove(d,e.ni)
- redo_ni(d)
- end
- end
- end
- end
-end
-function xml.replace(root,pattern,whatever)
- local element=root and xmltoelement(whatever,root)
- local collected=element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e=collected[c]
- local p=e.__p__
- if p then
- if trace_manipulations then
- report('replacing',pattern,c,e)
- end
- local d=p.dt
- d[e.ni]=copiedelement(element,p)
- redo_ni(d)
- end
- end
- end
-end
-local function wrap(e,wrapper)
- local t={
- rn=e.rn,
- tg=e.tg,
- ns=e.ns,
- at=e.at,
- dt=e.dt,
- __p__=e,
- }
- setmetatable(t,getmetatable(e))
- e.rn=wrapper.rn or e.rn or ""
- e.tg=wrapper.tg or e.tg or ""
- e.ns=wrapper.ns or e.ns or ""
- e.at=fastcopy(wrapper.at)
- e.dt={ t }
-end
-function xml.wrap(root,pattern,whatever)
- if whatever then
- local wrapper=xmltoelement(whatever,root)
- local collected=xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e=collected[c]
- if trace_manipulations then
- report('wrapping',pattern,c,e)
- end
- wrap(e,wrapper)
- end
- end
- else
- wrap(root,xmltoelement(pattern))
- end
-end
-local function inject_element(root,pattern,whatever,prepend)
- local element=root and xmltoelement(whatever,root)
- local collected=element and xmlapplylpath(root,pattern)
- local function inject_e(e)
- local r=e.__p__
- local d,k,rri=r.dt,e.ni,r.ri
- local edt=(rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be,af
- local cp=copiedelement(element,e)
- if prepend then
- be,af=cp,edt
- else
- be,af=edt,cp
- end
- local bn=#be
- for i=1,#af do
- bn=bn+1
- be[bn]=af[i]
- end
- if rri then
- r.dt[rri].dt=be
- else
- d[k].dt=be
- end
- redo_ni(d)
- end
- end
- if not collected then
- elseif collected.tg then
- inject_e(collected)
- else
- for c=1,#collected do
- inject_e(collected[c])
- end
- end
-end
-local function insert_element(root,pattern,whatever,before)
- local element=root and xmltoelement(whatever,root)
- local collected=element and xmlapplylpath(root,pattern)
- local function insert_e(e)
- local r=e.__p__
- local d,k=r.dt,e.ni
- if not before then
- k=k+1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
- end
- if not collected then
- elseif collected.tg then
- insert_e(collected)
- else
- for c=1,#collected do
- insert_e(collected[c])
- end
- end
-end
-xml.insert_element=insert_element
-xml.insertafter=insert_element
-xml.insertbefore=function(r,p,e) insert_element(r,p,e,true) end
-xml.injectafter=inject_element
-xml.injectbefore=function(r,p,e) inject_element(r,p,e,true) end
-local function include(xmldata,pattern,attribute,recursive,loaddata)
- pattern=pattern or 'include'
- loaddata=loaddata or io.loaddata
- local collected=xmlapplylpath(xmldata,pattern)
- if collected then
- for c=1,#collected do
- local ek=collected[c]
- local name=nil
- local ekdt=ek.dt
- local ekat=ek.at
- local epdt=ek.__p__.dt
- if not attribute or attribute=="" then
- name=(type(ekdt)=="table" and ekdt[1]) or ekdt
- end
- if not name then
- for a in gmatch(attribute or "href","([^|]+)") do
- name=ekat[a]
- if name then break end
- end
- end
- local data=(name and name~="" and loaddata(name)) or ""
- if data=="" then
- epdt[ek.ni]=""
- elseif ekat["parse"]=="text" then
- epdt[ek.ni]=xml.escaped(data)
- else
- local xi=xmlinheritedconvert(data,xmldata)
- if not xi then
- epdt[ek.ni]=""
- else
- if recursive then
- include(xi,pattern,attribute,recursive,loaddata)
- end
- epdt[ek.ni]=xml.body(xi)
- end
- end
- end
- end
-end
-xml.include=include
-local function stripelement(e,nolines,anywhere)
- local edt=e.dt
- if edt then
- if anywhere then
- local t,n={},0
- for e=1,#edt do
- local str=edt[e]
- if type(str)~="string" then
- n=n+1
- t[n]=str
- elseif str~="" then
- if nolines then
- str=gsub(str,"%s+"," ")
- end
- str=gsub(str,"^%s*(.-)%s*$","%1")
- if str~="" then
- n=n+1
- t[n]=str
- end
- end
- end
- e.dt=t
- else
- if #edt>0 then
- local str=edt[1]
- if type(str)~="string" then
- elseif str=="" then
- remove(edt,1)
- else
- if nolines then
- str=gsub(str,"%s+"," ")
- end
- str=gsub(str,"^%s+","")
- if str=="" then
- remove(edt,1)
- else
- edt[1]=str
- end
- end
- end
- local nedt=#edt
- if nedt>0 then
- local str=edt[nedt]
- if type(str)~="string" then
- elseif str=="" then
- remove(edt)
- else
- if nolines then
- str=gsub(str,"%s+"," ")
- end
- str=gsub(str,"%s+$","")
- if str=="" then
- remove(edt)
- else
- edt[nedt]=str
- end
- end
- end
- end
- end
- return e
-end
-xml.stripelement=stripelement
-function xml.strip(root,pattern,nolines,anywhere)
- local collected=xmlapplylpath(root,pattern)
- if collected then
- for i=1,#collected do
- stripelement(collected[i],nolines,anywhere)
- end
- end
-end
-local function renamespace(root,oldspace,newspace)
- local ndt=#root.dt
- for i=1,ndt or 0 do
- local e=root[i]
- if type(e)=="table" then
- if e.ns==oldspace then
- e.ns=newspace
- if e.rn then
- e.rn=newspace
- end
- end
- local edt=e.dt
- if edt then
- renamespace(edt,oldspace,newspace)
- end
- end
- end
-end
-xml.renamespace=renamespace
-function xml.remaptag(root,pattern,newtg)
- local collected=xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- collected[c].tg=newtg
- end
- end
-end
-function xml.remapnamespace(root,pattern,newns)
- local collected=xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- collected[c].ns=newns
- end
- end
-end
-function xml.checknamespace(root,pattern,newns)
- local collected=xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e=collected[c]
- if (not e.rn or e.rn=="") and e.ns=="" then
- e.rn=newns
- end
- end
- end
-end
-function xml.remapname(root,pattern,newtg,newns,newrn)
- local collected=xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e=collected[c]
- e.tg,e.ns,e.rn=newtg,newns,newrn
- end
- end
-end
-function xml.cdatatotext(e)
- local dt=e.dt
- if #dt==1 then
- local first=dt[1]
- if first.tg=="@cd@" then
- e.dt=first.dt
- end
- else
- end
-end
-function xml.texttocdata(e)
- local dt=e.dt
- local s=xml.tostring(dt)
- e.tg="@cd@"
- e.special=true
- e.ns=""
- e.rn=""
- e.dt={ s }
- e.at=nil
-end
-function xml.elementtocdata(e)
- local dt=e.dt
- local s=xml.tostring(e)
- e.tg="@cd@"
- e.special=true
- e.ns=""
- e.rn=""
- e.dt={ s }
- e.at=nil
-end
-xml.builtinentities=table.tohash { "amp","quot","apos","lt","gt" }
-local entities=characters and characters.entities or nil
-local builtinentities=xml.builtinentities
-function xml.addentitiesdoctype(root,option)
- if not entities then
- require("char-ent")
- entities=characters.entities
- end
- if entities and root and root.tg=="@rt@" and root.statistics then
- local list={}
- local hexify=option=="hexadecimal"
- for k,v in table.sortedhash(root.statistics.entities.names) do
- if not builtinentities[k] then
- local e=entities[k]
- if not e then
- e=format("[%s]",k)
- elseif hexify then
- e=format("&#%05X;",utfbyte(k))
- end
- list[#list+1]=format(" <!ENTITY %s %q >",k,e)
- end
- end
- local dt=root.dt
- local n=dt[1].tg=="@pi@" and 2 or 1
- if #list>0 then
- insert(dt,n,{ "\n" })
- insert(dt,n,{
- tg="@dt@",
- dt={ format("Something [\n%s\n] ",concat(list)) },
- ns="",
- special=true,
- })
- insert(dt,n,{ "\n\n" })
- else
- end
- end
-end
-xml.all=xml.each
-xml.insert=xml.insertafter
-xml.inject=xml.injectafter
-xml.after=xml.insertafter
-xml.before=xml.insertbefore
-xml.process=xml.each
-xml.obsolete=xml.obsolete or {}
-local obsolete=xml.obsolete
-xml.strip_whitespace=xml.strip obsolete.strip_whitespace=xml.strip
-xml.collect_elements=xml.collect obsolete.collect_elements=xml.collect
-xml.delete_element=xml.delete obsolete.delete_element=xml.delete
-xml.replace_element=xml.replace obsolete.replace_element=xml.replacet
-xml.each_element=xml.each obsolete.each_element=xml.each
-xml.process_elements=xml.process obsolete.process_elements=xml.process
-xml.insert_element_after=xml.insertafter obsolete.insert_element_after=xml.insertafter
-xml.insert_element_before=xml.insertbefore obsolete.insert_element_before=xml.insertbefore
-xml.inject_element_after=xml.injectafter obsolete.inject_element_after=xml.injectafter
-xml.inject_element_before=xml.injectbefore obsolete.inject_element_before=xml.injectbefore
-xml.process_attributes=xml.processattributes obsolete.process_attributes=xml.processattributes
-xml.collect_texts=xml.collecttexts obsolete.collect_texts=xml.collecttexts
-xml.inject_element=xml.inject obsolete.inject_element=xml.inject
-xml.remap_tag=xml.remaptag obsolete.remap_tag=xml.remaptag
-xml.remap_name=xml.remapname obsolete.remap_name=xml.remapname
-xml.remap_namespace=xml.remapnamespace obsolete.remap_namespace=xml.remapnamespace
-function xml.cdata(e)
- if e then
- local dt=e.dt
- if dt and #dt==1 then
- local first=dt[1]
- return first.tg=="@cd@" and first.dt[1] or ""
- end
- end
- return ""
-end
-function xml.finalizers.xml.cdata(collected)
- if collected then
- local e=collected[1]
- if e then
- local dt=e.dt
- if dt and #dt==1 then
- local first=dt[1]
- return first.tg=="@cd@" and first.dt[1] or ""
- end
- end
- end
- return ""
-end
-function xml.insertcomment(e,str,n)
- table.insert(e.dt,n or 1,{
- tg="@cm@",
- ns="",
- special=true,
- at={},
- dt={ str },
- })
-end
-function xml.setcdata(e,str)
- e.dt={ {
- tg="@cd@",
- ns="",
- special=true,
- at={},
- dt={ str },
- } }
-end
-function xml.separate(x,pattern)
- local collected=xmlapplylpath(x,pattern)
- if collected then
- for c=1,#collected do
- local e=collected[c]
- local d=e.dt
- if d==x then
- report_xml("warning: xml.separate changes root")
- x=d
- end
- local t,n={ "\n" },1
- local i,nd=1,#d
- while i<=nd do
- while i<=nd do
- local di=d[i]
- if type(di)=="string" then
- if di=="\n" or find(di,"^%s+$") then
- i=i+1
- else
- d[i]=strip(di)
- break
- end
- else
- break
- end
- end
- if i>nd then
- break
- end
- t[n+1]="\n"
- t[n+2]=d[i]
- t[n+3]="\n"
- n=n+3
- i=i+1
- end
- t[n+1]="\n"
- setmetatable(t,getmetatable(d))
- e.dt=t
- end
- end
- return x
-end
-local helpers=xml.helpers or {}
-xml.helpers=helpers
-local function normal(e,action)
- local edt=e.dt
- if edt then
- for i=1,#edt do
- local str=edt[i]
- if type(str)=="string" and str~="" then
- edt[i]=action(str)
- end
- end
- end
-end
-local function recurse(e,action)
- local edt=e.dt
- if edt then
- for i=1,#edt do
- local str=edt[i]
- if type(str)~="string" then
- recurse(str,action,recursive)
- elseif str~="" then
- edt[i]=action(str)
- end
- end
- end
-end
-function helpers.recursetext(collected,action,recursive)
- if recursive then
- for i=1,#collected do
- recurse(collected[i],action)
- end
- else
- for i=1,#collected do
- normal(collected[i],action)
- end
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["lxml-xml"] = package.loaded["lxml-xml"] or true
-
--- original size: 10274, stripped down to: 7538
-
-if not modules then modules={} end modules ['lxml-xml']={
- version=1.001,
- comment="this module is the basis for the lxml-* ones",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local concat=table.concat
-local find,lower,upper=string.find,string.lower,string.upper
-local xml=xml
-local finalizers=xml.finalizers.xml
-local xmlfilter=xml.filter
-local xmltostring=xml.tostring
-local xmlserialize=xml.serialize
-local xmlcollected=xml.collected
-local xmlnewhandlers=xml.newhandlers
-local function first(collected)
- return collected and collected[1]
-end
-local function last(collected)
- return collected and collected[#collected]
-end
-local function all(collected)
- return collected
-end
-local reverse=table.reversed
-local function attribute(collected,name)
- if collected and #collected>0 then
- local at=collected[1].at
- return at and at[name]
- end
-end
-local function att(id,name)
- local at=id.at
- return at and at[name]
-end
-local function count(collected)
- return collected and #collected or 0
-end
-local function position(collected,n)
- if not collected then
- return 0
- end
- local nc=#collected
- if nc==0 then
- return 0
- end
- n=tonumber(n) or 0
- if n<0 then
- return collected[nc+n+1]
- elseif n>0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
-end
-local function match(collected)
- return collected and #collected>0 and collected[1].mi or 0
-end
-local function index(collected)
- return collected and #collected>0 and collected[1].ni or 0
-end
-local function attributes(collected,arguments)
- if collected and #collected>0 then
- local at=collected[1].at
- if arguments then
- return at[arguments]
- elseif next(at) then
- return at
- end
- end
-end
-local function chainattribute(collected,arguments)
- if collected and #collected>0 then
- local e=collected[1]
- while e do
- local at=e.at
- if at then
- local a=at[arguments]
- if a then
- return a
- end
- else
- break
- end
- e=e.__p__
- end
- end
- return ""
-end
-local function raw(collected)
- if collected and #collected>0 then
- local e=collected[1] or collected
- return e and xmltostring(e) or ""
- else
- return ""
- end
-end
-local xmltexthandler=xmlnewhandlers {
- name="string",
- initialize=function()
- result={}
- return result
- end,
- finalize=function()
- return concat(result)
- end,
- handle=function(...)
- result[#result+1]=concat {... }
- end,
- escape=false,
-}
-local function xmltotext(root)
- local dt=root.dt
- if not dt then
- return ""
- end
- local nt=#dt
- if nt==0 then
- return ""
- elseif nt==1 and type(dt[1])=="string" then
- return dt[1]
- else
- return xmlserialize(root,xmltexthandler) or ""
- end
-end
-local function text(collected)
- if collected then
- local e=collected[1] or collected
- return e and xmltotext(e) or ""
- else
- return ""
- end
-end
-local function texts(collected)
- if not collected then
- return {}
- end
- local nc=#collected
- if nc==0 then
- return {}
- end
- local t,n={},0
- for c=1,nc do
- local e=collected[c]
- if e and e.dt then
- n=n+1
- t[n]=e.dt
- end
- end
- return t
-end
-local function tag(collected,n)
- if not collected then
- return
- end
- local nc=#collected
- if nc==0 then
- return
- end
- local c
- if n==0 or not n then
- c=collected[1]
- elseif n>1 then
- c=collected[n]
- else
- c=collected[nc-n+1]
- end
- return c and c.tg
-end
-local function name(collected,n)
- if not collected then
- return
- end
- local nc=#collected
- if nc==0 then
- return
- end
- local c
- if n==0 or not n then
- c=collected[1]
- elseif n>1 then
- c=collected[n]
- else
- c=collected[nc-n+1]
- end
- if not c then
- elseif c.ns=="" then
- return c.tg
- else
- return c.ns..":"..c.tg
- end
-end
-local function tags(collected,nonamespace)
- if not collected then
- return
- end
- local nc=#collected
- if nc==0 then
- return
- end
- local t,n={},0
- for c=1,nc do
- local e=collected[c]
- local ns,tg=e.ns,e.tg
- n=n+1
- if nonamespace or ns=="" then
- t[n]=tg
- else
- t[n]=ns..":"..tg
- end
- end
- return t
-end
-local function empty(collected,spacesonly)
- if not collected then
- return true
- end
- local nc=#collected
- if nc==0 then
- return true
- end
- for c=1,nc do
- local e=collected[c]
- if e then
- local edt=e.dt
- if edt then
- local n=#edt
- if n==1 then
- local edk=edt[1]
- local typ=type(edk)
- if typ=="table" then
- return false
- elseif edk~="" then
- return false
- elseif spacesonly and not find(edk,"%S") then
- return false
- end
- elseif n>1 then
- return false
- end
- end
- end
- end
- return true
-end
-finalizers.first=first
-finalizers.last=last
-finalizers.all=all
-finalizers.reverse=reverse
-finalizers.elements=all
-finalizers.default=all
-finalizers.attribute=attribute
-finalizers.att=att
-finalizers.count=count
-finalizers.position=position
-finalizers.match=match
-finalizers.index=index
-finalizers.attributes=attributes
-finalizers.chainattribute=chainattribute
-finalizers.text=text
-finalizers.texts=texts
-finalizers.tag=tag
-finalizers.name=name
-finalizers.tags=tags
-finalizers.empty=empty
-function xml.first(id,pattern)
- return first(xmlfilter(id,pattern))
-end
-function xml.last(id,pattern)
- return last(xmlfilter(id,pattern))
-end
-function xml.count(id,pattern)
- return count(xmlfilter(id,pattern))
-end
-function xml.attribute(id,pattern,a,default)
- return attribute(xmlfilter(id,pattern),a,default)
-end
-function xml.raw(id,pattern)
- if pattern then
- return raw(xmlfilter(id,pattern))
- else
- return raw(id)
- end
-end
-function xml.text(id,pattern)
- if pattern then
- local collected=xmlfilter(id,pattern)
- return collected and #collected>0 and xmltotext(collected[1]) or ""
- elseif id then
- return xmltotext(id) or ""
- else
- return ""
- end
-end
-xml.content=text
-function xml.position(id,pattern,n)
- return position(xmlfilter(id,pattern),n)
-end
-function xml.match(id,pattern)
- return match(xmlfilter(id,pattern))
-end
-function xml.empty(id,pattern,spacesonly)
- return empty(xmlfilter(id,pattern),spacesonly)
-end
-xml.all=xml.filter
-xml.index=xml.position
-xml.found=xml.filter
-local function totable(x)
- local t={}
- for e in xmlcollected(x[1] or x,"/*") do
- t[e.tg]=xmltostring(e.dt) or ""
- end
- return next(t) and t or nil
-end
-xml.table=totable
-finalizers.table=totable
-local function textonly(e,t)
- if e then
- local edt=e.dt
- if edt then
- for i=1,#edt do
- local e=edt[i]
- if type(e)=="table" then
- textonly(e,t)
- else
- t[#t+1]=e
- end
- end
- end
- end
- return t
-end
-function xml.textonly(e)
- return concat(textonly(e,{}))
-end
-function finalizers.lowerall(collected)
- for c=1,#collected do
- local e=collected[c]
- if not e.special then
- e.tg=lower(e.tg)
- local eat=e.at
- if eat then
- local t={}
- for k,v in next,eat do
- t[lower(k)]=v
- end
- e.at=t
- end
- end
- end
-end
-function finalizers.upperall(collected)
- for c=1,#collected do
- local e=collected[c]
- if not e.special then
- e.tg=upper(e.tg)
- local eat=e.at
- if eat then
- local t={}
- for k,v in next,eat do
- t[upper(k)]=v
- end
- e.at=t
- end
- end
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["trac-xml"] = package.loaded["trac-xml"] or true
-
--- original size: 6351, stripped down to: 4919
-
-if not modules then modules={} end modules ['trac-xml']={
- version=1.001,
- comment="companion to trac-log.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local formatters=string.formatters
-local reporters=logs.reporters
-local xmlserialize=xml.serialize
-local xmlcollected=xml.collected
-local xmltext=xml.text
-local xmlfirst=xml.first
-local function showhelp(specification,...)
- local root=xml.convert(specification.helpinfo or "")
- if not root then
- return
- end
- local xs=xml.gethandlers("string")
- xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
- xml.sethandlersfunction(xs,"ref",function(e,handler) handler.handle("--"..e.at.name) end)
- local wantedcategories=select("#",...)==0 and true or table.tohash {... }
- local nofcategories=xml.count(root,"/application/flags/category")
- local report=specification.report
- for category in xmlcollected(root,"/application/flags/category") do
- local categoryname=category.at.name or ""
- if wantedcategories==true or wantedcategories[categoryname] then
- if nofcategories>1 then
- report("%s options:",categoryname)
- report()
- end
- for subcategory in xmlcollected(category,"/subcategory") do
- for flag in xmlcollected(subcategory,"/flag") do
- local name=flag.at.name
- local value=flag.at.value
- local short=xmltext(xmlfirst(flag,"/short"))
- if value then
- report("--%-20s %s",formatters["%s=%s"](name,value),short)
- else
- report("--%-20s %s",name,short)
- end
- end
- report()
- end
- end
- end
- for category in xmlcollected(root,"/application/examples/category") do
- local title=xmltext(xmlfirst(category,"/title"))
- if title and title~="" then
- report()
- report(title)
- report()
- end
- for subcategory in xmlcollected(category,"/subcategory") do
- for example in xmlcollected(subcategory,"/example") do
- local command=xmltext(xmlfirst(example,"/command"))
- local comment=xmltext(xmlfirst(example,"/comment"))
- report(command)
- end
- report()
- end
- end
- for comment in xmlcollected(root,"/application/comments/comment") do
- local comment=xmltext(comment)
- report()
- report(comment)
- report()
- end
-end
-local reporthelp=reporters.help
-local exporthelp=reporters.export
-local function xmlfound(t)
- local helpinfo=t.helpinfo
- if type(helpinfo)=="table" then
- return false
- end
- if type(helpinfo)~="string" then
- helpinfo="Warning: no helpinfo found."
- t.helpinfo=helpinfo
- return false
- end
- if string.find(helpinfo,".xml$") then
- local ownscript=environment.ownscript
- local helpdata=false
- if ownscript then
- local helpfile=file.join(file.pathpart(ownscript),helpinfo)
- helpdata=io.loaddata(helpfile)
- if helpdata=="" then
- helpdata=false
- end
- end
- if not helpdata then
- local helpfile=resolvers.findfile(helpinfo,"tex")
- helpdata=helpfile and io.loaddata(helpfile)
- end
- if helpdata and helpdata~="" then
- helpinfo=helpdata
- else
- helpinfo=formatters["Warning: help file %a is not found."](helpinfo)
- end
- end
- t.helpinfo=helpinfo
- return string.find(t.helpinfo,"^<%?xml") and true or false
-end
-function reporters.help(t,...)
- if xmlfound(t) then
- showhelp(t,...)
- else
- reporthelp(t,...)
- end
-end
-function reporters.export(t,methods,filename)
- if not xmlfound(t) then
- return exporthelp(t)
- end
- if not methods or methods=="" then
- methods=environment.arguments["exporthelp"]
- end
- if not filename or filename=="" then
- filename=environment.files[1]
- end
- dofile(resolvers.findfile("trac-exp.lua","tex"))
- local exporters=logs.exporters
- if not exporters or not methods then
- return exporthelp(t)
- end
- if methods=="all" then
- methods=table.keys(exporters)
- elseif type(methods)=="string" then
- methods=utilities.parsers.settings_to_array(methods)
- else
- return exporthelp(t)
- end
- if type(filename)~="string" or filename=="" then
- filename=false
- elseif file.pathpart(filename)=="" then
- t.report("export file %a will not be saved on the current path (safeguard)",filename)
- return
- end
- for i=1,#methods do
- local method=methods[i]
- local exporter=exporters[method]
- if exporter then
- local result=exporter(t,method)
- if result and result~="" then
- if filename then
- local fullname=file.replacesuffix(filename,method)
- t.report("saving export in %a",fullname)
- io.savedata(fullname,result)
- else
- reporters.lines(t,result)
- end
- else
- t.report("no output from exporter %a",method)
- end
- else
- t.report("unknown exporter %a",method)
- end
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-ini"] = package.loaded["data-ini"] or true
-
--- original size: 7898, stripped down to: 5501
-
-if not modules then modules={} end modules ['data-ini']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files",
-}
-local gsub,find,gmatch,char=string.gsub,string.find,string.gmatch,string.char
-local next,type=next,type
-local filedirname,filebasename,filejoin=file.dirname,file.basename,file.join
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
-local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
-local report_initialization=logs.reporter("resolvers","initialization")
-local ostype,osname,ossetenv,osgetenv=os.type,os.name,os.setenv,os.getenv
-resolvers=resolvers or {}
-local resolvers=resolvers
-texconfig.kpse_init=false
-texconfig.shell_escape='t'
-if not (environment and environment.default_texmfcnf) and kpse and kpse.default_texmfcnf then
- local default_texmfcnf=kpse.default_texmfcnf()
- default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:")
- default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTODIR","selfautodir:")
- default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOPARENT","selfautoparent:")
- default_texmfcnf=gsub(default_texmfcnf,"$HOME","home:")
- environment.default_texmfcnf=default_texmfcnf
-end
-kpse={ original=kpse }
-setmetatable(kpse,{
- __index=function(kp,name)
- report_initialization("fatal error: kpse library is accessed (key: %s)",name)
- os.exit()
- end
-} )
-do
- local osfontdir=osgetenv("OSFONTDIR")
- if osfontdir and osfontdir~="" then
- elseif osname=="windows" then
- ossetenv("OSFONTDIR","c:/windows/fonts//")
- elseif osname=="macosx" then
- ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
- end
-end
-do
- local homedir=osgetenv(ostype=="windows" and 'USERPROFILE' or 'HOME') or ''
- if not homedir or homedir=="" then
- homedir=char(127)
- end
- homedir=file.collapsepath(homedir)
- ossetenv("HOME",homedir)
- ossetenv("USERPROFILE",homedir)
- environment.homedir=homedir
-end
-do
- local args=environment.originalarguments or arg
- if not environment.ownmain then
- environment.ownmain=status and string.match(string.lower(status.banner),"this is ([%a]+)") or "luatex"
- end
- local ownbin=environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
- local ownpath=environment.ownpath or os.selfdir
- ownbin=file.collapsepath(ownbin)
- ownpath=file.collapsepath(ownpath)
- if not ownpath or ownpath=="" or ownpath=="unset" then
- ownpath=args[-1] or arg[-1]
- ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
- if not ownpath or ownpath=="" then
- ownpath=args[-0] or arg[-0]
- ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
- end
- local binary=ownbin
- if not ownpath or ownpath=="" then
- ownpath=ownpath and filedirname(binary)
- end
- if not ownpath or ownpath=="" then
- if os.binsuffix~="" then
- binary=file.replacesuffix(binary,os.binsuffix)
- end
- local path=osgetenv("PATH")
- if path then
- for p in gmatch(path,"[^"..io.pathseparator.."]+") do
- local b=filejoin(p,binary)
- if lfs.isfile(b) then
- local olddir=lfs.currentdir()
- if lfs.chdir(p) then
- local pp=lfs.currentdir()
- if trace_locating and p~=pp then
- report_initialization("following symlink %a to %a",p,pp)
- end
- ownpath=pp
- lfs.chdir(olddir)
- else
- if trace_locating then
- report_initialization("unable to check path %a",p)
- end
- ownpath=p
- end
- break
- end
- end
- end
- end
- if not ownpath or ownpath=="" then
- ownpath="."
- report_initialization("forcing fallback to ownpath %a",ownpath)
- elseif trace_locating then
- report_initialization("using ownpath %a",ownpath)
- end
- end
- environment.ownbin=ownbin
- environment.ownpath=ownpath
-end
-resolvers.ownpath=environment.ownpath
-function resolvers.getownpath()
- return environment.ownpath
-end
-do
- local ownpath=environment.ownpath or dir.current()
- if ownpath then
- ossetenv('SELFAUTOLOC',file.collapsepath(ownpath))
- ossetenv('SELFAUTODIR',file.collapsepath(ownpath.."/.."))
- ossetenv('SELFAUTOPARENT',file.collapsepath(ownpath.."/../.."))
- else
- report_initialization("error: unable to locate ownpath")
- os.exit()
- end
-end
-local texos=environment.texos or osgetenv("TEXOS")
-local texmfos=environment.texmfos or osgetenv('SELFAUTODIR')
-if not texos or texos=="" then
- texos=file.basename(texmfos)
-end
-ossetenv('TEXMFOS',texmfos)
-ossetenv('TEXOS',texos)
-ossetenv('SELFAUTOSYSTEM',os.platform)
-environment.texos=texos
-environment.texmfos=texmfos
-local texroot=environment.texroot or osgetenv("TEXROOT")
-if not texroot or texroot=="" then
- texroot=osgetenv('SELFAUTOPARENT')
- ossetenv('TEXROOT',texroot)
-end
-environment.texroot=file.collapsepath(texroot)
-if profiler then
- directives.register("system.profile",function()
- profiler.start("luatex-profile.log")
- end)
-end
-if not resolvers.resolve then
- function resolvers.resolve (s) return s end
- function resolvers.unresolve(s) return s end
- function resolvers.repath (s) return s end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-exp"] = package.loaded["data-exp"] or true
-
--- original size: 15303, stripped down to: 9716
-
-if not modules then modules={} end modules ['data-exp']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files",
-}
-local format,find,gmatch,lower,char,sub=string.format,string.find,string.gmatch,string.lower,string.char,string.sub
-local concat,sort=table.concat,table.sort
-local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
-local Ct,Cs,Cc,Carg,P,C,S=lpeg.Ct,lpeg.Cs,lpeg.Cc,lpeg.Carg,lpeg.P,lpeg.C,lpeg.S
-local type,next=type,next
-local ostype=os.type
-local collapsepath=file.collapsepath
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
-local report_expansions=logs.reporter("resolvers","expansions")
-local resolvers=resolvers
-local function f_both(a,b)
- local t,n={},0
- for sb in gmatch(b,"[^,]+") do
- for sa in gmatch(a,"[^,]+") do
- n=n+1;t[n]=sa..sb
- end
- end
- return concat(t,",")
-end
-local comma=P(",")
-local nocomma=(1-comma)^1
-local docomma=comma^1/","
-local before=Cs((nocomma*Carg(1)+docomma)^0)
-local after=Cs((Carg(1)*nocomma+docomma)^0)
-local both=Cs(((C(nocomma)*Carg(1))/function(a,b) return lpegmatch(before,b,1,a) end+docomma)^0)
-local function f_first (a,b) return lpegmatch(after,b,1,a) end
-local function f_second(a,b) return lpegmatch(before,a,1,b) end
-local function f_both (a,b) return lpegmatch(both,b,1,a) end
-local left=P("{")
-local right=P("}")
-local var=P((1-S("{}" ))^0)
-local set=P((1-S("{},"))^0)
-local other=P(1)
-local l_first=Cs((Cc("{")*(C(set)*left*C(var)*right/f_first)*Cc("}")+other )^0 )
-local l_second=Cs((Cc("{")*(left*C(var)*right*C(set)/f_second)*Cc("}")+other )^0 )
-local l_both=Cs((Cc("{")*(left*C(var)*right*left*C(var)*right/f_both)*Cc("}")+other )^0 )
-local l_rest=Cs((left*var*(left/"")*var*(right/"")*var*right+other )^0 )
-local stripper_1=lpeg.stripper ("{}@")
-local replacer_1=lpeg.replacer { { ",}",",@}" },{ "{,","{@," },}
-local function splitpathexpr(str,newlist,validate)
- if trace_expansions then
- report_expansions("expanding variable %a",str)
- end
- local t,ok,done=newlist or {},false,false
- local n=#t
- str=lpegmatch(replacer_1,str)
- repeat
- local old=str
- repeat
- local old=str
- str=lpegmatch(l_first,str)
- until old==str
- repeat
- local old=str
- str=lpegmatch(l_second,str)
- until old==str
- repeat
- local old=str
- str=lpegmatch(l_both,str)
- until old==str
- repeat
- local old=str
- str=lpegmatch(l_rest,str)
- until old==str
- until old==str
- str=lpegmatch(stripper_1,str)
- if validate then
- for s in gmatch(str,"[^,]+") do
- s=validate(s)
- if s then
- n=n+1
- t[n]=s
- end
- end
- else
- for s in gmatch(str,"[^,]+") do
- n=n+1
- t[n]=s
- end
- end
- if trace_expansions then
- for k=1,#t do
- report_expansions("% 4i: %s",k,t[k])
- end
- end
- return t
-end
-local function validate(s)
- s=collapsepath(s)
- return s~="" and not find(s,"^!*unset/*$") and s
-end
-resolvers.validatedpath=validate
-function resolvers.expandedpathfromlist(pathlist)
- local newlist={}
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- return newlist
-end
-local cleanup=lpeg.replacer {
- { "!","" },
- { "\\","/" },
-}
-function resolvers.cleanpath(str)
- local doslashes=(P("\\")/"/"+1)^0
- local donegation=(P("!")/"" )^0
- local homedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
- if homedir=="~" or homedir=="" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if not str or find(str,"~") then
- return ""
- else
- return lpegmatch(cleanup,str)
- end
- end
- else
- local dohome=((P("~")+P("$HOME"))/homedir)^0
- local cleanup=Cs(donegation*dohome*doslashes)
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str) or ""
- end
- end
- return resolvers.cleanpath(str)
-end
-local expandhome=P("~")/"$HOME"
-local dodouble=P('"')/""*(expandhome+(1-P('"')))^0*P('"')/""
-local dosingle=P("'")/""*(expandhome+(1-P("'")))^0*P("'")/""
-local dostring=(expandhome+1 )^0
-local stripper=Cs(
- lpegpatterns.unspacer*(dosingle+dodouble+dostring)*lpegpatterns.unspacer
-)
-function resolvers.checkedvariable(str)
- return type(str)=="string" and lpegmatch(stripper,str) or str
-end
-local cache={}
-local splitter=lpeg.tsplitat(";")
-local backslashswapper=lpeg.replacer("\\","/")
-local function splitconfigurationpath(str)
- if str then
- local found=cache[str]
- if not found then
- if str=="" then
- found={}
- else
- local split=lpegmatch(splitter,lpegmatch(backslashswapper,str))
- found={}
- local noffound=0
- for i=1,#split do
- local s=split[i]
- if not find(s,"^{*unset}*") then
- noffound=noffound+1
- found[noffound]=s
- end
- end
- if trace_expansions then
- report_expansions("splitting path specification %a",str)
- for k=1,noffound do
- report_expansions("% 4i: %s",k,found[k])
- end
- end
- cache[str]=found
- end
- end
- return found
- end
-end
-resolvers.splitconfigurationpath=splitconfigurationpath
-function resolvers.splitpath(str)
- if type(str)=='table' then
- return str
- else
- return splitconfigurationpath(str)
- end
-end
-function resolvers.joinpath(str)
- if type(str)=='table' then
- return file.joinpath(str)
- else
- return str
- end
-end
-local attributes,directory=lfs.attributes,lfs.dir
-local weird=P(".")^1+lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-local timer={}
-local scanned={}
-local nofscans=0
-local scancache={}
-local function scan(files,spec,path,n,m,r)
- local full=(path=="" and spec) or (spec..path..'/')
- local dirs={}
- local nofdirs=0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode=attributes(full..name,'mode')
- if mode=='file' then
- n=n+1
- local f=files[name]
- if f then
- if type(f)=='string' then
- files[name]={ f,path }
- else
- f[#f+1]=path
- end
- else
- files[name]=path
- local lower=lower(name)
- if name~=lower then
- files["remap:"..lower]=name
- r=r+1
- end
- end
- elseif mode=='directory' then
- m=m+1
- nofdirs=nofdirs+1
- if path~="" then
- dirs[nofdirs]=path..'/'..name
- else
- dirs[nofdirs]=name
- end
- end
- end
- end
- if nofdirs>0 then
- sort(dirs)
- for i=1,nofdirs do
- files,n,m,r=scan(files,spec,dirs[i],n,m,r)
- end
- end
- scancache[sub(full,1,-2)]=files
- return files,n,m,r
-end
-local fullcache={}
-function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath=resolvers.resolve(path)
- if usecache then
- local files=fullcache[realpath]
- if files then
- if trace_locating then
- report_expansions("using caches scan of path %a, branch %a",path,branch or path)
- end
- return files
- end
- end
- if trace_locating then
- report_expansions("scanning path %a, branch %a",path,branch or path)
- end
- local files,n,m,r=scan({},realpath..'/',"",0,0,0)
- files.__path__=path
- files.__files__=n
- files.__directories__=m
- files.__remappings__=r
- if trace_locating then
- report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
- end
- if usecache then
- scanned[#scanned+1]=realpath
- fullcache[realpath]=files
- end
- nofscans=nofscans+1
- statistics.stoptiming(timer)
- return files
-end
-local function simplescan(files,spec,path)
- local full=(path=="" and spec) or (spec..path..'/')
- local dirs={}
- local nofdirs=0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode=attributes(full..name,'mode')
- if mode=='file' then
- if not files[name] then
- files[name]=path
- end
- elseif mode=='directory' then
- nofdirs=nofdirs+1
- if path~="" then
- dirs[nofdirs]=path..'/'..name
- else
- dirs[nofdirs]=name
- end
- end
- end
- end
- if nofdirs>0 then
- sort(dirs)
- for i=1,nofdirs do
- files=simplescan(files,spec,dirs[i])
- end
- end
- return files
-end
-local simplecache={}
-local nofsharedscans=0
-function resolvers.simplescanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath=resolvers.resolve(path)
- if usecache then
- local files=simplecache[realpath]
- if not files then
- files=scancache[realpath]
- if files then
- nofsharedscans=nofsharedscans+1
- end
- end
- if files then
- if trace_locating then
- report_expansions("using caches scan of path %a, branch %a",path,branch or path)
- end
- return files
- end
- end
- if trace_locating then
- report_expansions("scanning path %a, branch %a",path,branch or path)
- end
- local files=simplescan({},realpath..'/',"")
- if trace_locating then
- report_expansions("%s files found",table.count(files))
- end
- if usecache then
- scanned[#scanned+1]=realpath
- simplecache[realpath]=files
- end
- nofscans=nofscans+1
- statistics.stoptiming(timer)
- return files
-end
-function resolvers.scandata()
- table.sort(scanned)
- return {
- n=nofscans,
- shared=nofsharedscans,
- time=statistics.elapsedtime(timer),
- paths=scanned,
- }
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-env"] = package.loaded["data-env"] or true
-
--- original size: 8769, stripped down to: 6490
-
-if not modules then modules={} end modules ['data-env']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files",
-}
-local lower,gsub=string.lower,string.gsub
-local resolvers=resolvers
-local allocate=utilities.storage.allocate
-local setmetatableindex=table.setmetatableindex
-local suffixonly=file.suffixonly
-local formats=allocate()
-local suffixes=allocate()
-local dangerous=allocate()
-local suffixmap=allocate()
-resolvers.formats=formats
-resolvers.suffixes=suffixes
-resolvers.dangerous=dangerous
-resolvers.suffixmap=suffixmap
-local luasuffixes=utilities.lua.suffixes
-local relations=allocate {
- core={
- ofm={
- names={ "ofm","omega font metric","omega font metrics" },
- variable='OFMFONTS',
- suffixes={ 'ofm','tfm' },
- },
- ovf={
- names={ "ovf","omega virtual font","omega virtual fonts" },
- variable='OVFFONTS',
- suffixes={ 'ovf','vf' },
- },
- tfm={
- names={ "tfm","tex font metric","tex font metrics" },
- variable='TFMFONTS',
- suffixes={ 'tfm' },
- },
- vf={
- names={ "vf","virtual font","virtual fonts" },
- variable='VFFONTS',
- suffixes={ 'vf' },
- },
- otf={
- names={ "otf","opentype","opentype font","opentype fonts"},
- variable='OPENTYPEFONTS',
- suffixes={ 'otf' },
- },
- ttf={
- names={ "ttf","truetype","truetype font","truetype fonts","truetype collection","truetype collections","truetype dictionary","truetype dictionaries" },
- variable='TTFONTS',
- suffixes={ 'ttf','ttc','dfont' },
- },
- afm={
- names={ "afm","adobe font metric","adobe font metrics" },
- variable="AFMFONTS",
- suffixes={ "afm" },
- },
- pfb={
- names={ "pfb","type1","type 1","type1 font","type 1 font","type1 fonts","type 1 fonts" },
- variable='T1FONTS',
- suffixes={ 'pfb','pfa' },
- },
- fea={
- names={ "fea","font feature","font features","font feature file","font feature files" },
- variable='FONTFEATURES',
- suffixes={ 'fea' },
- },
- cid={
- names={ "cid","cid map","cid maps","cid file","cid files" },
- variable='FONTCIDMAPS',
- suffixes={ 'cid','cidmap' },
- },
- fmt={
- names={ "fmt","format","tex format" },
- variable='TEXFORMATS',
- suffixes={ 'fmt' },
- },
- mem={
- names={ 'mem',"metapost format" },
- variable='MPMEMS',
- suffixes={ 'mem' },
- },
- mp={
- names={ "mp" },
- variable='MPINPUTS',
- suffixes={ 'mp','mpvi','mpiv','mpii' },
- },
- tex={
- names={ "tex" },
- variable='TEXINPUTS',
- suffixes={ 'tex',"mkvi","mkiv","mkii" },
- },
- icc={
- names={ "icc","icc profile","icc profiles" },
- variable='ICCPROFILES',
- suffixes={ 'icc' },
- },
- texmfscripts={
- names={ "texmfscript","texmfscripts","script","scripts" },
- variable='TEXMFSCRIPTS',
- suffixes={ 'lua','rb','pl','py' },
- },
- lua={
- names={ "lua" },
- variable='LUAINPUTS',
- suffixes={ luasuffixes.lua,luasuffixes.luc,luasuffixes.tma,luasuffixes.tmc },
- },
- lib={
- names={ "lib" },
- variable='CLUAINPUTS',
- suffixes=os.libsuffix and { os.libsuffix } or { 'dll','so' },
- },
- bib={
- names={ 'bib' },
- suffixes={ 'bib' },
- },
- bst={
- names={ 'bst' },
- suffixes={ 'bst' },
- },
- fontconfig={
- names={ 'fontconfig','fontconfig file','fontconfig files' },
- variable='FONTCONFIG_PATH',
- },
- },
- obsolete={
- enc={
- names={ "enc","enc files","enc file","encoding files","encoding file" },
- variable='ENCFONTS',
- suffixes={ 'enc' },
- },
- map={
- names={ "map","map files","map file" },
- variable='TEXFONTMAPS',
- suffixes={ 'map' },
- },
- lig={
- names={ "lig files","lig file","ligature file","ligature files" },
- variable='LIGFONTS',
- suffixes={ 'lig' },
- },
- opl={
- names={ "opl" },
- variable='OPLFONTS',
- suffixes={ 'opl' },
- },
- ovp={
- names={ "ovp" },
- variable='OVPFONTS',
- suffixes={ 'ovp' },
- },
- },
- kpse={
- base={
- names={ 'base',"metafont format" },
- variable='MFBASES',
- suffixes={ 'base','bas' },
- },
- cmap={
- names={ 'cmap','cmap files','cmap file' },
- variable='CMAPFONTS',
- suffixes={ 'cmap' },
- },
- cnf={
- names={ 'cnf' },
- suffixes={ 'cnf' },
- },
- web={
- names={ 'web' },
- suffixes={ 'web','ch' }
- },
- cweb={
- names={ 'cweb' },
- suffixes={ 'w','web','ch' },
- },
- gf={
- names={ 'gf' },
- suffixes={ '<resolution>gf' },
- },
- mf={
- names={ 'mf' },
- variable='MFINPUTS',
- suffixes={ 'mf' },
- },
- mft={
- names={ 'mft' },
- suffixes={ 'mft' },
- },
- pk={
- names={ 'pk' },
- suffixes={ '<resolution>pk' },
- },
- },
-}
-resolvers.relations=relations
-function resolvers.updaterelations()
- for category,categories in next,relations do
- for name,relation in next,categories do
- local rn=relation.names
- local rv=relation.variable
- local rs=relation.suffixes
- if rn and rv then
- for i=1,#rn do
- local rni=lower(gsub(rn[i]," ",""))
- formats[rni]=rv
- if rs then
- suffixes[rni]=rs
- for i=1,#rs do
- local rsi=rs[i]
- suffixmap[rsi]=rni
- end
- end
- end
- end
- if rs then
- end
- end
- end
-end
-resolvers.updaterelations()
-local function simplified(t,k)
- return k and rawget(t,lower(gsub(k," ",""))) or nil
-end
-setmetatableindex(formats,simplified)
-setmetatableindex(suffixes,simplified)
-setmetatableindex(suffixmap,simplified)
-function resolvers.suffixofformat(str)
- local s=suffixes[str]
- return s and s[1] or ""
-end
-function resolvers.suffixofformat(str)
- return suffixes[str] or {}
-end
-for name,format in next,formats do
- dangerous[name]=true
-end
-dangerous.tex=nil
-function resolvers.formatofvariable(str)
- return formats[str] or ''
-end
-function resolvers.formatofsuffix(str)
- return suffixmap[suffixonly(str)] or 'tex'
-end
-function resolvers.variableofformat(str)
- return formats[str] or ''
-end
-function resolvers.variableofformatorsuffix(str)
- local v=formats[str]
- if v then
- return v
- end
- v=suffixmap[suffixonly(str)]
- if v then
- return formats[v]
- end
- return ''
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-tmp"] = package.loaded["data-tmp"] or true
-
--- original size: 15532, stripped down to: 11648
-
-if not modules then modules={} end modules ['data-tmp']={
- version=1.100,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local format,lower,gsub,concat=string.format,string.lower,string.gsub,table.concat
-local concat,serialize,serializetofile=table.concat,table.serialize,table.tofile
-local mkdirs,isdir,isfile=dir.mkdirs,lfs.isdir,lfs.isfile
-local addsuffix,is_writable,is_readable=file.addsuffix,file.is_writable,file.is_readable
-local formatters=string.formatters
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
-local report_caches=logs.reporter("resolvers","caches")
-local report_resolvers=logs.reporter("resolvers","caching")
-local resolvers=resolvers
-local directive_cleanup=false directives.register("system.compile.cleanup",function(v) directive_cleanup=v end)
-local directive_strip=false directives.register("system.compile.strip",function(v) directive_strip=v end)
-local compile=utilities.lua.compile
-function utilities.lua.compile(luafile,lucfile,cleanup,strip)
- if cleanup==nil then cleanup=directive_cleanup end
- if strip==nil then strip=directive_strip end
- return compile(luafile,lucfile,cleanup,strip)
-end
-caches=caches or {}
-local caches=caches
-local luasuffixes=utilities.lua.suffixes
-caches.base=caches.base or "luatex-cache"
-caches.more=caches.more or "context"
-caches.direct=false
-caches.tree=false
-caches.force=true
-caches.ask=false
-caches.relocate=false
-caches.defaults={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
-local writable,readables,usedreadables=nil,{},{}
-local function identify()
- local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
- if texmfcaches then
- for k=1,#texmfcaches do
- local cachepath=texmfcaches[k]
- if cachepath~="" then
- cachepath=resolvers.resolve(cachepath)
- cachepath=resolvers.cleanpath(cachepath)
- cachepath=file.collapsepath(cachepath)
- local valid=isdir(cachepath)
- if valid then
- if is_readable(cachepath) then
- readables[#readables+1]=cachepath
- if not writable and is_writable(cachepath) then
- writable=cachepath
- end
- end
- elseif not writable and caches.force then
- local cacheparent=file.dirname(cachepath)
- if is_writable(cacheparent) and true then
- if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath),"no",{ "yes","no" })=="yes" then
- mkdirs(cachepath)
- if isdir(cachepath) and is_writable(cachepath) then
- report_caches("path %a created",cachepath)
- writable=cachepath
- readables[#readables+1]=cachepath
- end
- end
- end
- end
- end
- end
- end
- local texmfcaches=caches.defaults
- if texmfcaches then
- for k=1,#texmfcaches do
- local cachepath=texmfcaches[k]
- cachepath=resolvers.expansion(cachepath)
- if cachepath~="" then
- cachepath=resolvers.resolve(cachepath)
- cachepath=resolvers.cleanpath(cachepath)
- local valid=isdir(cachepath)
- if valid and is_readable(cachepath) then
- if not writable and is_writable(cachepath) then
- readables[#readables+1]=cachepath
- writable=cachepath
- break
- end
- end
- end
- end
- end
- if not writable then
- report_caches("fatal error: there is no valid writable cache path defined")
- os.exit()
- elseif #readables==0 then
- report_caches("fatal error: there is no valid readable cache path defined")
- os.exit()
- end
- writable=dir.expandname(resolvers.cleanpath(writable))
- local base,more,tree=caches.base,caches.more,caches.tree or caches.treehash()
- if tree then
- caches.tree=tree
- writable=mkdirs(writable,base,more,tree)
- for i=1,#readables do
- readables[i]=file.join(readables[i],base,more,tree)
- end
- else
- writable=mkdirs(writable,base,more)
- for i=1,#readables do
- readables[i]=file.join(readables[i],base,more)
- end
- end
- if trace_cache then
- for i=1,#readables do
- report_caches("using readable path %a (order %s)",readables[i],i)
- end
- report_caches("using writable path %a",writable)
- end
- identify=function()
- return writable,readables
- end
- return writable,readables
-end
-function caches.usedpaths(separator)
- local writable,readables=identify()
- if #readables>1 then
- local result={}
- local done={}
- for i=1,#readables do
- local readable=readables[i]
- if readable==writable then
- done[readable]=true
- result[#result+1]=formatters["readable+writable: %a"](readable)
- elseif usedreadables[i] then
- done[readable]=true
- result[#result+1]=formatters["readable: %a"](readable)
- end
- end
- if not done[writable] then
- result[#result+1]=formatters["writable: %a"](writable)
- end
- return concat(result,separator or " | ")
- else
- return writable or "?"
- end
-end
-function caches.configfiles()
- return concat(resolvers.instance.specification,";")
-end
-function caches.hashed(tree)
- tree=gsub(tree,"[\\/]+$","")
- tree=lower(tree)
- local hash=md5.hex(tree)
- if trace_cache or trace_locating then
- report_caches("hashing tree %a, hash %a",tree,hash)
- end
- return hash
-end
-function caches.treehash()
- local tree=caches.configfiles()
- if not tree or tree=="" then
- return false
- else
- return caches.hashed(tree)
- end
-end
-local r_cache,w_cache={},{}
-local function getreadablepaths(...)
- local tags={... }
- local hash=concat(tags,"/")
- local done=r_cache[hash]
- if not done then
- local writable,readables=identify()
- if #tags>0 then
- done={}
- for i=1,#readables do
- done[i]=file.join(readables[i],...)
- end
- else
- done=readables
- end
- r_cache[hash]=done
- end
- return done
-end
-local function getwritablepath(...)
- local tags={... }
- local hash=concat(tags,"/")
- local done=w_cache[hash]
- if not done then
- local writable,readables=identify()
- if #tags>0 then
- done=mkdirs(writable,...)
- else
- done=writable
- end
- w_cache[hash]=done
- end
- return done
-end
-caches.getreadablepaths=getreadablepaths
-caches.getwritablepath=getwritablepath
-function caches.getfirstreadablefile(filename,...)
- local rd=getreadablepaths(...)
- for i=1,#rd do
- local path=rd[i]
- local fullname=file.join(path,filename)
- if is_readable(fullname) then
- usedreadables[i]=true
- return fullname,path
- end
- end
- return caches.setfirstwritablefile(filename,...)
-end
-function caches.getfirstreadablefile_TEST_ME_FIRST(filename,...)
- local fullname,path=caches.setfirstwritablefile(filename,...)
- if is_readable(fullname) then
- return fullname,path
- end
- local rd=getreadablepaths(...)
- for i=1,#rd do
- local path=rd[i]
- local fullname=file.join(path,filename)
- if is_readable(fullname) then
- usedreadables[i]=true
- return fullname,path
- end
- end
- return fullname,path
-end
-function caches.setfirstwritablefile(filename,...)
- local wr=getwritablepath(...)
- local fullname=file.join(wr,filename)
- return fullname,wr
-end
-function caches.define(category,subcategory)
- return function()
- return getwritablepath(category,subcategory)
- end
-end
-function caches.setluanames(path,name)
- return format("%s/%s.%s",path,name,luasuffixes.tma),format("%s/%s.%s",path,name,luasuffixes.tmc)
-end
-function caches.loaddata(readables,name)
- if type(readables)=="string" then
- readables={ readables }
- end
- for i=1,#readables do
- local path=readables[i]
- local tmaname,tmcname=caches.setluanames(path,name)
- local loader=false
- if isfile(tmcname) then
- loader=loadfile(tmcname)
- end
- if not loader and isfile(tmaname) then
- utilities.lua.compile(tmaname,tmcname)
- if isfile(tmcname) then
- loader=loadfile(tmcname)
- end
- if not loader then
- loader=loadfile(tmaname)
- end
- end
- if loader then
- loader=loader()
- collectgarbage("step")
- return loader
- end
- end
- return false
-end
-function caches.is_writable(filepath,filename)
- local tmaname,tmcname=caches.setluanames(filepath,filename)
- return is_writable(tmaname)
-end
-local saveoptions={ compact=true }
-function caches.savedata(filepath,filename,data,raw)
- local tmaname,tmcname=caches.setluanames(filepath,filename)
- local reduce,simplify=true,true
- if raw then
- reduce,simplify=false,false
- end
- data.cache_uuid=os.uuid()
- if caches.direct then
- file.savedata(tmaname,serialize(data,true,saveoptions))
- else
- serializetofile(tmaname,data,true,saveoptions)
- end
- utilities.lua.compile(tmaname,tmcname)
-end
-local content_state={}
-function caches.contentstate()
- return content_state or {}
-end
-function caches.loadcontent(cachename,dataname)
- local name=caches.hashed(cachename)
- local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
- local filename=file.join(path,name)
- local blob=loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua))
- if blob then
- local data=blob()
- if data and data.content then
- if data.type==dataname then
- if data.version==resolvers.cacheversion then
- content_state[#content_state+1]=data.uuid
- if trace_locating then
- report_resolvers("loading %a for %a from %a",dataname,cachename,filename)
- end
- return data.content
- else
- report_resolvers("skipping %a for %a from %a (version mismatch)",dataname,cachename,filename)
- end
- else
- report_resolvers("skipping %a for %a from %a (datatype mismatch)",dataname,cachename,filename)
- end
- elseif trace_locating then
- report_resolvers("skipping %a for %a from %a (no content)",dataname,cachename,filename)
- end
- elseif trace_locating then
- report_resolvers("skipping %a for %a from %a (invalid file)",dataname,cachename,filename)
- end
-end
-function caches.collapsecontent(content)
- for k,v in next,content do
- if type(v)=="table" and #v==1 then
- content[k]=v[1]
- end
- end
-end
-function caches.savecontent(cachename,dataname,content)
- local name=caches.hashed(cachename)
- local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
- local filename=file.join(path,name)
- local luaname=addsuffix(filename,luasuffixes.lua)
- local lucname=addsuffix(filename,luasuffixes.luc)
- if trace_locating then
- report_resolvers("preparing %a for %a",dataname,cachename)
- end
- local data={
- type=dataname,
- root=cachename,
- version=resolvers.cacheversion,
- date=os.date("%Y-%m-%d"),
- time=os.date("%H:%M:%S"),
- content=content,
- uuid=os.uuid(),
- }
- local ok=io.savedata(luaname,serialize(data,true))
- if ok then
- if trace_locating then
- report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname)
- end
- if utilities.lua.compile(luaname,lucname) then
- if trace_locating then
- report_resolvers("%a compiled to %a",dataname,lucname)
- end
- return true
- else
- if trace_locating then
- report_resolvers("compiling failed for %a, deleting file %a",dataname,lucname)
- end
- os.remove(lucname)
- end
- elseif trace_locating then
- report_resolvers("unable to save %a in %a (access error)",dataname,luaname)
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-met"] = package.loaded["data-met"] or true
-
--- original size: 5453, stripped down to: 4007
-
-if not modules then modules={} end modules ['data-met']={
- version=1.100,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local find,format=string.find,string.format
-local sequenced=table.sequenced
-local addurlscheme,urlhashed=url.addscheme,url.hashed
-local getcurrentdir=lfs.currentdir
-local trace_locating=false
-local trace_methods=false
-trackers.register("resolvers.locating",function(v) trace_methods=v end)
-trackers.register("resolvers.methods",function(v) trace_methods=v end)
-local report_methods=logs.reporter("resolvers","methods")
-local allocate=utilities.storage.allocate
-local resolvers=resolvers
-local registered={}
-local function splitmethod(filename)
- if not filename then
- return { scheme="unknown",original=filename }
- end
- if type(filename)=="table" then
- return filename
- end
- filename=file.collapsepath(filename,".")
- if not find(filename,"://") then
- return { scheme="file",path=filename,original=filename,filename=filename }
- end
- local specification=url.hashed(filename)
- if not specification.scheme or specification.scheme=="" then
- return { scheme="file",path=filename,original=filename,filename=filename }
- else
- return specification
- end
-end
-resolvers.splitmethod=splitmethod
-local function methodhandler(what,first,...)
- local method=registered[what]
- if method then
- local how,namespace=method.how,method.namespace
- if how=="uri" or how=="url" then
- local specification=splitmethod(first)
- local scheme=specification.scheme
- local resolver=namespace and namespace[scheme]
- if resolver then
- if trace_methods then
- report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,scheme,first)
- end
- return resolver(specification,...)
- else
- resolver=namespace.default or namespace.file
- if resolver then
- if trace_methods then
- report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"default",first)
- end
- return resolver(specification,...)
- elseif trace_methods then
- report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"unset")
- end
- end
- elseif how=="tag" then
- local resolver=namespace and namespace[first]
- if resolver then
- if trace_methods then
- report_methods("resolving, method %a, how %a, tag %a",what,how,first)
- end
- return resolver(...)
- else
- resolver=namespace.default or namespace.file
- if resolver then
- if trace_methods then
- report_methods("resolving, method %a, how %a, tag %a",what,how,"default")
- end
- return resolver(...)
- elseif trace_methods then
- report_methods("resolving, method %a, how %a, tag %a",what,how,"unset")
- end
- end
- end
- else
- report_methods("resolving, invalid method %a")
- end
-end
-resolvers.methodhandler=methodhandler
-function resolvers.registermethod(name,namespace,how)
- registered[name]={ how=how or "tag",namespace=namespace }
- namespace["byscheme"]=function(scheme,filename,...)
- if scheme=="file" then
- return methodhandler(name,filename,...)
- else
- return methodhandler(name,addurlscheme(filename,scheme),...)
- end
- end
-end
-local concatinators=allocate { notfound=file.join }
-local locators=allocate { notfound=function() end }
-local hashers=allocate { notfound=function() end }
-local generators=allocate { notfound=function() end }
-resolvers.concatinators=concatinators
-resolvers.locators=locators
-resolvers.hashers=hashers
-resolvers.generators=generators
-local registermethod=resolvers.registermethod
-registermethod("concatinators",concatinators,"tag")
-registermethod("locators",locators,"uri")
-registermethod("hashers",hashers,"uri")
-registermethod("generators",generators,"uri")
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-res"] = package.loaded["data-res"] or true
-
--- original size: 61799, stripped down to: 42957
-
-if not modules then modules={} end modules ['data-res']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files",
-}
-local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
-local concat,insert,sortedkeys=table.concat,table.insert,table.sortedkeys
-local next,type,rawget=next,type,rawget
-local os=os
-local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
-local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
-local formatters=string.formatters
-local filedirname=file.dirname
-local filebasename=file.basename
-local suffixonly=file.suffixonly
-local filejoin=file.join
-local collapsepath=file.collapsepath
-local joinpath=file.joinpath
-local allocate=utilities.storage.allocate
-local settings_to_array=utilities.parsers.settings_to_array
-local setmetatableindex=table.setmetatableindex
-local luasuffixes=utilities.lua.suffixes
-local getcurrentdir=lfs.currentdir
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
-local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
-local report_resolving=logs.reporter("resolvers","resolving")
-local resolvers=resolvers
-local expandedpathfromlist=resolvers.expandedpathfromlist
-local checkedvariable=resolvers.checkedvariable
-local splitconfigurationpath=resolvers.splitconfigurationpath
-local methodhandler=resolvers.methodhandler
-local initializesetter=utilities.setters.initialize
-local ostype,osname,osenv,ossetenv,osgetenv=os.type,os.name,os.env,os.setenv,os.getenv
-resolvers.cacheversion='1.0.1'
-resolvers.configbanner=''
-resolvers.homedir=environment.homedir
-resolvers.criticalvars=allocate { "SELFAUTOLOC","SELFAUTODIR","SELFAUTOPARENT","TEXMFCNF","TEXMF","TEXOS" }
-resolvers.luacnfname="texmfcnf.lua"
-resolvers.luacnfstate="unknown"
-if environment.default_texmfcnf then
- resolvers.luacnfspec="home:texmf/web2c;"..environment.default_texmfcnf
-else
- resolvers.luacnfspec=concat ({
- "home:texmf/web2c",
- "selfautoparent:/texmf-local/web2c",
- "selfautoparent:/texmf-context/web2c",
- "selfautoparent:/texmf-dist/web2c",
- "selfautoparent:/texmf/web2c",
- },";")
-end
-local unset_variable="unset"
-local formats=resolvers.formats
-local suffixes=resolvers.suffixes
-local dangerous=resolvers.dangerous
-local suffixmap=resolvers.suffixmap
-resolvers.defaultsuffixes={ "tex" }
-resolvers.instance=resolvers.instance or nil
-local instance=resolvers.instance or nil
-function resolvers.setenv(key,value,raw)
- if instance then
- instance.environment[key]=value
- ossetenv(key,raw and value or resolvers.resolve(value))
- end
-end
-local function getenv(key)
- local value=rawget(instance.environment,key)
- if value and value~="" then
- return value
- else
- local e=osgetenv(key)
- return e~=nil and e~="" and checkedvariable(e) or ""
- end
-end
-resolvers.getenv=getenv
-resolvers.env=getenv
-local function resolve(k)
- return instance.expansions[k]
-end
-local dollarstripper=lpeg.stripper("$")
-local inhibitstripper=P("!")^0*Cs(P(1)^0)
-local backslashswapper=lpeg.replacer("\\","/")
-local somevariable=P("$")/""
-local somekey=C(R("az","AZ","09","__","--")^1)
-local somethingelse=P(";")*((1-S("!{}/\\"))^1*P(";")/"")+P(";")*(P(";")/"")+P(1)
-local variableexpander=Cs((somevariable*(somekey/resolve)+somethingelse)^1 )
-local cleaner=P("\\")/"/"+P(";")*S("!{}/\\")^0*P(";")^1/";"
-local variablecleaner=Cs((cleaner+P(1))^0)
-local somevariable=R("az","AZ","09","__","--")^1/resolve
-local variable=(P("$")/"")*(somevariable+(P("{")/"")*somevariable*(P("}")/""))
-local variableresolver=Cs((variable+P(1))^0)
-local function expandedvariable(var)
- return lpegmatch(variableexpander,var) or var
-end
-function resolvers.newinstance()
- if trace_locating then
- report_resolving("creating instance")
- end
- local environment,variables,expansions,order=allocate(),allocate(),allocate(),allocate()
- local newinstance={
- environment=environment,
- variables=variables,
- expansions=expansions,
- order=order,
- files=allocate(),
- setups=allocate(),
- found=allocate(),
- foundintrees=allocate(),
- hashes=allocate(),
- hashed=allocate(),
- specification=allocate(),
- lists=allocate(),
- data=allocate(),
- fakepaths=allocate(),
- remember=true,
- diskcache=true,
- renewcache=false,
- renewtree=false,
- loaderror=false,
- savelists=true,
- pattern=nil,
- force_suffixes=true,
- }
- setmetatableindex(variables,function(t,k)
- local v
- for i=1,#order do
- v=order[i][k]
- if v~=nil then
- t[k]=v
- return v
- end
- end
- if v==nil then
- v=""
- end
- t[k]=v
- return v
- end)
- setmetatableindex(environment,function(t,k)
- local v=osgetenv(k)
- if v==nil then
- v=variables[k]
- end
- if v~=nil then
- v=checkedvariable(v) or ""
- end
- v=resolvers.repath(v)
- t[k]=v
- return v
- end)
- setmetatableindex(expansions,function(t,k)
- local v=environment[k]
- if type(v)=="string" then
- v=lpegmatch(variableresolver,v)
- v=lpegmatch(variablecleaner,v)
- end
- t[k]=v
- return v
- end)
- return newinstance
-end
-function resolvers.setinstance(someinstance)
- instance=someinstance
- resolvers.instance=someinstance
- return someinstance
-end
-function resolvers.reset()
- return resolvers.setinstance(resolvers.newinstance())
-end
-local function reset_hashes()
- instance.lists={}
- instance.found={}
-end
-local slash=P("/")
-local pathexpressionpattern=Cs (
- Cc("^")*(
- Cc("%")*S(".-")+slash^2*P(-1)/"/.*"
-+slash^2/"/"+(1-slash)*P(-1)*Cc("/")+P(1)
- )^1*Cc("$")
-)
-local cache={}
-local function makepathexpression(str)
- if str=="." then
- return "^%./$"
- else
- local c=cache[str]
- if not c then
- c=lpegmatch(pathexpressionpattern,str)
- cache[str]=c
- end
- return c
- end
-end
-local function reportcriticalvariables(cnfspec)
- if trace_locating then
- for i=1,#resolvers.criticalvars do
- local k=resolvers.criticalvars[i]
- local v=resolvers.getenv(k) or "unknown"
- report_resolving("variable %a set to %a",k,v)
- end
- report_resolving()
- if cnfspec then
- report_resolving("using configuration specification %a",type(cnfspec)=="table" and concat(cnfspec,",") or cnfspec)
- end
- report_resolving()
- end
- reportcriticalvariables=function() end
-end
-local function identify_configuration_files()
- local specification=instance.specification
- if #specification==0 then
- local cnfspec=getenv("TEXMFCNF")
- if cnfspec=="" then
- cnfspec=resolvers.luacnfspec
- resolvers.luacnfstate="default"
- else
- resolvers.luacnfstate="environment"
- end
- reportcriticalvariables(cnfspec)
- local cnfpaths=expandedpathfromlist(resolvers.splitpath(cnfspec))
- local luacnfname=resolvers.luacnfname
- for i=1,#cnfpaths do
- local filepath=cnfpaths[i]
- local filename=collapsepath(filejoin(filepath,luacnfname))
- local realname=resolvers.resolve(filename)
- if trace_locating then
- local fullpath=gsub(resolvers.resolve(collapsepath(filepath)),"//","/")
- local weirdpath=find(fullpath,"/texmf.+/texmf") or not find(fullpath,"/web2c")
- report_resolving("looking for %a on %s path %a from specification %a",luacnfname,weirdpath and "weird" or "given",fullpath,filepath)
- end
- if lfs.isfile(realname) then
- specification[#specification+1]=filename
- if trace_locating then
- report_resolving("found configuration file %a",realname)
- end
- end
- end
- if trace_locating then
- report_resolving()
- end
- elseif trace_locating then
- report_resolving("configuration files already identified")
- end
-end
-local function load_configuration_files()
- local specification=instance.specification
- if #specification>0 then
- local luacnfname=resolvers.luacnfname
- for i=1,#specification do
- local filename=specification[i]
- local pathname=filedirname(filename)
- local filename=filejoin(pathname,luacnfname)
- local realname=resolvers.resolve(filename)
- local blob=loadfile(realname)
- if blob then
- local setups=instance.setups
- local data=blob()
- local parent=data and data.parent
- if parent then
- local filename=filejoin(pathname,parent)
- local realname=resolvers.resolve(filename)
- local blob=loadfile(realname)
- if blob then
- local parentdata=blob()
- if parentdata then
- report_resolving("loading configuration file %a",filename)
- data=table.merged(parentdata,data)
- end
- end
- end
- data=data and data.content
- if data then
- if trace_locating then
- report_resolving("loading configuration file %a",filename)
- report_resolving()
- end
- local variables=data.variables or {}
- local warning=false
- for k,v in next,data do
- local variant=type(v)
- if variant=="table" then
- initializesetter(filename,k,v)
- elseif variables[k]==nil then
- if trace_locating and not warning then
- report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable",
- k,resolvers.resolve(filename))
- warning=true
- end
- variables[k]=v
- end
- end
- setups[pathname]=variables
- if resolvers.luacnfstate=="default" then
- local cnfspec=variables["TEXMFCNF"]
- if cnfspec then
- if trace_locating then
- report_resolving("reloading configuration due to TEXMF redefinition")
- end
- resolvers.setenv("TEXMFCNF",cnfspec)
- instance.specification={}
- identify_configuration_files()
- load_configuration_files()
- resolvers.luacnfstate="configuration"
- break
- end
- end
- else
- if trace_locating then
- report_resolving("skipping configuration file %a (no content)",filename)
- end
- setups[pathname]={}
- instance.loaderror=true
- end
- elseif trace_locating then
- report_resolving("skipping configuration file %a (no valid format)",filename)
- end
- instance.order[#instance.order+1]=instance.setups[pathname]
- if instance.loaderror then
- break
- end
- end
- elseif trace_locating then
- report_resolving("warning: no lua configuration files found")
- end
-end
-local function load_file_databases()
- instance.loaderror,instance.files=false,allocate()
- if not instance.renewcache then
- local hashes=instance.hashes
- for k=1,#hashes do
- local hash=hashes[k]
- resolvers.hashers.byscheme(hash.type,hash.name)
- if instance.loaderror then break end
- end
- end
-end
-local function locate_file_databases()
- local texmfpaths=resolvers.expandedpathlist("TEXMF")
- if #texmfpaths>0 then
- for i=1,#texmfpaths do
- local path=collapsepath(texmfpaths[i])
- path=gsub(path,"/+$","")
- local stripped=lpegmatch(inhibitstripper,path)
- if stripped~="" then
- local runtime=stripped==path
- path=resolvers.cleanpath(path)
- local spec=resolvers.splitmethod(stripped)
- if runtime and (spec.noscheme or spec.scheme=="file") then
- stripped="tree:///"..stripped
- elseif spec.scheme=="cache" or spec.scheme=="file" then
- stripped=spec.path
- end
- if trace_locating then
- if runtime then
- report_resolving("locating list of %a (runtime) (%s)",path,stripped)
- else
- report_resolving("locating list of %a (cached)",path)
- end
- end
- methodhandler('locators',stripped)
- end
- end
- if trace_locating then
- report_resolving()
- end
- elseif trace_locating then
- report_resolving("no texmf paths are defined (using TEXMF)")
- end
-end
-local function generate_file_databases()
- local hashes=instance.hashes
- for k=1,#hashes do
- local hash=hashes[k]
- methodhandler('generators',hash.name)
- end
- if trace_locating then
- report_resolving()
- end
-end
-local function save_file_databases()
- for i=1,#instance.hashes do
- local hash=instance.hashes[i]
- local cachename=hash.name
- if hash.cache then
- local content=instance.files[cachename]
- caches.collapsecontent(content)
- if trace_locating then
- report_resolving("saving tree %a",cachename)
- end
- caches.savecontent(cachename,"files",content)
- elseif trace_locating then
- report_resolving("not saving runtime tree %a",cachename)
- end
- end
-end
-function resolvers.renew(hashname)
- if hashname and hashname~="" then
- local expanded=resolvers.expansion(hashname) or ""
- if expanded~="" then
- if trace_locating then
- report_resolving("identifying tree %a from %a",expanded,hashname)
- end
- hashname=expanded
- else
- if trace_locating then
- report_resolving("identifying tree %a",hashname)
- end
- end
- local realpath=resolvers.resolve(hashname)
- if lfs.isdir(realpath) then
- if trace_locating then
- report_resolving("using path %a",realpath)
- end
- methodhandler('generators',hashname)
- local content=instance.files[hashname]
- caches.collapsecontent(content)
- if trace_locating then
- report_resolving("saving tree %a",hashname)
- end
- caches.savecontent(hashname,"files",content)
- else
- report_resolving("invalid path %a",realpath)
- end
- end
-end
-local function load_databases()
- locate_file_databases()
- if instance.diskcache and not instance.renewcache then
- load_file_databases()
- if instance.loaderror then
- generate_file_databases()
- save_file_databases()
- end
- else
- generate_file_databases()
- if instance.renewcache then
- save_file_databases()
- end
- end
-end
-function resolvers.appendhash(type,name,cache)
- if not instance.hashed[name] then
- if trace_locating then
- report_resolving("hash %a appended",name)
- end
- insert(instance.hashes,{ type=type,name=name,cache=cache } )
- instance.hashed[name]=cache
- end
-end
-function resolvers.prependhash(type,name,cache)
- if not instance.hashed[name] then
- if trace_locating then
- report_resolving("hash %a prepended",name)
- end
- insert(instance.hashes,1,{ type=type,name=name,cache=cache } )
- instance.hashed[name]=cache
- end
-end
-function resolvers.extendtexmfvariable(specification)
- local t=resolvers.splitpath(getenv("TEXMF"))
- insert(t,1,specification)
- local newspec=concat(t,",")
- if instance.environment["TEXMF"] then
- instance.environment["TEXMF"]=newspec
- elseif instance.variables["TEXMF"] then
- instance.variables["TEXMF"]=newspec
- else
- end
- reset_hashes()
-end
-function resolvers.splitexpansions()
- local ie=instance.expansions
- for k,v in next,ie do
- local t,tn,h,p={},0,{},splitconfigurationpath(v)
- for kk=1,#p do
- local vv=p[kk]
- if vv~="" and not h[vv] then
- tn=tn+1
- t[tn]=vv
- h[vv]=true
- end
- end
- if #t>1 then
- ie[k]=t
- else
- ie[k]=t[1]
- end
- end
-end
-function resolvers.datastate()
- return caches.contentstate()
-end
-function resolvers.variable(name)
- local name=name and lpegmatch(dollarstripper,name)
- local result=name and instance.variables[name]
- return result~=nil and result or ""
-end
-function resolvers.expansion(name)
- local name=name and lpegmatch(dollarstripper,name)
- local result=name and instance.expansions[name]
- return result~=nil and result or ""
-end
-function resolvers.unexpandedpathlist(str)
- local pth=resolvers.variable(str)
- local lst=resolvers.splitpath(pth)
- return expandedpathfromlist(lst)
-end
-function resolvers.unexpandedpath(str)
- return joinpath(resolvers.unexpandedpathlist(str))
-end
-local done={}
-function resolvers.resetextrapath()
- local ep=instance.extra_paths
- if not ep then
- ep,done={},{}
- instance.extra_paths=ep
- elseif #ep>0 then
- instance.lists,done={},{}
- end
-end
-function resolvers.registerextrapath(paths,subpaths)
- paths=settings_to_array(paths)
- subpaths=settings_to_array(subpaths)
- local ep=instance.extra_paths or {}
- local oldn=#ep
- local newn=oldn
- local nofpaths=#paths
- local nofsubpaths=#subpaths
- if nofpaths>0 then
- if nofsubpaths>0 then
- for i=1,nofpaths do
- local p=paths[i]
- for j=1,nofsubpaths do
- local s=subpaths[j]
- local ps=p.."/"..s
- if not done[ps] then
- newn=newn+1
- ep[newn]=resolvers.cleanpath(ps)
- done[ps]=true
- end
- end
- end
- else
- for i=1,nofpaths do
- local p=paths[i]
- if not done[p] then
- newn=newn+1
- ep[newn]=resolvers.cleanpath(p)
- done[p]=true
- end
- end
- end
- elseif nofsubpaths>0 then
- for i=1,oldn do
- for j=1,nofsubpaths do
- local s=subpaths[j]
- local ps=ep[i].."/"..s
- if not done[ps] then
- newn=newn+1
- ep[newn]=resolvers.cleanpath(ps)
- done[ps]=true
- end
- end
- end
- end
- if newn>0 then
- instance.extra_paths=ep
- end
- if newn>oldn then
- instance.lists={}
- end
-end
-local function made_list(instance,list)
- local ep=instance.extra_paths
- if not ep or #ep==0 then
- return list
- else
- local done,new,newn={},{},0
- for k=1,#list do
- local v=list[k]
- if not done[v] then
- if find(v,"^[%.%/]$") then
- done[v]=true
- newn=newn+1
- new[newn]=v
- else
- break
- end
- end
- end
- for k=1,#ep do
- local v=ep[k]
- if not done[v] then
- done[v]=true
- newn=newn+1
- new[newn]=v
- end
- end
- for k=1,#list do
- local v=list[k]
- if not done[v] then
- done[v]=true
- newn=newn+1
- new[newn]=v
- end
- end
- return new
- end
-end
-function resolvers.cleanpathlist(str)
- local t=resolvers.expandedpathlist(str)
- if t then
- for i=1,#t do
- t[i]=collapsepath(resolvers.cleanpath(t[i]))
- end
- end
- return t
-end
-function resolvers.expandpath(str)
- return joinpath(resolvers.expandedpathlist(str))
-end
-function resolvers.expandedpathlist(str)
- if not str then
- return {}
- elseif instance.savelists then
- str=lpegmatch(dollarstripper,str)
- local lists=instance.lists
- local lst=lists[str]
- if not lst then
- local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
- lst=expandedpathfromlist(l)
- lists[str]=lst
- end
- return lst
- else
- local lst=resolvers.splitpath(resolvers.expansion(str))
- return made_list(instance,expandedpathfromlist(lst))
- end
-end
-function resolvers.expandedpathlistfromvariable(str)
- str=lpegmatch(dollarstripper,str)
- local tmp=resolvers.variableofformatorsuffix(str)
- return resolvers.expandedpathlist(tmp~="" and tmp or str)
-end
-function resolvers.expandpathfromvariable(str)
- return joinpath(resolvers.expandedpathlistfromvariable(str))
-end
-function resolvers.expandbraces(str)
- local ori=str
- local pth=expandedpathfromlist(resolvers.splitpath(ori))
- return joinpath(pth)
-end
-function resolvers.registerfilehash(name,content,someerror)
- if content then
- instance.files[name]=content
- else
- instance.files[name]={}
- if somerror==true then
- instance.loaderror=someerror
- end
- end
-end
-local function isreadable(name)
- local readable=lfs.isfile(name)
- if trace_detail then
- if readable then
- report_resolving("file %a is readable",name)
- else
- report_resolving("file %a is not readable",name)
- end
- end
- return readable
-end
-local function collect_files(names)
- local filelist,noffiles={},0
- for k=1,#names do
- local fname=names[k]
- if trace_detail then
- report_resolving("checking name %a",fname)
- end
- local bname=filebasename(fname)
- local dname=filedirname(fname)
- if dname=="" or find(dname,"^%.") then
- dname=false
- else
- dname=gsub(dname,"%*",".*")
- dname="/"..dname.."$"
- end
- local hashes=instance.hashes
- for h=1,#hashes do
- local hash=hashes[h]
- local blobpath=hash.name
- local files=blobpath and instance.files[blobpath]
- if files then
- if trace_detail then
- report_resolving("deep checking %a, base %a, pattern %a",blobpath,bname,dname)
- end
- local blobfile=files[bname]
- if not blobfile then
- local rname="remap:"..bname
- blobfile=files[rname]
- if blobfile then
- bname=files[rname]
- blobfile=files[bname]
- end
- end
- if blobfile then
- local blobroot=files.__path__ or blobpath
- if type(blobfile)=='string' then
- if not dname or find(blobfile,dname) then
- local variant=hash.type
- local search=filejoin(blobroot,blobfile,bname)
- local result=methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
- if trace_detail then
- report_resolving("match: variant %a, search %a, result %a",variant,search,result)
- end
- noffiles=noffiles+1
- filelist[noffiles]={ variant,search,result }
- end
- else
- for kk=1,#blobfile do
- local vv=blobfile[kk]
- if not dname or find(vv,dname) then
- local variant=hash.type
- local search=filejoin(blobroot,vv,bname)
- local result=methodhandler('concatinators',hash.type,blobroot,vv,bname)
- if trace_detail then
- report_resolving("match: variant %a, search %a, result %a",variant,search,result)
- end
- noffiles=noffiles+1
- filelist[noffiles]={ variant,search,result }
- end
- end
- end
- end
- elseif trace_locating then
- report_resolving("no match in %a (%s)",blobpath,bname)
- end
- end
- end
- return noffiles>0 and filelist or nil
-end
-local fit={}
-function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
- local foundintrees=instance.foundintrees
- if usedmethod=="direct" and filename==foundname and fit[foundname] then
- else
- local t={
- filename=filename,
- format=format~="" and format or nil,
- filetype=filetype~="" and filetype or nil,
- usedmethod=usedmethod,
- foundname=foundname,
- }
- fit[foundname]=t
- foundintrees[#foundintrees+1]=t
- end
-end
-local function can_be_dir(name)
- local fakepaths=instance.fakepaths
- if not fakepaths[name] then
- if lfs.isdir(name) then
- fakepaths[name]=1
- else
- fakepaths[name]=2
- end
- end
- return fakepaths[name]==1
-end
-local preparetreepattern=Cs((P(".")/"%%."+P("-")/"%%-"+P(1))^0*Cc("$"))
-local collect_instance_files
-local function find_analyze(filename,askedformat,allresults)
- local filetype,wantedfiles,ext='',{},suffixonly(filename)
- wantedfiles[#wantedfiles+1]=filename
- if askedformat=="" then
- if ext=="" or not suffixmap[ext] then
- local defaultsuffixes=resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname=filename..'.'..defaultsuffixes[i]
- wantedfiles[#wantedfiles+1]=forcedname
- filetype=resolvers.formatofsuffix(forcedname)
- if trace_locating then
- report_resolving("forcing filetype %a",filetype)
- end
- end
- else
- filetype=resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype %a",filetype)
- end
- end
- else
- if ext=="" or not suffixmap[ext] then
- local format_suffixes=suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1]=filename.."."..format_suffixes[i]
- end
- end
- end
- filetype=askedformat
- if trace_locating then
- report_resolving("using given filetype %a",filetype)
- end
- end
- return filetype,wantedfiles
-end
-local function find_direct(filename,allresults)
- if not dangerous[askedformat] and isreadable(filename) then
- if trace_detail then
- report_resolving("file %a found directly",filename)
- end
- return "direct",{ filename }
- end
-end
-local function find_wildcard(filename,allresults)
- if find(filename,'%*') then
- if trace_locating then
- report_resolving("checking wildcard %a",filename)
- end
- local method,result=resolvers.findwildcardfiles(filename)
- if result then
- return "wildcard",result
- end
- end
-end
-local function find_qualified(filename,allresults,askedformat,alsostripped)
- if not file.is_qualified_path(filename) then
- return
- end
- if trace_locating then
- report_resolving("checking qualified name %a",filename)
- end
- if isreadable(filename) then
- if trace_detail then
- report_resolving("qualified file %a found",filename)
- end
- return "qualified",{ filename }
- end
- if trace_detail then
- report_resolving("locating qualified file %a",filename)
- end
- local forcedname,suffix="",suffixonly(filename)
- if suffix=="" then
- local format_suffixes=askedformat=="" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s=format_suffixes[i]
- forcedname=filename.."."..s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype %a",s)
- end
- return "qualified",{ forcedname }
- end
- end
- end
- end
- if alsostripped and suffix and suffix~="" then
- local basename=filebasename(filename)
- local pattern=lpegmatch(preparetreepattern,filename)
- local savedformat=askedformat
- local format=savedformat or ""
- if format=="" then
- askedformat=resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat="othertextfiles"
- end
- if basename~=filename then
- local resolved=collect_instance_files(basename,askedformat,allresults)
- if #resolved==0 then
- local lowered=lower(basename)
- if filename~=lowered then
- resolved=collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format=savedformat
- if #resolved>0 then
- local result={}
- for r=1,#resolved do
- local rr=resolved[r]
- if find(rr,pattern) then
- result[#result+1]=rr
- end
- end
- if #result>0 then
- return "qualified",result
- end
- end
- end
- end
-end
-local function check_subpath(fname)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found %a by deep scanning",fname)
- end
- return fname
- end
-end
-local function find_intree(filename,filetype,wantedfiles,allresults)
- local typespec=resolvers.variableofformat(filetype)
- local pathlist=resolvers.expandedpathlist(typespec)
- local method="intree"
- if pathlist and #pathlist>0 then
- local filelist=collect_files(wantedfiles)
- local dirlist={}
- if filelist then
- for i=1,#filelist do
- dirlist[i]=filedirname(filelist[i][3]).."/"
- end
- end
- if trace_detail then
- report_resolving("checking filename %a",filename)
- end
- local resolve=resolvers.resolve
- local result={}
- for k=1,#pathlist do
- local path=pathlist[k]
- local pathname=lpegmatch(inhibitstripper,path)
- local doscan=path==pathname
- if not find (pathname,'//$') then
- doscan=false
- end
- local done=false
- if filelist then
- local expression=makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern %a for path %a",expression,pathname)
- end
- for k=1,#filelist do
- local fl=filelist[k]
- local f=fl[2]
- local d=dirlist[k]
- if find(d,expression) or find(resolve(d),expression) then
- result[#result+1]=resolve(fl[3])
- done=true
- if allresults then
- if trace_detail then
- report_resolving("match to %a in hash for file %a and path %a, continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to %a in hash for file %a and path %a, quit scanning",expression,f,d)
- end
- break
- end
- elseif trace_detail then
- report_resolving("no match to %a in hash for file %a and path %a",expression,f,d)
- end
- end
- end
- if done then
- method="database"
- else
- method="filesystem"
- pathname=gsub(pathname,"/+$","")
- pathname=resolve(pathname)
- local scheme=url.hasscheme(pathname)
- if not scheme or scheme=="file" then
- local pname=gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- if can_be_dir(pname) then
- for k=1,#wantedfiles do
- local w=wantedfiles[k]
- local fname=check_subpath(filejoin(pname,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
- end
- end
- end
- if not done and doscan then
- local files=resolvers.simplescanfiles(pname,false,true)
- for k=1,#wantedfiles do
- local w=wantedfiles[k]
- local subpath=files[w]
- if not subpath or subpath=="" then
- elseif type(subpath)=="string" then
- local fname=check_subpath(filejoin(pname,subpath,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
- end
- end
- else
- for i=1,#subpath do
- local sp=subpath[i]
- if sp=="" then
- else
- local fname=check_subpath(filejoin(pname,sp,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
- end
- end
- end
- end
- if done and not allresults then
- break
- end
- end
- end
- end
- end
- else
- end
- end
- end
- if done and not allresults then
- break
- end
- end
- if #result>0 then
- return method,result
- end
- end
-end
-local function find_onpath(filename,filetype,wantedfiles,allresults)
- if trace_detail then
- report_resolving("checking filename %a, filetype %a, wanted files %a",filename,filetype,concat(wantedfiles," | "))
- end
- local result={}
- for k=1,#wantedfiles do
- local fname=wantedfiles[k]
- if fname and isreadable(fname) then
- filename=fname
- result[#result+1]=filejoin('.',fname)
- if not allresults then
- break
- end
- end
- end
- if #result>0 then
- return "onpath",result
- end
-end
-local function find_otherwise(filename,filetype,wantedfiles,allresults)
- local filelist=collect_files(wantedfiles)
- local fl=filelist and filelist[1]
- if fl then
- return "otherwise",{ resolvers.resolve(fl[3]) }
- end
-end
-collect_instance_files=function(filename,askedformat,allresults)
- askedformat=askedformat or ""
- filename=collapsepath(filename,".")
- filename=gsub(filename,"^%./",getcurrentdir().."/")
- if allresults then
- local filetype,wantedfiles=find_analyze(filename,askedformat)
- local results={
- { find_direct (filename,true) },
- { find_wildcard (filename,true) },
- { find_qualified(filename,true,askedformat) },
- { find_intree (filename,filetype,wantedfiles,true) },
- { find_onpath (filename,filetype,wantedfiles,true) },
- { find_otherwise(filename,filetype,wantedfiles,true) },
- }
- local result,status,done={},{},{}
- for k,r in next,results do
- local method,list=r[1],r[2]
- if method and list then
- for i=1,#list do
- local c=collapsepath(list[i])
- if not done[c] then
- result[#result+1]=c
- done[c]=true
- end
- status[#status+1]=formatters["%-10s: %s"](method,c)
- end
- end
- end
- if trace_detail then
- report_resolving("lookup status: %s",table.serialize(status,filename))
- end
- return result,status
- else
- local method,result,stamp,filetype,wantedfiles
- if instance.remember then
- stamp=formatters["%s--%s"](filename,askedformat)
- result=stamp and instance.found[stamp]
- if result then
- if trace_locating then
- report_resolving("remembered file %a",filename)
- end
- return result
- end
- end
- method,result=find_direct(filename)
- if not result then
- method,result=find_wildcard(filename)
- if not result then
- method,result=find_qualified(filename,false,askedformat)
- if not result then
- filetype,wantedfiles=find_analyze(filename,askedformat)
- method,result=find_intree(filename,filetype,wantedfiles)
- if not result then
- method,result=find_onpath(filename,filetype,wantedfiles)
- if not result then
- method,result=find_otherwise(filename,filetype,wantedfiles)
- end
- end
- end
- end
- end
- if result and #result>0 then
- local foundname=collapsepath(result[1])
- resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
- result={ foundname }
- else
- result={}
- end
- if stamp then
- if trace_locating then
- report_resolving("remembering file %a",filename)
- end
- instance.found[stamp]=result
- end
- return result
- end
-end
-local function findfiles(filename,filetype,allresults)
- local result,status=collect_instance_files(filename,filetype or "",allresults)
- if not result or #result==0 then
- local lowered=lower(filename)
- if filename~=lowered then
- result,status=collect_instance_files(lowered,filetype or "",allresults)
- end
- end
- return result or {},status
-end
-function resolvers.findfiles(filename,filetype)
- return findfiles(filename,filetype,true)
-end
-function resolvers.findfile(filename,filetype)
- return findfiles(filename,filetype,false)[1] or ""
-end
-function resolvers.findpath(filename,filetype)
- return filedirname(findfiles(filename,filetype,false)[1] or "")
-end
-local function findgivenfiles(filename,allresults)
- local bname,result=filebasename(filename),{}
- local hashes=instance.hashes
- local noffound=0
- for k=1,#hashes do
- local hash=hashes[k]
- local files=instance.files[hash.name] or {}
- local blist=files[bname]
- if not blist then
- local rname="remap:"..bname
- blist=files[rname]
- if blist then
- bname=files[rname]
- blist=files[bname]
- end
- end
- if blist then
- if type(blist)=='string' then
- local found=methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
- if found~="" then
- noffound=noffound+1
- result[noffound]=resolvers.resolve(found)
- if not allresults then
- break
- end
- end
- else
- for kk=1,#blist do
- local vv=blist[kk]
- local found=methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
- if found~="" then
- noffound=noffound+1
- result[noffound]=resolvers.resolve(found)
- if not allresults then break end
- end
- end
- end
- end
- end
- return result
-end
-function resolvers.findgivenfiles(filename)
- return findgivenfiles(filename,true)
-end
-function resolvers.findgivenfile(filename)
- return findgivenfiles(filename,false)[1] or ""
-end
-local function doit(path,blist,bname,tag,variant,result,allresults)
- local done=false
- if blist and variant then
- local resolve=resolvers.resolve
- if type(blist)=='string' then
- if find(lower(blist),path) then
- local full=methodhandler('concatinators',variant,tag,blist,bname) or ""
- result[#result+1]=resolve(full)
- done=true
- end
- else
- for kk=1,#blist do
- local vv=blist[kk]
- if find(lower(vv),path) then
- local full=methodhandler('concatinators',variant,tag,vv,bname) or ""
- result[#result+1]=resolve(full)
- done=true
- if not allresults then break end
- end
- end
- end
- end
- return done
-end
-local makewildcard=Cs(
- (P("^")^0*P("/")*P(-1)+P(-1))/".*"+(P("^")^0*P("/")/"")^0*(P("*")/".*"+P("-")/"%%-"+P(".")/"%%."+P("?")/"."+P("\\")/"/"+P(1))^0
-)
-function resolvers.wildcardpattern(pattern)
- return lpegmatch(makewildcard,pattern) or pattern
-end
-local function findwildcardfiles(filename,allresults,result)
- result=result or {}
- local base=filebasename(filename)
- local dirn=filedirname(filename)
- local path=lower(lpegmatch(makewildcard,dirn) or dirn)
- local name=lower(lpegmatch(makewildcard,base) or base)
- local files,done=instance.files,false
- if find(name,"%*") then
- local hashes=instance.hashes
- for k=1,#hashes do
- local hash=hashes[k]
- local hashname,hashtype=hash.name,hash.type
- for kk,hh in next,files[hashname] do
- if not find(kk,"^remap:") then
- if find(lower(kk),name) then
- if doit(path,hh,kk,hashname,hashtype,result,allresults) then done=true end
- if done and not allresults then break end
- end
- end
- end
- end
- else
- local hashes=instance.hashes
- for k=1,#hashes do
- local hash=hashes[k]
- local hashname,hashtype=hash.name,hash.type
- if doit(path,files[hashname][base],base,hashname,hashtype,result,allresults) then done=true end
- if done and not allresults then break end
- end
- end
- return result
-end
-function resolvers.findwildcardfiles(filename,result)
- return findwildcardfiles(filename,true,result)
-end
-function resolvers.findwildcardfile(filename)
- return findwildcardfiles(filename,false)[1] or ""
-end
-function resolvers.automount()
-end
-function resolvers.load(option)
- statistics.starttiming(instance)
- identify_configuration_files()
- load_configuration_files()
- if option~="nofiles" then
- load_databases()
- resolvers.automount()
- end
- statistics.stoptiming(instance)
- local files=instance.files
- return files and next(files) and true
-end
-function resolvers.loadtime()
- return statistics.elapsedtime(instance)
-end
-local function report(str)
- if trace_locating then
- report_resolving(str)
- else
- print(str)
- end
-end
-function resolvers.dowithfilesandreport(command,files,...)
- if files and #files>0 then
- if trace_locating then
- report('')
- end
- if type(files)=="string" then
- files={ files }
- end
- for f=1,#files do
- local file=files[f]
- local result=command(file,...)
- if type(result)=='string' then
- report(result)
- else
- for i=1,#result do
- report(result[i])
- end
- end
- end
- end
-end
-function resolvers.showpath(str)
- return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
-end
-function resolvers.registerfile(files,name,path)
- if files[name] then
- if type(files[name])=='string' then
- files[name]={ files[name],path }
- else
- files[name]=path
- end
- else
- files[name]=path
- end
-end
-function resolvers.dowithpath(name,func)
- local pathlist=resolvers.expandedpathlist(name)
- for i=1,#pathlist do
- func("^"..resolvers.cleanpath(pathlist[i]))
- end
-end
-function resolvers.dowithvariable(name,func)
- func(expandedvariable(name))
-end
-function resolvers.locateformat(name)
- local engine=environment.ownmain or "luatex"
- local barename=file.removesuffix(name)
- local fullname=file.addsuffix(barename,"fmt")
- local fmtname=caches.getfirstreadablefile(fullname,"formats",engine) or ""
- if fmtname=="" then
- fmtname=resolvers.findfile(fullname)
- fmtname=resolvers.cleanpath(fmtname)
- end
- if fmtname~="" then
- local barename=file.removesuffix(fmtname)
- local luaname=file.addsuffix(barename,luasuffixes.lua)
- local lucname=file.addsuffix(barename,luasuffixes.luc)
- local luiname=file.addsuffix(barename,luasuffixes.lui)
- if lfs.isfile(luiname) then
- return barename,luiname
- elseif lfs.isfile(lucname) then
- return barename,lucname
- elseif lfs.isfile(luaname) then
- return barename,luaname
- end
- end
- return nil,nil
-end
-function resolvers.booleanvariable(str,default)
- local b=resolvers.expansion(str)
- if b=="" then
- return default
- else
- b=toboolean(b)
- return (b==nil and default) or b
- end
-end
-function resolvers.dowithfilesintree(pattern,handle,before,after)
- local instance=resolvers.instance
- local hashes=instance.hashes
- for i=1,#hashes do
- local hash=hashes[i]
- local blobtype=hash.type
- local blobpath=hash.name
- if blobpath then
- if before then
- before(blobtype,blobpath,pattern)
- end
- local files=instance.files[blobpath]
- local total,checked,done=0,0,0
- if files then
- for k,v in table.sortedhash(files) do
- total=total+1
- if find(k,"^remap:") then
- elseif find(k,pattern) then
- if type(v)=="string" then
- checked=checked+1
- if handle(blobtype,blobpath,v,k) then
- done=done+1
- end
- else
- checked=checked+#v
- for i=1,#v do
- if handle(blobtype,blobpath,v[i],k) then
- done=done+1
- end
- end
- end
- end
- end
- end
- if after then
- after(blobtype,blobpath,pattern,total,checked,done)
- end
- end
- end
-end
-resolvers.obsolete=resolvers.obsolete or {}
-local obsolete=resolvers.obsolete
-resolvers.find_file=resolvers.findfile obsolete.find_file=resolvers.findfile
-resolvers.find_files=resolvers.findfiles obsolete.find_files=resolvers.findfiles
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-pre"] = package.loaded["data-pre"] or true
-
--- original size: 6643, stripped down to: 4401
-
-if not modules then modules={} end modules ['data-pre']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local resolvers=resolvers
-local prefixes=utilities.storage.allocate()
-resolvers.prefixes=prefixes
-local cleanpath,findgivenfile,expansion=resolvers.cleanpath,resolvers.findgivenfile,resolvers.expansion
-local getenv=resolvers.getenv
-local P,S,R,C,Cs,Cc,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.match
-local joinpath,basename,dirname=file.join,file.basename,file.dirname
-local getmetatable,rawset,type=getmetatable,rawset,type
-prefixes.environment=function(str)
- return cleanpath(expansion(str))
-end
-prefixes.relative=function(str,n)
- if io.exists(str) then
- elseif io.exists("./"..str) then
- str="./"..str
- else
- local p="../"
- for i=1,n or 2 do
- if io.exists(p..str) then
- str=p..str
- break
- else
- p=p.."../"
- end
- end
- end
- return cleanpath(str)
-end
-prefixes.auto=function(str)
- local fullname=prefixes.relative(str)
- if not lfs.isfile(fullname) then
- fullname=prefixes.locate(str)
- end
- return fullname
-end
-prefixes.locate=function(str)
- local fullname=findgivenfile(str) or ""
- return cleanpath((fullname~="" and fullname) or str)
-end
-prefixes.filename=function(str)
- local fullname=findgivenfile(str) or ""
- return cleanpath(basename((fullname~="" and fullname) or str))
-end
-prefixes.pathname=function(str)
- local fullname=findgivenfile(str) or ""
- return cleanpath(dirname((fullname~="" and fullname) or str))
-end
-prefixes.selfautoloc=function(str)
- return cleanpath(joinpath(getenv('SELFAUTOLOC'),str))
-end
-prefixes.selfautoparent=function(str)
- return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str))
-end
-prefixes.selfautodir=function(str)
- return cleanpath(joinpath(getenv('SELFAUTODIR'),str))
-end
-prefixes.home=function(str)
- return cleanpath(joinpath(getenv('HOME'),str))
-end
-local function toppath()
- local inputstack=resolvers.inputstack
- if not inputstack then
- return "."
- end
- local pathname=dirname(inputstack[#inputstack] or "")
- if pathname=="" then
- return "."
- else
- return pathname
- end
-end
-resolvers.toppath=toppath
-prefixes.toppath=function(str)
- return cleanpath(joinpath(toppath(),str))
-end
-prefixes.env=prefixes.environment
-prefixes.rel=prefixes.relative
-prefixes.loc=prefixes.locate
-prefixes.kpse=prefixes.locate
-prefixes.full=prefixes.locate
-prefixes.file=prefixes.filename
-prefixes.path=prefixes.pathname
-function resolvers.allprefixes(separator)
- local all=table.sortedkeys(prefixes)
- if separator then
- for i=1,#all do
- all[i]=all[i]..":"
- end
- end
- return all
-end
-local function _resolve_(method,target)
- local action=prefixes[method]
- if action then
- return action(target)
- else
- return method..":"..target
- end
-end
-local resolved,abstract={},{}
-function resolvers.resetresolve(str)
- resolved,abstract={},{}
-end
-local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
-local prefix=C(R("az")^2)*P(":")
-local target=C((1-S(" \"\';,"))^1)
-local notarget=(#S(";,")+P(-1))*Cc("")
-local pattern=Cs(((prefix*(target+notarget))/_resolve_+P(1))^0)
-local function resolve(str)
- if type(str)=="table" then
- local t={}
- for i=1,#str do
- t[i]=resolve(str[i])
- end
- return t
- else
- local res=resolved[str]
- if not res then
- res=lpegmatch(pattern,str)
- resolved[str]=res
- abstract[res]=str
- end
- return res
- end
-end
-local function unresolve(str)
- return abstract[str] or str
-end
-resolvers.resolve=resolve
-resolvers.unresolve=unresolve
-if type(os.uname)=="function" then
- for k,v in next,os.uname() do
- if not prefixes[k] then
- prefixes[k]=function() return v end
- end
- end
-end
-if os.type=="unix" then
- local pattern
- local function makepattern(t,k,v)
- if t then
- rawset(t,k,v)
- end
- local colon=P(":")
- for k,v in table.sortedpairs(prefixes) do
- if p then
- p=P(k)+p
- else
- p=P(k)
- end
- end
- pattern=Cs((p*colon+colon/";"+P(1))^0)
- end
- makepattern()
- getmetatable(prefixes).__newindex=makepattern
- function resolvers.repath(str)
- return lpegmatch(pattern,str)
- end
-else
- function resolvers.repath(str)
- return str
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-inp"] = package.loaded["data-inp"] or true
-
--- original size: 910, stripped down to: 823
-
-if not modules then modules={} end modules ['data-inp']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local allocate=utilities.storage.allocate
-local resolvers=resolvers
-local methodhandler=resolvers.methodhandler
-local registermethod=resolvers.registermethod
-local finders=allocate { helpers={},notfound=function() end }
-local openers=allocate { helpers={},notfound=function() end }
-local loaders=allocate { helpers={},notfound=function() return false,nil,0 end }
-registermethod("finders",finders,"uri")
-registermethod("openers",openers,"uri")
-registermethod("loaders",loaders,"uri")
-resolvers.finders=finders
-resolvers.openers=openers
-resolvers.loaders=loaders
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-out"] = package.loaded["data-out"] or true
-
--- original size: 530, stripped down to: 475
-
-if not modules then modules={} end modules ['data-out']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local allocate=utilities.storage.allocate
-local resolvers=resolvers
-local registermethod=resolvers.registermethod
-local savers=allocate { helpers={} }
-resolvers.savers=savers
-registermethod("savers",savers,"uri")
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-fil"] = package.loaded["data-fil"] or true
-
--- original size: 3801, stripped down to: 3231
-
-if not modules then modules={} end modules ['data-fil']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local report_files=logs.reporter("resolvers","files")
-local resolvers=resolvers
-local finders,openers,loaders,savers=resolvers.finders,resolvers.openers,resolvers.loaders,resolvers.savers
-local locators,hashers,generators,concatinators=resolvers.locators,resolvers.hashers,resolvers.generators,resolvers.concatinators
-local checkgarbage=utilities.garbagecollector and utilities.garbagecollector.check
-function locators.file(specification)
- local name=specification.filename
- local realname=resolvers.resolve(name)
- if realname and realname~='' and lfs.isdir(realname) then
- if trace_locating then
- report_files("file locator %a found as %a",name,realname)
- end
- resolvers.appendhash('file',name,true)
- elseif trace_locating then
- report_files("file locator %a not found",name)
- end
-end
-function hashers.file(specification)
- local name=specification.filename
- local content=caches.loadcontent(name,'files')
- resolvers.registerfilehash(name,content,content==nil)
-end
-function generators.file(specification)
- local path=specification.filename
- local content=resolvers.scanfiles(path,false,true)
- resolvers.registerfilehash(path,content,true)
-end
-concatinators.file=file.join
-function finders.file(specification,filetype)
- local filename=specification.filename
- local foundname=resolvers.findfile(filename,filetype)
- if foundname and foundname~="" then
- if trace_locating then
- report_files("file finder: %a found",filename)
- end
- return foundname
- else
- if trace_locating then
- report_files("file finder: %a not found",filename)
- end
- return finders.notfound()
- end
-end
-function openers.helpers.textopener(tag,filename,f)
- return {
- reader=function() return f:read () end,
- close=function() logs.show_close(filename) return f:close() end,
- }
-end
-function openers.file(specification,filetype)
- local filename=specification.filename
- if filename and filename~="" then
- local f=io.open(filename,"r")
- if f then
- if trace_locating then
- report_files("file opener: %a opened",filename)
- end
- return openers.helpers.textopener("file",filename,f)
- end
- end
- if trace_locating then
- report_files("file opener: %a not found",filename)
- end
- return openers.notfound()
-end
-function loaders.file(specification,filetype)
- local filename=specification.filename
- if filename and filename~="" then
- local f=io.open(filename,"rb")
- if f then
- logs.show_load(filename)
- if trace_locating then
- report_files("file loader: %a loaded",filename)
- end
- local s=f:read("*a")
- if checkgarbage then
- checkgarbage(#s)
- end
- f:close()
- if s then
- return true,s,#s
- end
- end
- end
- if trace_locating then
- report_files("file loader: %a not found",filename)
- end
- return loaders.notfound()
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-con"] = package.loaded["data-con"] or true
-
--- original size: 5010, stripped down to: 3588
-
-if not modules then modules={} end modules ['data-con']={
- version=1.100,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local format,lower,gsub=string.format,string.lower,string.gsub
-local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
-local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
-local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
-containers=containers or {}
-local containers=containers
-containers.usecache=true
-local report_containers=logs.reporter("resolvers","containers")
-local allocated={}
-local mt={
- __index=function(t,k)
- if k=="writable" then
- local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
- t.writable=writable
- return writable
- elseif k=="readables" then
- local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
- t.readables=readables
- return readables
- end
- end,
- __storage__=true
-}
-function containers.define(category,subcategory,version,enabled)
- if category and subcategory then
- local c=allocated[category]
- if not c then
- c={}
- allocated[category]=c
- end
- local s=c[subcategory]
- if not s then
- s={
- category=category,
- subcategory=subcategory,
- storage={},
- enabled=enabled,
- version=version or math.pi,
- trace=false,
- }
- setmetatable(s,mt)
- c[subcategory]=s
- end
- return s
- end
-end
-function containers.is_usable(container,name)
- return container.enabled and caches and caches.is_writable(container.writable,name)
-end
-function containers.is_valid(container,name)
- if name and name~="" then
- local storage=container.storage[name]
- return storage and storage.cache_version==container.version
- else
- return false
- end
-end
-function containers.read(container,name)
- local storage=container.storage
- local stored=storage[name]
- if not stored and container.enabled and caches and containers.usecache then
- stored=caches.loaddata(container.readables,name)
- if stored and stored.cache_version==container.version then
- if trace_cache or trace_containers then
- report_containers("action %a, category %a, name %a","load",container.subcategory,name)
- end
- else
- stored=nil
- end
- storage[name]=stored
- elseif stored then
- if trace_cache or trace_containers then
- report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
- end
- end
- return stored
-end
-function containers.write(container,name,data)
- if data then
- data.cache_version=container.version
- if container.enabled and caches then
- local unique,shared=data.unique,data.shared
- data.unique,data.shared=nil,nil
- caches.savedata(container.writable,name,data)
- if trace_cache or trace_containers then
- report_containers("action %a, category %a, name %a","save",container.subcategory,name)
- end
- data.unique,data.shared=unique,shared
- end
- if trace_cache or trace_containers then
- report_containers("action %a, category %a, name %a","store",container.subcategory,name)
- end
- container.storage[name]=data
- end
- return data
-end
-function containers.content(container,name)
- return container.storage[name]
-end
-function containers.cleanname(name)
- return (gsub(lower(name),"[^%w\128-\255]+","-"))
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-use"] = package.loaded["data-use"] or true
-
--- original size: 3899, stripped down to: 2984
-
-if not modules then modules={} end modules ['data-use']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local format,lower,gsub,find=string.format,string.lower,string.gsub,string.find
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local report_mounts=logs.reporter("resolvers","mounts")
-local resolvers=resolvers
-resolvers.automounted=resolvers.automounted or {}
-function resolvers.automount(usecache)
- local mountpaths=resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT'))
- if (not mountpaths or #mountpaths==0) and usecache then
- mountpaths=caches.getreadablepaths("mount")
- end
- if mountpaths and #mountpaths>0 then
- statistics.starttiming(resolvers.instance)
- for k=1,#mountpaths do
- local root=mountpaths[k]
- local f=io.open(root.."/url.tmi")
- if f then
- for line in f:lines() do
- if line then
- if find(line,"^[%%#%-]") then
- elseif find(line,"^zip://") then
- if trace_locating then
- report_mounts("mounting %a",line)
- end
- table.insert(resolvers.automounted,line)
- resolvers.usezipfile(line)
- end
- end
- end
- f:close()
- end
- end
- statistics.stoptiming(resolvers.instance)
- end
-end
-statistics.register("used config file",function() return caches.configfiles() end)
-statistics.register("used cache path",function() return caches.usedpaths() end)
-function statistics.savefmtstatus(texname,formatbanner,sourcefile)
- local enginebanner=status.banner
- if formatbanner and enginebanner and sourcefile then
- local luvname=file.replacesuffix(texname,"luv")
- local luvdata={
- enginebanner=enginebanner,
- formatbanner=formatbanner,
- sourcehash=md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"),
- sourcefile=sourcefile,
- }
- io.savedata(luvname,table.serialize(luvdata,true))
- end
-end
-function statistics.checkfmtstatus(texname)
- local enginebanner=status.banner
- if enginebanner and texname then
- local luvname=file.replacesuffix(texname,"luv")
- if lfs.isfile(luvname) then
- local luv=dofile(luvname)
- if luv and luv.sourcefile then
- local sourcehash=md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown")
- local luvbanner=luv.enginebanner or "?"
- if luvbanner~=enginebanner then
- return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
- end
- local luvhash=luv.sourcehash or "?"
- if luvhash~=sourcehash then
- return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
- end
- else
- return "invalid status file"
- end
- else
- return "missing status file"
- end
- end
- return true
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-zip"] = package.loaded["data-zip"] or true
-
--- original size: 8489, stripped down to: 6757
-
-if not modules then modules={} end modules ['data-zip']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local format,find,match=string.format,string.find,string.match
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local report_zip=logs.reporter("resolvers","zip")
-local resolvers=resolvers
-zip=zip or {}
-local zip=zip
-zip.archives=zip.archives or {}
-local archives=zip.archives
-zip.registeredfiles=zip.registeredfiles or {}
-local registeredfiles=zip.registeredfiles
-local limited=false
-directives.register("system.inputmode",function(v)
- if not limited then
- local i_limiter=io.i_limiter(v)
- if i_limiter then
- zip.open=i_limiter.protect(zip.open)
- limited=true
- end
- end
-end)
-local function validzip(str)
- if not find(str,"^zip://") then
- return "zip:///"..str
- else
- return str
- end
-end
-function zip.openarchive(name)
- if not name or name=="" then
- return nil
- else
- local arch=archives[name]
- if not arch then
- local full=resolvers.findfile(name) or ""
- arch=(full~="" and zip.open(full)) or false
- archives[name]=arch
- end
- return arch
- end
-end
-function zip.closearchive(name)
- if not name or (name=="" and archives[name]) then
- zip.close(archives[name])
- archives[name]=nil
- end
-end
-function resolvers.locators.zip(specification)
- local archive=specification.filename
- local zipfile=archive and archive~="" and zip.openarchive(archive)
- if trace_locating then
- if zipfile then
- report_zip("locator: archive %a found",archive)
- else
- report_zip("locator: archive %a not found",archive)
- end
- end
-end
-function resolvers.hashers.zip(specification)
- local archive=specification.filename
- if trace_locating then
- report_zip("loading file %a",archive)
- end
- resolvers.usezipfile(specification.original)
-end
-function resolvers.concatinators.zip(zipfile,path,name)
- if not path or path=="" then
- return format('%s?name=%s',zipfile,name)
- else
- return format('%s?name=%s/%s',zipfile,path,name)
- end
-end
-function resolvers.finders.zip(specification)
- local original=specification.original
- local archive=specification.filename
- if archive then
- local query=url.query(specification.query)
- local queryname=query.name
- if queryname then
- local zfile=zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("finder: archive %a found",archive)
- end
- local dfile=zfile:open(queryname)
- if dfile then
- dfile=zfile:close()
- if trace_locating then
- report_zip("finder: file %a found",queryname)
- end
- return specification.original
- elseif trace_locating then
- report_zip("finder: file %a not found",queryname)
- end
- elseif trace_locating then
- report_zip("finder: unknown archive %a",archive)
- end
- end
- end
- if trace_locating then
- report_zip("finder: %a not found",original)
- end
- return resolvers.finders.notfound()
-end
-function resolvers.openers.zip(specification)
- local original=specification.original
- local archive=specification.filename
- if archive then
- local query=url.query(specification.query)
- local queryname=query.name
- if queryname then
- local zfile=zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("opener; archive %a opened",archive)
- end
- local dfile=zfile:open(queryname)
- if dfile then
- if trace_locating then
- report_zip("opener: file %a found",queryname)
- end
- return resolvers.openers.helpers.textopener('zip',original,dfile)
- elseif trace_locating then
- report_zip("opener: file %a not found",queryname)
- end
- elseif trace_locating then
- report_zip("opener: unknown archive %a",archive)
- end
- end
- end
- if trace_locating then
- report_zip("opener: %a not found",original)
- end
- return resolvers.openers.notfound()
-end
-function resolvers.loaders.zip(specification)
- local original=specification.original
- local archive=specification.filename
- if archive then
- local query=url.query(specification.query)
- local queryname=query.name
- if queryname then
- local zfile=zip.openarchive(archive)
- if zfile then
- if trace_locating then
- report_zip("loader: archive %a opened",archive)
- end
- local dfile=zfile:open(queryname)
- if dfile then
- logs.show_load(original)
- if trace_locating then
- report_zip("loader; file %a loaded",original)
- end
- local s=dfile:read("*all")
- dfile:close()
- return true,s,#s
- elseif trace_locating then
- report_zip("loader: file %a not found",queryname)
- end
- elseif trace_locating then
- report_zip("loader; unknown archive %a",archive)
- end
- end
- end
- if trace_locating then
- report_zip("loader: %a not found",original)
- end
- return resolvers.openers.notfound()
-end
-function resolvers.usezipfile(archive)
- local specification=resolvers.splitmethod(archive)
- local archive=specification.filename
- if archive and not registeredfiles[archive] then
- local z=zip.openarchive(archive)
- if z then
- local instance=resolvers.instance
- local tree=url.query(specification.query).tree or ""
- if trace_locating then
- report_zip("registering: archive %a",archive)
- end
- statistics.starttiming(instance)
- resolvers.prependhash('zip',archive)
- resolvers.extendtexmfvariable(archive)
- registeredfiles[archive]=z
- instance.files[archive]=resolvers.registerzipfile(z,tree)
- statistics.stoptiming(instance)
- elseif trace_locating then
- report_zip("registering: unknown archive %a",archive)
- end
- elseif trace_locating then
- report_zip("registering: archive %a not found",archive)
- end
-end
-function resolvers.registerzipfile(z,tree)
- local files,filter={},""
- if tree=="" then
- filter="^(.+)/(.-)$"
- else
- filter=format("^%s/(.+)/(.-)$",tree)
- end
- if trace_locating then
- report_zip("registering: using filter %a",filter)
- end
- local register,n=resolvers.registerfile,0
- for i in z:files() do
- local path,name=match(i.filename,filter)
- if path then
- if name and name~='' then
- register(files,name,path)
- n=n+1
- else
- end
- else
- register(files,i.filename,'')
- n=n+1
- end
- end
- report_zip("registering: %s files registered",n)
- return files
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-tre"] = package.loaded["data-tre"] or true
-
--- original size: 2508, stripped down to: 2074
-
-if not modules then modules={} end modules ['data-tre']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local find,gsub,format=string.find,string.gsub,string.format
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local report_trees=logs.reporter("resolvers","trees")
-local resolvers=resolvers
-local done,found,notfound={},{},resolvers.finders.notfound
-function resolvers.finders.tree(specification)
- local spec=specification.filename
- local fnd=found[spec]
- if fnd==nil then
- if spec~="" then
- local path,name=file.dirname(spec),file.basename(spec)
- if path=="" then path="." end
- local hash=done[path]
- if not hash then
- local pattern=path.."/*"
- hash=dir.glob(pattern)
- done[path]=hash
- end
- local pattern="/"..gsub(name,"([%.%-%+])","%%%1").."$"
- for k=1,#hash do
- local v=hash[k]
- if find(v,pattern) then
- found[spec]=v
- return v
- end
- end
- end
- fnd=notfound()
- found[spec]=fnd
- end
- return fnd
-end
-function resolvers.locators.tree(specification)
- local name=specification.filename
- local realname=resolvers.resolve(name)
- if realname and realname~='' and lfs.isdir(realname) then
- if trace_locating then
- report_trees("locator %a found",realname)
- end
- resolvers.appendhash('tree',name,false)
- elseif trace_locating then
- report_trees("locator %a not found",name)
- end
-end
-function resolvers.hashers.tree(specification)
- local name=specification.filename
- if trace_locating then
- report_trees("analysing %a",name)
- end
- resolvers.methodhandler("hashers",name)
- resolvers.generators.file(specification)
-end
-resolvers.concatinators.tree=resolvers.concatinators.file
-resolvers.generators.tree=resolvers.generators.file
-resolvers.openers.tree=resolvers.openers.file
-resolvers.loaders.tree=resolvers.loaders.file
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-sch"] = package.loaded["data-sch"] or true
-
--- original size: 6202, stripped down to: 5149
-
-if not modules then modules={} end modules ['data-sch']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local load=load
-local gsub,concat,format=string.gsub,table.concat,string.format
-local finders,openers,loaders=resolvers.finders,resolvers.openers,resolvers.loaders
-local trace_schemes=false trackers.register("resolvers.schemes",function(v) trace_schemes=v end)
-local report_schemes=logs.reporter("resolvers","schemes")
-local http=require("socket.http")
-local ltn12=require("ltn12")
-local resolvers=resolvers
-local schemes=resolvers.schemes or {}
-resolvers.schemes=schemes
-local cleaners={}
-schemes.cleaners=cleaners
-local threshold=24*60*60
-directives.register("schemes.threshold",function(v) threshold=tonumber(v) or threshold end)
-function cleaners.none(specification)
- return specification.original
-end
-function cleaners.strip(specification)
- return (gsub(specification.original,"[^%a%d%.]+","-"))
-end
-function cleaners.md5(specification)
- return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
-end
-local cleaner=cleaners.strip
-directives.register("schemes.cleanmethod",function(v) cleaner=cleaners[v] or cleaners.strip end)
-function resolvers.schemes.cleanname(specification)
- local hash=cleaner(specification)
- if trace_schemes then
- report_schemes("hashing %a to %a",specification.original,hash)
- end
- return hash
-end
-local cached,loaded,reused,thresholds,handlers={},{},{},{},{}
-local function runcurl(name,cachename)
- local command="curl --silent --create-dirs --output "..cachename.." "..name
- os.spawn(command)
-end
-local function fetch(specification)
- local original=specification.original
- local scheme=specification.scheme
- local cleanname=schemes.cleanname(specification)
- local cachename=caches.setfirstwritablefile(cleanname,"schemes")
- if not cached[original] then
- statistics.starttiming(schemes)
- if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification)>(thresholds[protocol] or threshold)) then
- cached[original]=cachename
- local handler=handlers[scheme]
- if handler then
- if trace_schemes then
- report_schemes("fetching %a, protocol %a, method %a",original,scheme,"built-in")
- end
- logs.flush()
- handler(specification,cachename)
- else
- if trace_schemes then
- report_schemes("fetching %a, protocol %a, method %a",original,scheme,"curl")
- end
- logs.flush()
- runcurl(original,cachename)
- end
- end
- if io.exists(cachename) then
- cached[original]=cachename
- if trace_schemes then
- report_schemes("using cached %a, protocol %a, cachename %a",original,scheme,cachename)
- end
- else
- cached[original]=""
- if trace_schemes then
- report_schemes("using missing %a, protocol %a",original,scheme)
- end
- end
- loaded[scheme]=loaded[scheme]+1
- statistics.stoptiming(schemes)
- else
- if trace_schemes then
- report_schemes("reusing %a, protocol %a",original,scheme)
- end
- reused[scheme]=reused[scheme]+1
- end
- return cached[original]
-end
-local function finder(specification,filetype)
- return resolvers.methodhandler("finders",fetch(specification),filetype)
-end
-local opener=openers.file
-local loader=loaders.file
-local function install(scheme,handler,newthreshold)
- handlers [scheme]=handler
- loaded [scheme]=0
- reused [scheme]=0
- finders [scheme]=finder
- openers [scheme]=opener
- loaders [scheme]=loader
- thresholds[scheme]=newthreshold or threshold
-end
-schemes.install=install
-local function http_handler(specification,cachename)
- local tempname=cachename..".tmp"
- local f=io.open(tempname,"wb")
- local status,message=http.request {
- url=specification.original,
- sink=ltn12.sink.file(f)
- }
- if not status then
- os.remove(tempname)
- else
- os.remove(cachename)
- os.rename(tempname,cachename)
- end
- return cachename
-end
-install('http',http_handler)
-install('https')
-install('ftp')
-statistics.register("scheme handling time",function()
- local l,r,nl,nr={},{},0,0
- for k,v in table.sortedhash(loaded) do
- if v>0 then
- nl=nl+1
- l[nl]=k..":"..v
- end
- end
- for k,v in table.sortedhash(reused) do
- if v>0 then
- nr=nr+1
- r[nr]=k..":"..v
- end
- end
- local n=nl+nr
- if n>0 then
- l=nl>0 and concat(l) or "none"
- r=nr>0 and concat(r) or "none"
- return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s",
- statistics.elapsedtime(schemes),n,threshold,l,r)
- else
- return nil
- end
-end)
-local httprequest=http.request
-local toquery=url.toquery
-local function fetchstring(url,data)
- local q=data and toquery(data)
- if q then
- url=url.."?"..q
- end
- local reply=httprequest(url)
- return reply
-end
-schemes.fetchstring=fetchstring
-function schemes.fetchtable(url,data)
- local reply=fetchstring(url,data)
- if reply then
- local s=load("return "..reply)
- if s then
- return s()
- end
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-lua"] = package.loaded["data-lua"] or true
-
--- original size: 4237, stripped down to: 3177
-
-if not modules then modules={} end modules ['data-lua']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local resolvers,package=resolvers,package
-local gsub=string.gsub
-local concat=table.concat
-local addsuffix=file.addsuffix
-local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
-local luasuffixes={ 'tex','lua' }
-local libsuffixes={ 'lib' }
-local luaformats={ 'TEXINPUTS','LUAINPUTS' }
-local libformats={ 'CLUAINPUTS' }
-local helpers=package.helpers or {}
-local methods=helpers.methods or {}
-trackers.register("resolvers.libraries",function(v) helpers.trace=v end)
-trackers.register("resolvers.locating",function(v) helpers.trace=v end)
-helpers.report=logs.reporter("resolvers","libraries")
-helpers.sequence={
- "already loaded",
- "preload table",
- "lua variable format",
- "lib variable format",
- "lua extra list",
- "lib extra list",
- "path specification",
- "cpath specification",
- "all in one fallback",
- "not loaded",
-}
-local pattern=Cs(P("!")^0/""*(P("/")*P(-1)/"/"+P("/")^1/"/"+1)^0)
-function helpers.cleanpath(path)
- return resolvers.resolve(lpegmatch(pattern,path))
-end
-local loadedaslib=helpers.loadedaslib
-local getextraluapaths=package.extraluapaths
-local getextralibpaths=package.extralibpaths
-local registerpath=helpers.registerpath
-local lualibfile=helpers.lualibfile
-local luaformatpaths
-local libformatpaths
-local function getluaformatpaths()
- if not luaformatpaths then
- luaformatpaths={}
- for i=1,#luaformats do
- registerpath("lua format","lua",luaformatpaths,resolvers.expandedpathlistfromvariable(luaformats[i]))
- end
- end
- return luaformatpaths
-end
-local function getlibformatpaths()
- if not libformatpaths then
- libformatpaths={}
- for i=1,#libformats do
- registerpath("lib format","lib",libformatpaths,resolvers.expandedpathlistfromvariable(libformats[i]))
- end
- end
- return libformatpaths
-end
-local function loadedbyformat(name,rawname,suffixes,islib,what)
- local trace=helpers.trace
- local report=helpers.report
- for i=1,#suffixes do
- local format=suffixes[i]
- local resolved=resolvers.findfile(name,format) or ""
- if trace then
- report("%s format, identifying %a using format %a",what,name,format)
- end
- if resolved~="" then
- if trace then
- report("%s format, %a found on %a",what,name,resolved)
- end
- if islib then
- return loadedaslib(resolved,rawname)
- else
- return loadfile(resolved)
- end
- end
- end
-end
-helpers.loadedbyformat=loadedbyformat
-methods["lua variable format"]=function(name)
- if helpers.trace then
- helpers.report("%s format, checking %s paths","lua",#getluaformatpaths())
- end
- return loadedbyformat(addsuffix(lualibfile(name),"lua"),name,luasuffixes,false,"lua")
-end
-methods["lib variable format"]=function(name)
- if helpers.trace then
- helpers.report("%s format, checking %s paths","lib",#getlibformatpaths())
- end
- return loadedbyformat(addsuffix(lualibfile(name),os.libsuffix),name,libsuffixes,true,"lib")
-end
-resolvers.loadlualib=require
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-aux"] = package.loaded["data-aux"] or true
-
--- original size: 2431, stripped down to: 1996
-
-if not modules then modules={} end modules ['data-aux']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local find=string.find
-local type,next=type,next
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local resolvers=resolvers
-local report_scripts=logs.reporter("resolvers","scripts")
-function resolvers.updatescript(oldname,newname)
- local scriptpath="context/lua"
- newname=file.addsuffix(newname,"lua")
- local oldscript=resolvers.cleanpath(oldname)
- if trace_locating then
- report_scripts("to be replaced old script %a",oldscript)
- end
- local newscripts=resolvers.findfiles(newname) or {}
- if #newscripts==0 then
- if trace_locating then
- report_scripts("unable to locate new script")
- end
- else
- for i=1,#newscripts do
- local newscript=resolvers.cleanpath(newscripts[i])
- if trace_locating then
- report_scripts("checking new script %a",newscript)
- end
- if oldscript==newscript then
- if trace_locating then
- report_scripts("old and new script are the same")
- end
- elseif not find(newscript,scriptpath) then
- if trace_locating then
- report_scripts("new script should come from %a",scriptpath)
- end
- elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
- if trace_locating then
- report_scripts("invalid new script name")
- end
- else
- local newdata=io.loaddata(newscript)
- if newdata then
- if trace_locating then
- report_scripts("old script content replaced by new content")
- end
- io.savedata(oldscript,newdata)
- break
- elseif trace_locating then
- report_scripts("unable to load new script")
- end
- end
- end
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-tmf"] = package.loaded["data-tmf"] or true
-
--- original size: 2600, stripped down to: 1627
-
-if not modules then modules={} end modules ['data-tmf']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local resolvers=resolvers
-local report_tds=logs.reporter("resolvers","tds")
-function resolvers.load_tree(tree,resolve)
- if type(tree)=="string" and tree~="" then
- local getenv,setenv=resolvers.getenv,resolvers.setenv
- local texos="texmf-"..os.platform
- local oldroot=environment.texroot
- local newroot=file.collapsepath(tree)
- local newtree=file.join(newroot,texos)
- local newpath=file.join(newtree,"bin")
- if not lfs.isdir(newtree) then
- report_tds("no %a under tree %a",texos,tree)
- os.exit()
- end
- if not lfs.isdir(newpath) then
- report_tds("no '%s/bin' under tree %a",texos,tree)
- os.exit()
- end
- local texmfos=newtree
- environment.texroot=newroot
- environment.texos=texos
- environment.texmfos=texmfos
- if resolve then
- resolvers.luacnfspec=resolvers.resolve(resolvers.luacnfspec)
- end
- setenv('SELFAUTOPARENT',newroot)
- setenv('SELFAUTODIR',newtree)
- setenv('SELFAUTOLOC',newpath)
- setenv('TEXROOT',newroot)
- setenv('TEXOS',texos)
- setenv('TEXMFOS',texmfos)
- setenv('TEXMFCNF',resolvers.luacnfspec,true)
- setenv('PATH',newpath..io.pathseparator..getenv('PATH'))
- report_tds("changing from root %a to %a",oldroot,newroot)
- report_tds("prepending %a to PATH",newpath)
- report_tds("setting TEXMFCNF to %a",resolvers.luacnfspec)
- report_tds()
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["data-lst"] = package.loaded["data-lst"] or true
-
--- original size: 2654, stripped down to: 2301
-
-if not modules then modules={} end modules ['data-lst']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local find,concat,upper,format=string.find,table.concat,string.upper,string.format
-local fastcopy,sortedpairs=table.fastcopy,table.sortedpairs
-resolvers.listers=resolvers.listers or {}
-local resolvers=resolvers
-local report_lists=logs.reporter("resolvers","lists")
-local function tabstr(str)
- if type(str)=='table' then
- return concat(str," | ")
- else
- return str
- end
-end
-function resolvers.listers.variables(pattern)
- local instance=resolvers.instance
- local environment=instance.environment
- local variables=instance.variables
- local expansions=instance.expansions
- local pattern=upper(pattern or "")
- local configured={}
- local order=instance.order
- for i=1,#order do
- for k,v in next,order[i] do
- if v~=nil and configured[k]==nil then
- configured[k]=v
- end
- end
- end
- local env=fastcopy(environment)
- local var=fastcopy(variables)
- local exp=fastcopy(expansions)
- for key,value in sortedpairs(configured) do
- if key~="" and (pattern=="" or find(upper(key),pattern)) then
- report_lists(key)
- report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
- report_lists(" var: %s",tabstr(configured[key]) or "unset")
- report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
- report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
- end
- end
- instance.environment=fastcopy(env)
- instance.variables=fastcopy(var)
- instance.expansions=fastcopy(exp)
-end
-local report_resolved=logs.reporter("system","resolved")
-function resolvers.listers.configurations()
- local configurations=resolvers.instance.specification
- for i=1,#configurations do
- report_resolved("file : %s",resolvers.resolve(configurations[i]))
- end
- report_resolved("")
- local list=resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
- for i=1,#list do
- local li=resolvers.resolve(list[i])
- if lfs.isdir(li) then
- report_resolved("path - %s",li)
- else
- report_resolved("path + %s",li)
- end
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["util-lib"] = package.loaded["util-lib"] or true
-
--- original size: 11549, stripped down to: 5905
-
-if not modules then modules={} end modules ['util-lib']={
- version=1.001,
- comment="companion to luat-lib.mkiv",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files",
-}
-local gsub,find=string.gsub,string.find
-local pathpart,nameonly,joinfile=file.pathpart,file.nameonly,file.join
-local findfile,findfiles=resolvers and resolvers.findfile,resolvers and resolvers.findfiles
-local loaded=package.loaded
-local report_swiglib=logs.reporter("swiglib")
-local trace_swiglib=false trackers.register("resolvers.swiglib",function(v) trace_swiglib=v end)
-local done=false
-local function requireswiglib(required,version)
- local trace_swiglib=trace_swiglib or package.helpers.trace
- local library=loaded[required]
- if library==nil then
- if trace_swiglib then
- report_swiglib("requiring library %a with version %a",required,version or "any")
- end
- local required_full=gsub(required,"%.","/")
- local required_path=pathpart(required_full)
- local required_base=nameonly(required_full)
- local required_name=required_base.."."..os.libsuffix
- local version=type(version)=="string" and version~="" and version or false
- local engine=environment.ownmain or false
- if trace_swiglib and not done then
- local list=resolvers.expandedpathlistfromvariable("lib")
- for i=1,#list do
- report_swiglib("tds path %i: %s",i,list[i])
- end
- end
- local function found(locate,asked_library,how,...)
- if trace_swiglib then
- report_swiglib("checking %s: %a",how,asked_library)
- end
- return locate(asked_library,...)
- end
- local function check(locate,...)
- local found=nil
- if version then
- local asked_library=joinfile(required_path,version,required_name)
- if trace_swiglib then
- report_swiglib("checking %s: %a","with version",asked_library)
- end
- found=locate(asked_library,...)
- end
- if not found or found=="" then
- local asked_library=joinfile(required_path,required_name)
- if trace_swiglib then
- report_swiglib("checking %s: %a","with version",asked_library)
- end
- found=locate(asked_library,...)
- end
- return found and found~="" and found or false
- end
- local function attempt(checkpattern)
- if trace_swiglib then
- report_swiglib("checking tds lib paths strictly")
- end
- local found=findfile and check(findfile,"lib")
- if found and (not checkpattern or find(found,checkpattern)) then
- return found
- end
- if trace_swiglib then
- report_swiglib("checking tds lib paths with wildcard")
- end
- local asked_library=joinfile(required_path,".*",required_name)
- if trace_swiglib then
- report_swiglib("checking %s: %a","latest version",asked_library)
- end
- local list=findfiles(asked_library,"lib",true)
- if list and #list>0 then
- table.sort(list)
- local found=list[#list]
- if found and (not checkpattern or find(found,checkpattern)) then
- return found
- end
- end
- if trace_swiglib then
- report_swiglib("checking lib paths")
- end
- package.extralibpath(environment.ownpath)
- local paths=package.libpaths()
- for i=1,#paths do
- local found=check(lfs.isfile)
- if found and (not checkpattern or find(found,checkpattern)) then
- return found
- end
- end
- return false
- end
- local found_library=nil
- if engine then
- if trace_swiglib then
- report_swiglib("attemp 1, engine %a",engine)
- end
- found_library=attempt("/"..engine.."/")
- if not found_library then
- if trace_swiglib then
- report_swiglib("attemp 2, no engine",asked_library)
- end
- found_library=attempt()
- end
- else
- found_library=attempt()
- end
- if not found_library then
- if trace_swiglib then
- report_swiglib("not found: %a",required)
- end
- library=false
- else
- local path=pathpart(found_library)
- local base=nameonly(found_library)
- dir.push(path)
- if trace_swiglib then
- report_swiglib("found: %a",found_library)
- end
- local message=nil
- local opener="luaopen_"..required_base
- library,message=package.loadlib(found_library,opener)
- local libtype=type(library)
- if libtype=="function" then
- library=library()
- else
- report_swiglib("load error: %a returns %a, message %a, library %a",opener,libtype,(string.gsub(message or "no message","[%s]+$","")),found_library or "no library")
- library=false
- end
- dir.pop()
- end
- if not library then
- report_swiglib("unknown: %a",required)
- elseif trace_swiglib then
- report_swiglib("stored: %a",required)
- end
- loaded[required]=library
- else
- report_swiglib("reused: %a",required)
- end
- return library
-end
-local savedrequire=require
-function require(name,version)
- if find(name,"^swiglib%.") then
- return requireswiglib(name,version)
- else
- return savedrequire(name)
- end
-end
-local swiglibs={}
-local initializer="core"
-function swiglib(name,version)
- local library=swiglibs[name]
- if not library then
- statistics.starttiming(swiglibs)
- if trace_swiglib then
- report_swiglib("loading %a",name)
- end
- if not find(name,"%."..initializer.."$") then
- fullname="swiglib."..name.."."..initializer
- else
- fullname="swiglib."..name
- end
- library=requireswiglib(fullname,version)
- swiglibs[name]=library
- statistics.stoptiming(swiglibs)
- end
- return library
-end
-statistics.register("used swiglibs",function()
- if next(swiglibs) then
- return string.format("%s, initial load time %s seconds",table.concat(table.sortedkeys(swiglibs)," "),statistics.elapsedtime(swiglibs))
- end
-end)
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["luat-sta"] = package.loaded["luat-sta"] or true
-
--- original size: 5703, stripped down to: 2507
-
-if not modules then modules={} end modules ['luat-sta']={
- version=1.001,
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local gmatch,match=string.gmatch,string.match
-local type=type
-states=states or {}
-local states=states
-states.data=states.data or {}
-local data=states.data
-states.hash=states.hash or {}
-local hash=states.hash
-states.tag=states.tag or ""
-states.filename=states.filename or ""
-function states.save(filename,tag)
- tag=tag or states.tag
- filename=file.addsuffix(filename or states.filename,'lus')
- io.savedata(filename,
- "-- generator : luat-sta.lua\n".."-- state tag : "..tag.."\n\n"..table.serialize(data[tag or states.tag] or {},true)
- )
-end
-function states.load(filename,tag)
- states.filename=filename
- states.tag=tag or "whatever"
- states.filename=file.addsuffix(states.filename,'lus')
- data[states.tag],hash[states.tag]=(io.exists(filename) and dofile(filename)) or {},{}
-end
-local function set_by_tag(tag,key,value,default,persistent)
- local d,h=data[tag],hash[tag]
- if d then
- if type(d)=="table" then
- local dkey,hkey=key,key
- local pre,post=match(key,"(.+)%.([^%.]+)$")
- if pre and post then
- for k in gmatch(pre,"[^%.]+") do
- local dk=d[k]
- if not dk then
- dk={}
- d[k]=dk
- elseif type(dk)=="string" then
- break
- end
- d=dk
- end
- dkey,hkey=post,key
- end
- if value==nil then
- value=default
- elseif value==false then
- elseif persistent then
- value=value or d[dkey] or default
- else
- value=value or default
- end
- d[dkey],h[hkey]=value,value
- elseif type(d)=="string" then
- data[tag],hash[tag]=value,value
- end
- end
-end
-local function get_by_tag(tag,key,default)
- local h=hash[tag]
- if h and h[key] then
- return h[key]
- else
- local d=data[tag]
- if d then
- for k in gmatch(key,"[^%.]+") do
- local dk=d[k]
- if dk~=nil then
- d=dk
- else
- return default
- end
- end
- if d==false then
- return false
- else
- return d or default
- end
- end
- end
-end
-states.set_by_tag=set_by_tag
-states.get_by_tag=get_by_tag
-function states.set(key,value,default,persistent)
- set_by_tag(states.tag,key,value,default,persistent)
-end
-function states.get(key,default)
- return get_by_tag(states.tag,key,default)
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-package.loaded["luat-fmt"] = package.loaded["luat-fmt"] or true
-
--- original size: 5951, stripped down to: 4922
-
-if not modules then modules={} end modules ['luat-fmt']={
- version=1.001,
- comment="companion to mtxrun",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-local format=string.format
-local concat=table.concat
-local quoted=string.quoted
-local luasuffixes=utilities.lua.suffixes
-local report_format=logs.reporter("resolvers","formats")
-local function primaryflags()
- local trackers=environment.argument("trackers")
- local directives=environment.argument("directives")
- local flags={}
- if trackers and trackers~="" then
- flags={ "--trackers="..quoted(trackers) }
- end
- if directives and directives~="" then
- flags={ "--directives="..quoted(directives) }
- end
- if environment.argument("jit") then
- flags={ "--jiton" }
- end
- return concat(flags," ")
-end
-function environment.make_format(name)
- local engine=environment.ownmain or "luatex"
- local olddir=dir.current()
- local path=caches.getwritablepath("formats",engine) or ""
- if path~="" then
- lfs.chdir(path)
- end
- report_format("using format path %a",dir.current())
- local texsourcename=file.addsuffix(name,"mkiv")
- local fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
- if fulltexsourcename=="" then
- texsourcename=file.addsuffix(name,"tex")
- fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
- end
- if fulltexsourcename=="" then
- report_format("no tex source file with name %a (mkiv or tex)",name)
- lfs.chdir(olddir)
- return
- else
- report_format("using tex source file %a",fulltexsourcename)
- end
- local texsourcepath=dir.expandname(file.dirname(fulltexsourcename))
- local specificationname=file.replacesuffix(fulltexsourcename,"lus")
- local fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
- if fullspecificationname=="" then
- specificationname=file.join(texsourcepath,"context.lus")
- fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
- end
- if fullspecificationname=="" then
- report_format("unknown stub specification %a",specificationname)
- lfs.chdir(olddir)
- return
- end
- local specificationpath=file.dirname(fullspecificationname)
- local usedluastub=nil
- local usedlualibs=dofile(fullspecificationname)
- if type(usedlualibs)=="string" then
- usedluastub=file.join(file.dirname(fullspecificationname),usedlualibs)
- elseif type(usedlualibs)=="table" then
- report_format("using stub specification %a",fullspecificationname)
- local texbasename=file.basename(name)
- local luastubname=file.addsuffix(texbasename,luasuffixes.lua)
- local lucstubname=file.addsuffix(texbasename,luasuffixes.luc)
- report_format("creating initialization file %a",luastubname)
- utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
- if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
- report_format("using compiled initialization file %a",lucstubname)
- usedluastub=lucstubname
- else
- report_format("using uncompiled initialization file %a",luastubname)
- usedluastub=luastubname
- end
- else
- report_format("invalid stub specification %a",fullspecificationname)
- lfs.chdir(olddir)
- return
- end
- local command=format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform=="unix" and "\\\\" or "\\")
- report_format("running command: %s\n",command)
- os.spawn(command)
- local pattern=file.removesuffix(file.basename(usedluastub)).."-*.mem"
- local mp=dir.glob(pattern)
- if mp then
- for i=1,#mp do
- local name=mp[i]
- report_format("removing related mplib format %a",file.basename(name))
- os.remove(name)
- end
- end
- lfs.chdir(olddir)
-end
-function environment.run_format(name,data,more)
- if name and name~="" then
- local engine=environment.ownmain or "luatex"
- local barename=file.removesuffix(name)
- local fmtname=caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats",engine)
- if fmtname=="" then
- fmtname=resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
- end
- fmtname=resolvers.cleanpath(fmtname)
- if fmtname=="" then
- report_format("no format with name %a",name)
- else
- local barename=file.removesuffix(name)
- local luaname=file.addsuffix(barename,"luc")
- if not lfs.isfile(luaname) then
- luaname=file.addsuffix(barename,"lua")
- end
- if not lfs.isfile(luaname) then
- report_format("using format name %a",fmtname)
- report_format("no luc/lua file with name %a",barename)
- else
- local command=format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more~="" and quoted(more) or "")
- report_format("running command: %s",command)
- os.spawn(command)
- end
- end
- end
-end
-
-
-end -- of closure
-
--- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
--- skipped libraries : -
--- original bytes : 689993
--- stripped bytes : 244562
-
--- end library merge
-
--- We need this hack till luatex is fixed.
---
--- for k,v in pairs(arg) do print(k,v) end
-
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
-end
-
--- End of hack.
-
-local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
-local concat = table.concat
-
-local ownname = environment and environment.ownname or arg[0] or 'mtxrun.lua'
-local ownpath = gsub(match(ownname,"^(.+)[\\/].-$") or ".","\\","/")
-local owntree = environment and environment.ownpath or ownpath
-
-local ownlibs = { -- order can be made better
-
- 'l-lua.lua',
- 'l-package.lua',
- 'l-lpeg.lua',
- 'l-function.lua',
- 'l-string.lua',
- 'l-table.lua',
- 'l-io.lua',
- 'l-number.lua',
- 'l-set.lua',
- 'l-os.lua',
- 'l-file.lua',
- 'l-gzip.lua',
- 'l-md5.lua',
- 'l-url.lua',
- 'l-dir.lua',
- 'l-boolean.lua',
- 'l-unicode.lua',
- 'l-math.lua',
-
- 'util-str.lua', -- code might move to l-string
- 'util-tab.lua',
- 'util-sto.lua',
- 'util-prs.lua',
- 'util-fmt.lua',
-
- 'trac-set.lua',
- 'trac-log.lua',
- 'trac-inf.lua', -- was before trac-set
- 'trac-pro.lua', -- not really needed
- 'util-lua.lua', -- indeed here?
- 'util-deb.lua',
-
- 'util-mrg.lua',
- 'util-tpl.lua',
-
- 'util-env.lua',
- 'luat-env.lua', -- can come before inf (as in mkiv)
-
- 'lxml-tab.lua',
- 'lxml-lpt.lua',
- -- 'lxml-ent.lua',
- 'lxml-mis.lua',
- 'lxml-aux.lua',
- 'lxml-xml.lua',
-
- 'trac-xml.lua',
-
- 'data-ini.lua',
- 'data-exp.lua',
- 'data-env.lua',
- 'data-tmp.lua',
- 'data-met.lua',
- 'data-res.lua',
- 'data-pre.lua',
- 'data-inp.lua',
- 'data-out.lua',
- 'data-fil.lua',
- 'data-con.lua',
- 'data-use.lua',
--- 'data-tex.lua',
--- 'data-bin.lua',
- 'data-zip.lua',
- 'data-tre.lua',
- 'data-sch.lua',
- 'data-lua.lua',
- 'data-aux.lua', -- updater
- 'data-tmf.lua',
- 'data-lst.lua',
-
- 'util-lib.lua', -- swiglib
-
- 'luat-sta.lua',
- 'luat-fmt.lua',
-
-}
-
--- c:/data/develop/tex-context/tex/texmf-win64/bin/../../texmf-context/tex/context/base/data-tmf.lua
--- c:/data/develop/context/sources/data-tmf.lua
-
-local ownlist = {
- -- '.',
- -- ownpath ,
- owntree .. "/../../../../context/sources", -- HH's development path
- owntree .. "/../../texmf-local/tex/context/base",
- owntree .. "/../../texmf-context/tex/context/base",
- owntree .. "/../../texmf/tex/context/base",
- owntree .. "/../../../texmf-local/tex/context/base",
- owntree .. "/../../../texmf-context/tex/context/base",
- owntree .. "/../../../texmf/tex/context/base",
-}
-
-if ownpath == "." then table.remove(ownlist,1) end
-
-own = {
- name = ownname,
- path = ownpath,
- tree = owntree,
- list = ownlist,
- libs = ownlibs,
-}
-
-local function locate_libs()
- for l=1,#ownlibs do
- local lib = ownlibs[l]
- for p =1,#ownlist do
- local pth = ownlist[p]
- local filename = pth .. "/" .. lib
- local found = lfs.isfile(filename)
- if found then
- package.path = package.path .. ";" .. pth .. "/?.lua" -- in case l-* does a require
- return pth
- end
- end
- end
-end
-
-local function load_libs()
- local found = locate_libs()
- if found then
- for l=1,#ownlibs do
- local filename = found .. "/" .. ownlibs[l]
- local codeblob = loadfile(filename)
- if codeblob then
- codeblob()
- end
- end
- else
- resolvers = nil
- end
-end
-
-if not resolvers then
- load_libs()
-end
-
-if not resolvers then
- print("")
- print("Mtxrun is unable to start up due to lack of libraries. You may")
- print("try to run 'lua mtxrun.lua --selfmerge' in the path where this")
- print("script is located (normally under ..../scripts/context/lua) which")
- print("will make this script library independent.")
- os.exit()
-end
-
--- verbosity
-
------ e_verbose = environment.arguments["verbose"]
-
-local e_verbose = false
-
--- some common flags (also passed through environment)
-
-local e_silent = environment.argument("silent")
-local e_noconsole = environment.argument("noconsole")
-
-local e_trackers = environment.argument("trackers")
-local e_directives = environment.argument("directives")
-local e_experiments = environment.argument("experiments")
-
-if e_silent == true then
- e_silent = "*"
-end
-
-if type(e_silent) == "string" then
- if type(e_directives) == "string" then
- e_directives = format("%s,logs.blocked={%s}",e_directives,e_silent)
- else
- e_directives = format("logs.blocked={%s}",e_silent)
- end
-end
-
-if e_noconsole then
- if type(e_directives) == "string" then
- e_directives = format("%s,logs.target=file",e_directives)
- else
- e_directives = format("logs.target=file")
- end
-end
-
-if e_trackers then trackers .enable(e_trackers) end
-if e_directives then directives .enable(e_directives) end
-if e_experiments then experiments.enable(e_experiments) end
-
-if not environment.trackers then environment.trackers = e_trackers end
-if not environment.directives then environment.directives = e_directives end
-if not environment.experiments then environment.experiments = e_experiments end
-
---
-
-local instance = resolvers.reset()
-
-local helpinfo = [[
-<?xml version="1.0" ?>
-<application>
- <metadata>
- <entry name="name">mtxrun</entry>
- <entry name="detail">ConTeXt TDS Runner Tool</entry>
- <entry name="version">1.31</entry>
- </metadata>
- <flags>
- <category name="basic">
- <subcategory>
- <flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
- <flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
- <flag name="resolve"><short>resolve prefixed arguments</short></flag>
- <flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
- <flag name="internal"><short>run script using built in libraries (same as <ref name="ctxlua"/>)</short></flag>
- <flag name="locate"><short>locate given filename in database (default) or system (<ref name="first"/> <ref name="all"/> <ref name="detail"/>)</short></flag>
- </subcategory>
- <subcategory>
- <flag name="autotree"><short>use texmf tree cf. env texmfstart_tree or texmfstarttree</short></flag>
- <flag name="tree" value="pathtotree"><short>use given texmf tree (default file: setuptex.tmf)</short></flag>
- <flag name="environment" value="name"><short>use given (tmf) environment file</short></flag>
- <flag name="path" value="runpath"><short>go to given path before execution</short></flag>
- <flag name="ifchanged" value="filename"><short>only execute when given file has changed (md checksum)</short></flag>
- <flag name="iftouched" value="old,new"><short>only execute when given file has changed (time stamp)</short></flag>
- </subcategory>
- <subcategory>
- <flag name="makestubs"><short>create stubs for (context related) scripts</short></flag>
- <flag name="removestubs"><short>remove stubs (context related) scripts</short></flag>
- <flag name="stubpath" value="binpath"><short>paths where stubs wil be written</short></flag>
- <flag name="windows"><short>create windows (mswin) stubs</short></flag>
- <flag name="unix"><short>create unix (linux) stubs</short></flag>
- </subcategory>
- <subcategory>
- <flag name="verbose"><short>give a bit more info</short></flag>
- <flag name="trackers" value="list"><short>enable given trackers</short></flag>
- <flag name="progname" value="str"><short>format or backend</short></flag>
- </subcategory>
- <subcategory>
- <flag name="edit"><short>launch editor with found file</short></flag>
- <flag name="launch"><short>launch files like manuals, assumes os support (<ref name="all"/>)</short></flag>
- </subcategory>
- <subcategory>
- <flag name="timedrun"><short>run a script and time its run</short></flag>
- <flag name="autogenerate"><short>regenerate databases if needed (handy when used to run context in an editor)</short></flag>
- </subcategory>
- <subcategory>
- <flag name="usekpse"><short>use kpse as fallback (when no mkiv and cache installed, often slower)</short></flag>
- <flag name="forcekpse"><short>force using kpse (handy when no mkiv and cache installed but less functionality)</short></flag>
- </subcategory>
- <subcategory>
- <flag name="prefixes"><short>show supported prefixes</short></flag>
- </subcategory>
- <subcategory>
- <flag name="generate"><short>generate file database</short></flag>
- </subcategory>
- <subcategory>
- <flag name="variables"><short>show configuration variables</short></flag>
- <flag name="configurations"><short>show configuration order</short></flag>
- </subcategory>
- <subcategory>
- <flag name="directives"><short>show (known) directives</short></flag>
- <flag name="trackers"><short>show (known) trackers</short></flag>
- <flag name="experiments"><short>show (known) experiments</short></flag>
- </subcategory>
- <subcategory>
- <flag name="expand-braces"><short>expand complex variable</short></flag>
- <flag name="expand-path"><short>expand variable (resolve paths)</short></flag>
- <flag name="expand-var"><short>expand variable (resolve references)</short></flag>
- <flag name="show-path"><short>show path expansion of ...</short></flag>
- <flag name="var-value"><short>report value of variable</short></flag>
- <flag name="find-file"><short>report file location</short></flag>
- <flag name="find-path"><short>report path of file</short></flag>
- </subcategory>
- <subcategory>
- <flag name="pattern" value="string"><short>filter variables</short></flag>
- </subcategory>
- </category>
- </flags>
-</application>
-]]
-
-local application = logs.application {
- name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.31",
- helpinfo = helpinfo,
-}
-
-local report = application.report
-
-messages = messages or { } -- for the moment
-
-runners = runners or { } -- global (might become local)
-
-runners.applications = {
- ["lua"] = "luatex --luaonly",
- ["luc"] = "luatex --luaonly",
- ["pl"] = "perl",
- ["py"] = "python",
- ["rb"] = "ruby",
-}
-
-runners.suffixes = {
- 'rb', 'lua', 'py', 'pl'
-}
-
-runners.registered = {
- texexec = { 'texexec.rb', false }, -- context mkii runner (only tool not to be luafied)
- texutil = { 'texutil.rb', true }, -- old perl based index sorter for mkii (old versions need it)
- texfont = { 'texfont.pl', true }, -- perl script that makes mkii font metric files
- texfind = { 'texfind.pl', false }, -- perltk based tex searching tool, mostly used at pragma
- texshow = { 'texshow.pl', false }, -- perltk based context help system, will be luafied
- -- texwork = { 'texwork.pl', false }, -- perltk based editing environment, only used at pragma
- makempy = { 'makempy.pl', true },
- mptopdf = { 'mptopdf.pl', true },
- pstopdf = { 'pstopdf.rb', true }, -- converts ps (and some more) images, does some cleaning (replaced)
- -- examplex = { 'examplex.rb', false },
- concheck = { 'concheck.rb', false },
- runtools = { 'runtools.rb', true },
- textools = { 'textools.rb', true },
- tmftools = { 'tmftools.rb', true },
- ctxtools = { 'ctxtools.rb', true },
- rlxtools = { 'rlxtools.rb', true },
- pdftools = { 'pdftools.rb', true },
- mpstools = { 'mpstools.rb', true },
- -- exatools = { 'exatools.rb', true },
- xmltools = { 'xmltools.rb', true },
- -- luatools = { 'luatools.lua', true },
- mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false },
-}
-
-runners.launchers = {
- windows = { },
- unix = { },
-}
-
--- like runners.libpath("framework"): looks on script's subpath
-
-function runners.libpath(...)
- package.prepend_libpath(file.dirname(environment.ownscript),...)
- package.prepend_libpath(file.dirname(environment.ownname) ,...)
-end
-
-function runners.prepare()
- local checkname = environment.argument("ifchanged")
- if type(checkname) == "string" and checkname ~= "" then
- local oldchecksum = file.loadchecksum(checkname)
- local newchecksum = file.checksum(checkname)
- if oldchecksum == newchecksum then
- if e_verbose then
- report("file '%s' is unchanged",checkname)
- end
- return "skip"
- elseif e_verbose then
- report("file '%s' is changed, processing started",checkname)
- end
- file.savechecksum(checkname)
- end
- local touchname = environment.argument("iftouched")
- if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.splitup(touchname, ",")
- if oldname and newname and oldname ~= "" and newname ~= "" then
- if not file.needs_updating(oldname,newname) then
- if e_verbose then
- report("file '%s' and '%s' have same age",oldname,newname)
- end
- return "skip"
- elseif e_verbose then
- report("file '%s' is older than '%s'",oldname,newname)
- end
- end
- end
- local runpath = environment.argument("path")
- if type(runpath) == "string" and not lfs.chdir(runpath) then
- report("unable to change to path '%s'",runpath)
- return "error"
- end
- runners.prepare = function() end
- return "run"
-end
-
-function runners.execute_script(fullname,internal,nosplit)
- local noquote = environment.argument("noquotes")
- if fullname and fullname ~= "" then
- local state = runners.prepare()
- if state == 'error' then
- return false
- elseif state == 'skip' then
- return true
- elseif state == "run" then
- local path, name, suffix = file.splitname(fullname)
- local result = ""
- if path ~= "" then
- result = fullname
- elseif name then
- name = gsub(name,"^int[%a]*:",function()
- internal = true
- return ""
- end )
- name = gsub(name,"^script:","")
- if suffix == "" and runners.registered[name] and runners.registered[name][1] then
- name = runners.registered[name][1]
- suffix = file.suffix(name)
- end
- if suffix == "" then
- -- loop over known suffixes
- for _,s in pairs(runners.suffixes) do
- result = resolvers.findfile(name .. "." .. s, 'texmfscripts')
- if result ~= "" then
- break
- end
- end
- elseif runners.applications[suffix] then
- result = resolvers.findfile(name, 'texmfscripts')
- else
- -- maybe look on path
- result = resolvers.findfile(name, 'other text files')
- end
- end
- if result and result ~= "" then
- if not no_split then
- local before, after = environment.splitarguments(fullname) -- already done
- environment.arguments_before, environment.arguments_after = before, after
- end
- if internal then
- arg = { } for _,v in pairs(environment.arguments_after) do arg[#arg+1] = v end
- environment.ownscript = result
- dofile(result)
- else
-local texmfcnf = resolvers.getenv("TEXMFCNF")
-if not texmfcnf or texmfcnf == "" then
- texmfcnf = resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.resolve(resolvers.luacnfspec)))
- resolvers.setenv("TEXMFCNF",table.concat(texmfcnf,";")) -- for running texexec etc (after tl change to texmf-dist)
-end
- local binary = runners.applications[file.suffix(result)]
- result = string.quoted(string.unquoted(result))
- -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
- -- result = '"' .. result .. '"'
- -- end
- if binary and binary ~= "" then
- result = binary .. " " .. result
- end
- local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
- if e_verbose then
- report()
- report("executing: %s",command)
- report()
- report()
- io.flush()
- end
- -- no os.exec because otherwise we get the wrong return value
- local code = os.execute(command) -- maybe spawn
- if code == 0 then
- return true
- else
- if binary then
- binary = file.addsuffix(binary,os.binsuffix)
- for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
- if lfs.isfile(file.join(p,binary)) then
- return false
- end
- end
- report()
- report("This script needs '%s' which seems not to be installed.",binary)
- report()
- end
- return false
- end
- end
- end
- end
- end
- return false
-end
-
-function runners.execute_program(fullname)
- local noquote = environment.argument("noquotes")
- if fullname and fullname ~= "" then
- local state = runners.prepare()
- if state == 'error' then
- return false
- elseif state == 'skip' then
- return true
- elseif state == "run" then
- local before, after = environment.splitarguments(fullname)
- for k=1,#after do after[k] = resolvers.resolve(after[k]) end
- environment.initializearguments(after)
- fullname = gsub(fullname,"^bin:","")
- local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
- report()
- report("executing: %s",command)
- report()
- report()
- io.flush()
- local code = os.exec(command) -- (fullname,unpack(after)) does not work / maybe spawn
- return code == 0
- end
- end
- return false
-end
-
--- the --usekpse flag will fallback (not default) on kpse (hm, we can better update mtx-stubs)
-
-local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010'
-local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010'
-
-function runners.handle_stubs(create)
- local stubpath = environment.argument('stubpath') or '.' -- 'auto' no longer subpathssupported
- local windows = environment.argument('windows') or environment.argument('mswin') or false
- local unix = environment.argument('unix') or environment.argument('linux') or false
- if not windows and not unix then
- if os.platform == "unix" then
- unix = true
- else
- windows = true
- end
- end
- for _,v in pairs(runners.registered) do
- local name, doit = v[1], v[2]
- if doit then
- local base = gsub(file.basename(name), "%.(.-)$", "")
- if create then
- if windows then
- io.savedata(file.join(stubpath,base..".bat"),format(windows_stub,name))
- report("windows stub for '%s' created",base)
- end
- if unix then
- io.savedata(file.join(stubpath,base),format(unix_stub,name))
- report("unix stub for '%s' created",base)
- end
- else
- if windows and (os.remove(file.join(stubpath,base..'.bat')) or os.remove(file.join(stubpath,base..'.cmd'))) then
- report("windows stub for '%s' removed", base)
- end
- if unix and (os.remove(file.join(stubpath,base)) or os.remove(file.join(stubpath,base..'.sh'))) then
- report("unix stub for '%s' removed",base)
- end
- end
- end
- end
-end
-
-function runners.resolve_string(filename)
- if filename and filename ~= "" then
- runners.report_location(resolvers.resolve(filename))
- end
-end
-
--- differs from texmfstart where locate appends .com .exe .bat ... todo
-
-function runners.locate_file(filename) -- was given file but only searches in tree
- if filename and filename ~= "" then
- if environment.argument("first") then
- runners.report_location(resolvers.findfile(filename))
- -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
- elseif environment.argument("all") then
- local result, status = resolvers.findfiles(filename)
- if status and environment.argument("detail") then
- runners.report_location(status)
- else
- runners.report_location(result)
- end
- else
- runners.report_location(resolvers.findgivenfile(filename))
- -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
- end
- end
-end
-
-function runners.locate_platform()
- runners.report_location(os.platform)
-end
-
-function runners.report_location(result)
- if type(result) == "table" then
- for i=1,#result do
- if i > 1 then
- io.write("\n")
- end
- io.write(result[i])
- end
- else
- io.write(result)
- end
-end
-
-function runners.edit_script(filename) -- we assume that gvim is present on most systems (todo: also in cnf file)
- local editor = os.getenv("MTXRUN_EDITOR") or os.getenv("TEXMFSTART_EDITOR") or os.getenv("EDITOR") or 'gvim'
- local rest = resolvers.resolve(filename)
- if rest ~= "" then
- local command = editor .. " " .. rest
- if e_verbose then
- report()
- report("starting editor: %s",command)
- report()
- report()
- end
- os.launch(command)
- end
-end
-
-function runners.save_script_session(filename, list)
- local t = { }
- for i=1,#list do
- local key = list[i]
- t[key] = environment.arguments[key]
- end
- io.savedata(filename,table.serialize(t,true))
-end
-
-function runners.load_script_session(filename)
- if lfs.isfile(filename) then
- local t = io.loaddata(filename)
- if t then
- t = loadstring(t)
- if t then t = t() end
- for key, value in pairs(t) do
- environment.arguments[key] = value
- end
- end
- end
-end
-
-function resolvers.launch(str)
- -- maybe we also need to test on mtxrun.launcher.suffix environment
- -- variable or on windows consult the assoc and ftype vars and such
- local launchers = runners.launchers[os.platform] if launchers then
- local suffix = file.suffix(str) if suffix then
- local runner = launchers[suffix] if runner then
- str = runner .. " " .. str
- end
- end
- end
- os.launch(str)
-end
-
-function runners.launch_file(filename)
- trackers.enable("resolvers.locating")
- local allresults = environment.arguments["all"]
- local pattern = environment.arguments["pattern"]
- if not pattern or pattern == "" then
- pattern = filename
- end
- if not pattern or pattern == "" then
- report("provide name or --pattern=")
- else
- local t = resolvers.findfiles(pattern,nil,allresults)
- if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern,nil,allresults)
- end
- if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
- end
- if t and #t > 0 then
- if allresults then
- for _, v in pairs(t) do
- report("launching %s", v)
- resolvers.launch(v)
- end
- else
- report("launching %s", t[1])
- resolvers.launch(t[1])
- end
- else
- report("no match for %s", pattern)
- end
- end
-end
-
-local mtxprefixes = {
- { "^mtx%-", "mtx-" },
- { "^mtx%-t%-", "mtx-t-" },
-}
-
-function runners.find_mtx_script(filename)
- local function found(name)
- local path = file.dirname(name)
- if path and path ~= "" then
- return false
- else
- local fullname = own and own.path and file.join(own.path,name)
- return io.exists(fullname) and fullname
- end
- end
- filename = file.addsuffix(filename,"lua")
- local basename = file.removesuffix(file.basename(filename))
- local suffix = file.suffix(filename)
- -- qualified path, raw name
- local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename
- if fullname and fullname ~= "" then
- return fullname
- end
- -- current path, raw name
- fullname = "./" .. filename
- fullname = io.exists(fullname) and fullname
- if fullname and fullname ~= "" then
- return fullname
- end
- -- mtx- prefix checking
- for i=1,#mtxprefixes do
- local mtxprefix = mtxprefixes[i]
- mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- end
- -- context namespace, just <filename>
- fullname = resolvers.findfile(filename)
- return fullname
-end
-
-function runners.register_arguments(...)
- local arguments = environment.arguments_after
- local passedon = { ... }
- for i=#passedon,1,-1 do
- local pi = passedon[i]
- if pi then
- table.insert(arguments,1,pi)
- end
- end
-end
-
-function runners.execute_ctx_script(filename,...)
- runners.register_arguments(...)
- local arguments = environment.arguments_after
- local fullname = runners.find_mtx_script(filename) or ""
- if file.suffix(fullname) == "cld" then
- -- handy in editors where we force --autopdf
- report("running cld script: %s",filename)
- table.insert(arguments,1,fullname)
- table.insert(arguments,"--autopdf")
- fullname = runners.find_mtx_script("context") or ""
- end
- -- retry after generate but only if --autogenerate
- if fullname == "" and environment.argument("autogenerate") then -- might become the default
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
- --
- fullname = runners.find_mtx_script(filename) or ""
- end
- -- that should do it
- if fullname ~= "" then
- local state = runners.prepare()
- if state == 'error' then
- return false
- elseif state == 'skip' then
- return true
- elseif state == "run" then
- -- load and save ... kind of undocumented
- arg = { } for _,v in pairs(arguments) do arg[#arg+1] = resolvers.resolve(v) end
- environment.initializearguments(arg)
- local loadname = environment.arguments['load']
- if loadname then
- if type(loadname) ~= "string" then loadname = file.basename(fullname) end
- loadname = file.replacesuffix(loadname,"cfg")
- runners.load_script_session(loadname)
- end
- filename = environment.files[1]
- if e_verbose then
- report("using script: %s\n",fullname)
- end
- environment.ownscript = fullname
- dofile(fullname)
- local savename = environment.arguments['save']
- if savename then
- local save_list = runners.save_list
- if save_list and next(save_list) then
- if type(savename) ~= "string" then savename = file.basename(fullname) end
- savename = file.replacesuffix(savename,"cfg")
- runners.save_script_session(savename,save_list)
- end
- end
- return true
- end
- else
- if filename == "" or filename == "help" then
- local context = resolvers.findfile("mtx-context.lua")
- trackers.enable("resolvers.locating")
- if context ~= "" then
- local result = dir.glob((gsub(context,"mtx%-context","mtx-*"))) -- () needed
- local valid = { }
- table.sort(result)
- for i=1,#result do
- local scriptname = result[i]
- local scriptbase = match(scriptname,".*mtx%-([^%-]-)%.lua")
- if scriptbase then
- local data = io.loaddata(scriptname)
-local application = match(data,"local application.-=.-(%{.-%})")
-if application then
- application = loadstring("return " .. application)
- if application then
- application = application()
- local banner = application.banner
- if banner then
- local description, version = match(banner,"^(.-) ([%d.]+)$")
- if description then
- valid[#valid+1] = { scriptbase, version, description }
- else
- valid[#valid+1] = { scriptbase, "", banner }
- end
- end
- end
-end
- end
- end
- if #valid > 0 then
- application.identify()
- report("no script name given, known scripts:")
- report()
- for k=1,#valid do
- local v = valid[k]
- report("%-12s %4s %s",v[1],v[2],v[3])
- end
- end
- else
- report("no script name given")
- end
- else
- filename = file.addsuffix(filename,"lua")
- if file.is_qualified_path(filename) then
- report("unknown script '%s'",filename)
- else
- report("unknown script '%s' or 'mtx-%s'",filename,filename)
- end
- end
- return false
- end
-end
-
-function runners.prefixes()
- application.identify()
- report()
- report(concat(resolvers.allprefixes(true)," "))
-end
-
-function runners.timedrun(filename) -- just for me
- if filename and filename ~= "" then
- runners.timed(function() os.execute(filename) end)
- end
-end
-
-function runners.timed(action)
- statistics.timed(action)
-end
-
-function runners.associate(filename)
- os.launch(filename)
-end
-
-function runners.gethelp(filename)
- local url = environment.argument("url")
- if url and url ~= "" then
- local command = string.gsub(environment.argument("command") or "unknown","^%s*\\*(.-)%s*$","%1")
- url = utilities.templates.replace(url,{ command = command })
- os.launch(url)
- else
- report("no --url given")
- end
-end
-
--- this is a bit dirty ... first we store the first filename and next we
--- split the arguments so that we only see the ones meant for this script
--- ... later we will use the second half
-
-local filename = environment.files[1] or ""
-local ok = true
-
-local before, after = environment.splitarguments(filename)
-environment.arguments_before, environment.arguments_after = before, after
-environment.initializearguments(before)
-
-instance.lsrmode = environment.argument("lsr") or false
-
-e_verbose = environment.arguments["verbose"] -- delayed till here (we need the ones before script)
-
-if e_verbose then
- trackers.enable("resolvers.locating")
-end
-
--- maybe the unset has to go to this level
-
-local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-
-local e_argument = environment.argument
-
-if e_argument("timedlog") then
- logs.settimedlog()
-end
-
-if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
-
- resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
-
- os.setenv("engine","")
- os.setenv("progname","")
-
- local remapper = {
- otf = "opentype fonts",
- ttf = "truetype fonts",
- ttc = "truetype fonts",
- pfb = "type1 fonts",
- other = "other text files",
- }
-
- local progname = e_argument("progname") or 'context'
-
- local function kpse_initialized()
- texconfig.kpse_init = true
- local t = os.clock()
- local k = kpse.original.new("luatex",progname)
- local dummy = k:find_file("mtxrun.lua") -- so that we're initialized
- report("kpse fallback with progname '%s' initialized in %s seconds",progname,os.clock()-t)
- kpse_initialized = function() return k end
- return k
- end
-
- local findfile = resolvers.findfile
- local showpath = resolvers.showpath
-
- if e_argument("forcekpse") then
-
- function resolvers.findfile(name,kind)
- return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
- end
- function resolvers.showpath(name)
- return (kpse_initialized():show_path(name)) or ""
- end
-
- elseif e_argument("usekpse") or is_mkii_stub then
-
- resolvers.load()
-
- function resolvers.findfile(name,kind)
- local found = findfile(name,kind) or ""
- if found ~= "" then
- return found
- else
- return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
- end
- end
- function resolvers.showpath(name)
- local found = showpath(name) or ""
- if found ~= "" then
- return found
- else
- return (kpse_initialized():show_path(name)) or ""
- end
- end
-
- end
-
- function runners.loadbase()
- end
-
-else
-
- function runners.loadbase(...)
- if not resolvers.load(...) then
- report("forcing cache reload")
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- if not resolvers.load(...) then
- report("the resolver databases are not present or outdated")
- end
- end
- end
-
- resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
-
-end
-
--- joke .. reminds me of messing with gigi terminals
-
-if e_argument("ansi") then
-
- local formatters = string.formatters
-
- logs.setformatters {
- report_yes = formatters["%-15s | %s"],
- report_nop = formatters["%-15s |"],
- subreport_yes = formatters["%-15s | %s | %s"],
- subreport_nop = formatters["%-15s | %s |"],
- status_yes = formatters["%-15s : %s\n"],
- status_nop = formatters["%-15s :\n"],
- }
-
- local script = e_argument("script") or e_argument("scripts")
-
- if type(script) == "string" then
- logs.writer("]0;"..script.."") -- for Alan to test
- end
-
-end
-
-if e_argument("script") or e_argument("scripts") then
-
- -- run a script by loading it (using libs), pass args
-
- if e_argument("nofiledatabase") then
- -- handy for mtx-update
- else
- runners.loadbase()
- end
- if is_mkii_stub then
- ok = runners.execute_script(filename,false,true)
- else
- ok = runners.execute_ctx_script(filename)
- end
-
-elseif e_argument("selfmerge") then
-
- -- embed used libraries
-
- runners.loadbase()
- local found = locate_libs()
-
- if found then
- local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
- if lfs.isfile(mtxrun) then
- utilities.merger.selfmerge(mtxrun,own.libs,{ found })
- application.report("runner updated on resolved path: %s",mtxrun)
- else
- utilities.merger.selfmerge(own.name,own.libs,{ found })
- application.report("runner updated on relative path: %s",own.name)
- end
- end
-
-elseif e_argument("selfclean") then
-
- -- remove embedded libraries
-
- runners.loadbase()
-
- local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
- if lfs.isfile(mtxrun) then
- utilities.merger.selfclean(mtxrun)
- application.report("runner cleaned on resolved path: %s",mtxrun)
- else
- utilities.merger.selfclean(own.name)
- application.report("runner cleaned on relative path: %s",own.name)
- end
-
-elseif e_argument("selfupdate") then
-
- runners.loadbase()
- trackers.enable("resolvers.locating")
- resolvers.updatescript(own.name,"mtxrun")
-
-elseif e_argument("ctxlua") or e_argument("internal") then
-
- -- run a script by loading it (using libs)
-
- runners.loadbase()
- ok = runners.execute_script(filename,true)
-
-elseif e_argument("execute") then
-
- -- execute script
-
- runners.loadbase()
- ok = runners.execute_script(filename)
-
-elseif e_argument("direct") then
-
- -- equals bin:
-
- runners.loadbase()
- ok = runners.execute_program(filename)
-
-elseif e_argument("edit") then
-
- -- edit file
-
- runners.loadbase()
- runners.edit_script(filename)
-
-elseif e_argument("launch") then
-
- runners.loadbase()
- runners.launch_file(filename)
-
-elseif e_argument("associate") then
-
- runners.associate(filename)
-
-elseif e_argument("gethelp") then
-
- runners.gethelp()
-
-elseif e_argument("makestubs") then
-
- -- make stubs (depricated)
-
- runners.handle_stubs(true)
-
-elseif e_argument("removestubs") then
-
- -- remove stub (depricated)
-
- runners.loadbase()
- runners.handle_stubs(false)
-
-elseif e_argument("resolve") then
-
- -- resolve string
-
- runners.loadbase()
- runners.resolve_string(filename)
-
-elseif e_argument("locate") then
-
- -- locate file (only database)
-
- runners.loadbase()
- runners.locate_file(filename)
-
-elseif e_argument("platform") or e_argument("show-platform") then
-
- -- locate platform
-
- runners.loadbase()
- runners.locate_platform()
-
-elseif e_argument("prefixes") then
-
- runners.loadbase()
- runners.prefixes()
-
-elseif e_argument("timedrun") then
-
- -- locate platform
-
- runners.loadbase()
- runners.timedrun(filename)
-
-elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-
- -- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
-
- resolvers.load("nofiles")
- resolvers.listers.variables(e_argument("pattern"))
-
-elseif e_argument("configurations") or e_argument("show-configurations") then
-
- -- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
-
- resolvers.load("nofiles")
- resolvers.listers.configurations()
-
-elseif e_argument("find-file") then
-
- -- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
-
- resolvers.load()
- local e_all = e_argument("all")
- local e_pattern = e_argument("pattern")
- local e_format = e_argument("format")
- local finder = e_all and resolvers.findfiles or resolvers.findfile
- if not e_pattern then
- runners.register_arguments(filename)
- environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(finder,environment.files,e_format)
- elseif type(e_pattern) == "string" then
- resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
- end
-
-elseif e_argument("find-path") then
-
- -- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
-
- resolvers.load()
- local path = resolvers.findpath(filename, instance.my_format)
- if e_verbose then
- report(path)
- else
- print(path)
- end
-
-elseif e_argument("expand-braces") then
-
- -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
-
- resolvers.load("nofiles")
- runners.register_arguments(filename)
- environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-
-elseif e_argument("expand-path") then
-
- -- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
-
- resolvers.load("nofiles")
- runners.register_arguments(filename)
- environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-
-elseif e_argument("expand-var") or e_argument("expand-variable") then
-
- -- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
-
- resolvers.load("nofiles")
- runners.register_arguments(filename)
- environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-
-elseif e_argument("show-path") or e_argument("path-value") then
-
- -- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
-
- resolvers.load("nofiles")
- runners.register_arguments(filename)
- environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-
-elseif e_argument("var-value") or e_argument("show-value") then
-
- -- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
-
- resolvers.load("nofiles")
- runners.register_arguments(filename)
- environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-
-elseif e_argument("format-path") then
-
- -- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
-
- resolvers.load()
- report(caches.getwritablepath("format"))
-
-elseif e_argument("pattern") then
-
- -- luatools
-
- runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-
-elseif e_argument("generate") then
-
- -- luatools
-
- if filename and filename ~= "" then
- resolvers.load("nofiles")
- trackers.enable("resolvers.locating")
- resolvers.renew(filename)
- else
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
- end
-
- e_verbose = true
-
-elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-
- -- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
-
- resolvers.load()
- trackers.enable("resolvers.locating")
- environment.make_format(filename)
-
-elseif e_argument("run") then
-
- -- luatools
-
- runners.execute_ctx_script("mtx-base","--run",filename)
-
-elseif e_argument("fmt") then
-
- -- luatools
-
- runners.execute_ctx_script("mtx-base","--fmt",filename)
-
-elseif e_argument("help") and filename=='base' then
-
- -- luatools
-
- runners.execute_ctx_script("mtx-base","--help")
-
-elseif e_argument("version") then
-
- application.version()
-
- application.report("source path",environment.ownbin)
-
-elseif e_argument("directives") then
-
- directives.show()
-
-elseif e_argument("trackers") then
-
- trackers.show()
-
-elseif e_argument("experiments") then
-
- experiments.show()
-
-elseif e_argument("exporthelp") then
-
- runners.loadbase()
- application.export(e_argument("exporthelp"),filename)
-
-elseif e_argument("help") or filename=='help' or filename == "" then
-
- application.help()
-
-elseif find(filename,"^bin:") then
-
- runners.loadbase()
- ok = runners.execute_program(filename)
-
-elseif is_mkii_stub then
-
- -- execute mkii script
-
- runners.loadbase()
- ok = runners.execute_script(filename,false,true)
-
-elseif false then
-
- runners.loadbase()
- ok = runners.execute_ctx_script(filename)
- if not ok then
- ok = runners.execute_script(filename)
- end
-
-elseif environment.files[1] == 'texmfcnf.lua' then -- so that we don't need to load mtx-base
-
- resolvers.load("nofiles")
- resolvers.listers.configurations()
-
-else
- runners.loadbase()
- runners.execute_ctx_script("mtx-base",filename)
-
-end
-
-if e_verbose then
- report()
- report("elapsed lua time: %0.3f seconds",os.runtime())
-end
-
-if os.type ~= "windows" then
- texio.write("\n") -- is this still valid?
-end
-
-if ok == false then ok = 1 elseif ok == true or ok == nil then ok = 0 end
-
--- os.exit(ok,true) -- true forces a cleanup in 5.2+
-
-os.exit(ok) -- true forces a cleanup in 5.2+ but reports a wrong number then
diff --git a/scripts/context/stubs/win64/mtxrunjit.exe b/scripts/context/stubs/win64/mtxrunjit.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/mtxrunjit.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/mtxworks.exe b/scripts/context/stubs/win64/mtxworks.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/mtxworks.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/pstopdf.exe b/scripts/context/stubs/win64/pstopdf.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/pstopdf.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/texexec.exe b/scripts/context/stubs/win64/texexec.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/texexec.exe
+++ /dev/null
Binary files differ
diff --git a/scripts/context/stubs/win64/texmfstart.exe b/scripts/context/stubs/win64/texmfstart.exe
deleted file mode 100644
index 93290a6e0..000000000
--- a/scripts/context/stubs/win64/texmfstart.exe
+++ /dev/null
Binary files differ
diff --git a/tex/context/base/anch-bar.mkiv b/tex/context/base/anch-bar.mkiv
index 501507b3b..c7c6190be 100644
--- a/tex/context/base/anch-bar.mkiv
+++ b/tex/context/base/anch-bar.mkiv
@@ -81,6 +81,9 @@
\let\setupsidebars\setupsidebar
\unexpanded\def\startsidebar
+ {\dosingleempty\anch_sidebars_start}
+
+\unexpanded\def\startsidebar
{\dodoubleempty\anch_sidebars_start}
\def\anch_sidebars_start[#1][#2]%
@@ -120,7 +123,7 @@
\d_anch_sidebars_distance\dimexpr\scratchdimen+\numexpr\m_level-\plusone\relax\dimexpr\scratchdistance\relax\relax
\fi
\fi
- \startpositionoverlay{\v!text-1}%
+ \startpositionoverlay{text-1}%
\normalexpanded{\setMPpositiongraphicrange % maybe expand in definition
{b:sidebar:\the\c_anch_sidebars_n}%
{e:sidebar:\the\c_anch_sidebars_n}%
@@ -208,6 +211,9 @@
\unexpanded\def\startmarginrule
{\dosingleempty\anch_marginrules_start}
+\unexpanded\def\startmarginrule
+ {\dosingleempty\anch_marginrules_start}
+
\def\anch_marginrules_start[#1]% pretty inefficient checking
{\edef\m_anch_marginrules_kind{#1}%
\ifx\m_anch_marginrules_kind\empty
diff --git a/tex/context/base/anch-pos.lua b/tex/context/base/anch-pos.lua
index 0bd945c8a..9cc9fb128 100644
--- a/tex/context/base/anch-pos.lua
+++ b/tex/context/base/anch-pos.lua
@@ -30,25 +30,15 @@ local texsp = tex.sp
----- texsp = string.todimen -- because we cache this is much faster but no rounding
local texgetcount = tex.getcount
+local texgetbox = tex.getbox
local texsetcount = tex.setcount
local texget = tex.get
local pdf = pdf -- h and v are variables
local setmetatableindex = table.setmetatableindex
-
-local nuts = nodes.nuts
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getlist = nuts.getlist
-local getbox = nuts.getbox
-local getskip = nuts.getskip
-
-local find_tail = nuts.tail
-
-local new_latelua = nuts.pool.latelua
-local new_latelua_node = nodes.pool.latelua
+local new_latelua = nodes.pool.latelua
+local find_tail = node.slide
local variables = interfaces.variables
local v_text = variables.text
@@ -312,13 +302,13 @@ function commands.bcolumn(tag,register) -- name will change
insert(columns,tag)
column = tag
if register then
- context(new_latelua_node(f_b_column(tag)))
+ context(new_latelua(f_b_column(tag)))
end
end
function commands.ecolumn(register) -- name will change
if register then
- context(new_latelua_node(f_e_column()))
+ context(new_latelua(f_e_column()))
end
remove(columns)
column = columns[#columns]
@@ -350,10 +340,10 @@ function jobpositions.markregionbox(n,tag,correct)
nofregions = nofregions + 1
tag = f_region(nofregions)
end
- local box = getbox(n)
- local w = getfield(box,"width")
- local h = getfield(box,"height")
- local d = getfield(box,"depth")
+ local box = texgetbox(n)
+ local w = box.width
+ local h = box.height
+ local d = box.depth
tobesaved[tag] = {
p = true,
x = true,
@@ -365,18 +355,18 @@ function jobpositions.markregionbox(n,tag,correct)
local push = new_latelua(f_b_region(tag))
local pop = new_latelua(f_e_region(tostring(correct))) -- todo: check if tostring is needed with formatter
-- maybe we should construct a hbox first (needs experimenting) so that we can avoid some at the tex end
- local head = getlist(box)
+ local head = box.list
if head then
local tail = find_tail(head)
- setfield(head,"prev",push)
- setfield(push,"next",head)
- setfield(pop,"prev",tail)
- setfield(tail,"next",pop)
+ head.prev = push
+ push.next = head
+ pop .prev = tail
+ tail.next = pop
else -- we can have a simple push/pop
- setfield(push,"next",pop)
- setfield(pop,"prev",push)
+ push.next = pop
+ pop.prev = push
end
- setfield(box,"list",push)
+ box.list = push
end
function jobpositions.enhance(name)
@@ -385,7 +375,7 @@ end
function commands.pos(name,t)
tobesaved[name] = t
- context(new_latelua_node(f_enhance(name)))
+ context(new_latelua(f_enhance(name)))
end
local nofparagraphs = 0
@@ -393,19 +383,19 @@ local nofparagraphs = 0
function commands.parpos() -- todo: relate to localpar (so this is an intermediate variant)
nofparagraphs = nofparagraphs + 1
texsetcount("global","c_anch_positions_paragraph",nofparagraphs)
- local strutbox = getbox("strutbox")
+ local strutbox = texgetbox("strutbox")
local t = {
p = true,
c = true,
r = true,
x = true,
y = true,
- h = getfield(strutbox,"height"),
- d = getfield(strutbox,"depth"),
+ h = strutbox.height,
+ d = strutbox.depth,
hs = texget("hsize"),
}
- local leftskip = getfield(getskip("leftskip"),"width")
- local rightskip = getfield(getskip("rightskip"),"width")
+ local leftskip = texget("leftskip").width
+ local rightskip = texget("rightskip").width
local hangindent = texget("hangindent")
local hangafter = texget("hangafter")
local parindent = texget("parindent")
@@ -430,7 +420,7 @@ function commands.parpos() -- todo: relate to localpar (so this is an intermedia
end
local tag = f_p_tag(nofparagraphs)
tobesaved[tag] = t
- context(new_latelua_node(f_enhance(tag)))
+ context(new_latelua(f_enhance(tag)))
end
function commands.posxy(name) -- can node.write be used here?
@@ -442,7 +432,7 @@ function commands.posxy(name) -- can node.write be used here?
y = true,
n = nofparagraphs > 0 and nofparagraphs or nil,
}
- context(new_latelua_node(f_enhance(name)))
+ context(new_latelua(f_enhance(name)))
end
function commands.poswhd(name,w,h,d)
@@ -457,7 +447,7 @@ function commands.poswhd(name,w,h,d)
d = d,
n = nofparagraphs > 0 and nofparagraphs or nil,
}
- context(new_latelua_node(f_enhance(name)))
+ context(new_latelua(f_enhance(name)))
end
function commands.posplus(name,w,h,d,extra)
@@ -473,22 +463,22 @@ function commands.posplus(name,w,h,d,extra)
n = nofparagraphs > 0 and nofparagraphs or nil,
e = extra,
}
- context(new_latelua_node(f_enhance(name)))
+ context(new_latelua(f_enhance(name)))
end
function commands.posstrut(name,w,h,d)
- local strutbox = getbox("strutbox")
+ local strutbox = texgetbox("strutbox")
tobesaved[name] = {
p = true,
c = column,
r = true,
x = true,
y = true,
- h = getfield(strutbox,"height"),
- d = getfield(strutbox,"depth"),
+ h = strutbox.height,
+ d = strutbox.depth,
n = nofparagraphs > 0 and nofparagraphs or nil,
}
- context(new_latelua_node(f_enhance(name)))
+ context(new_latelua(f_enhance(name)))
end
function jobpositions.getreserved(tag,n)
diff --git a/tex/context/base/attr-ini.mkiv b/tex/context/base/attr-ini.mkiv
index 3f49e67a9..9dfa7baae 100644
--- a/tex/context/base/attr-ini.mkiv
+++ b/tex/context/base/attr-ini.mkiv
@@ -85,7 +85,7 @@
%D For the moment we put this here (later it will move to where it's used):
\definesystemattribute [state]
-\definesystemattribute [color] [public] % global
+\definesystemattribute [color] [public]
\definesystemattribute [colormodel] [public,global]
\definesystemattribute [skip]
\definesystemattribute [penalty]
diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua
index d4133396b..18a339247 100644
--- a/tex/context/base/back-exp.lua
+++ b/tex/context/base/back-exp.lua
@@ -95,22 +95,10 @@ local a_reference = attributes.private('reference')
local a_textblock = attributes.private("textblock")
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getnext = nuts.getnext
-local getsubtype = nuts.getsubtype
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getlist = nuts.getlist
-local getid = nuts.getid
-local getfield = nuts.getfield
-local getattr = nuts.getattr
-
-local setattr = nuts.setattr
-
-local traverse_id = nuts.traverse_id
-local traverse_nodes = nuts.traverse
+local traverse_id = node.traverse_id
+local traverse_nodes = node.traverse
+local slide_nodelist = node.slide
+local locate_node = nodes.locate
local references = structures.references
local structurestags = structures.tags
@@ -454,26 +442,13 @@ local function checkdocument(root)
if data then
for i=1,#data do
local di = data[i]
- local tg = di.tg
- if tg == "noexport" then
- local ud = userdata[di.fulltag]
- local comment = ud and ud.comment
- if comment then
- di.element = "comment"
- di.data = { { content = comment } }
- ud.comment = nil
- else
- data[i] = false
- -- di.element = ""
- -- di.data = nil
- end
- elseif di.content then
- -- okay
- elseif tg == "ignore" then
+ if di.content then
+ -- ok
+ elseif di.tg == "ignore" then
di.element = ""
checkdocument(di)
else
- checkdocument(di) -- new, else no noexport handling
+ -- can't happen
end
end
end
@@ -1210,28 +1185,23 @@ function structurestags.settabulatecell(align)
end
end
-local function hascontent(data)
- for i=1,#data do
- local di = data[i]
- if not di then
- --
- elseif di.content then
- return true
- else
- local d = di.data
- if d and #d > 0 and hascontent(d) then
- return true
- end
- end
- end
-end
-
function extras.tabulate(result,element,detail,n,fulltag,di)
local data = di.data
for i=1,#data do
local di = data[i]
- if di.tg == "tabulaterow" and not hascontent(di.data) then
- di.element = "" -- or simply remove
+ if di.tg == "tabulaterow" then
+ local did = di.data
+ local content = false
+ for i=1,#did do
+ local d = did[i].data
+ if d and #d > 0 and d[1].content then
+ content = true
+ break
+ end
+ end
+ if not content then
+ di.element = "" -- or simply remove
+ end
end
end
end
@@ -1784,9 +1754,9 @@ local function pushentry(current)
end
end
-local function pushcontent(oldparagraph,newparagraph)
+local function pushcontent(currentparagraph,newparagraph)
if nofcurrentcontent > 0 then
- if oldparagraph then
+ if currentparagraph then
if currentcontent[nofcurrentcontent] == "\n" then
if trace_export then
report_export("%w<!-- removing newline -->",currentdepth)
@@ -1796,9 +1766,9 @@ local function pushcontent(oldparagraph,newparagraph)
end
local content = concat(currentcontent,"",1,nofcurrentcontent)
if content == "" then
- -- omit; when oldparagraph we could push, remove spaces, pop
- elseif somespace[content] and oldparagraph then
- -- omit; when oldparagraph we could push, remove spaces, pop
+ -- omit; when currentparagraph we could push, remove spaces, pop
+ elseif somespace[content] and currentparagraph then
+ -- omit; when currentparagraph we could push, remove spaces, pop
else
local olddepth, newdepth
local list = taglist[currentattribute]
@@ -1807,7 +1777,7 @@ local function pushcontent(oldparagraph,newparagraph)
end
local td = tree.data
local nd = #td
- td[nd+1] = { parnumber = oldparagraph or currentparagraph, content = content }
+ td[nd+1] = { parnumber = currentparagraph, content = content }
if trace_export then
report_export("%w<!-- start content with length %s -->",currentdepth,#content)
report_export("%w%s",currentdepth,(gsub(content,"\n","\\n")))
@@ -1821,10 +1791,10 @@ local function pushcontent(oldparagraph,newparagraph)
end
nofcurrentcontent = 0
end
- if oldparagraph then
+ if currentparagraph then
pushentry(makebreaklist(currentnesting))
if trace_export then
- report_export("%w<!-- break added betweep paragraph %a and %a -->",currentdepth,oldparagraph,newparagraph)
+ report_export("%w<!-- break added betweep paragraph %a and %a -->",currentdepth,currentparagraph,newparagraph)
end
end
end
@@ -1856,25 +1826,25 @@ end
local function collectresults(head,list) -- is last used (we also have currentattribute)
local p
for n in traverse_nodes(head) do
- local id = getid(n) -- 14: image, 8: literal (mp)
+ local id = n.id -- 14: image, 8: literal (mp)
if id == glyph_code then
- local at = getattr(n,a_tagged)
+ local at = n[a_tagged]
if not at then
-- we need to tag the pagebody stuff as being valid skippable
--
-- report_export("skipping character: %C (no attribute)",n.char)
else
-- we could add tonunicodes for ligatures (todo)
- local components = getfield(n,"components")
+ local components = n.components
if components then -- we loose data
collectresults(components,nil)
else
- local c = getchar(n)
+ local c = n.char
if last ~= at then
local tl = taglist[at]
pushcontent()
currentnesting = tl
- currentparagraph = getattr(n,a_taggedpar)
+ currentparagraph = n[a_taggedpar]
currentattribute = at
last = at
pushentry(currentnesting)
@@ -1883,13 +1853,13 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
-- We need to intercept this here; maybe I will also move this
-- to a regular setter at the tex end.
- local r = getattr(n,a_reference)
+ local r = n[a_reference]
if r then
referencehash[tl[#tl]] = r -- fulltag
end
--
elseif last then
- local ap = getattr(n,a_taggedpar)
+ local ap = n[a_taggedpar]
if ap ~= currentparagraph then
pushcontent(currentparagraph,ap)
pushentry(currentnesting)
@@ -1904,7 +1874,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
report_export("%w<!-- processing glyph %C tagged %a) -->",currentdepth,c,at)
end
end
- local s = getattr(n,a_exportstatus)
+ local s = n[a_exportstatus]
if s then
c = s
end
@@ -1913,7 +1883,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
report_export("%w<!-- skipping last glyph -->",currentdepth)
end
elseif c == 0x20 then
- local a = getattr(n,a_characters)
+ local a = n[a_characters]
nofcurrentcontent = nofcurrentcontent + 1
if a then
if trace_export then
@@ -1924,7 +1894,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
currentcontent[nofcurrentcontent] = " "
end
else
- local fc = fontchar[getfont(n)]
+ local fc = fontchar[n.font]
if fc then
fc = fc and fc[c]
if fc then
@@ -1949,23 +1919,20 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
elseif id == disc_code then -- probably too late
if keephyphens then
- local pre = getfield(n,"pre")
- if pre and not getnext(pre) and getid(pre) == glyph_code and getchar(pre) == hyphencode then
+ local pre = n.pre
+ if pre and not pre.next and pre.id == glyph_code and pre.char == hyphencode then
nofcurrentcontent = nofcurrentcontent + 1
currentcontent[nofcurrentcontent] = hyphen
end
end
- local replace = getfield(n,"replace")
- if replace then
- collectresults(replace,nil)
- end
+ collectresults(n.replace,nil)
elseif id == glue_code then
-- we need to distinguish between hskips and vskips
- local ca = getattr(n,a_characters)
+ local ca = n[a_characters]
if ca == 0 then
-- skip this one ... already converted special character (node-acc)
elseif ca then
- local a = getattr(n,a_tagged)
+ local a = n[a_tagged]
if a then
local c = specialspaces[ca]
if last ~= a then
@@ -1975,13 +1942,13 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
pushcontent()
currentnesting = tl
- currentparagraph = getattr(n,a_taggedpar)
+ currentparagraph = n[a_taggedpar]
currentattribute = a
last = a
pushentry(currentnesting)
-- no reference check (see above)
elseif last then
- local ap = getattr(n,a_taggedpar)
+ local ap = n[a_taggedpar]
if ap ~= currentparagraph then
pushcontent(currentparagraph,ap)
pushentry(currentnesting)
@@ -2002,11 +1969,11 @@ local function collectresults(head,list) -- is last used (we also have currentat
currentcontent[nofcurrentcontent] = c
end
else
- local subtype = getsubtype(n)
+ local subtype = n.subtype
if subtype == userskip_code then
- if getfield(getfield(n,"spec"),"width") > threshold then
+ if n.spec.width > threshold then
if last and not somespace[currentcontent[nofcurrentcontent]] then
- local a = getattr(n,a_tagged)
+ local a = n[a_tagged]
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 5a -->",currentdepth)
@@ -2033,7 +2000,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
elseif subtype == spaceskip_code or subtype == xspaceskip_code then
if not somespace[currentcontent[nofcurrentcontent]] then
- local a = getattr(n,a_tagged)
+ local a = n[a_tagged]
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 7 (stay in element) -->",currentdepth)
@@ -2062,7 +2029,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
nofcurrentcontent = nofcurrentcontent - 1
end
elseif not somespace[r] then
- local a = getattr(n,a_tagged)
+ local a = n[a_tagged]
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 1 (end of line, stay in element) -->",currentdepth)
@@ -2090,9 +2057,9 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
end
elseif id == hlist_code or id == vlist_code then
- local ai = getattr(n,a_image)
+ local ai = n[a_image]
if ai then
- local at = getattr(n,a_tagged)
+ local at = n[a_tagged]
if nofcurrentcontent > 0 then
pushcontent()
pushentry(currentnesting) -- ??
@@ -2105,21 +2072,18 @@ local function collectresults(head,list) -- is last used (we also have currentat
currentparagraph = nil
else
-- we need to determine an end-of-line
- local list = getlist(n)
- if list then
- collectresults(list,n)
- end
+ collectresults(n.list,n)
end
elseif id == kern_code then
- local kern = getfield(n,"kern")
+ local kern = n.kern
if kern > 0 then
local limit = threshold
- if p and getid(p) == glyph_code then
- limit = fontquads[getfont(p)] / 4
+ if p and p.id == glyph_code then
+ limit = fontquads[p.font] / 4
end
if kern > limit then
if last and not somespace[currentcontent[nofcurrentcontent]] then
- local a = getattr(n,a_tagged)
+ local a = n[a_tagged]
if a == last then
if not somespace[currentcontent[nofcurrentcontent]] then
if trace_export then
@@ -2159,7 +2123,7 @@ function nodes.handlers.export(head) -- hooks into the page builder
end
-- continueexport()
restart = true
- collectresults(tonut(head))
+ collectresults(head)
if trace_export then
report_export("%w<!-- stop flushing page -->",currentdepth)
end
@@ -2169,12 +2133,12 @@ end
function builders.paragraphs.tag(head)
noftextblocks = noftextblocks + 1
- for n in traverse_id(hlist_code,tonut(head)) do
- local subtype = getsubtype(n)
+ for n in traverse_id(hlist_code,head) do
+ local subtype = n.subtype
if subtype == line_code then
- setattr(n,a_textblock,noftextblocks)
+ n[a_textblock] = noftextblocks
elseif subtype == glue_code or subtype == kern_code then
- setattr(n,a_textblock,0)
+ n[a_textblock] = 0
end
end
return false
diff --git a/tex/context/base/bibl-tra.lua b/tex/context/base/bibl-tra.lua
index 75dc3e86f..82f8dc2aa 100644
--- a/tex/context/base/bibl-tra.lua
+++ b/tex/context/base/bibl-tra.lua
@@ -10,7 +10,7 @@ if not modules then modules = { } end modules ['bibl-tra'] = {
-- temporary hack, needed for transition
-if not publications then
+if not punlications then
local hacks = utilities.storage.allocate()
diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv
index 6c4fb6fc1..bdde5df9d 100644
--- a/tex/context/base/buff-ver.mkiv
+++ b/tex/context/base/buff-ver.mkiv
@@ -758,9 +758,8 @@
\buff_verbatim_setup_line_numbering
\buff_verbatim_initialize_typing_one
\buff_verbatim_initialize_typing_two
- \dostarttagged\t!verbatimblock{#1}%
\beginofverbatimlines
- \dostarttagged\t!verbatimlines\empty
+ \dostarttagged\t!verbatimblock{#1}%
\ctxcommand{typebuffer {
name = "#2",
strip = "\typingparameter\c!strip",
@@ -773,7 +772,6 @@
}}%
\dostoptagged
\endofverbatimlines
- \dostoptagged
\stoppacked
\typingparameter\c!after}
diff --git a/tex/context/base/char-def.lua b/tex/context/base/char-def.lua
index 3e1d56009..9642d1736 100644
--- a/tex/context/base/char-def.lua
+++ b/tex/context/base/char-def.lua
@@ -1829,9 +1829,7 @@ characters.data={
direction="on",
linebreak="al",
mathclass="topaccent",
- mathfiller="barfill",
mathname="bar",
- mathmleq=0x203E,
specials={ "compat", 0x0020, 0x0304 },
unicodeslot=0x00AF,
},
diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua
index d406b8bfe..95ed48279 100644
--- a/tex/context/base/char-utf.lua
+++ b/tex/context/base/char-utf.lua
@@ -357,55 +357,44 @@ function utffilters.collapse(str,filename) -- we can make high a seperate pass
return str
end
--- function utffilters.decompose(str)
--- if str and str ~= "" then
--- local nstr = #str
--- if nstr > 1 then
--- -- if initialize then -- saves a call
--- -- initialize()
--- -- end
--- local tokens, t, done, n = { }, 0, false, 0
--- for s in utfcharacters(str) do
--- local dec = decomposed[s]
--- if dec then
--- if not done then
--- if n > 0 then
--- for s in utfcharacters(str) do
--- if n == 0 then
--- break
--- else
--- t = t + 1
--- tokens[t] = s
--- n = n - 1
--- end
--- end
--- end
--- done = true
--- end
--- t = t + 1
--- tokens[t] = dec
--- elseif done then
--- t = t + 1
--- tokens[t] = s
--- else
--- n = n + 1
--- end
--- end
--- if done then
--- return concat(tokens) -- seldom called
--- end
--- end
--- end
--- return str
--- end
-
-local tree = lpeg.utfchartabletopattern(table.keys(decomposed))
-local finder = lpeg.finder(tree,false,true)
-local replacer = lpeg.replacer(tree,decomposed,false,true)
-
-function utffilters.decompose(str) -- 3 to 4 times faster than the above
- if str and str ~= "" and #str > 1 and lpegmatch(finder,str) then
- return lpegmatch(replacer,str)
+function utffilters.decompose(str)
+ if str and str ~= "" then
+ local nstr = #str
+ if nstr > 1 then
+ -- if initialize then -- saves a call
+ -- initialize()
+ -- end
+ local tokens, t, done, n = { }, 0, false, 0
+ for s in utfcharacters(str) do
+ local dec = decomposed[s]
+ if dec then
+ if not done then
+ if n > 0 then
+ for s in utfcharacters(str) do
+ if n == 1 then
+ break
+ else
+ t = t + 1
+ tokens[t] = s
+ n = n - 1
+ end
+ end
+ end
+ done = true
+ end
+ t = t + 1
+ tokens[t] = dec
+ elseif done then
+ t = t + 1
+ tokens[t] = s
+ else
+ n = n + 1
+ end
+ end
+ if done then
+ return concat(tokens) -- seldom called
+ end
+ end
end
return str
end
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 733afc6d0..134b1f08a 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2014.02.14 17:07}
+\newcontextversion{2014.01.03 00:40}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
@@ -24,16 +24,6 @@
%D Maybe:
-% \appendtoks
-% \inheritmaintextcolor
-% \to \everybeforenoteinsert
-
-% \appendtoks
-% \inheritmaintextcolor
-% \to \everymargindatacontent
-
-%D Maybe:
-
\unexpanded\def\tightvbox{\dowithnextbox{\dp\nextbox\zeropoint\box\nextbox}\vbox}
\unexpanded\def\tightvtop{\dowithnextbox{\ht\nextbox\zeropoint\box\nextbox}\vtop}
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index 275625528..30d18d29b 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context-version.png b/tex/context/base/context-version.png
index 39c348e48..67edf8a53 100644
--- a/tex/context/base/context-version.png
+++ b/tex/context/base/context-version.png
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 8c67fbd50..0214c2bfa 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -28,8 +28,8 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2014.02.14 17:07}
-\edef\contextkind {beta}
+\edef\contextversion{2014.01.03 00:40}
+\edef\contextkind {current}
%D For those who want to use this:
@@ -143,6 +143,7 @@
\loadmarkfile{node-fin}
\loadmarkfile{node-mig}
+\loadmarkfile{typo-bld} % par builders
%loadmarkfile{node-pag}
\loadmarkfile{back-ini}
@@ -170,7 +171,6 @@
%loadmarkfile{supp-num} % obsolete
\loadmarkfile{typo-ini}
-\loadmarkfile{typo-bld} % par builders
\loadmkvifile{file-syn}
\loadmkvifile{file-mod}
@@ -357,7 +357,6 @@
\loadmkvifile{font-sel}
\loadmarkfile{typo-tal}
-\loadmarkfile{typo-par} % par builders (uses fonts)
\loadmarkfile{tabl-com}
\loadmarkfile{tabl-pln}
@@ -402,7 +401,7 @@
\loadmarkfile{scrp-ini}
\loadmarkfile{lang-wrd} % can be optional (discussion with mm sideeffect)
-\loadmarkfile{lang-rep} % can be optional (bt 2013 side effect)
+%loadmarkfile{lang-rep} % can be optional (bt 2013 side effect)
\loadmarkfile{prop-ini} % only for downward compatibility
@@ -477,17 +476,8 @@
\loadmarkfile{lang-spa} % will become obsolete
-% old bibtex support: (will be m-oldbibtex.mkiv)
-
-% \loadmarkfile{bibl-bib}
-% \loadmarkfile{bibl-tra}
-
-% new bibtex support:
-
-\loadmarkfile{publ-ini}
-\loadmarkfile{publ-tra}
-\loadmarkfile{publ-xml}
-\loadmarkfile{publ-old}
+\loadmarkfile{bibl-bib}
+\loadmarkfile{bibl-tra}
%loadmarkfile{x-xtag} % no longer preloaded
diff --git a/tex/context/base/core-env.lua b/tex/context/base/core-env.lua
index 2cc84299b..a4d1fdd92 100644
--- a/tex/context/base/core-env.lua
+++ b/tex/context/base/core-env.lua
@@ -31,7 +31,6 @@ tex.systemmodes = allocate { }
tex.constants = allocate { }
tex.conditionals = allocate { }
tex.ifs = allocate { }
-tex.isdefined = allocate { }
local modes = { }
local systemmodes = { }
@@ -79,19 +78,6 @@ setmetatableindex(tex.ifs, function(t,k)
return csname_id(k) ~= undefined and create(k)[2] == iftrue -- inefficient, this create, we need a helper
end)
-setmetatableindex(tex.isdefined, function(t,k)
- return csname_id(k) ~= undefined
-end)
-
-function context.setconditional(name,value)
- if value then
- context.settruevalue(name)
- else
- context.setfalsevalue(name)
- end
-end
-
-
-- todo : global
-- not possible as we let at the tex end to zerocount and plusone
diff --git a/tex/context/base/core-sys.mkiv b/tex/context/base/core-sys.mkiv
index c07722f56..8f56b6f16 100644
--- a/tex/context/base/core-sys.mkiv
+++ b/tex/context/base/core-sys.mkiv
@@ -89,8 +89,6 @@
% \ctxcommand{updatefilenames("\jobame","\inputfilename","\outputfilename")}%
% \to \everysetupsystem
-\newconditional\prerollrun % when true it means that we have a forced number of runs
-
% Some mechanisms (see x-res-01) use either \jobfilename or
% \jobfilename.somesuffix, in which case we need to use the
% full name if given or a default (like \jobfilename.xml);
diff --git a/tex/context/base/core-two.lua b/tex/context/base/core-two.lua
index 1f2bc7c6c..d6e006e04 100644
--- a/tex/context/base/core-two.lua
+++ b/tex/context/base/core-two.lua
@@ -54,7 +54,7 @@ end
function jobpasses.getdata(id,index,default)
local jti = collected[id]
- local value = jti and jti[index]
+ local value = jit and jti[index]
return value ~= "" and value or default or ""
end
diff --git a/tex/context/base/core-uti.lua b/tex/context/base/core-uti.lua
index 71b80170c..ef792c1d8 100644
--- a/tex/context/base/core-uti.lua
+++ b/tex/context/base/core-uti.lua
@@ -126,10 +126,6 @@ function jobvariables.save(cs,value)
tobesaved[cs] = value
end
-function jobvariables.restore(cs)
- return collected[cs] or tobesaved[cs]
-end
-
-- checksums
function jobvariables.getchecksum(tag)
@@ -327,7 +323,7 @@ if jit then
local saved = watts_per_core * runtime * kg_per_watt_per_second / speedup_by_other_engine
local saved = used_wood_factor * runtime
-- return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second, %f kg tree saved by using luajittex",runtime,pages,shipped,persecond,saved)
- return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second, %f mg tree saved by using luajittex",runtime,pages,shipped,persecond,saved*1000*1000)
+ return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second, %f g tree saved by using luajittex",runtime,pages,shipped,persecond,saved*1000)
else
return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second",runtime,pages,shipped,persecond)
end
diff --git a/tex/context/base/data-aux.lua b/tex/context/base/data-aux.lua
index dae96ce62..b969e6070 100644
--- a/tex/context/base/data-aux.lua
+++ b/tex/context/base/data-aux.lua
@@ -16,8 +16,7 @@ local resolvers = resolvers
local report_scripts = logs.reporter("resolvers","scripts")
function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per se a suffix
- -- local scriptpath = "scripts/context/lua"
- local scriptpath = "context/lua"
+ local scriptpath = "scripts/context/lua"
newname = file.addsuffix(newname,"lua")
local oldscript = resolvers.cleanpath(oldname)
if trace_locating then
diff --git a/tex/context/base/data-use.lua b/tex/context/base/data-use.lua
index 7598506e4..9c15263bb 100644
--- a/tex/context/base/data-use.lua
+++ b/tex/context/base/data-use.lua
@@ -57,7 +57,7 @@ statistics.register("used cache path", function() return caches.usedpaths() end
-- experiment (code will move)
function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname == formatname
- local enginebanner = status.banner
+ local enginebanner = status.list().banner
if formatbanner and enginebanner and sourcefile then
local luvname = file.replacesuffix(texname,"luv") -- utilities.lua.suffixes.luv
local luvdata = {
@@ -75,7 +75,7 @@ end
-- a remake
function statistics.checkfmtstatus(texname)
- local enginebanner = status.banner
+ local enginebanner = status.list().banner
if enginebanner and texname then
local luvname = file.replacesuffix(texname,"luv") -- utilities.lua.suffixes.luv
if lfs.isfile(luvname) then
diff --git a/tex/context/base/enco-ini.mkiv b/tex/context/base/enco-ini.mkiv
index ab3aa488d..77fcbe483 100644
--- a/tex/context/base/enco-ini.mkiv
+++ b/tex/context/base/enco-ini.mkiv
@@ -87,26 +87,17 @@
%D Accent handling (try to avoid this):
-% \buildtextaccent\greekdasia\greekalphamacron
-% \buildtextaccent\textacute q
-
\newbox\b_enco_accent
\def\buildmathaccent#1%
{\mathaccent#1 }
-% \unexpanded\def\buildtextaccent#1#2% we could do all at the lua end
-% {\begingroup % but that's no fun (yet)
-% \setbox\b_enco_accent\hbox{#1}%
-% \scratchcounter\cldcontext{nodes.firstcharinbox(\number\b_enco_accent)}\relax
-% \ifcase\scratchcounter\else\accent\scratchcounter\fi
-% \relax#2%
-% \endgroup}
-
\unexpanded\def\buildtextaccent#1#2% we could do all at the lua end
{\begingroup % but that's no fun (yet)
\setbox\b_enco_accent\hbox{#1}%
- \ctxcommand{buildtextaccent(\number\b_enco_accent)}#2%
+ \scratchcounter\cldcontext{nodes.firstcharinbox(\number\b_enco_accent)}\relax
+ \ifcase\scratchcounter\else\accent\scratchcounter\fi
+ \relax#2%
\endgroup}
\unexpanded\def\bottomaccent#1#2#3#4#5% down right slantcorrection accent char
diff --git a/tex/context/base/export-example.css b/tex/context/base/export-example.css
index dbecc01fe..06d51c587 100644
--- a/tex/context/base/export-example.css
+++ b/tex/context/base/export-example.css
@@ -709,18 +709,3 @@ a[href]:hover {
color : rgb(50%,0%,0%) ;
text-decoration : underline ;
}
-
-/* setups */
-
-setup {
- display : block ;
-}
-
-comment {
- background-color : rgb(50%,75%,100%) ;
- display : block ;
- padding : 1em ;
- margin-bottom : 1em ;
- margin-top : 1em ;
- font-family : "Lucida Console", "DejaVu Sans Mono", monospace ;
-}
diff --git a/tex/context/base/file-job.lua b/tex/context/base/file-job.lua
index c88eb7e9d..288a690d2 100644
--- a/tex/context/base/file-job.lua
+++ b/tex/context/base/file-job.lua
@@ -801,45 +801,23 @@ function commands.getcommandline() -- has to happen at the tex end in order to e
inputfile = basename(inputfile)
end
- local forcedruns = arguments.forcedruns
local kindofrun = arguments.kindofrun
- local currentrun = arguments.currentrun
- local maxnofruns = arguments.maxnofruns or arguments.runs
-
- -- context.setupsystem {
- -- [constants.directory] = validstring(arguments.setuppath),
- -- [constants.inputfile] = inputfile,
- -- [constants.file] = validstring(arguments.result),
- -- [constants.random] = validstring(arguments.randomseed),
- -- -- old:
- -- [constants.n] = validstring(kindofrun),
- -- [constants.m] = validstring(currentrun),
- -- }
+ local currentrun = arguments.maxnofruns
+ local maxnofruns = arguments.currentrun
context.setupsystem {
- directory = validstring(arguments.setuppath),
- inputfile = inputfile,
- file = validstring(arguments.result),
- random = validstring(arguments.randomseed),
+ [constants.directory] = validstring(arguments.setuppath),
+ [constants.inputfile] = inputfile,
+ [constants.file] = validstring(arguments.result),
+ [constants.random] = validstring(arguments.randomseed),
-- old:
- n = validstring(kindofrun),
- m = validstring(currentrun),
+ [constants.n] = validstring(kindofrun),
+ [constants.m] = validstring(currentrun),
}
- forcedruns = tonumber(forcedruns) or 0
- kindofrun = tonumber(kindofrun) or 0
- maxnofruns = tonumber(maxnofruns) or 0
- currentrun = tonumber(currentrun) or 0
-
- local prerollrun = forcedruns > 0 and currentrun > 0 and currentrun < forcedruns
-
- environment.forcedruns = forcedruns
- environment.kindofrun = kindofrun
- environment.maxnofruns = maxnofruns
- environment.currentrun = currentrun
- environment.prerollrun = prerollrun
-
- context.setconditional("prerollrun",prerollrun)
+ environment.kindofrun = tonumber(kindofrun) or 0
+ environment.maxnofruns = tonumber(maxnofruns) or 0
+ environment.currentrun = tonumber(currentrun) or 0
if validstring(arguments.arguments) then
context.setupenv { arguments.arguments }
diff --git a/tex/context/base/file-job.mkvi b/tex/context/base/file-job.mkvi
index fa395a32e..ce0d54ece 100644
--- a/tex/context/base/file-job.mkvi
+++ b/tex/context/base/file-job.mkvi
@@ -274,7 +274,7 @@
%D Relatively new (might move as it depends on setups):
-%newtoks\everysetupdocument
+\newtoks\everysetupdocument
\unexpanded\def\startdocument % todo: dostarttagged\t!document
{\dosingleargument\syst_structure_document_start}
diff --git a/tex/context/base/file-res.lua b/tex/context/base/file-res.lua
index 8a50c0d58..8e65ba4c7 100644
--- a/tex/context/base/file-res.lua
+++ b/tex/context/base/file-res.lua
@@ -6,14 +6,13 @@ if not modules then modules = { } end modules ['file-res'] = {
license = "see context related readme files"
}
-local format, find = string.format, string.find
+local format = string.format
local isfile = lfs.isfile
local is_qualified_path = file.is_qualified_path
-local hasscheme, urlescape = url.hasscheme, url.escape
+local hasscheme = url.hasscheme
-local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end)
-local trace_details = false trackers.register("resolvers.readfile.details", function(v) trace_details = v end)
-local report_files = logs.reporter("files","readfile")
+local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end)
+local report_files = logs.reporter("files","readfile")
resolvers.maxreadlevel = 2
@@ -24,9 +23,6 @@ local finders, loaders, openers = resolvers.finders, resolvers.loaders, resolver
local found = { } -- can best be done in the resolver itself
local function readfilename(specification,backtrack,treetoo)
- if trace_details then
- report_files(table.serialize(specification,"specification"))
- end
local name = specification.filename
local fnd = name and found[name]
if not fnd then
@@ -136,11 +132,9 @@ function getreadfilename(scheme,path,name) -- better do a split and then pass ta
if hasscheme(name) or is_qualified_path(name) then
fullname = name
else
- if not find(name,"%%") then
- name = urlescape(name) -- if no % in names
- end
fullname = ((path == "") and format("%s:///%s",scheme,name)) or format("%s:///%s/%s",scheme,path,name)
end
+--~ print(">>>",fullname)
return resolvers.findtexfile(fullname) or "" -- can be more direct
end
diff --git a/tex/context/base/font-chk.lua b/tex/context/base/font-chk.lua
index 5d4f6059b..6dc1667bb 100644
--- a/tex/context/base/font-chk.lua
+++ b/tex/context/base/font-chk.lua
@@ -41,18 +41,9 @@ local enableaction = tasks.enableaction
local disableaction = tasks.disableaction
local glyph_code = nodes.nodecodes.glyph
-
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local setfield = nuts.setfield
-
-local traverse_id = nuts.traverse_id
-local remove_node = nuts.remove
-local insert_node_after = nuts.insert_after
+local traverse_id = node.traverse_id
+local remove_node = nodes.remove
+local insert_node_after = node.insert_after
-- maybe in fonts namespace
-- deletion can be option
@@ -214,10 +205,9 @@ end
function checkers.missing(head)
local lastfont, characters, found = nil, nil, nil
- head = tonut(head)
for n in traverse_id(glyph_code,head) do -- faster than while loop so we delay removal
- local font = getfont(n)
- local char = getchar(n)
+ local font = n.font
+ local char = n.char
if font ~= lastfont then
characters = fontcharacters[font]
lastfont = font
@@ -246,8 +236,8 @@ function checkers.missing(head)
elseif action == "replace" then
for i=1,#found do
local n = found[i]
- local font = getfont(n)
- local char = getchar(n)
+ local font = n.font
+ local char = n.char
local tfmdata = fontdata[font]
local properties = tfmdata.properties
local privates = properties.privates
@@ -265,13 +255,13 @@ function checkers.missing(head)
head = remove_node(head,n,true)
else
-- good, we have \definefontfeature[default][default][missing=yes]
- setfield(n,"char",p)
+ n.char = p
end
end
else
-- maye write a report to the log
end
- return tonode(head), false
+ return head, false
end
local relevant = { "missing (will be deleted)", "missing (will be flagged)", "missing" }
diff --git a/tex/context/base/font-col.lua b/tex/context/base/font-col.lua
index 187e33311..f5e17f1da 100644
--- a/tex/context/base/font-col.lua
+++ b/tex/context/base/font-col.lua
@@ -17,12 +17,7 @@ local type, next, toboolean = type, next, toboolean
local gmatch = string.gmatch
local fastcopy = table.fastcopy
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local setfield = nuts.setfield
-local traverse_id = nuts.traverse_id
+local traverse_id = nodes.traverse_id
local settings_to_hash = utilities.parsers.settings_to_hash
@@ -204,7 +199,7 @@ end
--
-- if lpegmatch(okay,name) then
-function collections.prepare(name) -- we can do this in lua now .. todo
+function collections.prepare(name) -- we can do this in lua now
current = currentfont()
if vectors[current] then
return
@@ -249,23 +244,23 @@ end
function collections.process(head) -- this way we keep feature processing
local done = false
- for n in traverse_id(glyph_code,tonut(head)) do
- local v = vectors[getfont(n)]
+ for n in traverse_id(glyph_code,head) do
+ local v = vectors[n.font]
if v then
- local id = v[getchar(n)]
+ local id = v[n.char]
if id then
if type(id) == "table" then
local newid, newchar = id[1], id[2]
if trace_collecting then
report_fonts("remapping character %C in font %a to character %C in font %a",getchar(n),getfont(n),newchar,newid)
end
- setfield(n,"font",newid)
- setfield(n,"char",newchar)
+ n.font = newid
+ n.char = newchar
else
if trace_collecting then
report_fonts("remapping font %a to %a for character %C",getfont(n),id,getchar(n))
end
- setfield(n,"font",id)
+ n.font = id
end
end
end
diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua
index e251cc9c1..b08a6aed2 100644
--- a/tex/context/base/font-ctx.lua
+++ b/tex/context/base/font-ctx.lua
@@ -57,16 +57,6 @@ local helpers = fonts.helpers
local hashes = fonts.hashes
local currentfont = font.current
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getattr = nuts.getattr
-local getfont = nuts.getfont
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
local texgetattribute = tex.getattribute
local texsetattribute = tex.setattribute
local texgetdimen = tex.getdimen
@@ -137,8 +127,8 @@ function fonts.helpers.name(tfmdata)
return file.basename(type(tfmdata) == "number" and properties[tfmdata].name or tfmdata.properties.name)
end
-utilities.strings.formatters.add(formatters,"font:name", [["'"..fontname(%s).."'"]], { fontname = fonts.helpers.name })
-utilities.strings.formatters.add(formatters,"font:features",[["'"..sequenced(%s," ",true).."'"]], { sequenced = table.sequenced })
+utilities.strings.formatters.add(formatters,"font:name", [["'"..fonts.helpers.name(%s).."'"]])
+utilities.strings.formatters.add(formatters,"font:features",[["'"..table.sequenced(%s," ",true).."'"]])
-- ... like font-sfm or so
@@ -1911,25 +1901,24 @@ end
-- a fontkern plug:
+local copy_node = node.copy
+local kern = nodes.pool.register(nodes.pool.kern())
-local copy_node = nuts.copy
-local kern = nuts.pool.register(nuts.pool.kern())
-
-setattr(kern,attributes.private('fontkern'),1) -- we can have several, attributes are shared
+node.set_attribute(kern,attributes.private('fontkern'),1) -- we can have several, attributes are shared
nodes.injections.installnewkern(function(k)
local c = copy_node(kern)
- setfield(c,"kern",k)
+ c.kern = k
return c
end)
-directives.register("nodes.injections.fontkern", function(v) setfield(kern,"subtype",v and 0 or 1) end)
+directives.register("nodes.injections.fontkern", function(v) kern.subtype = v and 0 or 1 end)
-- here
local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
-local otffeatures = constructors.newfeatures("otf")
+local otffeatures = fonts.constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
local analyzers = fonts.analyzers
@@ -1937,7 +1926,7 @@ local methods = analyzers.methods
local unsetvalue = attributes.unsetvalue
-local traverse_by_id = nuts.traverse_id
+local traverse_by_id = node.traverse_id
local a_color = attributes.private('color')
local a_colormodel = attributes.private('colormodel')
@@ -1964,17 +1953,16 @@ local names = {
local function markstates(head)
if head then
- head = tonut(head)
- local model = getattr(head,a_colormodel) or 1
+ local model = head[a_colormodel] or 1
for glyph in traverse_by_id(glyph_code,head) do
- local a = getattr(glyph,a_state)
+ local a = glyph[a_state]
if a then
local name = names[a]
if name then
local color = m_color[name]
if color then
- setattr(glyph,a_colormodel,model)
- setattr(glyph,a_color,color)
+ glyph[a_colormodel] = model
+ glyph[a_color] = color
end
end
end
@@ -2017,8 +2005,8 @@ registerotffeature { -- adapts
function methods.nocolor(head,font,attr)
for n in traverse_by_id(glyph_code,head) do
- if not font or getfont(n) == font then
- setattr(n,a_color,unsetvalue)
+ if not font or n.font == font then
+ n[a_color] = unsetvalue
end
end
return head, true
diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua
index e57f784a0..7131ecad5 100644
--- a/tex/context/base/font-gds.lua
+++ b/tex/context/base/font-gds.lua
@@ -46,12 +46,7 @@ local findfile = resolvers.findfile
local glyph_code = nodes.nodecodes.glyph
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getattr = nuts.getattr
-local traverse_id = nuts.traverse_id
+local traverse_id = nodes.traverse_id
function fontgoodies.report(what,trace,goodies)
if trace_goodies or trace then
@@ -316,16 +311,16 @@ local setnodecolor = nodes.tracers.colors.set
-- function colorschemes.coloring(head)
-- local lastfont, lastscheme
-- local done = false
--- for n in traverse_id(glyph_code,tonut(head)) do
--- local a = getattr(n,a_colorscheme)
+-- for n in traverse_id(glyph_code,head) do
+-- local a = n[a_colorscheme]
-- if a then
--- local f = getfont(n)
+-- local f = n.font
-- if f ~= lastfont then
-- lastscheme = fontproperties[f].colorscheme
-- lastfont = f
-- end
-- if lastscheme then
--- local sc = lastscheme[getchar(n)]
+-- local sc = lastscheme[n.char]
-- if sc then
-- done = true
-- setnodecolor(n,"colorscheme:"..a..":"..sc) -- slow
@@ -343,21 +338,21 @@ local setnodecolor = nodes.tracers.colors.set
-- local lastattr = nil
-- local lastscheme = nil
-- local lastprefix = nil
--- local done = nil
--- for n in traverse_id(glyph_code,tonut(head)) do
--- local a = getattr(n,a_colorscheme)
+-- local done = nil
+-- for n in traverse_id(glyph_code,head) do
+-- local a = n[a_colorscheme]
-- if a then
-- if a ~= lastattr then
-- lastattr = a
-- lastprefix = "colorscheme:" .. a .. ":"
-- end
--- local f = getfont(n)
+-- local f = n.font
-- if f ~= lastfont then
-- lastfont = f
-- lastscheme = fontproperties[f].colorscheme
-- end
-- if lastscheme then
--- local sc = lastscheme[getchar(n)]
+-- local sc = lastscheme[n.char]
-- if sc then
-- setnodecolor(n,lastprefix .. sc) -- slow
-- done = true
@@ -389,10 +384,10 @@ function colorschemes.coloring(head)
local lastcache = nil
local lastscheme = nil
local done = nil
- for n in traverse_id(glyph_code,tonut(head)) do
- local a = getattr(n,a_colorscheme)
+ for n in traverse_id(glyph_code,head) do
+ local a = n[a_colorscheme]
if a then
- local f = getfont(n)
+ local f = n.font
if f ~= lastfont then
lastfont = f
lastscheme = fontproperties[f].colorscheme
@@ -402,7 +397,7 @@ function colorschemes.coloring(head)
lastcache = cache[a]
end
if lastscheme then
- local sc = lastscheme[getchar(n)]
+ local sc = lastscheme[n.char]
if sc then
setnodecolor(n,lastcache[sc]) -- we could inline this one
done = true
diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua
index f74e13e81..ce724b973 100644
--- a/tex/context/base/font-map.lua
+++ b/tex/context/base/font-map.lua
@@ -66,6 +66,21 @@ local function makenameparser(str)
end
end
+-- local parser = makenameparser("Japan1")
+-- local parser = makenameparser()
+-- local function test(str)
+-- local b, a = lpegmatch(parser,str)
+-- print((a and table.serialize(b)) or b)
+-- end
+-- test("a.sc")
+-- test("a")
+-- test("uni1234")
+-- test("uni1234.xx")
+-- test("uni12349876")
+-- test("u123400987600")
+-- test("index1234")
+-- test("Japan1.123")
+
local function tounicode16(unicode,name)
if unicode < 0x10000 then
return format("%04X",unicode)
@@ -331,18 +346,3 @@ function mappings.addtounicode(data,filename)
report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
end
end
-
--- local parser = makenameparser("Japan1")
--- local parser = makenameparser()
--- local function test(str)
--- local b, a = lpegmatch(parser,str)
--- print((a and table.serialize(b)) or b)
--- end
--- test("a.sc")
--- test("a")
--- test("uni1234")
--- test("uni1234.xx")
--- test("uni12349876")
--- test("u123400987600")
--- test("index1234")
--- test("Japan1.123")
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index 63cae37f3..e1d1ebeb9 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -22,7 +22,7 @@ local handlers = fonts.handlers
handlers.otf = handlers.otf or { }
local otf = handlers.otf
-otf.version = otf.version or 2.751
+otf.version = otf.version or 2.749
otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
function otf.loadcached(filename,format,sub)
diff --git a/tex/context/base/font-nod.lua b/tex/context/base/font-nod.lua
index 8251dc588..7fa3297d4 100644
--- a/tex/context/base/font-nod.lua
+++ b/tex/context/base/font-nod.lua
@@ -41,6 +41,13 @@ tracers.characters = char_tracers
local step_tracers = tracers.steppers or { }
tracers.steppers = step_tracers
+local texsetbox = tex.setbox
+
+local copy_node_list = nodes.copy_list
+local hpack_node_list = nodes.hpack
+local free_node_list = nodes.flush_list
+local traverse_nodes = nodes.traverse
+
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -52,32 +59,12 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local rule_code = nodecodes.rule
local whatsit_code = nodecodes.whatsit
+local spec_code = nodecodes.glue_spec
local localpar_code = whatcodes.localpar
local dir_code = whatcodes.dir
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local getlist = nuts.getlist
-local setbox = nuts.setbox
-
-local copy_node_list = nuts.copy_list
-local hpack_node_list = nuts.hpack
-local free_node_list = nuts.flush_list
-local traverse_nodes = nuts.traverse
-local protect_glyphs = nuts.protect_glyphs
-
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_glyph = nodepool.glyph
local formatters = string.formatters
@@ -92,17 +79,16 @@ local fontproperties = hashes.properties
local fontparameters = hashes.parameters
function char_tracers.collect(head,list,tag,n)
- head = tonut(head)
n = n or 0
local ok, fn = false, nil
while head do
- local id = getid(head)
+ local id = head.id
if id == glyph_code then
- local f = getfont(head)
+ local f = head.font
if f ~= fn then
ok, fn = false, f
end
- local c = getchar(head)
+ local c = head.char
local i = fontidentifiers[f].indices[c] or 0
if not ok then
ok = true
@@ -117,7 +103,7 @@ function char_tracers.collect(head,list,tag,n)
else
ok = false
end
- head = getnext(head)
+ head = head.next
end
end
@@ -176,12 +162,12 @@ function char_tracers.indices(t,decimal)
end
function char_tracers.start()
- local npc = handlers.characters -- should accept nuts too
+ local npc = handlers.characters
local list = { }
function handlers.characters(head)
local n = #list
char_tracers.collect(head,list,'before',n)
- local h, d = npc(tonode(head)) -- for the moment tonode
+ local h, d = npc(head)
char_tracers.collect(head,list,'after',n)
if #list > n then
list[#list+1] = { }
@@ -248,8 +234,8 @@ end
function step_tracers.glyphs(n,i)
local c = collection[i]
if c then
- local b = hpack_node_list(copy_node_list(c)) -- multiple arguments
- setbox(n,b)
+ local b = hpack_node_list(copy_node_list(c)) -- multiple arguments
+ texsetbox(n,b)
end
end
@@ -257,8 +243,8 @@ function step_tracers.features()
-- we cannot use first_glyph here as it only finds characters with subtype < 256
local f = collection[1]
while f do
- if getid(f) == glyph_code then
- local tfmdata, t = fontidentifiers[getfont(f)], { }
+ if f.id == glyph_code then
+ local tfmdata, t = fontidentifiers[f.font], { }
for feature, value in table.sortedhash(tfmdata.shared.features) do
if feature == "number" or feature == "features" then
-- private
@@ -279,24 +265,22 @@ function step_tracers.features()
end
return
end
- f = getnext(f)
+ f = f.next
end
end
function tracers.fontchar(font,char)
local n = new_glyph()
- setfield(n,"font",font)
- setfield(n,"char",char)
- setfield(n,"subtype",256)
- context(tonode(n))
+ n.font, n.char, n.subtype = font, char, 256
+ context(n)
end
function step_tracers.font(command)
local c = collection[1]
while c do
- local id = getid(c)
+ local id = c.id
if id == glyph_code then
- local font = getfont(c)
+ local font = c.font
local name = file.basename(fontproperties[font].filename or "unknown")
local size = fontparameters[font].size or 0
if command then
@@ -306,7 +290,7 @@ function step_tracers.font(command)
end
return
else
- c = getnext(c)
+ c = c.next
end
end
end
@@ -314,22 +298,22 @@ end
function step_tracers.codes(i,command)
local c = collection[i]
while c do
- local id = getid(c)
+ local id = c.id
if id == glyph_code then
if command then
- local f, c = getfont(c), getchar(c)
+ local f, c = c.font,c.char
local d = fontdescriptions[f]
local d = d and d[c]
context[command](f,c,d and d.class or "")
else
- context("[%s:U+%04X]",getfont(c),getchar(c))
+ context("[%s:U+%04X]",c.font,c.char)
end
- elseif id == whatsit_code and (getsubtype(c) == localpar_code or getsubtype(c) == dir_code) then
- context("[%s]",getfield(c,"dir"))
+ elseif id == whatsit_code and (c.subtype == localpar_code or c.subtype == dir_code) then
+ context("[%s]",c.dir)
else
context("[%s]",nodecodes[id])
end
- c = getnext(c)
+ c = c.next
end
end
@@ -355,10 +339,9 @@ end
function step_tracers.check(head)
if collecting then
step_tracers.reset()
- local n = copy_node_list(tonut(head))
+ local n = copy_node_list(head)
injections.handler(n,nil,"trace",true)
- -- handlers.protectglyphs(n) -- can be option
- protect_glyphs(n)
+ handlers.protectglyphs(n) -- can be option
collection[1] = n
end
end
@@ -367,10 +350,9 @@ function step_tracers.register(head)
if collecting then
local nc = #collection+1
if messages[nc] then
- local n = copy_node_list(tonut(head))
+ local n = copy_node_list(head)
injections.handler(n,nil,"trace",true)
- -- handlers.protectglyphs(n) -- can be option
- protect_glyphs(n)
+ handlers.protectglyphs(n) -- can be option
collection[nc] = n
end
end
@@ -393,28 +375,21 @@ local threshold = 65536
local function toutf(list,result,nofresult,stopcriterium)
if list then
- for n in traverse_nodes(tonut(list)) do
- local id = getid(n)
+ for n in traverse_nodes(list) do
+ local id = n.id
if id == glyph_code then
- local components = getfield(n,"components")
+ local components = n.components
if components then
result, nofresult = toutf(components,result,nofresult)
else
- local c = getchar(n)
- local fc = fontcharacters[getfont(n)]
+ local c = n.char
+ local fc = fontcharacters[n.font]
if fc then
- local fcc = fc[c]
- if fcc then
- -- == fromunicode
- local u = fcc.tounicode
- if u then
- for s in gmatch(u,"....") do
- nofresult = nofresult + 1
- result[nofresult] = utfchar(tonumber(s,16))
- end
- else
+ local u = fc[c].tounicode
+ if u then
+ for s in gmatch(u,"....") do
nofresult = nofresult + 1
- result[nofresult] = utfchar(c)
+ result[nofresult] = utfchar(tonumber(s,16))
end
else
nofresult = nofresult + 1
@@ -422,23 +397,23 @@ local function toutf(list,result,nofresult,stopcriterium)
end
else
nofresult = nofresult + 1
- result[nofresult] = f_unicode(c)
+ result[nofresult] = utfchar(c)
end
end
elseif id == disc_code then
- result, nofresult = toutf(getfield(n,"replace"),result,nofresult) -- needed?
+ result, nofresult = toutf(n.replace,result,nofresult) -- needed?
elseif id == hlist_code or id == vlist_code then
-- if nofresult > 0 and result[nofresult] ~= " " then
-- nofresult = nofresult + 1
-- result[nofresult] = " "
-- end
- result, nofresult = toutf(getlist(n),result,nofresult)
+ result, nofresult = toutf(n.list,result,nofresult)
elseif id == glue_code then
if nofresult > 0 and result[nofresult] ~= " " then
nofresult = nofresult + 1
result[nofresult] = " "
end
- elseif id == kern_code and getfield(n,"kern") > threshold then
+ elseif id == kern_code and n.kern > threshold then
if nofresult > 0 and result[nofresult] ~= " " then
nofresult = nofresult + 1
result[nofresult] = " "
diff --git a/tex/context/base/font-odv.lua b/tex/context/base/font-odv.lua
index d07c38d9a..69f74dfa5 100644
--- a/tex/context/base/font-odv.lua
+++ b/tex/context/base/font-odv.lua
@@ -15,9 +15,6 @@ if not modules then modules = { } end modules ['font-odv'] = {
-- deva: http://www.microsoft.com/typography/OpenType%20Dev/devanagari/introO.mspx
-- dev2: http://www.microsoft.com/typography/OpenType%20Dev/devanagari/intro.mspx
--
--- Rajeesh Nambiar provided patches for the malayalam variant. Thansk to feedback from
--- the mailing list some aspects could be improved.
---
-- As I touched nearly all code, reshuffled it, optimized a lot, etc. etc. (imagine how
-- much can get messed up in over a week work) it could be that I introduced bugs. There
-- is more to gain (esp in the functions applied to a range) but I'll do that when
@@ -50,10 +47,7 @@ if not modules then modules = { } end modules ['font-odv'] = {
-- Some data will move to char-def.lua (some day).
--
-- Hans Hagen, PRAGMA-ADE, Hasselt NL
---
--- We could have c_nukta, c_halant, c_ra is we know that they are never used mixed within
--- one script .. yes or no?
---
+
-- Matras: according to Microsoft typography specifications "up to one of each type:
-- pre-, above-, below- or post- base", but that does not seem to be right. It could
-- become an option.
@@ -63,9 +57,9 @@ if not modules then modules = { } end modules ['font-odv'] = {
--
-- local function ms_matra(c)
-- local prebase, abovebase, belowbase, postbase = true, true, true, true
--- local n = getnext(c)
--- while n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font do
--- local char = getchar(n)
+-- local n = c.next
+-- while n and n.id == glyph_code and n.subtype < 256 and n.font == font do
+-- local char = n.char
-- if not dependent_vowel[char] then
-- break
-- elseif pre_mark[char] and prebase then
@@ -79,7 +73,7 @@ if not modules then modules = { } end modules ['font-odv'] = {
-- else
-- return c
-- end
--- c = getnext(c)
+-- c = c.next
-- end
-- return c
-- end
@@ -106,26 +100,11 @@ local methods = fonts.analyzers.methods
local otffeatures = fonts.constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getchar = nuts.getchar
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-
-local insert_node_after = nuts.insert_after
-local copy_node = nuts.copy
-local free_node = nuts.free
-local remove_node = nuts.remove
-local flush_list = nuts.flush_list
+local insert_node_after = nodes.insert_after
+local copy_node = nodes.copy
+local free_node = nodes.free
+local remove_node = nodes.remove
+local flush_list = nodes.flush_list
local unsetvalue = attributes.unsetvalue
@@ -162,7 +141,7 @@ xprocesscharacters = function(head,font)
end
local function processcharacters(head,font)
- return tonut(xprocesscharacters(tonode(head)))
+ return xprocesscharacters(head)
end
-- function processcharacters(head,font)
@@ -177,10 +156,6 @@ end
-- Gurmukhi, Kannada, Malayalam, Oriya, Tamil, Telugu. Feel free to provide the
-- code points.
--- We can assume that script are not mixed in the source but if that is the case
--- we might need to have consonants etc per script and initialize a local table
--- pointing to the right one.
-
local consonant = {
-- devanagari
[0x0915] = true, [0x0916] = true, [0x0917] = true, [0x0918] = true,
@@ -207,17 +182,6 @@ local consonant = {
[0x0CB5] = true, [0x0CB6] = true, [0x0CB7] = true, [0x0CB8] = true,
[0x0CB9] = true,
[0x0CDE] = true, -- obsolete
- -- malayalam
- [0x0D15] = true, [0x0D16] = true, [0x0D17] = true, [0x0D18] = true,
- [0x0D19] = true, [0x0D1A] = true, [0x0D1B] = true, [0x0D1C] = true,
- [0x0D1D] = true, [0x0D1E] = true, [0x0D1F] = true, [0x0D20] = true,
- [0x0D21] = true, [0x0D22] = true, [0x0D23] = true, [0x0D24] = true,
- [0x0D25] = true, [0x0D26] = true, [0x0D27] = true, [0x0D28] = true,
- [0x0D29] = true, [0x0D2A] = true, [0x0D2B] = true, [0x0D2C] = true,
- [0x0D2D] = true, [0x0D2E] = true, [0x0D2F] = true, [0x0D30] = true,
- [0x0D31] = true, [0x0D32] = true, [0x0D33] = true, [0x0D34] = true,
- [0x0D35] = true, [0x0D36] = true, [0x0D37] = true, [0x0D38] = true,
- [0x0D39] = true, [0x0D3A] = true,
}
local independent_vowel = {
@@ -234,11 +198,6 @@ local independent_vowel = {
[0x0C89] = true, [0x0C8A] = true, [0x0C8B] = true, [0x0C8C] = true,
[0x0C8D] = true, [0x0C8E] = true, [0x0C8F] = true, [0x0C90] = true,
[0x0C91] = true, [0x0C92] = true, [0x0C93] = true, [0x0C94] = true,
- -- malayalam
- [0x0D05] = true, [0x0D06] = true, [0x0D07] = true, [0x0D08] = true,
- [0x0D09] = true, [0x0D0A] = true, [0x0D0B] = true, [0x0D0C] = true,
- [0x0D0E] = true, [0x0D0F] = true, [0x0D10] = true, [0x0D12] = true,
- [0x0D13] = true, [0x0D14] = true,
}
local dependent_vowel = { -- matra
@@ -254,11 +213,6 @@ local dependent_vowel = { -- matra
[0x0CC2] = true, [0x0CC3] = true, [0x0CC4] = true, [0x0CC5] = true,
[0x0CC6] = true, [0x0CC7] = true, [0x0CC8] = true, [0x0CC9] = true,
[0x0CCA] = true, [0x0CCB] = true, [0x0CCC] = true,
- -- malayalam
- [0x0D3E] = true, [0x0D3F] = true, [0x0D40] = true, [0x0D41] = true,
- [0x0D42] = true, [0x0D43] = true, [0x0D44] = true, [0x0D46] = true,
- [0x0D47] = true, [0x0D48] = true, [0x0D4A] = true, [0x0D4B] = true,
- [0x0D4C] = true, [0x0D57] = true,
}
local vowel_modifier = {
@@ -270,16 +224,10 @@ local vowel_modifier = {
[0xA8E8] = true, [0xA8E9] = true, [0xA8EA] = true, [0xA8EB] = true,
[0xA8EC] = true, [0xA8ED] = true, [0xA8EE] = true, [0xA8EF] = true,
[0xA8F0] = true, [0xA8F1] = true,
- -- malayalam
- [0x0D02] = true, [0x0D03] = true,
}
local stress_tone_mark = {
[0x0951] = true, [0x0952] = true, [0x0953] = true, [0x0954] = true,
- -- kannada
- [0x0CCD] = true,
- -- malayalam
- [0x0D4D] = true,
}
local nukta = {
@@ -294,19 +242,9 @@ local halant = {
[0x094D] = true,
-- kannada
[0x0CCD] = true,
- -- malayalam
- [0x0D4D] = true,
-}
-
-local ra = {
- -- devanagari
- [0x0930] = true,
- -- kannada
- [0x0CB0] = true,
- -- malayalam
- [0x0D30] = true,
}
+local c_ra = 0x0930 -- used to be tables (also used as constant)
local c_anudatta = 0x0952 -- used to be tables
local c_nbsp = 0x00A0 -- used to be tables
local c_zwnj = 0x200C -- used to be tables
@@ -332,8 +270,6 @@ local zw_char = { -- could also be inlined
local pre_mark = {
[0x093F] = true, [0x094E] = true,
- -- malayalam
- [0x0D46] = true, [0x0D47] = true, [0x0D48] = true,
}
local above_mark = {
@@ -345,8 +281,6 @@ local above_mark = {
[0xA8E8] = true, [0xA8E9] = true, [0xA8EA] = true, [0xA8EB] = true,
[0xA8EC] = true, [0xA8ED] = true, [0xA8EE] = true, [0xA8EF] = true,
[0xA8F0] = true, [0xA8F1] = true,
- -- malayalam
- [0x0D4E] = true,
}
local below_mark = {
@@ -361,13 +295,6 @@ local post_mark = {
[0x094F] = true,
}
-local twopart_mark = {
- -- malayalam
- [0x0D4A] = { 0x0D46, 0x0D3E, }, -- ൊ
- [0x0D4B] = { 0x0D47, 0x0D3E, }, -- ോ
- [0x0D4C] = { 0x0D46, 0x0D57, }, -- ൌ
-}
-
local mark_four = { } -- As we access these frequently an extra hash is used.
for k, v in next, pre_mark do mark_four[k] = pre_mark end
@@ -419,7 +346,6 @@ local reorder_class = {
[0x0CC4] = "after subscript",
[0x0CD5] = "after subscript",
[0x0CD6] = "after subscript",
- -- malayalam
}
-- We use some pseudo features as we need to manipulate the nodelist based
@@ -500,7 +426,7 @@ local basic_shaping_forms = {
local function initializedevanagi(tfmdata)
local script, language = otf.scriptandlanguage(tfmdata,attr) -- todo: take fast variant
- if script == "deva" or script == "dev2" or script =="mlym" or script == "mlm2" then
+ if script == "deva" or script == "dev2" then
local resources = tfmdata.resources
local lookuphash = resources.lookuphash
if not lookuphash["dv01"] then
@@ -538,20 +464,15 @@ local function initializedevanagi(tfmdata)
--
if script == "deva" then
sharedfeatures["dv04"] = true -- dv04_remove_joiners
- elseif script == "dev2" then
+ end
+ --
+ if script == "dev2" then
sharedfeatures["dv01"] = true -- dv01_reorder_matras
sharedfeatures["dv02"] = true -- dv02_reorder_reph
sharedfeatures["dv03"] = true -- dv03_reorder_pre_base_reordering_consonants
sharedfeatures["dv04"] = true -- dv04_remove_joiners
- elseif script == "mlym" then
- sharedfeatures["pstf"] = true
- elseif script == "mlm2" then
- sharedfeatures["pstf"] = true
- sharedfeatures["pref"] = true
- sharedfeatures["dv03"] = true -- dv03_reorder_pre_base_reordering_consonants
- gsubfeatures["dv03"] = dev2_defaults -- reorder pre base reordering consonants
- insert(sequences,insertindex,sequence_reorder_pre_base_reordering_consonants)
end
+ --
end
end
end
@@ -633,30 +554,30 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
local lookuphash, reph, vattu, blwfcache = deva_initialize(font,attr) -- could be inlines but ugly
local current = start
- local n = getnext(start)
+ local n = start.next
local base = nil
local firstcons = nil
local lastcons = nil
local basefound = false
- if ra[getchar(start)] and halant[getchar(n)] and reph then
+ if start.char == c_ra and halant[n.char] and reph then
-- if syllable starts with Ra + H and script has 'Reph' then exclude Reph
-- from candidates for base consonants
if n == stop then
return head, stop, nbspaces
end
- if getchar(getnext(n)) == c_zwj then
+ if n.next.char == c_zwj then
current = start
else
- current = getnext(n)
- setattr(start,a_state,s_rphf)
+ current = n.next
+ start[a_state] = s_rphf
end
end
- if getchar(current) == c_nbsp then
+ if current.char == c_nbsp then
-- Stand Alone cluster
if current == stop then
- stop = getprev(stop)
+ stop = stop.prev
head = remove_node(head,current)
free_node(current)
return head, stop, nbspaces
@@ -665,37 +586,37 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
base = current
firstcons = current
lastcons = current
- current = getnext(current)
+ current = current.next
if current ~= stop then
- if nukta[getchar(current)] then
- current = getnext(current)
+ if nukta[current.char] then
+ current = current.next
end
- if getchar(current) == c_zwj then
+ if current.char == c_zwj then
if current ~= stop then
- local next = getnext(current)
- if next ~= stop and halant[getchar(next)] then
+ local next = current.next
+ if next ~= stop and halant[next.char] then
current = next
- next = getnext(current)
- local tmp = next and getnext(next) or nil -- needs checking
+ next = current.next
+ local tmp = next and next.next or nil -- needs checking
local changestop = next == stop
local tempcurrent = copy_node(next)
local nextcurrent = copy_node(current)
- setfield(tempcurrent,"next",nextcurrent)
- setfield(nextcurrent,"prev",tempcurrent)
- setattr(tempcurrent,a_state,s_blwf)
+ tempcurrent.next = nextcurrent
+ nextcurrent.prev = tempcurrent
+ tempcurrent[a_state] = s_blwf
tempcurrent = processcharacters(tempcurrent,font)
- setattr(tempcurrent,a_state,unsetvalue)
- if getchar(next) == getchar(tempcurrent) then
+ tempcurrent[a_state] = unsetvalue
+ if next.char == tempcurrent.char then
flush_list(tempcurrent)
local n = copy_node(current)
- setfield(current,"char",dotted_circle)
+ current.char = dotted_circle
head = insert_node_after(head, current, n)
else
- setfield(current,"char",getchar(tempcurrent)) -- (assumes that result of blwf consists of one node)
- local freenode = getnext(current)
- setfield(current,"next",tmp)
- if tmp then
- setfield(tmp,"prev",current)
+ current.char = tempcurrent.char -- (assumes that result of blwf consists of one node)
+ local freenode = current.next
+ current.next = tmp
+ if tmp then
+ tmp.prev = current
end
free_node(freenode)
flush_list(tempcurrent)
@@ -712,82 +633,83 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
while not basefound do
-- find base consonant
- if consonant[getchar(current)] then
- setattr(current,a_state,s_half)
+ if consonant[current.char] then
+ current[a_state] = s_half
if not firstcons then
firstcons = current
end
lastcons = current
if not base then
base = current
- elseif blwfcache[getchar(current)] then
+ elseif blwfcache[current.char] then
-- consonant has below-base (or post-base) form
- setattr(current,a_state,s_blwf)
+ current[a_state] = s_blwf
else
base = current
end
end
basefound = current == stop
- current = getnext(current)
+ current = current.next
end
if base ~= lastcons then
-- if base consonant is not last one then move halant from base consonant to last one
local np = base
- local n = getnext(base)
- if nukta[getchar(n)] then
+ local n = base.next
+ if nukta[n.char] then
np = n
- n = getnext(n)
+ n = n.next
end
- if halant[getchar(n)] then
+ if halant[n.char] then
if lastcons ~= stop then
- local ln = getnext(lastcons)
- if nukta[getchar(ln)] then
+ local ln = lastcons.next
+ if nukta[ln.char] then
lastcons = ln
end
end
- -- local np = getprev(n)
- local nn = getnext(n)
- local ln = getnext(lastcons) -- what if lastcons is nn ?
- setfield(np,"next",nn)
- setfield(nn,"prev",np)
- setfield(lastcons,"next",n)
+ -- local np = n.prev
+ local nn = n.next
+ local ln = lastcons.next -- what if lastcons is nn ?
+ np.next = nn
+ nn.prev = np
+ lastcons.next = n
if ln then
- setfield(ln,"prev",n)
+ ln.prev = n
end
- setfield(n,"next",ln)
- setfield(n,"prev",lastcons)
+ n.next = ln
+ n.prev = lastcons
if lastcons == stop then
stop = n
end
end
end
- n = getnext(start)
- if n ~= stop and ra[getchar(start)] and halant[getchar(n)] and not zw_char[getchar(getnext(n))] then
+ n = start.next
+ -- if start.char == c_ra and halant[n.char] and not (n ~= stop and zw_char[n.next.char]) then
+ if n ~= stop and start.char == c_ra and halant[n.char] and not zw_char[n.next.char] then
-- if syllable starts with Ra + H then move this combination so that it follows either:
-- the post-base 'matra' (if any) or the base consonant
local matra = base
if base ~= stop then
- local next = getnext(base)
- if dependent_vowel[getchar(next)] then
+ local next = base.next
+ if dependent_vowel[next.char] then
matra = next
end
end
-- [sp][start][n][nn] [matra|base][?]
-- [matra|base][start] [n][?] [sp][nn]
- local sp = getprev(start)
- local nn = getnext(n)
- local mn = getnext(matra)
+ local sp = start.prev
+ local nn = n.next
+ local mn = matra.next
if sp then
- setfield(sp,"next",nn)
+ sp.next = nn
end
- setfield(nn,"prev",sp)
- setfield(matra,"next",start)
- setfield(start,"prev",matra)
- setfield(n,"next",mn)
+ nn.prev = sp
+ matra.next = start
+ start.prev = matra
+ n.next = mn
if mn then
- setfield(mn,"prev",n)
+ mn.prev = n
end
if head == start then
head = nn
@@ -800,17 +722,17 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
local current = start
while current ~= stop do
- local next = getnext(current)
- if next ~= stop and halant[getchar(next)] and getchar(getnext(next)) == c_zwnj then
- setattr(current,a_state,unsetvalue)
+ local next = current.next
+ if next ~= stop and halant[next.char] and next.next.char == c_zwnj then
+ current[a_state] = unsetvalue
end
current = next
end
- if base ~= stop and getattr(base,a_state) then
- local next = getnext(base)
- if halant[getchar(next)] and not (next ~= stop and getchar(getnext(next)) == c_zwj) then
- setattr(base,a_state,unsetvalue)
+ if base ~= stop and base[a_state] then
+ local next = base.next
+ if halant[next.char] and not (next ~= stop and next.next.char == c_zwj) then
+ base[a_state] = unsetvalue
end
end
@@ -820,62 +742,62 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
-- classify consonants and 'matra' parts as pre-base, above-base (Reph), below-base or post-base, and group elements of the syllable (consonants and 'matras') according to this classification
local current, allreordered, moved = start, false, { [base] = true }
- local a, b, p, bn = base, base, base, getnext(base)
- if base ~= stop and nukta[getchar(bn)] then
+ local a, b, p, bn = base, base, base, base.next
+ if base ~= stop and nukta[bn.char] then
a, b, p = bn, bn, bn
end
while not allreordered do
-- current is always consonant
local c = current
- local n = getnext(current)
+ local n = current.next
local l = nil -- used ?
if c ~= stop then
- if nukta[getchar(n)] then
+ if nukta[n.char] then
c = n
- n = getnext(n)
+ n = n.next
end
if c ~= stop then
- if halant[getchar(n)] then
+ if halant[n.char] then
c = n
- n = getnext(n)
+ n = n.next
end
- while c ~= stop and dependent_vowel[getchar(n)] do
+ while c ~= stop and dependent_vowel[n.char] do
c = n
- n = getnext(n)
+ n = n.next
end
if c ~= stop then
- if vowel_modifier[getchar(n)] then
+ if vowel_modifier[n.char] then
c = n
- n = getnext(n)
+ n = n.next
end
- if c ~= stop and stress_tone_mark[getchar(n)] then
+ if c ~= stop and stress_tone_mark[n.char] then
c = n
- n = getnext(n)
+ n = n.next
end
end
end
end
- local bp = getprev(firstcons)
- local cn = getnext(current)
- local last = getnext(c)
+ local bp = firstcons.prev
+ local cn = current.next
+ local last = c.next
while cn ~= last do
-- move pre-base matras...
- if pre_mark[getchar(cn)] then
+ if pre_mark[cn.char] then
if bp then
- setfield(bp,"next",cn)
+ bp.next = cn
end
- local next = getnext(cn)
- local prev = getprev(cn)
+ local next = cn.next
+ local prev = cn.prev
if next then
- setfield(next,"prev",prev)
+ next.prev = prev
end
- setfield(prev,"next",next)
+ prev.next = next
if cn == stop then
stop = prev
end
- setfield(cn,"prev",bp)
- setfield(cn,"next",firstcons)
- setfield(firstcons,"prev",cn)
+ cn.prev = bp
+ cn.next = firstcons
+ firstcons.prev = cn
if firstcons == start then
if head == start then
head = cn
@@ -884,29 +806,29 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
end
break
end
- cn = getnext(cn)
+ cn = cn.next
end
allreordered = c == stop
- current = getnext(c)
+ current = c.next
end
if reph or vattu then
local current, cns = start, nil
while current ~= stop do
local c = current
- local n = getnext(current)
- if ra[getchar(current)] and halant[getchar(n)] then
+ local n = current.next
+ if current.char == c_ra and halant[n.char] then
c = n
- n = getnext(n)
+ n = n.next
local b, bn = base, base
while bn ~= stop do
- local next = getnext(bn)
- if dependent_vowel[getchar(next)] then
+ local next = bn.next
+ if dependent_vowel[next.char] then
b = next
end
bn = next
end
- if getattr(current,a_state) == s_rphf then
+ if current[a_state] == s_rphf then
-- position Reph (Ra + H) after post-base 'matra' (if any) since these
-- become marks on the 'matra', not on the base glyph
if b ~= current then
@@ -919,65 +841,65 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
if b == stop then
stop = c
end
- local prev = getprev(current)
+ local prev = current.prev
if prev then
- setfield(prev,"next",n)
+ prev.next = n
end
if n then
- setfield(n,"prev",prev)
+ n.prev = prev
end
- local next = getnext(b)
- setfield(c,"next",next)
+ local next = b.next
+ c.next = next
if next then
- setfield(next,"prev",c)
+ next.prev = c
end
- setfield(c,"next",next)
- setfield(b,"next",current)
- setfield(current,"prev",b)
+ c.next = next
+ b.next = current
+ current.prev = b
end
- elseif cns and getnext(cns) ~= current then -- todo: optimize next
+ elseif cns and cns.next ~= current then
-- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants)
- local cp, cnsn = getprev(current), getnext(cns)
+ local cp, cnsn = current.prev, cns.next
if cp then
- setfield(cp,"next",n)
+ cp.next = n
end
if n then
- setfield(n,"prev",cp)
+ n.prev = cp
end
- setfield(cns,"next",current)
- setfield(current,"prev",cns)
- setfield(c,"next",cnsn)
+ cns.next = current
+ current.prev = cns
+ c.next = cnsn
if cnsn then
- setfield(cnsn,"prev",c)
+ cnsn.prev = c
end
if c == stop then
stop = cp
break
end
- current = getprev(n)
+ current = n.prev
end
else
- local char = getchar(current)
+ local char = current.char
if consonant[char] then
cns = current
- local next = getnext(cns)
- if halant[getchar(next)] then
+ local next = cns.next
+ if halant[next.char] then
cns = next
end
elseif char == c_nbsp then
nbspaces = nbspaces + 1
cns = current
- local next = getnext(cns)
- if halant[getchar(next)] then
+ local next = cns.next
+ if halant[next.char] then
cns = next
end
end
end
- current = getnext(current)
+ current = current.next
end
end
- if getchar(base) == c_nbsp then
+ if base.char == c_nbsp then
nbspaces = nbspaces - 1
head = remove_node(head,base)
free_node(base)
@@ -997,24 +919,24 @@ end
function handlers.devanagari_reorder_matras(head,start,kind,lookupname,replacement) -- no leak
local current = start -- we could cache attributes here
- local startfont = getfont(start)
- local startattr = getattr(start,a_syllabe)
+ local startfont = start.font
+ local startattr = start[a_syllabe]
-- can be fast loop
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font and getattr(current,a_syllabe) == startattr do
- local next = getnext(current)
- if halant[getchar(current)] and not getattr(current,a_state) then
- if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == font and getattr(next,a_syllabe) == startattr and zw_char[getchar(next)] then
+ while current and current.id == glyph_code and current.subtype<256 and current.font == font and current[a_syllabe] == startattr do
+ local next = current.next
+ if halant[current.char] and not current[a_state] then
+ if next and next.id == glyph_code and next.subtype<256 and next.font == font and next[a_syllabe] == startattr and zw_char[next.char] then
current = next
end
- local startnext = getnext(start)
+ local startnext = start.next
head = remove_node(head,start)
- local next = getnext(current)
+ local next = current.next
if next then
- setfield(next,"prev",start)
+ next.prev = start
end
- setfield(start,"next",next)
- setfield(current,"next",start)
- setfield(start,"prev",current)
+ start.next = next
+ current.next = start
+ start.prev = current
start = startnext
break
end
@@ -1050,98 +972,98 @@ end
function handlers.devanagari_reorder_reph(head,start,kind,lookupname,replacement)
-- since in Devanagari reph has reordering position 'before postscript' dev2 only follows step 2, 4, and 6,
-- the other steps are still ToDo (required for scripts other than dev2)
- local current = getnext(start)
+ local current = start.next
local startnext = nil
local startprev = nil
- local startfont = getfont(start)
- local startattr = getattr(start,a_syllabe)
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getattr(current,a_syllabe) == startattr do --step 2
- if halant[getchar(current)] and not getattr(current,a_state) then
- local next = getnext(current)
- if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == startfont and getattr(next,a_syllabe) == startattr and zw_char[getchar(next)] then
+ local startfont = start.font
+ local startattr = start[a_syllabe]
+ while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do --step 2
+ if halant[current.char] and not current[a_state] then
+ local next = current.next
+ if next and next.id == glyph_code and next.subtype<256 and next.font == startfont and next[a_syllabe] == startattr and zw_char[next.char] then
current = next
end
- startnext = getnext(start)
+ startnext = start.next
head = remove_node(head,start)
- local next = getnext(current)
+ local next = current.next
if next then
- setfield(next,"prev",start)
+ next.prev = start
end
- setfield(start,"next",next)
- setfield(current,"next",start)
- setfield(start,"prev",current)
+ start.next = next
+ current.next = start
+ start.prev = current
start = startnext
- startattr = getattr(start,a_syllabe)
+ startattr = start[a_syllabe]
break
end
- current = getnext(current)
+ current = current.next
end
if not startnext then
- current = getnext(start)
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getattr(current,a_syllabe) == startattr do --step 4
- if getattr(current,a_state) == s_pstf then --post-base
- startnext = getnext(start)
+ current = start.next
+ while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do --step 4
+ if current[a_state] == s_pstf then --post-base
+ startnext = start.next
head = remove_node(head,start)
- local prev = getprev(current)
- setfield(start,"prev",prev)
- setfield(prev,"next",start)
- setfield(start,"next",current)
- setfield(current,"prev",start)
+ local prev = current.prev
+ start.prev = prev
+ prev.next = start
+ start.next = current
+ current.prev = start
start = startnext
- startattr = getattr(start,a_syllabe)
+ startattr = start[a_syllabe]
break
end
- current = getnext(current)
+ current = current.next
end
end
-- ToDo: determine position for reph with reordering position other than 'before postscript'
-- (required for scripts other than dev2)
-- leaks
if not startnext then
- current = getnext(start)
+ current = start.next
local c = nil
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getattr(current,a_syllabe) == startattr do --step 5
+ while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do --step 5
if not c then
- local char = getchar(current)
+ local char = current.char
-- todo: combine in one
if mark_above_below_post[char] and reorder_class[char] ~= "after subscript" then
c = current
end
end
- current = getnext(current)
+ current = current.next
end
-- here we can loose the old start node: maybe best split cases
if c then
- startnext = getnext(start)
+ startnext = start.next
head = remove_node(head,start)
- local prev = getprev(c)
- setfield(start,"prev",prev)
- setfield(prev,"next",start)
- setfield(start,"next",c)
- setfield(c,"prev",start)
+ local prev = c.prev
+ start.prev = prev
+ prev.next = start
+ start.next = c
+ c.prev = start
-- end
start = startnext
- startattr = getattr(start,a_syllabe)
+ startattr = start[a_syllabe]
end
end
-- leaks
if not startnext then
current = start
- local next = getnext(current)
- while next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == startfont and getattr(next,a_syllabe) == startattr do --step 6
+ local next = current.next
+ while next and next.id == glyph_code and next.subtype<256 and next.font == startfont and next[a_syllabe] == startattr do --step 6
current = next
- next = getnext(current)
+ next = current.next
end
if start ~= current then
- startnext = getnext(start)
+ startnext = start.next
head = remove_node(head,start)
- local next = getnext(current)
+ local next = current.next
if next then
- setfield(next,"prev",start)
+ next.prev = start
end
- setfield(start,"next",next)
- setfield(current,"next",start)
- setfield(start,"prev",current)
+ start.next = next
+ current.next = start
+ start.prev = current
start = startnext
end
end
@@ -1164,71 +1086,71 @@ function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start,k
local current = start
local startnext = nil
local startprev = nil
- local startfont = getfont(start)
- local startattr = getattr(start,a_syllabe)
+ local startfont = start.font
+ local startattr = start[a_syllabe]
-- can be fast for loop + caching state
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getattr(current,a_syllabe) == startattr do
- local next = getnext(current)
- if halant[getchar(current)] and not getattr(current,a_state) then
- if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == font and getattr(next,a_syllabe) == startattr then
- local char = getchar(next)
+ while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do
+ local next = current.next
+ if halant[current.char] and not current[a_state] then
+ if next and next.id == glyph_code and next.subtype<256 and next.font == font and next[a_syllabe] == startattr then
+ local char = next.char
if char == c_zwnj or char == c_zwj then
current = next
end
end
- startnext = getnext(start)
+ startnext = start.next
removenode(start,start)
- local next = getnext(current)
+ local next = current.next
if next then
- setfield(next,"prev",start)
+ next.prev = start
end
- setfield(start,"next",next)
- setfield(current,"next",start)
- setfield(start,"prev",current)
+ start.next = next
+ current.next = start
+ start.prev = current
start = startnext
break
end
current = next
end
if not startnext then
- current = getnext(start)
- startattr = getattr(start,a_syllabe)
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getattr(current,a_syllabe) == startattr do
- if not consonant[getchar(current)] and getattr(current,a_state) then --main
- startnext = getnext(start)
+ current = start.next
+ startattr = start[a_syllabe]
+ while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do
+ if not consonant[current.char] and current[a_state] then --main
+ startnext = start.next
removenode(start,start)
- local prev = getprev(current)
- setfield(start,"prev",prev)
- setfield(prev,"next",start)
- setfield(start,"next",current)
- setfield(current,"prev",start)
+ local prev = current.prev
+ start.prev = prev
+ prev.next = start
+ start.next = current
+ current.prev = start
start = startnext
break
end
- current = getnext(current)
+ current = current.next
end
end
return head, start, true
end
function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replacement)
- local stop = getnext(start)
- local startfont = getfont(start)
- while stop and getid(stop) == glyph_code and getsubtype(stop) < 256 and getfont(stop) == startfont do
- local char = getchar(stop)
+ local stop = start.next
+ local startfont = start.font
+ while stop and stop.id == glyph_code and stop.subtype<256 and stop.font == startfont do
+ local char = stop.char
if char == c_zwnj or char == c_zwj then
- stop = getnext(stop)
+ stop = stop.next
else
break
end
end
if stop then
- setfield(getfield(stop,"prev"),"next",nil)
- setfield(stop,"prev",getprev(start))
+ stop.prev.next = nil
+ stop.prev = start.prev
end
- local prev = getprev(start)
+ local prev = start.prev
if prev then
- setfield(prev,"next",stop)
+ prev.next = stop
end
if head == start then
head = stop
@@ -1238,15 +1160,11 @@ function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replaceme
end
local valid = {
- akhn = true, -- malayalam
rphf = true,
pref = true,
half = true,
blwf = true,
pstf = true,
- pres = true, -- malayalam
- blws = true, -- malayalam
- psts = true, -- malayalam
}
local function dev2_initialize(font,attr)
@@ -1288,25 +1206,16 @@ local function dev2_initialize(font,attr)
local reph = false
local chain = dataset[3]
if chain ~= 0 then --rphf is result of of chain
- -- rphf might be result of other handler/chainproc
+ --ToDo: rphf might be result of other handler/chainproc
else
- -- rphf acts on consonant + halant
- for k, v in next, ra do
- local r = lookupcache[k]
- if r then
- local h = false
- for k, v in next, halant do
- local h = r[k]
- if h then
- reph = h.ligature or false
- break
- end
- end
- if reph then
- break
- end
+ reph = lookupcache[0x0930]
+ if reph then
+ reph = reph[0x094D]
+ if reph then
+ reph = reph["ligature"]
end
end
+ --ToDo: rphf actualy acts on consonant + halant. This consonant might not necesseraly be 0x0930 ... (but fot dev2 it is)
end
seqsubset[#seqsubset+1] = { kind, lookupcache, reph }
end
@@ -1347,37 +1256,32 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
local kind = subset[1]
local lookupcache = subset[2]
if kind == "rphf" then
- for k, v in next, ra do
- local r = lookupcache[k]
- if r then
- for k, v in next, halant do
- local h = r[k]
- if h then
- reph = h.ligature or false
- break
- end
- end
- if reph then
- break
- end
+ -- todo: rphf might be result of other handler/chainproc
+ -- todo: rphf actualy acts on consonant + halant.
+ -- todo: the consonant might not necesseraly be 0x0930 ... (but for devanagari it is)
+ local lookup = lookupcache[0x0930]
+ if lookup then
+ local hit = lookup[0x094D]
+ if hit then
+ reph = hit["ligature"]
end
end
local current = start
- local last = getnext(stop)
+ local last = stop.next
while current ~= last do
if current ~= stop then
- local c = locl[current] or getchar(current)
+ local c = locl[current] or current.char
local found = lookupcache[c]
if found then
- local next = getnext(current)
- local n = locl[next] or getchar(next)
+ local next = current.next
+ local n = locl[next] or next.char
if found[n] then --above-base: rphf Consonant + Halant
- local afternext = next ~= stop and getnext(next)
- if afternext and zw_char[getchar(afternext)] then -- ZWJ and ZWNJ prevent creation of reph
+ local afternext = next ~= stop and next.next
+ if afternext and zw_char[afternext.char] then -- ZWJ and ZWNJ prevent creation of reph
current = next
- current = getnext(current)
+ current = current.next
elseif current == start then
- setattr(current,a_state,s_rphf)
+ current[a_state] = s_rphf
current = next
else
current = next
@@ -1385,111 +1289,98 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
end
end
end
- current = getnext(current)
+ current = current.next
end
elseif kind == "pref" then
-- why not global? pretty ineffient this way
-- this will move to the initializer and we will store the hash in dataset
-- todo: reph might also be result of chain
- for k, v in next, halant do
- local h = lookupcache[k]
- if h then
- local found = false
- for k, v in next, h do
- found = v and v.ligature
- if found then
- pre_base_reordering_consonants[k] = found
- break
- end
- end
- if found then
- break
- end
- end
+ for k, v in lookupcache[0x094D], next do
+ pre_base_reordering_consonants[k] = v and v["ligature"] --ToDo: reph might also be result of chain
end
--
local current = start
- local last = getnext(stop)
+ local last = stop.next
while current ~= last do
if current ~= stop then
- local c = locl[current] or getchar(current)
+ local c = locl[current] or current.char
local found = lookupcache[c]
if found then
- local next = getnext(current)
- local n = locl[next] or getchar(next)
+ local next = current.next
+ local n = locl[next] or next.char
if found[n] then
- setattr(current,a_state,s_pref)
- setattr(next,a_state,s_pref)
+ current[a_state] = s_pref
+ next[a_state] = s_pref
current = next
end
end
end
- current = getnext(current)
+ current = current.next
end
elseif kind == "half" then -- half forms: half / Consonant + Halant
local current = start
- local last = getnext(stop)
+ local last = stop.next
while current ~= last do
if current ~= stop then
- local c = locl[current] or getchar(current)
+ local c = locl[current] or current.char
local found = lookupcache[c]
if found then
- local next = getnext(current)
- local n = locl[next] or getchar(next)
+ local next = current.next
+ local n = locl[next] or next.char
if found[n] then
- if next ~= stop and getchar(getnext(next)) == c_zwnj then -- zwnj prevent creation of half
+ if next ~= stop and next.next.char == c_zwnj then -- zwnj prevent creation of half
current = next
else
- setattr(current,a_state,s_half)
+ current[a_state] = s_half
if not halfpos then
halfpos = current
end
end
- current = getnext(current)
+ current = current.next
end
end
end
- current = getnext(current)
+ current = current.next
end
elseif kind == "blwf" then -- below-base: blwf / Halant + Consonant
local current = start
- local last = getnext(stop)
+ local last = stop.next
while current ~= last do
if current ~= stop then
- local c = locl[current] or getchar(current)
+ local c = locl[current] or current.char
local found = lookupcache[c]
if found then
- local next = getnext(current)
- local n = locl[next] or getchar(next)
+ local next = current.next
+ local n = locl[next] or next.char
if found[n] then
- setattr(current,a_state,s_blwf)
- setattr(next,a_state,s_blwf)
+ current[a_state] = s_blwf
+ next[a_state] = s_blwf
current = next
subpos = current
end
end
end
- current = getnext(current)
+ current = current.next
end
elseif kind == "pstf" then -- post-base: pstf / Halant + Consonant
local current = start
- local last = getnext(stop)
+ local last = stop.next
while current ~= last do
if current ~= stop then
- local c = locl[current] or getchar(current)
+ local c = locl[current] or current.char
local found = lookupcache[c]
if found then
- local next = getnext(current)
- local n = locl[next] or getchar(next)
+ local next = current.next
+ local n = locl[next] or next.char
if found[n] then
- setattr(current,a_state,s_pstf)
- setattr(next,a_state,s_pstf)
+ current[a_state] = s_pstf
+ next[a_state] = s_pstf
current = next
postpos = current
end
end
end
- current = getnext(current)
+ current = current.next
end
end
end
@@ -1501,14 +1392,14 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
local current, base, firstcons = start, nil, nil
- if getattr(start,a_state) == s_rphf then
+ if start[a_state] == s_rphf then
-- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
- current = getnext(getnext(start))
+ current = start.next.next
end
local function stand_alone(is_nbsp)
if current == stop then
- stop = getprev(stop)
+ stop = stop.prev
head = remove_node(head,current)
free_node(current)
return head, stop, nbspaces
@@ -1516,36 +1407,36 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
if is_nbsp then
nbspaces = nbspaces + 1
end
- base = current
- current = getnext(current)
+ base = current
+ current = current.next
if current ~= stop then
- local char = getchar(current)
+ local char = current.char
if nukta[char] then
- current = getnext(current)
- char = getchar(current)
+ current = current.next
+ char = current.char
end
if char == c_zwj then
- local next = getnext(current)
- if current ~= stop and next ~= stop and halant[getchar(next)] then
+ local next = current.next
+ if current ~= stop and next ~= stop and halant[next.char] then
current = next
- next = getnext(current)
- local tmp = getnext(next)
+ next = current.next
+ local tmp = next.next
local changestop = next == stop
- setfield(next,"next",nil)
- setattr(current,a_state,s_pref)
+ next.next = nil
+ current[a_state] = s_pref
current = processcharacters(current,font)
- setattr(current,a_state,s_blwf)
+ current[a_state] = s_blwf
current = processcharacters(current,font)
- setattr(current,a_state,s_pstf)
+ current[a_state] = s_pstf
current = processcharacters(current,font)
- setattr(current,a_state,unsetvalue)
- if halant[getchar(current)] then
- setfield(getnext(current),"next",tmp)
+ current[a_state] = unsetvalue
+ if halant[current.char] then
+ current.next.next = tmp
local nc = copy_node(current)
- setfield(current,"char",dotted_circle)
+ current.char = dotted_circle
head = insert_node_after(head,current,nc)
else
- setfield(current,"next",tmp) -- assumes that result of pref, blwf, or pstf consists of one node
+ current.next = tmp -- assumes that result of pref, blwf, or pstf consists of one node
if changestop then
stop = current
end
@@ -1556,23 +1447,23 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
end
end
- if current ~= getnext(stop) then
+ if current ~= stop.next then
-- Stand Alone cluster
stand_alone()
- elseif getchar(current) == c_nbsp then
+ elseif current.char == c_nbsp then
-- Stand Alone cluster
stand_alone(true)
else -- not Stand Alone cluster
- local last = getnext(stop)
+ local last = stop.next
while current ~= last do -- find base consonant
- local next = getnext(current)
- if consonant[getchar(current)] then
- if not (current ~= stop and next ~= stop and halant[getchar(next)] and getchar(getnext(next)) == c_zwj) then
+ local next = current.next
+ if consonant[current.char] then
+ if not (current ~= stop and next ~= stop and halant[next.char] and next.next.char == c_zwj) then
if not firstcons then
firstcons = current
end
-- check whether consonant has below-base or post-base form or is pre-base reordering Ra
- local a = getattr(current,a_state)
+ local a = current[a_state]
if not (a == s_pref or a == s_blwf or a == s_pstf) then
base = current
end
@@ -1586,13 +1477,13 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
end
if not base then
- if getattr(start,a_state) == s_rphf then
- setattr(start,a_state,unsetvalue)
+ if start[a_state] == s_rphf then
+ start[a_state] = unsetvalue
end
return head, stop, nbspaces
else
- if getattr(base,a_state) then
- setattr(base,a_state,unsetvalue)
+ if base[a_state] then
+ base[a_state] = unsetvalue
end
basepos = base
end
@@ -1610,32 +1501,22 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
local moved = { }
local current = start
- local last = getnext(stop)
+ local last = stop.next
while current ~= last do
- local char, target, cn = locl[current] or getchar(current), nil, getnext(current)
--- not so efficient (needed for malayalam)
-local tpm = twopart_mark[char]
-if tpm then
- local extra = copy_node(current)
- char = tpm[1]
- setfield(current,"char",char)
- setfield(extra,"char",tpm[2])
- head = insert_node_after(head,current,extra)
-end
---
+ local char, target, cn = locl[current] or current.char, nil, current.next
if not moved[current] and dependent_vowel[char] then
if pre_mark[char] then -- Before first half form in the syllable
moved[current] = true
- local prev = getprev(current)
- local next = getnext(current)
+ local prev = current.prev
+ local next = current.next
if prev then
- setfield(prev,"next",next)
+ prev.next = next
end
if next then
- setfield(next,"prev",prev)
+ next.prev = prev
end
if current == stop then
- stop = getprev(current)
+ stop = current.prev
end
if halfpos == start then
if head == start then
@@ -1643,13 +1524,13 @@ end
end
start = current
end
- local prev = getprev(halfpos)
+ local prev = halfpos.prev
if prev then
- setfield(prev,"next",current)
+ prev.next = current
end
- setfield(current,"prev",prev)
- setfield(halfpos,"prev",current)
- setfield(current,"next",halfpos)
+ current.prev = prev
+ halfpos.prev = current
+ current.next = halfpos
halfpos = current
elseif above_mark[char] then -- After main consonant
target = basepos
@@ -1671,25 +1552,25 @@ end
postpos = current
end
if mark_above_below_post[char] then
- local prev = getprev(current)
+ local prev = current.prev
if prev ~= target then
- local next = getnext(current)
+ local next = current.next
if prev then -- not needed, already tested with target
- setfield(prev,"next",next)
+ prev.next = next
end
if next then
- setfield(next,"prev",prev)
+ next.prev = prev
end
if current == stop then
stop = prev
end
- local next = getnext(target)
+ local next = target.next
if next then
- setfield(next,"prev",current)
+ next.prev = current
end
- setfield(current,"next",next)
- setfield(target,"next",current)
- setfield(current,"prev",target)
+ current.next = next
+ target.next = current
+ current.prev = target
end
end
end
@@ -1700,7 +1581,7 @@ end
local current, c = start, nil
while current ~= stop do
- local char = getchar(current)
+ local char = current.char
if halant[char] or stress_tone_mark[char] then
if not c then
c = current
@@ -1708,33 +1589,33 @@ end
else
c = nil
end
- local next = getnext(current)
- if c and nukta[getchar(next)] then
+ local next = current.next
+ if c and nukta[next.char] then
if head == c then
head = next
end
if stop == next then
stop = current
end
- local prev = getprev(c)
+ local prev = c.prev
if prev then
- setfield(prev,"next",next)
+ prev.next = next
end
- setfield(next,"prev",prev)
- local nextnext = getnext(next)
- setfield(current,"next",nextnext)
- local nextnextnext = getnext(nextnext)
+ next.prev = prev
+ local nextnext = next.next
+ current.next = nextnext
+ local nextnextnext = nextnext.next
if nextnextnext then
- setfield(nextnextnext,"prev",current)
+ nextnextnext.prev = current
end
- setfield(c,"prev",nextnext)
- setfield(nextnext,"next",c)
+ c.prev = nextnext
+ nextnext.next = c
end
if stop == current then break end
- current = getnext(current)
+ current = current.next
end
- if getchar(base) == c_nbsp then
+ if base.char == c_nbsp then
nbspaces = nbspaces - 1
head = remove_node(head, base)
free_node(base)
@@ -1758,30 +1639,30 @@ for k, v in next, halant do separator[k] = true end
local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowel
-- why two variants ... the comment suggests that it's the same ruleset
- local n = getnext(c)
+ local n = c.next
if not n then
return c
end
if variant == 1 then
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if v and nukta[getchar(n)] then
- n = getnext(n)
+ local v = n.id == glyph_code and n.subtype<256 and n.font == font
+ if v and nukta[n.char] then
+ n = n.next
if n then
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
end
end
if n and v then
- local nn = getnext(n)
- if nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font then
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and getsubtype(nnn) < 256 and getfont(nnn) == font then
- local nnc = getchar(nn)
- local nnnc = getchar(nnn)
+ local nn = n.next
+ if nn and nn.id == glyph_code and nn.subtype<256 and nn.font == font then
+ local nnn = nn.next
+ if nnn and nnn.id == glyph_code and nnn.subtype<256 and nnn.font == font then
+ local nnc = nn.char
+ local nnnc = nnn.char
if nnc == c_zwj and consonant[nnnc] then
c = nnn
elseif (nnc == c_zwnj or nnc == c_zwj) and halant[nnnc] then
- local nnnn = getnext(nnn)
- if nnnn and getid(nnnn) == glyph_code and consonant[getchar(nnnn)] and getsubtype(nnnn) < 256 and getfont(nnnn) == font then
+ local nnnn = nnn.next
+ if nnnn and nnnn.id == glyph_code and consonant[nnnn.char] and nnnn.subtype<256 and nnnn.font == font then
c = nnnn
end
end
@@ -1789,94 +1670,94 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe
end
end
elseif variant == 2 then
- if getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ if n.id == glyph_code and nukta[n.char] and n.subtype<256 and n.font == font then
c = n
end
- n = getnext(c)
- if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
- local nn = getnext(n)
+ n = c.next
+ if n and n.id == glyph_code and n.subtype<256 and n.font == font then
+ local nn = n.next
if nn then
- local nv = getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font
- if nv and zw_char[getchar(n)] then
+ local nv = nn.id == glyph_code and nn.subtype<256 and nn.font == font
+ if nv and zw_char[n.char] then
n = nn
- nn = getnext(nn)
- nv = nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font
+ nn = nn.next
+ nv = nn and nn.id == glyph_code and nn.subtype<256 and nn.font == font
end
- if nv and halant[getchar(n)] and consonant[getchar(nn)] then
+ if nv and halant[n.char] and consonant[nn.char] then
c = nn
end
end
end
end
-- c = ms_matra(c)
- local n = getnext(c)
+ local n = c.next
if not n then
return c
end
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ local v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- local char = getchar(n)
+ local char = n.char
if dependent_vowel[char] then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
if nukta[char] then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
if halant[char] then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
if vowel_modifier[char] then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
if stress_tone_mark[char] then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
if stress_tone_mark[char] then
return n
@@ -1886,38 +1767,38 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe
end
local function analyze_next_chars_two(c,font)
- local n = getnext(c)
+ local n = c.next
if not n then
return c
end
- if getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ if n.id == glyph_code and nukta[n.char] and n.subtype<256 and n.font == font then
c = n
end
n = c
while true do
- local nn = getnext(n)
- if nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font then
- local char = getchar(nn)
+ local nn = n.next
+ if nn and nn.id == glyph_code and nn.subtype<256 and nn.font == font then
+ local char = nn.char
if halant[char] then
n = nn
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and zw_char[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
+ local nnn = nn.next
+ if nnn and nnn.id == glyph_code and zw_char[nnn.char] and nnn.subtype<256 and nnn.font == font then
n = nnn
end
elseif char == c_zwnj or char == c_zwj then
-- n = nn -- not here (?)
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and halant[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
+ local nnn = nn.next
+ if nnn and nnn.id == glyph_code and halant[nnn.char] and nnn.subtype<256 and nnn.font == font then
n = nnn
end
else
break
end
- local nn = getnext(n)
- if nn and getid(nn) == glyph_code and consonant[getchar(nn)] and getsubtype(nn) < 256 and getfont(nn) == font then
+ local nn = n.next
+ if nn and nn.id == glyph_code and consonant[nn.char] and nn.subtype<256 and nn.font == font then
n = nn
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and nukta[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
+ local nnn = nn.next
+ if nnn and nnn.id == glyph_code and nukta[nnn.char] and nnn.subtype<256 and nnn.font == font then
n = nnn
end
c = n
@@ -1933,114 +1814,114 @@ local function analyze_next_chars_two(c,font)
-- This shouldn't happen I guess.
return
end
- local n = getnext(c)
+ local n = c.next
if not n then
return c
end
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ local v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- local char = getchar(n)
+ local char = n.char
if char == c_anudatta then
c = n
- n = getnext(c)
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
if halant[char] then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
if char == c_zwnj or char == c_zwj then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
else
-- c = ms_matra(c)
-- same as one
if dependent_vowel[char] then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
if nukta[char] then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
if halant[char] then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
end
-- same as one
if vowel_modifier[char] then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
if stress_tone_mark[char] then
- c = getnext(c)
- n = getnext(c)
+ c = c.next
+ n = c.next
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
return c
end
- char = getchar(n)
+ char = n.char
end
if stress_tone_mark[char] then
return n
@@ -2052,9 +1933,9 @@ end
local function inject_syntax_error(head,current,mark)
local signal = copy_node(current)
if mark == pre_mark then
- setfield(signal,"char",dotted_circle)
+ signal.char = dotted_circle
else
- setfield(current,"char",dotted_circle)
+ current.char = dotted_circle
end
return insert_node_after(head,current,signal)
end
@@ -2063,32 +1944,31 @@ end
-- a lot. Common code has been synced.
function methods.deva(head,font,attr)
- head = tonut(head)
local current = head
local start = true
local done = false
local nbspaces = 0
while current do
- if getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font then
+ if current.id == glyph_code and current.subtype<256 and current.font == font then
done = true
local syllablestart = current
local syllableend = nil
local c = current
- local n = getnext(c)
- if n and ra[getchar(c)] and getid(n) == glyph_code and halant[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
- local n = getnext(n)
- if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
+ local n = c.next
+ if n and c.char == c_ra and n.id == glyph_code and halant[n.char] and n.subtype<256 and n.font == font then
+ local n = n.next
+ if n and n.id == glyph_code and n.subtype<256 and n.font == font then
c = n
end
end
- local standalone = getchar(c) == c_nbsp
+ local standalone = c.char == c_nbsp
if standalone then
- local prev = getprev(current)
+ local prev = current.prev
if not prev then
-- begin of paragraph or box
- elseif getid(prev) ~= glyph_code or getsubtype(prev) >= 256 or getfont(prev) ~= font then
+ elseif prev.id ~= glyph_code or prev.subtype>=256 or prev.font ~= font then
-- different font or language so quite certainly a different word
- elseif not separator[getchar(prev)] then
+ elseif not separator[prev.char] then
-- something that separates words
else
standalone = false
@@ -2097,61 +1977,61 @@ function methods.deva(head,font,attr)
if standalone then
-- stand alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
local syllableend = analyze_next_chars_one(c,font,2)
- current = getnext(syllableend)
+ current = syllableend.next
if syllablestart ~= syllableend then
head, current, nbspaces = deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
- current = getnext(current)
+ current = current.next
end
else
- -- we can delay the getsubtype(n) and getfont(n) and test for say halant first
+ -- we can delay the n.subtype and n.font and test for say halant first
-- as an table access is faster than two function calls (subtype and font are
-- pseudo fields) but the code becomes messy (unless we make it a function)
- local char = getchar(current)
+ local char = current.char
if consonant[char] then
-- syllable containing consonant
local prevc = true
while prevc do
prevc = false
- local n = getnext(current)
+ local n = current.next
if not n then
break
end
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ local v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
break
end
- local c = getchar(n)
+ local c = n.char
if nukta[c] then
- n = getnext(n)
+ n = n.next
if not n then
break
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
break
end
- c = getchar(n)
+ c = n.char
end
if halant[c] then
- n = getnext(n)
+ n = n.next
if not n then
break
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
break
end
- c = getchar(n)
+ c = n.char
if c == c_zwnj or c == c_zwj then
- n = getnext(n)
+ n = n.next
if not n then
break
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = n.id == glyph_code and n.subtype<256 and n.font == font
if not v then
break
end
- c = getchar(n)
+ c = n.char
end
if consonant[c] then
prevc = true
@@ -2159,77 +2039,77 @@ function methods.deva(head,font,attr)
end
end
end
- local n = getnext(current)
- if n and getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ local n = current.next
+ if n and n.id == glyph_code and nukta[n.char] and n.subtype<256 and n.font == font then
-- nukta (not specified in Microsft Devanagari OpenType specification)
current = n
- n = getnext(current)
+ n = current.next
end
syllableend = current
current = n
if current then
- local v = getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
+ local v = current.id == glyph_code and current.subtype<256 and current.font == font
if v then
- if halant[getchar(current)] then
+ if halant[current.char] then
-- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H
- local n = getnext(current)
- if n and getid(n) == glyph_code and zw_char[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ local n = current.next
+ if n and n.id == glyph_code and zw_char[n.char] and n.subtype<256 and n.font == font then
-- code collapsed, probably needs checking with intention
syllableend = n
- current = getnext(n)
+ current = n.next
else
syllableend = current
current = n
end
else
-- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM]
- local c = getchar(current)
+ local c = current.char
if dependent_vowel[c] then
syllableend = current
- current = getnext(current)
- v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
+ current = current.next
+ v = current and current.id == glyph_code and current.subtype<256 and current.font == font
if v then
- c = getchar(current)
+ c = current.char
end
end
if v and vowel_modifier[c] then
syllableend = current
- current = getnext(current)
- v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
+ current = current.next
+ v = current and current.id == glyph_code and current.subtype<256 and current.font == font
if v then
- c = getchar(current)
+ c = current.char
end
end
if v and stress_tone_mark[c] then
syllableend = current
- current = getnext(current)
+ current = current.next
end
end
end
end
if syllablestart ~= syllableend then
head, current, nbspaces = deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
- current = getnext(current)
+ current = current.next
end
elseif independent_vowel[char] then
-- syllable without consonants: VO + [VM] + [SM]
syllableend = current
- current = getnext(current)
+ current = current.next
if current then
- local v = getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
+ local v = current.id == glyph_code and current.subtype<256 and current.font == font
if v then
- local c = getchar(current)
+ local c = current.char
if vowel_modifier[c] then
syllableend = current
- current = getnext(current)
- v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
+ current = current.next
+ v = current and current.id == glyph_code and current.subtype<256 and current.font == font
if v then
- c = getchar(current)
+ c = current.char
end
end
if v and stress_tone_mark[c] then
syllableend = current
- current = getnext(current)
+ current = current.next
end
end
end
@@ -2238,11 +2118,11 @@ function methods.deva(head,font,attr)
if mark then
head, current = inject_syntax_error(head,current,mark)
end
- current = getnext(current)
+ current = current.next
end
end
else
- current = getnext(current)
+ current = current.next
end
start = false
end
@@ -2251,7 +2131,7 @@ function methods.deva(head,font,attr)
head = replace_all_nbsp(head)
end
- head = tonode(head)
+ head = typesetters.characters.handler(head)
return head, done
end
@@ -2262,7 +2142,6 @@ end
-- handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
function methods.dev2(head,font,attr)
- head = tonut(head)
local current = head
local start = true
local done = false
@@ -2270,18 +2149,18 @@ function methods.dev2(head,font,attr)
local nbspaces = 0
while current do
local syllablestart, syllableend = nil, nil
- if getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font then
+ if current.id == glyph_code and current.subtype<256 and current.font == font then
done = true
syllablestart = current
local c = current
- local n = getnext(current)
- if n and ra[getchar(c)] and getid(n) == glyph_code and halant[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
- local n = getnext(n)
- if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
+ local n = current.next
+ if n and c.char == c_ra and n.id == glyph_code and halant[n.char] and n.subtype<256 and n.font == font then
+ local n = n.next
+ if n and n.id == glyph_code and n.subtype<256 and n.font == font then
c = n
end
end
- local char = getchar(c)
+ local char = c.char
if independent_vowel[char] then
-- vowel-based syllable: [Ra+H]+V+[N]+[<[<ZWJ|ZWNJ>]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
current = analyze_next_chars_one(c,font,1)
@@ -2290,12 +2169,12 @@ function methods.dev2(head,font,attr)
local standalone = char == c_nbsp
if standalone then
nbspaces = nbspaces + 1
- local p = getprev(current)
+ local p = current.prev
if not p then
-- begin of paragraph or box
- elseif getid(p) ~= glyph_code or getsubtype(p) >= 256 or getfont(p) ~= font then
+ elseif p.id ~= glyph_code or p.subtype>=256 or p.font ~= font then
-- different font or language so quite certainly a different word
- elseif not separator[getchar(p)] then
+ elseif not separator[p.char] then
-- something that separates words
else
standalone = false
@@ -2305,7 +2184,7 @@ function methods.dev2(head,font,attr)
-- Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
current = analyze_next_chars_one(c,font,2)
syllableend = current
- elseif consonant[getchar(current)] then
+ elseif consonant[current.char] then
-- WHY current INSTEAD OF c ?
-- Consonant syllable: {C+[N]+<H+[<ZWNJ|ZWJ>]|<ZWNJ|ZWJ>+H>} + C+[N]+[A] + [< H+[<ZWNJ|ZWJ>] | {M}+[N]+[H]>]+[SM]+[(VD)]
@@ -2317,33 +2196,28 @@ function methods.dev2(head,font,attr)
if syllableend then
syllabe = syllabe + 1
local c = syllablestart
- local n = getnext(syllableend)
+ local n = syllableend.next
while c ~= n do
- setattr(c,a_syllabe,syllabe)
- c = getnext(c)
+ c[a_syllabe] = syllabe
+ c = c.next
end
end
if syllableend and syllablestart ~= syllableend then
head, current, nbspaces = dev2_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
end
- if not syllableend and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font and not getattr(current,a_state) then
- local mark = mark_four[getchar(current)]
+ if not syllableend and current.id == glyph_code and current.subtype<256 and current.font == font and not current[a_state] then
+ local mark = mark_four[current.char]
if mark then
head, current = inject_syntax_error(head,current,mark)
end
end
start = false
- current = getnext(current)
+ current = current.next
end
if nbspaces > 0 then
head = replace_all_nbsp(head)
end
- head = tonode(head)
-
return head, done
end
-
-methods.mlym = methods.deva
-methods.mlm2 = methods.dev2
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index eb28bc368..51c2af00f 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -20,7 +20,7 @@ local type, next, tonumber, tostring = type, next, tonumber, tostring
local abs = math.abs
local insert = table.insert
local lpegmatch = lpeg.match
-local reversed, concat, remove, sortedkeys = table.reversed, table.concat, table.remove, table.sortedkeys
+local reversed, concat, remove = table.reversed, table.concat, table.remove
local ioflush = io.flush
local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive
local formatters = string.formatters
@@ -48,7 +48,7 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.751 -- beware: also sync font-mis.lua
+otf.version = 2.749 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
local fontdata = fonts.hashes.identifiers
@@ -106,8 +106,6 @@ function otf.fileformat(filename)
return formats.otf, suffix == "otf"
elseif leader == "ttcf" then
return formats.ttc, suffix == "ttc"
- -- elseif leader == "true" then
- -- return formats.ttf, suffix == "ttf"
elseif suffix == "ttc" then
return formats.ttc, true
elseif suffix == "dfont" then
@@ -239,7 +237,7 @@ local valid_fields = table.tohash {
"upos",
"use_typo_metrics",
"uwidth",
- "validation_state",
+ -- "validation_state",
"version",
"vert_base",
"weight",
@@ -770,7 +768,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
}
local altuni = glyph.altuni
if altuni then
- -- local d
+ local d
for i=1,#altuni do
local a = altuni[i]
local u = a.unicode
@@ -785,15 +783,15 @@ actions["prepare glyphs"] = function(data,filename,raw)
vv = { [u] = unicode }
variants[v] = vv
end
- -- elseif d then
- -- d[#d+1] = u
- -- else
- -- d = { u }
+ elseif d then
+ d[#d+1] = u
+ else
+ d = { u }
end
end
- -- if d then
- -- duplicates[unicode] = d -- is this needed ?
- -- end
+ if d then
+ duplicates[unicode] = d
+ end
end
else
report_otf("potential problem: glyph %U is used but empty",index)
@@ -821,7 +819,6 @@ actions["check encoding"] = function(data,filename,raw)
local mapdata = raw.map or { }
local unicodetoindex = mapdata and mapdata.map or { }
- local indextounicode = mapdata and mapdata.backmap or { }
-- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "")
local encname = lower(data.enc_name or mapdata.enc_name or "")
local criterium = 0xFFFF -- for instance cambria has a lot of mess up there
@@ -832,81 +829,42 @@ actions["check encoding"] = function(data,filename,raw)
if trace_loading then
report_otf("checking embedded unicode map %a",encname)
end
- -- if false then
- -- for unicode, index in next, unicodetoindex do -- altuni already covers this
- -- if unicode <= criterium and not descriptions[unicode] then
- -- local parent = indices[index] -- why nil?
- -- if not parent then
- -- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
- -- else
- -- local parentdescription = descriptions[parent]
- -- if parentdescription then
- -- local altuni = parentdescription.altuni
- -- if not altuni then
- -- altuni = { { unicode = unicode } }
- -- parentdescription.altuni = altuni
- -- duplicates[parent] = { unicode }
- -- else
- -- local done = false
- -- for i=1,#altuni do
- -- if altuni[i].unicode == unicode then
- -- done = true
- -- break
- -- end
- -- end
- -- if not done then
- -- -- let's assume simple cjk reuse
- -- insert(altuni,{ unicode = unicode })
- -- insert(duplicates[parent],unicode)
- -- end
- -- end
- -- -- if trace_loading then
- -- -- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
- -- -- end
- -- else
- -- report_otf("weird, unicode %U points to %U with index %H",unicode,index)
- -- end
- -- end
- -- end
- -- end
- -- else
- local hash = { }
- for index, unicode in next, indices do -- indextounicode
- hash[index] = descriptions[unicode]
- end
- local reported = { }
- for unicode, index in next, unicodetoindex do
- if not descriptions[unicode] then
- local d = hash[index]
- if d then
- if d.unicode ~= unicode then
- local c = d.copies
- if c then
- c[unicode] = true
- else
- d.copies = { [unicode] = true }
+ for unicode, index in next, unicodetoindex do -- altuni already covers this
+ if unicode <= criterium and not descriptions[unicode] then
+ local parent = indices[index] -- why nil?
+ if not parent then
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ else
+ local parentdescription = descriptions[parent]
+ if parentdescription then
+ local altuni = parentdescription.altuni
+ if not altuni then
+ altuni = { { unicode = unicode } }
+ parentdescription.altuni = altuni
+ duplicates[parent] = { unicode }
+ else
+ local done = false
+ for i=1,#altuni do
+ if altuni[i].unicode == unicode then
+ done = true
+ break
+ end
end
+ if not done then
+ -- let's assume simple cjk reuse
+ insert(altuni,{ unicode = unicode })
+ insert(duplicates[parent],unicode)
+ end
+ end
+ if trace_loading then
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
end
- elseif not reported[i] then
- report_otf("missing index %i",index)
- reported[i] = true
+ else
+ report_otf("weird, unicode %U points to %U with index %H",unicode,index)
end
end
end
- for index, data in next, hash do -- indextounicode
- data.copies = sortedkeys(data.copies)
- end
- for index, unicode in next, indices do -- indextounicode
- local description = hash[index]
- local copies = description.copies
- if copies then
- duplicates[unicode] = copies
- description.copies = nil
- else
- report_otf("copies but no unicode parent %U",unicode)
- end
- end
- -- end
+ end
elseif properties.cidinfo then
report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
else
@@ -914,15 +872,12 @@ actions["check encoding"] = function(data,filename,raw)
end
if mapdata then
- mapdata.map = { } -- clear some memory
- mapdata.backmap = { } -- clear some memory
+ mapdata.map = { } -- clear some memory
end
end
-- for the moment we assume that a font with lookups will not use
--- altuni so we stick to kerns only .. alternatively we can always
--- do an indirect lookup uni_to_uni . but then we need that in
--- all lookups
+-- altuni so we stick to kerns only
actions["add duplicates"] = function(data,filename,raw)
local descriptions = data.descriptions
@@ -933,38 +888,29 @@ actions["add duplicates"] = function(data,filename,raw)
local duplicates = resources.duplicates
for unicode, d in next, duplicates do
- local nofduplicates = #d
- if nofduplicates > 4 then
- if trace_loading then
- report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
- end
- else
- for i=1,nofduplicates do
- local u = d[i]
- if not descriptions[u] then
- local description = descriptions[unicode]
- local n = 0
- for _, description in next, descriptions do
- if kerns then
- local kerns = description.kerns
- for _, k in next, kerns do
- local ku = k[unicode]
- if ku then
- k[u] = ku
- n = n + 1
- end
+ for i=1,#d do
+ local u = d[i]
+ if not descriptions[u] then
+ local description = descriptions[unicode]
+ local duplicate = table.copy(description) -- else packing problem
+ duplicate.comment = format("copy of U+%05X", unicode)
+ descriptions[u] = duplicate
+ local n = 0
+ for _, description in next, descriptions do
+ if kerns then
+ local kerns = description.kerns
+ for _, k in next, kerns do
+ local ku = k[unicode]
+ if ku then
+ k[u] = ku
+ n = n + 1
end
end
- -- todo: lookups etc
- end
- if u > 0 then
- local duplicate = table.copy(description) -- else packing problem
- duplicate.comment = format("copy of U+%05X", unicode)
- descriptions[u] = duplicate
- if trace_loading then
- report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
- end
end
+ -- todo: lookups etc
+ end
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
end
end
end
@@ -1772,13 +1718,6 @@ actions["check metadata"] = function(data,filename,raw)
ttftables[i].data = "deleted"
end
end
- --
- if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
- local name = file.nameonly(filename)
- metadata.fontname = "bad-fontname-" .. name
- metadata.fullname = "bad-fullname-" .. name
- end
- --
end
actions["cleanup tables"] = function(data,filename,raw)
@@ -2145,24 +2084,6 @@ local function otftotfm(specification)
local features = specification.features.normal
local rawdata = otf.load(filename,sub,features and features.featurefile)
if rawdata and next(rawdata) then
- local descriptions = rawdata.descriptions
- local duplicates = rawdata.resources.duplicates
- if duplicates then
- local nofduplicates, nofduplicated = 0, 0
- for parent, list in next, duplicates do
- for i=1,#list do
- local unicode = list[i]
- if not descriptions[unicode] then
- descriptions[unicode] = descriptions[parent] -- or copy
- nofduplicated = nofduplicated + 1
- end
- end
- nofduplicates = nofduplicates + #list
- end
- if trace_otf and nofduplicated ~= nofduplicates then
- report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
- end
- end
rawdata.lookuphash = { }
tfmdata = copytotfm(rawdata,cache_id)
if tfmdata and next(tfmdata) then
diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua
index 75e95749c..c57be5f02 100644
--- a/tex/context/base/font-otn.lua
+++ b/tex/context/base/font-otn.lua
@@ -6,14 +6,8 @@ if not modules then modules = { } end modules ['font-otn'] = {
license = "see context related readme files",
}
--- this is a context version which can contain experimental code, but when we
--- have serious patches we also need to change the other two font-otn files
-
-- preprocessors = { "nodes" }
--- anchor class : mark, mkmk, curs, mklg (todo)
--- anchor type : mark, basechar, baselig, basemark, centry, cexit, max (todo)
-
-- this is still somewhat preliminary and it will get better in due time;
-- much functionality could only be implemented thanks to the husayni font
-- of Idris Samawi Hamid to who we dedicate this module.
@@ -177,28 +171,12 @@ registertracker("otf.injections","nodes.injections")
registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local insert_node_after = nuts.insert_after
-local delete_node = nuts.delete
-local copy_node = nuts.copy
-local find_node_tail = nuts.tail
-local flush_node_list = nuts.flush_list
-local end_of_math = nuts.end_of_math
+local insert_node_after = node.insert_after
+local delete_node = nodes.delete
+local copy_node = node.copy
+local find_node_tail = node.tail or node.slide
+local flush_node_list = node.flush_list
+local end_of_math = node.end_of_math
local setmetatableindex = table.setmetatableindex
@@ -354,11 +332,11 @@ end
-- and indices.
local function copy_glyph(g) -- next and prev are untouched !
- local components = getfield(g,"components")
+ local components = g.components
if components then
- setfield(g,"components",nil)
+ g.components = nil
local n = copy_node(g)
- setfield(g,"components",components)
+ g.components = components
return n
else
return copy_node(g)
@@ -368,28 +346,28 @@ end
-- start is a mark and we need to keep that one
local function markstoligature(kind,lookupname,head,start,stop,char)
- if start == stop and getchar(start) == char then
+ if start == stop and start.char == char then
return head, start
else
- local prev = getprev(start)
- local next = getnext(stop)
- setfield(start,"prev",nil)
- setfield(stop,"next",nil)
+ local prev = start.prev
+ local next = stop.next
+ start.prev = nil
+ stop.next = nil
local base = copy_glyph(start)
if head == start then
head = base
end
- setfield(base,"char",char)
- setfield(base,"subtype",ligature_code)
- setfield(base,"components",start)
+ base.char = char
+ base.subtype = ligature_code
+ base.components = start
if prev then
- setfield(prev,"next",base)
+ prev.next = base
end
if next then
- setfield(next,"prev",base)
+ next.prev = base
end
- setfield(base,"next",next)
- setfield(base,"prev",prev)
+ base.next = next
+ base.prev = prev
return head, base
end
end
@@ -402,17 +380,17 @@ end
-- third component.
local function getcomponentindex(start)
- if getid(start) ~= glyph_code then
+ if start.id ~= glyph_code then
return 0
- elseif getsubtype(start) == ligature_code then
+ elseif start.subtype == ligature_code then
local i = 0
- local components = getfield(start,"components")
+ local components = start.components
while components do
i = i + getcomponentindex(components)
- components = getnext(components)
+ components = components.next
end
return i
- elseif not marks[getchar(start)] then
+ elseif not marks[start.char] then
return 1
else
return 0
@@ -422,29 +400,29 @@ end
-- eventually we will do positioning in an other way (needs addional w/h/d fields)
local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
- if start == stop and getchar(start) == char then
- setfield(start,"char",char)
+ if start == stop and start.char == char then
+ start.char = char
return head, start
end
- local prev = getprev(start)
- local next = getnext(stop)
- setfield(start,"prev",nil)
- setfield(stop,"next",nil)
+ local prev = start.prev
+ local next = stop.next
+ start.prev = nil
+ stop.next = nil
local base = copy_glyph(start)
if start == head then
head = base
end
- setfield(base,"char",char)
- setfield(base,"subtype",ligature_code)
- setfield(base,"components",start) -- start can have components
+ base.char = char
+ base.subtype = ligature_code
+ base.components = start -- start can have components
if prev then
- setfield(prev,"next",base)
+ prev.next = base
end
if next then
- setfield(next,"prev",base)
+ next.prev = base
end
- setfield(base,"next",next)
- setfield(base,"prev",prev)
+ base.next = next
+ base.prev = prev
if not discfound then
local deletemarks = markflag ~= "mark"
local components = start
@@ -454,35 +432,35 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
local current = base
-- first we loop over the glyphs in start .. stop
while start do
- local char = getchar(start)
+ local char = start.char
if not marks[char] then
baseindex = baseindex + componentindex
componentindex = getcomponentindex(start)
elseif not deletemarks then -- quite fishy
- setattr(start,a_ligacomp,baseindex + (getattr(start,a_ligacomp) or componentindex))
+ start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp))
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
end
head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
elseif trace_marks then
logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
end
- start = getnext(start)
+ start = start.next
end
-- we can have one accent as part of a lookup and another following
-- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
- local start = getnext(current)
- while start and getid(start) == glyph_code do
- local char = getchar(start)
+ local start = current.next
+ while start and start.id == glyph_code do
+ local char = start.char
if marks[char] then
- setattr(start,a_ligacomp,baseindex + (getattr(start,a_ligacomp) or componentindex))
+ start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp))
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
end
else
break
end
- start = getnext(start)
+ start = start.next
end
end
return head, base
@@ -490,9 +468,9 @@ end
function handlers.gsub_single(head,start,kind,lookupname,replacement)
if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
end
- setfield(start,"char",replacement)
+ start.char = replacement
return head, start, true
end
@@ -519,7 +497,7 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives
return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
end
elseif value == 0 then
- return getchar(start), trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
elseif value < 1 then
return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
else
@@ -531,30 +509,30 @@ end
local function multiple_glyphs(head,start,multiple,ignoremarks)
local nofmultiples = #multiple
if nofmultiples > 0 then
- setfield(start,"char",multiple[1])
+ start.char = multiple[1]
if nofmultiples > 1 then
- local sn = getnext(start)
+ local sn = start.next
for k=2,nofmultiples do -- todo: use insert_node
-- untested:
--
--- while ignoremarks and marks[getchar(sn)] then
--- local sn = getnext(sn)
+-- while ignoremarks and marks[sn.char] then
+-- local sn = sn.next
-- end
local n = copy_node(start) -- ignore components
- setfield(n,"char",multiple[k])
- setfield(n,"next",sn)
- setfield(n,"prev",start)
+ n.char = multiple[k]
+ n.next = sn
+ n.prev = start
if sn then
- setfield(sn,"prev",n)
+ sn.prev = n
end
- setfield(start,"next",n)
+ start.next = n
start = n
end
end
return head, start, true
else
if trace_multiples then
- logprocess("no multiple for %s",gref(getchar(start)))
+ logprocess("no multiple for %s",gref(start.char))
end
return head, start, false
end
@@ -565,12 +543,12 @@ function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence
local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
if choice then
if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
end
- setfield(start,"char",choice)
+ start.char = choice
else
if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
end
end
return head, start, true
@@ -578,23 +556,23 @@ end
function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
end
return multiple_glyphs(head,start,multiple,sequence.flags[1])
end
function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s, stop, discfound = getnext(start), nil, false
- local startchar = getchar(start)
+ local s, stop, discfound = start.next, nil, false
+ local startchar = start.char
if marks[startchar] then
while s do
- local id = getid(s)
- if id == glyph_code and getfont(s) == currentfont and getsubtype(s)<256 then
- local lg = ligature[getchar(s)]
+ local id = s.id
+ if id == glyph_code and s.font == currentfont and s.subtype<256 then
+ local lg = ligature[s.char]
if lg then
stop = s
ligature = lg
- s = getnext(s)
+ s = s.next
else
break
end
@@ -606,9 +584,9 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
local lig = ligature.ligature
if lig then
if trace_ligatures then
- local stopchar = getchar(stop)
+ local stopchar = stop.char
head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
else
head, start = markstoligature(kind,lookupname,head,start,stop,lig)
end
@@ -620,18 +598,18 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
else
local skipmark = sequence.flags[1]
while s do
- local id = getid(s)
- if id == glyph_code and getsubtype(s)<256 then
- if getfont(s) == currentfont then
- local char = getchar(s)
+ local id = s.id
+ if id == glyph_code and s.subtype<256 then
+ if s.font == currentfont then
+ local char = s.char
if skipmark and marks[char] then
- s = getnext(s)
+ s = s.next
else
local lg = ligature[char]
if lg then
stop = s
ligature = lg
- s = getnext(s)
+ s = s.next
else
break
end
@@ -641,7 +619,7 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
end
elseif id == disc_code then
discfound = true
- s = getnext(s)
+ s = s.next
else
break
end
@@ -650,20 +628,21 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
if lig then
if stop then
if trace_ligatures then
- local stopchar = getchar(stop)
+ local stopchar = stop.char
head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
else
head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
end
+ return head, start, true
else
-- weird but happens (in some arabic font)
- setfield(start,"char",lig)
+ start.char = lig
if trace_ligatures then
logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
end
+ return head, start, true
end
- return head, start, true
else
-- weird but happens
end
@@ -677,16 +656,16 @@ we need to explicitly test for basechar, baselig and basemark entries.</p>
--ldx]]--
function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
- local base = getprev(start) -- [glyph] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
+ local base = start.prev -- [glyph] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
if marks[basechar] then
while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
if not marks[basechar] then
break
end
@@ -738,16 +717,16 @@ end
function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
-- check chainpos variant
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
- local base = getprev(start) -- [glyph] [optional marks] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
+ local base = start.prev -- [glyph] [optional marks] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
if marks[basechar] then
while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
if not marks[basechar] then
break
end
@@ -759,7 +738,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
end
end
- local index = getattr(start,a_ligacomp)
+ local index = start[a_ligacomp]
local baseanchors = descriptions[basechar]
if baseanchors then
baseanchors = baseanchors.anchors
@@ -806,22 +785,22 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
- local base = getprev(start) -- [glyph] [basemark] [start=mark]
- local slc = getattr(start,a_ligacomp)
+ local base = start.prev -- [glyph] [basemark] [start=mark]
+ local slc = start[a_ligacomp]
if slc then -- a rather messy loop ... needs checking with husayni
while base do
- local blc = getattr(base,a_ligacomp)
+ local blc = base[a_ligacomp]
if blc and blc ~= slc then
- base = getprev(base)
+ base = base.prev
else
break
end
end
end
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
- local basechar = getchar(base)
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
+ local basechar = base.char
local baseanchors = descriptions[basechar]
if baseanchors then
baseanchors = baseanchors.anchors
@@ -861,21 +840,21 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
end
function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and getattr(start,a_cursbase)
+ local alreadydone = cursonce and start[a_cursbase]
if not alreadydone then
local done = false
- local startchar = getchar(start)
+ local startchar = start.char
if marks[startchar] then
if trace_cursive then
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt = getnext(start)
- while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
- local nextchar = getchar(nxt)
+ local nxt = start.next
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
+ local nextchar = nxt.char
if marks[nextchar] then
-- should not happen (maybe warning)
- nxt = getnext(nxt)
+ nxt = nxt.next
else
local entryanchors = descriptions[nextchar]
if entryanchors then
@@ -910,14 +889,14 @@ function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
return head, start, done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
end
return head, start, false
end
end
function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
- local startchar = getchar(start)
+ local startchar = start.char
local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
@@ -928,19 +907,19 @@ end
function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
-- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
-- todo: kerns in components of ligatures
- local snext = getnext(start)
+ local snext = start.next
if not snext then
return head, start, false
else
local prev, done = start, false
local factor = tfmdata.parameters.factor
local lookuptype = lookuptypes[lookupname]
- while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
- local nextchar = getchar(snext)
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
+ local nextchar = snext.char
local krn = kerns[nextchar]
if not krn and marks[nextchar] then
prev = snext
- snext = getnext(snext)
+ snext = snext.next
else
if not krn then
-- skip
@@ -948,14 +927,14 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
if lookuptype == "pair" then -- probably not needed
local a, b = krn[2], krn[3]
if a and #a > 0 then
- local startchar = getchar(start)
+ local startchar = start.char
local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
- local startchar = getchar(start)
+ local startchar = start.char
local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
@@ -967,7 +946,7 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
-- if a and a ~= 0 then
-- local k = setkern(snext,factor,rlmode,a)
-- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
-- end
-- end
-- if b and b ~= 0 then
@@ -978,7 +957,7 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
elseif krn ~= 0 then
local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
end
done = true
end
@@ -1033,13 +1012,13 @@ end
-- itself. It is meant mostly for dealing with Urdu.
function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = getchar(start)
+ local char = start.char
local replacement = replacements[char]
if replacement then
if trace_singles then
logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
end
- setfield(start,"char",replacement)
+ start.char = replacement
return head, start, true
else
return head, start, false
@@ -1068,9 +1047,9 @@ as less as needed but that would also make the code even more messy.</p>
-- -- done
-- elseif ignoremarks then
-- repeat -- start x x m x x stop => start m
--- local next = getnext(start)
--- if not marks[getchar(next)] then
--- local components = getfield(next,"components")
+-- local next = start.next
+-- if not marks[next.char] then
+-- local components = next.components
-- if components then -- probably not needed
-- flush_node_list(components)
-- end
@@ -1080,8 +1059,8 @@ as less as needed but that would also make the code even more messy.</p>
-- until next == stop
-- else -- start x x x stop => start
-- repeat
--- local next = getnext(start)
--- local components = getfield(next,"components")
+-- local next = start.next
+-- local components = next.components
-- if components then -- probably not needed
-- flush_node_list(components)
-- end
@@ -1105,8 +1084,8 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
end
while current do
- if getid(current) == glyph_code then
- local currentchar = getchar(current)
+ if current.id == glyph_code then
+ local currentchar = current.char
local lookupname = subtables[1] -- only 1
local replacement = lookuphash[lookupname]
if not replacement then
@@ -1123,14 +1102,14 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
if trace_singles then
logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
end
- setfield(current,"char",replacement)
+ current.char = replacement
end
end
return head, start, true
elseif current == stop then
break
else
- current = getnext(current)
+ current = current.next
end
end
return head, start, false
@@ -1145,7 +1124,7 @@ the match.</p>
function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
-- local head, n = delete_till_stop(head,start,stop)
- local startchar = getchar(start)
+ local startchar = start.char
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local replacements = lookuphash[lookupname]
@@ -1188,8 +1167,8 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
local subtables = currentlookup.subtables
local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
while current do
- if getid(current) == glyph_code then -- is this check needed?
- local currentchar = getchar(current)
+ if current.id == glyph_code then -- is this check needed?
+ local currentchar = current.char
local lookupname = subtables[1]
local alternatives = lookuphash[lookupname]
if not alternatives then
@@ -1204,7 +1183,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
if trace_alternatives then
logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
end
- setfield(start,"char",choice)
+ start.char = choice
else
if trace_alternatives then
logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
@@ -1218,7 +1197,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
elseif current == stop then
break
else
- current = getnext(current)
+ current = current.next
end
end
return head, start, false
@@ -1233,7 +1212,7 @@ assume rather stupid ligatures (no complex disc nodes).</p>
--ldx]]--
function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = getchar(start)
+ local startchar = start.char
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local ligatures = lookuphash[lookupname]
@@ -1248,20 +1227,20 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
end
else
- local s = getnext(start)
+ local s = start.next
local discfound = false
local last = stop
local nofreplacements = 0
local skipmark = currentlookup.flags[1]
while s do
- local id = getid(s)
+ local id = s.id
if id == disc_code then
- s = getnext(s)
+ s = s.next
discfound = true
else
- local schar = getchar(s)
+ local schar = s.char
if skipmark and marks[schar] then -- marks
- s = getnext(s)
+ s = s.next
else
local lg = ligatures[schar]
if lg then
@@ -1269,7 +1248,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if s == stop then
break
else
- s = getnext(s)
+ s = s.next
end
else
break
@@ -1286,7 +1265,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start == stop then
logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
end
end
head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
@@ -1295,7 +1274,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start == stop then
logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
end
end
end
@@ -1306,7 +1285,7 @@ end
chainmores.gsub_ligature = chainprocs.gsub_ligature
function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
local subtables = currentlookup.subtables
local lookupname = subtables[1]
@@ -1315,14 +1294,14 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
markanchors = markanchors[markchar]
end
if markanchors then
- local base = getprev(start) -- [glyph] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
+ local base = start.prev -- [glyph] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
if marks[basechar] then
while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
if not marks[basechar] then
break
end
@@ -1370,7 +1349,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
end
function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
local subtables = currentlookup.subtables
local lookupname = subtables[1]
@@ -1379,14 +1358,14 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
markanchors = markanchors[markchar]
end
if markanchors then
- local base = getprev(start) -- [glyph] [optional marks] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
+ local base = start.prev -- [glyph] [optional marks] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
if marks[basechar] then
while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
if not marks[basechar] then
break
end
@@ -1399,7 +1378,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
end
-- todo: like marks a ligatures hash
- local index = getattr(start,a_ligacomp)
+ local index = start[a_ligacomp]
local baseanchors = descriptions[basechar].anchors
if baseanchors then
local baseanchors = baseanchors['baselig']
@@ -1439,9 +1418,9 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
+ local markchar = start.char
if marks[markchar] then
- -- local alreadydone = markonce and getattr(start,a_markmark)
+ -- local alreadydone = markonce and start[a_markmark]
-- if not alreadydone then
-- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
local subtables = currentlookup.subtables
@@ -1451,20 +1430,20 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
markanchors = markanchors[markchar]
end
if markanchors then
- local base = getprev(start) -- [glyph] [basemark] [start=mark]
- local slc = getattr(start,a_ligacomp)
+ local base = start.prev -- [glyph] [basemark] [start=mark]
+ local slc = start[a_ligacomp]
if slc then -- a rather messy loop ... needs checking with husayni
while base do
- local blc = getattr(base,a_ligacomp)
+ local blc = base[a_ligacomp]
if blc and blc ~= slc then
- base = getprev(base)
+ base = base.prev
else
break
end
end
end
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
- local basechar = getchar(base)
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
+ local basechar = base.char
local baseanchors = descriptions[basechar].anchors
if baseanchors then
baseanchors = baseanchors['basemark']
@@ -1504,9 +1483,9 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
end
function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and getattr(start,a_cursbase)
+ local alreadydone = cursonce and start[a_cursbase]
if not alreadydone then
- local startchar = getchar(start)
+ local startchar = start.char
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local exitanchors = lookuphash[lookupname]
@@ -1520,12 +1499,12 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt = getnext(start)
- while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
- local nextchar = getchar(nxt)
+ local nxt = start.next
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
+ local nextchar = nxt.char
if marks[nextchar] then
-- should not happen (maybe warning)
- nxt = getnext(nxt)
+ nxt = nxt.next
else
local entryanchors = descriptions[nextchar]
if entryanchors then
@@ -1560,7 +1539,7 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
return head, start, done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
end
return head, start, false
end
@@ -1570,7 +1549,7 @@ end
function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
-- untested .. needs checking for the new model
- local startchar = getchar(start)
+ local startchar = start.char
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local kerns = lookuphash[lookupname]
@@ -1591,9 +1570,9 @@ chainmores.gpos_single = chainprocs.gpos_single -- okay?
-- when machines become faster i will make a shared function
function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local snext = getnext(start)
+ local snext = start.next
if snext then
- local startchar = getchar(start)
+ local startchar = start.char
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local kerns = lookuphash[lookupname]
@@ -1603,12 +1582,12 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
local lookuptype = lookuptypes[lookupname]
local prev, done = start, false
local factor = tfmdata.parameters.factor
- while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
- local nextchar = getchar(snext)
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
+ local nextchar = snext.char
local krn = kerns[nextchar]
if not krn and marks[nextchar] then
prev = snext
- snext = getnext(snext)
+ snext = snext.next
else
if not krn then
-- skip
@@ -1616,14 +1595,14 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
if lookuptype == "pair" then
local a, b = krn[2], krn[3]
if a and #a > 0 then
- local startchar = getchar(start)
+ local startchar = start.char
local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
- local startchar = getchar(start)
+ local startchar = start.char
local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
@@ -1635,7 +1614,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
if a and a ~= 0 then
local k = setkern(snext,factor,rlmode,a)
if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
end
end
if b and b ~= 0 then
@@ -1646,7 +1625,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
elseif krn ~= 0 then
local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
end
done = true
end
@@ -1678,12 +1657,6 @@ local function show_skip(kind,chainname,char,ck,class)
end
end
-local quit_on_no_replacement = true
-
-directives.register("otf.chain.quitonnoreplacement",function(value) -- maybe per font
- quit_on_no_replacement = value
-end)
-
local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
-- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
local flags = sequence.flags
@@ -1704,7 +1677,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- f..l = mid string
if s == 1 then
-- never happens
- match = getid(current) == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
+ match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
else
-- maybe we need a better space check (maybe check for glue or category or combination)
-- we cannot optimize for n=2 because there can be disc nodes
@@ -1719,13 +1692,13 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- match = true
else
local n = f + 1
- last = getnext(last)
+ last = last.next
while n <= l do
if last then
- local id = getid(last)
+ local id = last.id
if id == glyph_code then
- if getfont(last) == currentfont and getsubtype(last)<256 then
- local char = getchar(last)
+ if last.font == currentfont and last.subtype<256 then
+ local char = last.char
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1734,10 +1707,10 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if trace_skips then
show_skip(kind,chainname,char,ck,class)
end
- last = getnext(last)
+ last = last.next
elseif seq[n][char] then
if n < l then
- last = getnext(last)
+ last = last.next
end
n = n + 1
else
@@ -1753,7 +1726,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
elseif id == disc_code then
- last = getnext(last)
+ last = last.next
else
match = false
break
@@ -1767,15 +1740,15 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
-- before
if match and f > 1 then
- local prev = getprev(start)
+ local prev = start.prev
if prev then
local n = f-1
while n >= 1 do
if prev then
- local id = getid(prev)
+ local id = prev.id
if id == glyph_code then
- if getfont(prev) == currentfont and getsubtype(prev)<256 then -- normal char
- local char = getchar(prev)
+ if prev.font == currentfont and prev.subtype<256 then -- normal char
+ local char = prev.char
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1806,7 +1779,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
match = false
break
end
- prev = getprev(prev)
+ prev = prev.prev
elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
n = n -1
else
@@ -1827,16 +1800,16 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
-- after
if match and s > l then
- local current = last and getnext(last)
+ local current = last and last.next
if current then
-- removed optimization for s-l == 1, we have to deal with marks anyway
local n = l + 1
while n <= s do
if current then
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- if getfont(current) == currentfont and getsubtype(current)<256 then -- normal char
- local char = getchar(current)
+ if current.font == currentfont and current.subtype<256 then -- normal char
+ local char = current.char
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1867,7 +1840,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
match = false
break
end
- current = getnext(current)
+ current = current.next
elseif seq[n][32] then
n = n + 1
else
@@ -1891,7 +1864,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- ck == currentcontext
if trace_contexts then
local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = getchar(start)
+ local char = start.char
if ck[9] then
logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
@@ -1926,12 +1899,12 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
repeat
if skipped then
while true do
- local char = getchar(start)
+ local char = start.char
local ccd = descriptions[char]
if ccd then
local class = ccd.class
if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = getnext(start)
+ start = start.next
else
break
end
@@ -1965,7 +1938,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
if start then
- start = getnext(start)
+ start = start.next
else
-- weird
end
@@ -1976,7 +1949,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if replacements then
head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
else
- done = quit_on_no_replacement -- can be meant to be skipped / quite inconsistent in fonts
+ done = true -- can be meant to be skipped
if trace_contexts then
logprocess("%s: skipping match",cref(kind,chainname))
end
@@ -2126,12 +2099,12 @@ end
-- if ok then
-- done = true
-- end
--- if start then start = getnext(start) end
+-- if start then start = start.next end
-- else
--- start = getnext(start)
+-- start = start.next
-- end
-- else
--- start = getnext(start)
+-- start = start.next
-- end
-- there will be a new direction parser (pre-parsed etc)
@@ -2153,8 +2126,6 @@ local function featuresprocessor(head,font,attr)
return head, false
end
- head = tonut(head)
-
if trace_steps then
checkstep(head)
end
@@ -2186,8 +2157,6 @@ local function featuresprocessor(head,font,attr)
-- Keeping track of the headnode is needed for devanagari (I generalized it a bit
-- so that multiple cases are also covered.)
- -- todo: retain prev
-
for s=1,#datasets do
local dataset = datasets[s]
featurevalue = dataset[1] -- todo: pass to function instead of using a global
@@ -2206,10 +2175,10 @@ local function featuresprocessor(head,font,attr)
-- we need to get rid of this slide! probably no longer needed in latest luatex
local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
if a then
a = a == attr
else
@@ -2220,7 +2189,7 @@ local function featuresprocessor(head,font,attr)
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[start.char]
if lookupmatch then
head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
if success then
@@ -2231,15 +2200,15 @@ local function featuresprocessor(head,font,attr)
report_missing_cache(typ,lookupname)
end
end
- if start then start = getprev(start) end
+ if start then start = start.prev end
else
- start = getprev(start)
+ start = start.prev
end
else
- start = getprev(start)
+ start = start.prev
end
else
- start = getprev(start)
+ start = start.prev
end
end
else
@@ -2259,16 +2228,16 @@ local function featuresprocessor(head,font,attr)
local head = start
local done = false
while start do
- local id = getid(start)
- if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
+ local id = start.id
+ if id == glyph_code and start.font == font and start.subtype <256 then
+ local a = start[0]
if a then
- a = (a == attr) and (not attribute or getattr(start,a_state) == attribute)
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
else
- a = not attribute or getattr(start,a_state) == attribute
+ a = not attribute or start[a_state] == attribute
end
if a then
- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[start.char]
if lookupmatch then
-- sequence kan weg
local ok
@@ -2277,12 +2246,12 @@ local function featuresprocessor(head,font,attr)
done = true
end
end
- if start then start = getnext(start) end
+ if start then start = start.next end
else
- start = getnext(start)
+ start = start.next
end
else
- start = getnext(start)
+ start = start.next
end
end
if done then
@@ -2292,19 +2261,19 @@ local function featuresprocessor(head,font,attr)
end
local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = getprev(disc)
- local next = getnext(disc)
+ local prev = disc.prev
+ local next = disc.next
if prev and next then
- setfield(prev,"next",next)
- -- setfield(next,"prev",prev)
- local a = getattr(prev,0)
+ prev.next = next
+ -- next.prev = prev
+ local a = prev[0]
if a then
- a = (a == attr) and (not attribute or getattr(prev,a_state) == attribute)
+ a = (a == attr) and (not attribute or prev[a_state] == attribute)
else
- a = not attribute or getattr(prev,a_state) == attribute
+ a = not attribute or prev[a_state] == attribute
end
if a then
- local lookupmatch = lookupcache[getchar(prev)]
+ local lookupmatch = lookupcache[prev.char]
if lookupmatch then
-- sequence kan weg
local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
@@ -2314,24 +2283,24 @@ local function featuresprocessor(head,font,attr)
end
end
end
- setfield(prev,"next",disc)
- -- setfield(next,"prev",disc)
+ prev.next = disc
+ -- next.prev = disc
end
return next
end
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
if a then
- a = (a == attr) and (not attribute or getattr(start,a_state) == attribute)
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
else
- a = not attribute or getattr(start,a_state) == attribute
+ a = not attribute or start[a_state] == attribute
end
if a then
- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[start.char]
if lookupmatch then
-- sequence kan weg
local ok
@@ -2340,39 +2309,39 @@ local function featuresprocessor(head,font,attr)
success = true
end
end
- if start then start = getnext(start) end
+ if start then start = start.next end
else
- start = getnext(start)
+ start = start.next
end
else
- start = getnext(start)
+ start = start.next
end
elseif id == disc_code then
-- mostly for gsub
- if getsubtype(start) == discretionary_code then
- local pre = getfield(start,"pre")
+ if start.subtype == discretionary_code then
+ local pre = start.pre
if pre then
local new = subrun(pre)
- if new then setfield(start,"pre",new) end
+ if new then start.pre = new end
end
- local post = getfield(start,"post")
+ local post = start.post
if post then
local new = subrun(post)
- if new then setfield(start,"post",new) end
+ if new then start.post = new end
end
- local replace = getfield(start,"replace")
+ local replace = start.replace
if replace then
local new = subrun(replace)
- if new then setfield(start,"replace",new) end
+ if new then start.replace = new end
end
elseif typ == "gpos_single" or typ == "gpos_pair" then
kerndisc(start)
end
- start = getnext(start)
+ start = start.next
elseif id == whatsit_code then -- will be function
- local subtype = getsubtype(start)
+ local subtype = start.subtype
if subtype == dir_code then
- local dir = getfield(start,"dir")
+ local dir = start.dir
if dir == "+TRT" or dir == "+TLT" then
topstack = topstack + 1
dirstack[topstack] = dir
@@ -2391,7 +2360,7 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype == localpar_code then
- local dir = getfield(start,"dir")
+ local dir = start.dir
if dir == "TRT" then
rlparmode = -1
elseif dir == "TLT" then
@@ -2405,11 +2374,11 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
- start = getnext(start)
+ start = start.next
elseif id == math_code then
- start = getnext(end_of_math(start))
+ start = end_of_math(start).next
else
- start = getnext(start)
+ start = start.next
end
end
end
@@ -2420,20 +2389,20 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
local head = start
local done = false
while start do
- local id = getid(start)
- if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
+ local id = start.id
+ if id == glyph_code and start.id == font and start.subtype <256 then
+ local a = start[0]
if a then
- a = (a == attr) and (not attribute or getattr(start,a_state) == attribute)
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
else
- a = not attribute or getattr(start,a_state) == attribute
+ a = not attribute or start[a_state] == attribute
end
if a then
for i=1,ns do
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[start.char]
if lookupmatch then
-- we could move all code inline but that makes things even more unreadable
local ok
@@ -2450,12 +2419,12 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_missing_cache(typ,lookupname)
end
end
- if start then start = getnext(start) end
+ if start then start = start.next end
else
- start = getnext(start)
+ start = start.next
end
else
- start = getnext(start)
+ start = start.next
end
end
if done then
@@ -2465,23 +2434,23 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
end
local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = getprev(disc)
- local next = getnext(disc)
+ local prev = disc.prev
+ local next = disc.next
if prev and next then
- setfield(prev,"next",next)
- -- setfield(next,"prev",prev)
- local a = getattr(prev,0)
+ prev.next = next
+ -- next.prev = prev
+ local a = prev[0]
if a then
- a = (a == attr) and (not attribute or getattr(prev,a_state) == attribute)
+ a = (a == attr) and (not attribute or prev[a_state] == attribute)
else
- a = not attribute or getattr(prev,a_state) == attribute
+ a = not attribute or prev[a_state] == attribute
end
if a then
for i=1,ns do
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[getchar(prev)]
+ local lookupmatch = lookupcache[prev.char]
if lookupmatch then
-- we could move all code inline but that makes things even more unreadable
local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
@@ -2495,28 +2464,28 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
end
end
end
- setfield(prev,"next",disc)
- -- setfield(next,"prev",disc)
+ prev.next = disc
+ -- next.prev = disc
end
return next
end
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
if a then
- a = (a == attr) and (not attribute or getattr(start,a_state) == attribute)
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
else
- a = not attribute or getattr(start,a_state) == attribute
+ a = not attribute or start[a_state] == attribute
end
if a then
for i=1,ns do
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[start.char]
if lookupmatch then
-- we could move all code inline but that makes things even more unreadable
local ok
@@ -2533,39 +2502,39 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_missing_cache(typ,lookupname)
end
end
- if start then start = getnext(start) end
+ if start then start = start.next end
else
- start = getnext(start)
+ start = start.next
end
else
- start = getnext(start)
+ start = start.next
end
elseif id == disc_code then
-- mostly for gsub
- if getsubtype(start) == discretionary_code then
- local pre = getfield(start,"pre")
+ if start.subtype == discretionary_code then
+ local pre = start.pre
if pre then
local new = subrun(pre)
- if new then setfield(start,"pre",new) end
+ if new then start.pre = new end
end
- local post = getfield(start,"post")
+ local post = start.post
if post then
local new = subrun(post)
- if new then setfield(start,"post",new) end
+ if new then start.post = new end
end
- local replace = getfield(start,"replace")
+ local replace = start.replace
if replace then
local new = subrun(replace)
- if new then setfield(start,"replace",new) end
+ if new then start.replace = new end
end
elseif typ == "gpos_single" or typ == "gpos_pair" then
kerndisc(start)
end
- start = getnext(start)
+ start = start.next
elseif id == whatsit_code then
- local subtype = getsubtype(start)
+ local subtype = start.subtype
if subtype == dir_code then
- local dir = getfield(start,"dir")
+ local dir = start.dir
if dir == "+TRT" or dir == "+TLT" then
topstack = topstack + 1
dirstack[topstack] = dir
@@ -2584,7 +2553,7 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype == localpar_code then
- local dir = getfield(start,"dir")
+ local dir = start.dir
if dir == "TRT" then
rlparmode = -1
elseif dir == "TLT" then
@@ -2597,11 +2566,11 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
- start = getnext(start)
+ start = start.next
elseif id == math_code then
- start = getnext(end_of_math(start))
+ start = end_of_math(start).next
else
- start = getnext(start)
+ start = start.next
end
end
end
@@ -2613,9 +2582,6 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
registerstep(head)
end
end
-
- head = tonode(head)
-
return head, done
end
diff --git a/tex/context/base/font-ott.lua b/tex/context/base/font-ott.lua
index 1e2309056..e3aacd0d1 100644
--- a/tex/context/base/font-ott.lua
+++ b/tex/context/base/font-ott.lua
@@ -42,7 +42,6 @@ local scripts = allocate {
['cprt'] = 'cypriot syllabary',
['cyrl'] = 'cyrillic',
['deva'] = 'devanagari',
- ['dev2'] = 'devanagari variant 2',
['dsrt'] = 'deseret',
['ethi'] = 'ethiopic',
['geor'] = 'georgian',
@@ -68,7 +67,6 @@ local scripts = allocate {
['linb'] = 'linear b',
['math'] = 'mathematical alphanumeric symbols',
['mlym'] = 'malayalam',
- ['mlm2'] = 'malayalam variant 2',
['mong'] = 'mongolian',
['musc'] = 'musical symbols',
['mymr'] = 'myanmar',
@@ -633,7 +631,6 @@ local features = allocate {
['js..'] = 'justification ..',
["dv.."] = "devanagari ..",
- ["ml.."] = "malayalam ..",
}
local baselines = allocate {
diff --git a/tex/context/base/font-otx.lua b/tex/context/base/font-otx.lua
index b7d2ae0bc..f39045223 100644
--- a/tex/context/base/font-otx.lua
+++ b/tex/context/base/font-otx.lua
@@ -30,29 +30,15 @@ analyzers.methods = methods
local a_state = attributes.private('state')
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-
-local setattr = nuts.setattr
-
-local traverse_id = nuts.traverse_id
-local traverse_node_list = nuts.traverse
-local end_of_math = nuts.end_of_math
-
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
local math_code = nodecodes.math
+local traverse_id = node.traverse_id
+local traverse_node_list = node.traverse
+local end_of_math = node.end_of_math
+
local fontdata = fonts.hashes.identifiers
local categories = characters and characters.categories or { } -- sorry, only in context
local chardata = characters and characters.data
@@ -109,61 +95,60 @@ analyzers.useunicodemarks = false
-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
-function analyzers.setstate(head,font) -- we can skip math
+function analyzers.setstate(head,font)
local useunicodemarks = analyzers.useunicodemarks
local tfmdata = fontdata[font]
local descriptions = tfmdata.descriptions
local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
- current = tonut(current)
while current do
- local id = getid(current)
- if id == glyph_code and getfont(current) == font then
+ local id = current.id
+ if id == glyph_code and current.font == font then
done = true
- local char = getchar(current)
+ local char = current.char
local d = descriptions[char]
if d then
if d.class == "mark" then
done = true
- setattr(current,a_state,s_mark)
+ current[a_state] = s_mark
elseif useunicodemarks and categories[char] == "mn" then
done = true
- setattr(current,a_state,s_mark)
+ current[a_state] = s_mark
elseif n == 0 then
first, last, n = current, current, 1
- setattr(current,a_state,s_init)
+ current[a_state] = s_init
else
last, n = current, n+1
- setattr(current,a_state,s_medi)
+ current[a_state] = s_medi
end
else -- finish
if first and first == last then
- setattr(last,a_state,s_isol)
+ last[a_state] = s_isol
elseif last then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
end
first, last, n = nil, nil, 0
end
elseif id == disc_code then
-- always in the middle
- setattr(current,a_state,s_medi)
+ current[a_state] = s_medi
last = current
else -- finish
if first and first == last then
- setattr(last,a_state,s_isol)
+ last[a_state] = s_isol
elseif last then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
end
first, last, n = nil, nil, 0
if id == math_code then
current = end_of_math(current)
end
end
- current = getnext(current)
+ current = current.next
end
if first and first == last then
- setattr(last,a_state,s_isol)
+ last[a_state] = s_isol
elseif last then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
end
return head, done
end
@@ -224,7 +209,7 @@ methods.latn = analyzers.setstate
local arab_warned = { }
local function warning(current,what)
- local char = getchar(current)
+ local char = current.char
if not arab_warned[char] then
log.report("analyze","arab: character %C has no %a class",char,what)
arab_warned[char] = true
@@ -276,95 +261,94 @@ function methods.arab(head,font,attr)
local first, last = nil, nil
local c_first, c_last = nil, nil
local current, done = head, false
- current = tonut(current)
while current do
- local id = getid(current)
- if id == glyph_code and getfont(current) == font and getsubtype(current)<256 and not getattr(current,a_state) then
+ local id = current.id
+ if id == glyph_code and current.font == font and current.subtype<256 and not current[a_state] then
done = true
- local char = getchar(current)
+ local char = current.char
local classifier = classifiers[char]
if not classifier then
if last then
if c_last == s_medi or c_last == s_fina then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
else
warning(last,"fina")
- setattr(last,a_state,s_error)
+ last[a_state] = s_error
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- setattr(first,a_state,s_isol)
+ first[a_state] = s_isol
else
warning(first,"isol")
- setattr(first,a_state,s_error)
+ first[a_state] = s_error
end
first = nil
end
elseif classifier == s_mark then
- setattr(current,a_state,s_mark)
+ current[a_state] = s_mark
elseif classifier == s_isol then
if last then
if c_last == s_medi or c_last == s_fina then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
else
warning(last,"fina")
- setattr(last,a_state,s_error)
+ last[a_state] = s_error
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- setattr(first,a_state,s_isol)
+ first[a_state] = s_isol
else
warning(first,"isol")
- setattr(first,a_state,s_error)
+ first[a_state] = s_error
end
first = nil
end
- setattr(current,a_state,s_isol)
+ current[a_state] = s_isol
elseif classifier == s_medi then
if first then
last = current
c_last = classifier
- setattr(current,a_state,s_medi)
+ current[a_state] = s_medi
else
- setattr(current,a_state,s_init)
+ current[a_state] = s_init
first = current
c_first = classifier
end
elseif classifier == s_fina then
if last then
- if getattr(last,a_state) ~= s_init then
- setattr(last,a_state,s_medi)
+ if last[a_state] ~= s_init then
+ last[a_state] = s_medi
end
- setattr(current,a_state,s_fina)
+ current[a_state] = s_fina
first, last = nil, nil
elseif first then
- -- if getattr(first,a_state) ~= s_init then
+ -- if first[a_state] ~= s_init then
-- -- needs checking
- -- setattr(first,a_state,s_medi)
+ -- first[a_state] = s_medi
-- end
- setattr(current,a_state,s_fina)
+ current[a_state] = s_fina
first = nil
else
- setattr(current,a_state,s_isol)
+ current[a_state] = s_isol
end
else -- classifier == s_rest
- setattr(current,a_state,s_rest)
+ current[a_state] = s_rest
if last then
if c_last == s_medi or c_last == s_fina then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
else
warning(last,"fina")
- setattr(last,a_state,s_error)
+ last[a_state] = s_error
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- setattr(first,a_state,s_isol)
+ first[a_state] = s_isol
else
warning(first,"isol")
- setattr(first,a_state,s_error)
+ first[a_state] = s_error
end
first = nil
end
@@ -372,18 +356,18 @@ function methods.arab(head,font,attr)
else
if last then
if c_last == s_medi or c_last == s_fina then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
else
warning(last,"fina")
- setattr(last,a_state,s_error)
+ last[a_state] = s_error
end
first, last = nil, nil
elseif first then
if c_first == s_medi or c_first == s_fina then
- setattr(first,a_state,s_isol)
+ first[a_state] = s_isol
else
warning(first,"isol")
- setattr(first,a_state,s_error)
+ first[a_state] = s_error
end
first = nil
end
@@ -391,21 +375,21 @@ function methods.arab(head,font,attr)
current = end_of_math(current)
end
end
- current = getnext(current)
+ current = current.next
end
if last then
if c_last == s_medi or c_last == s_fina then
- setattr(last,a_state,s_fina)
+ last[a_state] = s_fina
else
warning(last,"fina")
- setattr(last,a_state,s_error)
+ last[a_state] = s_error
end
elseif first then
if c_first == s_medi or c_first == s_fina then
- setattr(first,a_state,s_isol)
+ first[a_state] = s_isol
else
warning(first,"isol")
- setattr(first,a_state,s_error)
+ first[a_state] = s_error
end
end
return head, done
diff --git a/tex/context/base/font-pat.lua b/tex/context/base/font-pat.lua
index 049853796..9733c9ada 100644
--- a/tex/context/base/font-pat.lua
+++ b/tex/context/base/font-pat.lua
@@ -38,7 +38,7 @@ local report = patches.report
-- library) the palatino arabic fonts don't have the mkmk features properly
-- set up.
-register("after","rehash features","^palatino.*arabic", function (data,filename)
+register("after","rehash features","^palatino.*arabic", function patch(data,filename)
local gpos = data.gpos
if gpos then
for k=1,#gpos do
diff --git a/tex/context/base/font-pre.mkiv b/tex/context/base/font-pre.mkiv
index fc6eb289e..c404771fd 100644
--- a/tex/context/base/font-pre.mkiv
+++ b/tex/context/base/font-pre.mkiv
@@ -170,24 +170,6 @@
kern=yes]
\definefontfeature
- [malayalam-one]
- [mode=node,
- language=dflt,
- script=mlym,
- akhn=yes,
- blwf=yes,
- half=yes,
- pres=yes,
- blws=yes,
- psts=yes,
- haln=no]
-
-\definefontfeature
- [malayalam-two]
- [malayalam-one]
- [script=mlm2]
-
-\definefontfeature
[jamoforms]
[ljmo=yes,
tjmo=yes,
diff --git a/tex/context/base/font-sol.lua b/tex/context/base/font-sol.lua
index a41e4a679..9ccfd0588 100644
--- a/tex/context/base/font-sol.lua
+++ b/tex/context/base/font-sol.lua
@@ -48,41 +48,19 @@ local v_split = variables.split
local settings_to_array = utilities.parsers.settings_to_array
local settings_to_hash = utilities.parsers.settings_to_hash
-local tasks = nodes.tasks
-
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local getlist = nuts.getlist
-
-local find_node_tail = nuts.tail
-local free_node = nuts.free
-local free_nodelist = nuts.flush_list
-local copy_nodelist = nuts.copy_list
-local traverse_nodes = nuts.traverse
-local traverse_ids = nuts.traverse_id
-local hpack_nodes = nuts.hpack
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local protect_glyphs = nuts.protect_glyphs
-
-local repack_hlist = nuts.repackhlist
-
+local find_node_tail = node.tail or node.slide
+local free_node = node.free
+local free_nodelist = node.flush_list
+local copy_nodelist = node.copy_list
+local traverse_nodes = node.traverse
+local traverse_ids = node.traverse_id
+local protect_glyphs = nodes.handlers.protectglyphs or node.protect_glyphs
+local hpack_nodes = node.hpack
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local repack_hlist = nodes.repackhlist
local nodes_to_utf = nodes.listtoutf
------ protect_glyphs = nodes.handlers.protectglyphs
-
local setnodecolor = nodes.tracers.colors.set
local nodecodes = nodes.nodecodes
@@ -101,7 +79,8 @@ local localpar_code = whatsitcodes.localpar
local dir_code = whatsitcodes.dir
local userdefined_code = whatsitcodes.userdefined
-local nodepool = nuts.pool
+local nodepool = nodes.pool
+local tasks = nodes.tasks
local usernodeids = nodepool.userids
local new_textdir = nodepool.textdir
@@ -111,7 +90,7 @@ local new_leftskip = nodepool.leftskip
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
------ process_characters = nodes.handlers.characters
+local process_characters = nodes.handlers.characters
local inject_kerns = nodes.injections.handler
local fonthashes = fonts.hashes
@@ -338,12 +317,11 @@ end)
function splitters.split(head)
-- quite fast
- head = tonut(head)
local current, done, rlmode, start, stop, attribute = head, false, false, nil, nil, 0
cache, max_less, max_more = { }, 0, 0
local function flush() -- we can move this
- local font = getfont(start)
- local last = getnext(stop)
+ local font = start.font
+ local last = stop.next
local list = last and copy_nodelist(start,last) or copy_nodelist(start)
local n = #cache + 1
if encapsulate then
@@ -354,18 +332,18 @@ function splitters.split(head)
else
local current = start
while true do
- setattr(current,a_word,n)
+ current[a_word] = n
if current == stop then
break
else
- current = getnext(current)
+ current = current.next
end
end
end
if rlmode == "TRT" or rlmode == "+TRT" then
local dirnode = new_textdir("+TRT")
- setfield(list,"prev",dirnode)
- setfield(dirnode,"next",list)
+ list.prev = dirnode
+ dirnode.next = list
list = dirnode
end
local c = {
@@ -386,11 +364,11 @@ function splitters.split(head)
start, stop, done = nil, nil, true
end
while current do -- also nextid
- local next = getnext(current)
- local id = getid(current)
+ local next = current.next
+ local id = current.id
if id == glyph_code then
- if getsubtype(current) < 256 then
- local a = getattr(current,a_split)
+ if current.subtype < 256 then
+ local a = current[a_split]
if not a then
start, stop = nil, nil
elseif not start then
@@ -406,7 +384,7 @@ function splitters.split(head)
if start then
flush()
end
- elseif start and next and getid(next) == glyph_code and getsubtype(next) < 256 then
+ elseif start and next and next.id == glyph_code and next.subtype < 256 then
-- beware: we can cross future lines
stop = next
else
@@ -416,9 +394,9 @@ function splitters.split(head)
if start then
flush()
end
- local subtype = getsubtype(current)
+ local subtype = current.subtype
if subtype == dir_code or subtype == localpar_code then
- rlmode = getfield(current,"dir")
+ rlmode = current.dir
end
else
if start then
@@ -432,17 +410,17 @@ function splitters.split(head)
end
nofparagraphs = nofparagraphs + 1
nofwords = nofwords + #cache
- return tonode(head), done
+ return head, done
end
local function collect_words(list) -- can be made faster for attributes
local words, w, word = { }, 0, nil
if encapsulate then
for current in traverse_ids(whatsit_code,list) do
- if getsubtype(current) == userdefined_code then -- hm
- local user_id = getfield(current,"user_id")
+ if current.subtype == userdefined_code then -- hm
+ local user_id = current.user_id
if user_id == splitter_one then
- word = { getfield(current,"value"), current, current }
+ word = { current.value, current, current }
w = w + 1
words[w] = word
elseif user_id == splitter_two then
@@ -458,9 +436,9 @@ local function collect_words(list) -- can be made faster for attributes
local current, first, last, index = list, nil, nil, nil
while current do
-- todo: disc and kern
- local id = getid(current)
+ local id = current.id
if id == glyph_code or id == disc_code then
- local a = getattr(current,a_word)
+ local a = current[a_word]
if a then
if a == index then
-- same word
@@ -493,7 +471,7 @@ local function collect_words(list) -- can be made faster for attributes
report_splitters("skipped: %C",current.char)
end
end
- elseif id == kern_code and (getsubtype(current) == fontkern_code or getattr(current,a_fontkern)) then
+ elseif id == kern_code and (current.subtype == fontkern_code or current[a_fontkern]) then
if first then
last = current
else
@@ -511,7 +489,7 @@ local function collect_words(list) -- can be made faster for attributes
end
end
end
- current = getnext(current)
+ current = current.next
end
if index then
w = w + 1
@@ -542,8 +520,8 @@ local function doit(word,list,best,width,badness,line,set,listdir)
if found then
local h, t
if encapsulate then
- h = getnext(word[2]) -- head of current word
- t = getprev(word[3]) -- tail of current word
+ h = word[2].next -- head of current word
+ t = word[3].prev -- tail of current word
else
h = word[2]
t = word[3]
@@ -558,7 +536,7 @@ local function doit(word,list,best,width,badness,line,set,listdir)
ok = true
break
else
- c = getnext(c)
+ c = c.next
end
end
if not ok then
@@ -577,20 +555,19 @@ local function doit(word,list,best,width,badness,line,set,listdir)
local first = copy_nodelist(original)
if not trace_colors then
for n in traverse_nodes(first) do -- maybe fast force so no attr needed
- setattr(n,0,featurenumber) -- this forces dynamics
+ n[0] = featurenumber -- this forces dynamics
end
elseif set == "less" then
for n in traverse_nodes(first) do
setnodecolor(n,"font:isol") -- yellow
- setattr(n,0,featurenumber)
+ n[0] = featurenumber
end
else
for n in traverse_nodes(first) do
setnodecolor(n,"font:medi") -- green
- setattr(n,0,featurenumber)
+ n[0] = featurenumber
end
end
-first = tonode(first)
local font = found.font
local setdynamics = setfontdynamics[font]
if setdynamics then
@@ -602,21 +579,20 @@ first = tonode(first)
report_solutions("fatal error, no dynamics for font %a",font)
end
first = inject_kerns(first)
-first = tonut(first)
- if getid(first) == whatsit_code then
+ if first.id == whatsit_code then
local temp = first
- first = getnext(first)
+ first = first.next
free_node(temp)
end
local last = find_node_tail(first)
-- replace [u]h->t by [u]first->last
- local prev = getprev(h)
- local next = getnext(t)
- setfield(prev,"next",first)
- setfield(first,"prev",prev)
+ local prev = h.prev
+ local next = t.next
+ prev.next = first
+ first.prev = prev
if next then
- setfield(last,"next",next)
- setfield(next,"prev",last)
+ last.next = next
+ next.prev = last
end
-- check new pack
local temp, b = repack_hlist(list,width,'exactly',listdir)
@@ -625,22 +601,22 @@ first = tonut(first)
report_optimizers("line %a, badness before %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"quit")
end
-- remove last insert
- setfield(prev,"next",h)
- setfield(h,"prev",prev)
+ prev.next = h
+ h.prev = prev
if next then
- setfield(t,"next",next)
- setfield(next,"prev",t)
+ t.next = next
+ next.prev = t
else
- setfield(t,"next",nil)
+ t.next = nil
end
- setfield(last,"next",nil)
+ last.next = nil
free_nodelist(first)
else
if trace_optimize then
report_optimizers("line %a, badness before: %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"continue")
end
-- free old h->t
- setfield(t,"next",nil)
+ t.next = nil
free_nodelist(h) -- somhow fails
if not encapsulate then
word[2] = first
@@ -721,9 +697,9 @@ variants[v_random] = function(words,list,best,width,badness,line,set,listdir)
end
local function show_quality(current,what,line)
- local set = getfield(current,"glue_set")
- local sign = getfield(current,"glue_sign")
- local order = getfield(current,"glue_order")
+ local set = current.glue_set
+ local sign = current.glue_sign
+ local order = current.glue_order
local amount = set * ((sign == 2 and -1) or 1)
report_optimizers("line %a, category %a, amount %a, set %a, sign %a, how %a, order %a",line,what,amount,set,sign,how,order)
end
@@ -743,25 +719,20 @@ function splitters.optimize(head)
math.setrandomseedi(randomseed)
randomseed = nil
end
- local line = 0
- local tex_hbadness = tex.hbadness
- local tex_hfuzz = tex.hfuzz
- tex.hbadness = 10000
- tex.hfuzz = number.maxdimen
+ local line = 0
+ local tex_hbadness, tex_hfuzz = tex.hbadness, tex.hfuzz
+ tex.hbadness, tex.hfuzz = 10000, number.maxdimen
if trace_optimize then
report_optimizers("preroll %a, variant %a, criterium %a, cache size %a",preroll,variant,criterium,nc)
end
- for current in traverse_ids(hlist_code,tonut(head)) do
+ for current in traverse_ids(hlist_code,head) do
+ -- report_splitters("before: [%s] => %s",current.dir,nodes.tosequence(current.list,nil))
line = line + 1
- local sign = getfield(current,"glue_sign")
- local dir = getfield(current,"dir")
- local width = getfield(current,"width")
- local list = getlist(current)
- if not encapsulate and getid(list) == glyph_code then
+ local sign, dir, list, width = current.glue_sign, current.dir, current.list, current.width
+ if not encapsulate and list.id == glyph_code then
-- nasty .. we always assume a prev being there .. future luatex will always have a leftskip set
- -- is this assignment ok ? .. needs checking
- list = insert_node_before(list,list,new_leftskip(0)) -- new_glue(0)
- setfield(current,"list",list)
+ -- current.list, list = insert_node_before(list,list,new_glue(0))
+ current.list, list = insert_node_before(list,list,new_leftskip(0))
end
local temp, badness = repack_hlist(list,width,'exactly',dir) -- it would be nice if the badness was stored in the node
if badness > 0 then
@@ -821,7 +792,7 @@ function splitters.optimize(head)
local words = collect_words(list)
for best=lastbest or 1,max do
local temp, done, changes, b = optimize(words,list,best,width,badness,line,set,dir)
- setfield(current,"list",temp)
+ current.list = temp
if trace_optimize then
report_optimizers("line %a, alternative %a, changes %a, badness %a",line,best,changes,b)
end
@@ -839,16 +810,15 @@ function splitters.optimize(head)
end
end
-- we pack inside the outer hpack and that way keep the original wd/ht/dp as bonus
- local list = hpack_nodes(getlist(current),width,'exactly',listdir)
- setfield(current,"list",list)
+ current.list = hpack_nodes(current.list,width,'exactly',listdir)
+ -- report_splitters("after: [%s] => %s",temp.dir,nodes.tosequence(temp.list,nil))
end
for i=1,nc do
local ci = cache[i]
free_nodelist(ci.original)
end
cache = { }
- tex.hbadness = tex_hbadness
- tex.hfuzz = tex_hfuzz
+ tex.hbadness, tex.hfuzz = tex_hbadness, tex_hfuzz
stoptiming(splitters)
end
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index 6296f088e..5b50ac75f 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -81,33 +81,7 @@ directives.register("fonts.usesystemfonts", function(v) usesystemfonts = toboole
local P, C, Cc, Cs = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cs
--- -- what to do with these -- --
---
--- thin -> thin
---
--- regu -> regular -> normal
--- norm -> normal -> normal
--- stan -> standard -> normal
--- medi -> medium
--- ultr -> ultra
--- ligh -> light
--- heav -> heavy
--- blac -> black
--- thin
--- book
--- verylight
---
--- buch -> book
--- buchschrift -> book
--- halb -> demi
--- halbfett -> demi
--- mitt -> medium
--- mittel -> medium
--- fett -> bold
--- mage -> light
--- mager -> light
--- nord -> normal
--- gras -> normal
+-- what to do with 'thin'
local weights = Cs ( -- not extra
P("demibold")
@@ -116,7 +90,6 @@ local weights = Cs ( -- not extra
+ P("ultrabold")
+ P("extrabold")
+ P("ultralight")
- + P("extralight")
+ P("bold")
+ P("demi")
+ P("semi")
@@ -130,17 +103,6 @@ local weights = Cs ( -- not extra
+ P("regular") / "normal"
)
--- numeric_weights = {
--- 200 = "extralight",
--- 300 = "light",
--- 400 = "book",
--- 500 = "medium",
--- 600 = "demi",
--- 700 = "bold",
--- 800 = "heavy",
--- 900 = "black",
--- }
-
local normalized_weights = sparse {
regular = "normal",
}
@@ -154,7 +116,6 @@ local styles = Cs (
+ P("roman") / "normal"
+ P("ital") / "italic" -- might be tricky
+ P("ita") / "italic" -- might be tricky
---+ P("obli") / "oblique"
)
local normalized_styles = sparse {
@@ -168,7 +129,6 @@ local widths = Cs(
+ P("thin")
+ P("expanded")
+ P("cond") / "condensed"
---+ P("expa") / "expanded"
+ P("normal")
+ P("book") / "normal"
)
@@ -308,9 +268,6 @@ filters.dfont = fontloader.info
-- glyphs so here we first load and then discard which is a waste. In the past it did
-- free memory because a full load was done. One of these things that goes unnoticed.
--
--- missing: names, units_per_em, design_range_bottom, design_range_top, design_size,
--- pfminfo, top_side_bearing
-
-- function fontloader.fullinfo(...) -- check with taco what we get / could get
-- local ff = fontloader.open(...)
-- if ff then
@@ -326,7 +283,7 @@ filters.dfont = fontloader.info
-- Phillip suggested this faster variant but it's still a hack as fontloader.info should
-- return these keys/values (and maybe some more) but at least we close the loader which
-- might save some memory in the end.
-
+--
-- function fontloader.fullinfo(name)
-- local ff = fontloader.open(name)
-- if ff then
@@ -344,9 +301,8 @@ filters.dfont = fontloader.info
-- design_size = fields.design_size and ff.design_size,
-- italicangle = fields.italicangle and ff.italicangle,
-- pfminfo = fields.pfminfo and ff.pfminfo,
--- top_side_bearing = fields.top_side_bearing and ff.top_side_bearing,
-- }
--- setmetatableindex(d,function(t,k)
+-- table.setmetatableindex(d,function(t,k)
-- report_names("warning, trying to access field %a in font table of %a",k,name)
-- end)
-- fontloader.close(ff)
@@ -357,26 +313,19 @@ filters.dfont = fontloader.info
-- end
-- As we have lazy loading anyway, this one still is full and with less code than
--- the previous one. But this depends on the garbage collector to kick in.
+-- the previous one.
function fontloader.fullinfo(...)
local ff = fontloader.open(...)
if ff then
local d = { } -- ff is userdata so [1] or # fails on it
- setmetatableindex(d,ff)
+ table.setmetatableindex(d,ff)
return d
else
return nil, "error in loading font"
end
end
--- We don't get the design_* values here as for that the fontloader has to load feature
--- info and therefore we're not much better off than using 'open'.
---
--- if tonumber(status.luatex_version) > 78 or (tonumber(status.luatex_version) == 78 and tonumber(status.luatex_revision) > 0) then
--- fontloader.fullinfo = fontloader.info
--- end
-
filters.otf = fontloader.fullinfo
filters.ttf = fontloader.fullinfo
@@ -598,7 +547,7 @@ local function check_name(data,result,filename,modification,suffix,subfont)
fullname = fullname or fontname
familyname = familyname or fontname
-- we do these sparse
- local units = result.units_per_em or 1000 -- can be zero too
+ local units = result.units_per_em or 1000
local minsize = result.design_range_bottom or 0
local maxsize = result.design_range_top or 0
local designsize = result.design_size or 0
@@ -622,7 +571,7 @@ local function check_name(data,result,filename,modification,suffix,subfont)
style = style,
width = width,
variant = variant,
- units = units ~= 1000 and units or nil,
+ units = units ~= 1000 and unit or nil,
pfmwidth = pfmwidth ~= 0 and pfmwidth or nil,
pfmweight = pfmweight ~= 0 and pfmweight or nil,
angle = angle ~= 0 and angle or nil,
@@ -631,9 +580,6 @@ local function check_name(data,result,filename,modification,suffix,subfont)
designsize = designsize ~= 0 and designsize or nil,
modification = modification ~= 0 and modification or nil,
}
--- inspect(filename)
--- inspect(result)
--- inspect(specifications[#specifications])
end
local function cleanupkeywords()
diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua
index b658b7c75..40081cc3b 100644
--- a/tex/context/base/l-dir.lua
+++ b/tex/context/base/l-dir.lua
@@ -25,7 +25,6 @@ local isdir = lfs.isdir
local isfile = lfs.isfile
local currentdir = lfs.currentdir
local chdir = lfs.chdir
-local mkdir = lfs.mkdir
local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
@@ -285,28 +284,17 @@ local make_indeed = true -- false
if onwindows then
function dir.mkdirs(...)
- local n = select("#",...)
- local str
- if n == 1 then
- str = select(1,...)
- if isdir(str) then
- return str, true
- end
- else
- str = ""
- for i=1,n do
- local s = select(i,...)
- local s = select(i,...)
- if s == "" then
- -- skip
- elseif str == "" then
- str = s
- else
- str = str .. "/" .. s
- end
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
+ if s == "" then
+ -- skip
+ elseif str == "" then
+ str = s
+ else
+ str = str .. "/" .. s
end
end
- local pth = ""
local drive = false
local first, middle, last = match(str,"^(//)(//*)(.*)$")
if first then
@@ -342,7 +330,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -363,23 +351,14 @@ if onwindows then
else
function dir.mkdirs(...)
- local n = select("#",...)
- local str, pth
- if n == 1 then
- str = select(1,...)
- if isdir(str) then
- return str, true
- end
- else
- str = ""
- for i=1,n do
- local s = select(i,...)
- if s and s ~= "" then -- we catch nil and false
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
+ if s and s ~= "" then -- we catch nil and false
+ if str ~= "" then
+ str = str .. "/" .. s
+ else
+ str = s
end
end
end
@@ -394,7 +373,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -402,7 +381,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
index 6feb7089c..399b3ad65 100644
--- a/tex/context/base/l-lpeg.lua
+++ b/tex/context/base/l-lpeg.lua
@@ -6,10 +6,6 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
license = "see context related readme files"
}
--- lpeg 12 vs lpeg 10: slower compilation, similar parsing speed (i need to check
--- if i can use new features like capture / 2 and .B (at first sight the xml
--- parser is some 5% slower)
-
-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
-- move utf -> l-unicode
@@ -19,15 +15,14 @@ lpeg = require("lpeg")
-- The latest lpeg doesn't have print any more, and even the new ones are not
-- available by default (only when debug mode is enabled), which is a pitty as
--- as it helps nailign down bottlenecks. Performance seems comparable: some 10%
--- slower pattern compilation, same parsing speed, although,
+-- as it helps bailign down bottlenecks. Performance seems comparable, although
--
-- local p = lpeg.C(lpeg.P(1)^0 * lpeg.P(-1))
--- local a = string.rep("123",100)
+-- local a = string.rep("123",10)
-- lpeg.match(p,a)
--
--- seems slower and is also still suboptimal (i.e. a match that runs from begin
--- to end, one of the cases where string matchers win).
+-- is nearly 20% slower and also still suboptimal (i.e. a match that runs from
+-- begin to end, one of the cases where string matchers win).
if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end
@@ -79,9 +74,7 @@ local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print
-- let's start with an inspector:
-if setinspector then
- setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-end
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-- Beware, we predefine a bunch of patterns here and one reason for doing so
-- is that we get consistent behaviour in some of the visualizers.
@@ -176,14 +169,12 @@ patterns.whitespace = whitespace
patterns.nonspacer = nonspacer
patterns.nonwhitespace = nonwhitespace
-local stripper = spacer ^0 * C((spacer ^0 * nonspacer ^1)^0) -- from example by roberto
-local fullstripper = whitespace^0 * C((whitespace^0 * nonwhitespace^1)^0)
+local stripper = spacer^0 * C((spacer^0 * nonspacer^1)^0) -- from example by roberto
----- collapser = Cs(spacer^0/"" * ((spacer^1 * endofstring / "") + (spacer^1/" ") + P(1))^0)
local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0))
patterns.stripper = stripper
-patterns.fullstripper = fullstripper
patterns.collapser = collapser
patterns.lowercase = lowercase
@@ -478,7 +469,7 @@ end
-- local pattern1 = P(1-P(pattern))^0 * P(pattern) : test for not nil
-- local pattern2 = (P(pattern) * Cc(true) + P(1))^0 : test for true (could be faster, but not much)
-function lpeg.finder(lst,makefunction,isutf) -- beware: slower than find with 'patternless finds'
+function lpeg.finder(lst,makefunction) -- beware: slower than find with 'patternless finds'
local pattern
if type(lst) == "table" then
pattern = P(false)
@@ -494,12 +485,7 @@ function lpeg.finder(lst,makefunction,isutf) -- beware: slower than find with 'p
else
pattern = P(lst)
end
- if isutf then
--- pattern = ((utf8char or 1)-pattern)^0 * pattern
- pattern = ((utf8char or 1)-pattern)^0 * pattern
- else
- pattern = (1-pattern)^0 * pattern
- end
+ pattern = (1-pattern)^0 * pattern
if makefunction then
return function(str)
return lpegmatch(pattern,str)
diff --git a/tex/context/base/l-lua.lua b/tex/context/base/l-lua.lua
index 4a96b0b1d..fc05afa67 100644
--- a/tex/context/base/l-lua.lua
+++ b/tex/context/base/l-lua.lua
@@ -148,9 +148,3 @@ function optionalrequire(...)
return result
end
end
-
--- nice for non ascii scripts (this might move):
-
-if lua then
- lua.mask = load([[τεχ = 1]]) and "utf" or "ascii"
-end
diff --git a/tex/context/base/l-string.lua b/tex/context/base/l-string.lua
index 3b1a0003f..9b079b00a 100644
--- a/tex/context/base/l-string.lua
+++ b/tex/context/base/l-string.lua
@@ -70,7 +70,6 @@ function string.limit(str,n,sentinel) -- not utf proof
end
local stripper = patterns.stripper
-local fullstripper = patterns.fullstripper
local collapser = patterns.collapser
local longtostring = patterns.longtostring
@@ -78,10 +77,6 @@ function string.strip(str)
return lpegmatch(stripper,str) or ""
end
-function string.fullstrip(str)
- return lpegmatch(fullstripper,str) or ""
-end
-
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua
index c318c57bb..f361f3d20 100644
--- a/tex/context/base/l-table.lua
+++ b/tex/context/base/l-table.lua
@@ -1006,9 +1006,7 @@ function table.print(t,...)
end
end
-if setinspector then
- setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
-end
+setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
-- -- -- obsolete but we keep them for a while and might comment them later -- -- --
diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua
index 7bb731254..7b7910fa7 100644
--- a/tex/context/base/l-url.lua
+++ b/tex/context/base/l-url.lua
@@ -26,8 +26,6 @@ local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replac
-- | ___________|____________ |
-- / \ / \ |
-- urn:example:animal:ferret:nose interpretable as extension
---
--- also nice: http://url.spec.whatwg.org/ (maybe some day ...)
url = url or { }
local url = url
@@ -45,7 +43,7 @@ local hexdigit = R("09","AF","af")
local plus = P("+")
local nothing = Cc("")
local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar
-local escaped = (plus / " ") + escapedchar -- so no loc://foo++.tex
+local escaped = (plus / " ") + escapedchar
local noslash = P("/") / ""
@@ -191,11 +189,7 @@ local function hashed(str) -- not yet ok (/test?test)
return s
end
--- inspect(hashed("template:///test"))
--- inspect(hashed("template:///test++.whatever"))
--- inspect(hashed("template:///test%2B%2B.whatever"))
--- inspect(hashed("template:///test%x.whatever"))
--- inspect(hashed("tem%2Bplate:///test%x.whatever"))
+-- inspect(hashed("template://test"))
-- Here we assume:
--
diff --git a/tex/context/base/lang-lab.mkiv b/tex/context/base/lang-lab.mkiv
index 14d9d8594..1ddb44cbb 100644
--- a/tex/context/base/lang-lab.mkiv
+++ b/tex/context/base/lang-lab.mkiv
@@ -94,10 +94,8 @@
\csname\??label\currentlabelcategory#1:##1:##2\endcsname
\else\ifcsname\??label#1:##1:##2\endcsname
\csname\??label#1:##1:##2\endcsname
-% \else\ifcsname\??language#4\s!default\endcsname
-% \expandafter#5\csname\??language#4\s!default\endcsname{##2}%
- \else\ifcsname\??language##1\s!default\endcsname
- \expandafter#5\csname\??language##1\s!default\endcsname{##2}%
+ \else\ifcsname\??language#4\s!default\endcsname
+ \expandafter#5\csname\??language#4\s!default\endcsname{##2}%
\else\ifcsname\??label\currentlabelcategory#1:##2\endcsname
\csname\??label\currentlabelcategory#1:##2\endcsname
\else\ifcsname\??label#1:##2\endcsname
diff --git a/tex/context/base/lang-rep.lua b/tex/context/base/lang-rep.lua
index be74d597a..31ae36e6d 100644
--- a/tex/context/base/lang-rep.lua
+++ b/tex/context/base/lang-rep.lua
@@ -7,21 +7,9 @@ if not modules then modules = { } end modules ['lang-rep'] = {
}
-- A BachoTeX 2013 experiment, probably not that useful. Eventually I used a simpler
--- more generic example. I'm sure no one ever notices of even needs this code.
---
--- As a follow up on a question by Alan about special treatment of dropped caps I wonder
--- if I can make this one more clever (probably in a few more dev steps). For instance
--- injecting nodes or replacing nodes. It's a prelude to a kind of lpeg for nodes,
--- although (given experiences so far) we don't really need that. After all, each problem
--- is somewhat unique.
+-- more generic example.
-local type = type
local utfbyte, utfsplit = utf.byte, utf.split
-local P, C, U, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.patterns.utf8character, lpeg.Cc, lpeg.Ct, lpeg.match
-local find = string.find
-
-local grouped = P("{") * ( Ct((U/utfbyte-P("}"))^1) + Cc(false) ) * P("}")-- grouped
-local splitter = Ct((Ct(Cc("discretionary") * grouped * grouped * grouped) + U/utfbyte)^1)
local trace_replacements = false trackers.register("languages.replacements", function(v) trace_replacements = v end)
local trace_detail = false trackers.register("languages.replacements.detail", function(v) trace_detail = v end)
@@ -30,26 +18,9 @@ local report_replacement = logs.reporter("languages","replacements")
local glyph_code = nodes.nodecodes.glyph
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getattr = nuts.getattr
-local getid = nuts.getid
-local getchar = nuts.getchar
-
-local insert_node_before = nuts.insert_before
-local remove_node = nuts.remove
-local copy_node = nuts.copy
-local flush_list = nuts.flush_list
-local insert_after = nuts.insert_after
-
-local nodepool = nuts.pool
-local new_glyph = nodepool.glyph
-local new_disc = nodepool.disc
+local insert_node_before = nodes.insert_before
+local remove_node = nodes.remove
+local copy_node = nodes.copy
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
@@ -75,30 +46,23 @@ table.setmetatableindex(lists,function(lists,name)
return data
end)
--- todo: glue kern
-
local function add(root,word,replacement)
local list = utfsplit(word,true)
- local size = #list
- for i=1,size do
+ for i=1,#list do
local l = utfbyte(list[i])
if not root[l] then
root[l] = { }
end
- if i == size then
- -- local newlist = utfsplit(replacement,true)
- -- for i=1,#newlist do
- -- newlist[i] = utfbyte(newlist[i])
- -- end
- local special = find(replacement,"{")
- local newlist = lpegmatch(splitter,replacement)
- --
+ if i == #list then
+ local newlist = utfsplit(replacement,true)
+ for i=1,#newlist do
+ newlist[i] = utfbyte(newlist[i])
+ end
root[l].final = {
word = word,
replacement = replacement,
- oldlength = size,
+ oldlength = #list,
newcodes = newlist,
- special = special,
}
end
root = root[l]
@@ -119,13 +83,13 @@ end
local function hit(a,head)
local tree = trees[a]
if tree then
- local root = tree[getchar(head)]
+ local root = tree[head.char]
if root then
- local current = getnext(head)
+ local current = head.next
local lastrun = false
local lastfinal = false
- while current and getid(current) == glyph_code do
- local newroot = root[getchar(current)]
+ while current and current.id == glyph_code do
+ local newroot = root[current.char]
if not newroot then
return lastrun, lastfinal
else
@@ -140,7 +104,7 @@ local function hit(a,head)
root = newroot
end
end
- current = getnext(current)
+ current = current.next
end
if lastrun then
return lastrun, lastfinal
@@ -149,27 +113,11 @@ local function hit(a,head)
end
end
-local function tonodes(list,template)
- local head, current
- for i=1,#list do
- local new = copy_node(template)
- setfield(new,"char",list[i])
- if head then
- head, current = insert_after(head,current,new)
- else
- head, current = new, new
- end
- end
- return head
-end
-
-
function replacements.handler(head)
- head = tonut(head)
local current = head
local done = false
while current do
- if getid(current) == glyph_code then
+ if current.id == glyph_code then
local a = getattr(current,a_replacements)
if a then
local last, final = hit(a,current)
@@ -177,85 +125,41 @@ function replacements.handler(head)
local oldlength = final.oldlength
local newcodes = final.newcodes
local newlength = #newcodes
- if trace_replacement then
+ if report_replacement then
report_replacement("replacing word %a by %a",final.word,final.replacement)
end
- if final.special then
- -- easier is to delete and insert (a simple callout to tex would be more efficient)
- -- maybe just walk over a replacement string instead
- local prev = getprev(current)
- local next = getnext(last)
- local list = current
- setfield(last,"next",nil)
- setfield(prev,"next",next)
- if next then
- setfield(next,"prev",prev)
- end
- current = prev
- if not current then
- head = nil
- end
- for i=1,newlength do
- local codes = newcodes[i]
- local new = nil
- if type(codes) == "table" then
- local method = codes[1]
- if method == "discretionary" then
- local pre, post, replace = codes[2], codes[3], codes[4]
- new = new_disc()
- if pre then
- setfield(new,"pre",tonodes(pre,last))
- end
- if post then
- setfield(new,"post",tonodes(post,last))
- end
- if replace then
- setfield(new,"replace",tonodes(replace,last))
- end
- else
- -- todo
- end
- else
- new = copy_node(last)
- setfield(new,"char",codes)
- end
- if new then
- head, current = insert_after(head,current,new)
- end
- end
- flush_list(list)
- elseif oldlength == newlength then -- #old == #new
+ if oldlength == newlength then -- #old == #new
for i=1,newlength do
- setfield(current,"char",newcodes[i])
- current = getnext(current)
+ current.char = newcodes[i]
+ current = current.next
end
elseif oldlength < newlength then -- #old < #new
for i=1,newlength-oldlength do
local n = copy_node(current)
- setfield(n,"char",newcodes[i])
+ n.char = newcodes[i]
head, current = insert_node_before(head,current,n)
- current = getnext(current)
+ current = current.next
end
for i=newlength-oldlength+1,newlength do
- setfield(current,"char",newcodes[i])
- current = getnext(current)
+ current.char = newcodes[i]
+ current = current.next
end
else -- #old > #new
for i=1,oldlength-newlength do
head, current = remove_node(head,current,true)
end
for i=1,newlength do
- setfield(current,"char",newcodes[i])
- current = getnext(current)
+ current.char = newcodes[i]
+ current = current.next
end
end
done = true
end
end
end
- current = getnext(current)
+ current = current.next
end
- return tonode(head), done
+ return head, done
end
local enabled = false
@@ -280,3 +184,6 @@ end
commands.setreplacements = replacements.set
commands.addreplacements = replacements.add
+
+nodes.tasks.prependaction("processors","words","languages.replacements.handler")
+nodes.tasks.disableaction("processors","languages.replacements.handler")
diff --git a/tex/context/base/lang-rep.mkiv b/tex/context/base/lang-rep.mkiv
deleted file mode 100644
index b3f21f22a..000000000
--- a/tex/context/base/lang-rep.mkiv
+++ /dev/null
@@ -1,75 +0,0 @@
-%D \module
-%D [ file=lang-rep,
-%D version=2013.04.28,
-%D title=\CONTEXT\ Language Macros,
-%D subtitle=Substitution,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-%D As I needed an example of messing with nodes for the bacho\TEX\ tutorial
-%D I cooked up this. In the end I decided to stick to a simpler example and
-%D just finished this off in case someone really needs it.
-
-\writestatus{loading}{ConTeXt Language Macros / Replacements}
-
-\unprotect
-
-\registerctxluafile{lang-rep}{1.001}
-
-\definesystemattribute[replacements][public]
-
-%D \startluacode
-%D
-%D -- todo: other nodes (prelude to more experiments with auto dropped caps)
-%D
-%D languages.replacements.add("basics", {
-%D ["aap"] = "monkey",
-%D ["noot"] = "nut",
-%D ["never"] = "forever",
-%D ["newer"] = "cooler",
-%D ["new"] = "cool",
-%D -- ["special"] = "veryspe{>>>}{<<<}{=}cial",
-%D })
-%D
-%D \stopluacode
-%D
-%D \replaceword[more][this][that]
-%D \replaceword[more][crap][support]
-%D \replaceword[more][---][—]
-%D \replaceword[basics][special][veryspe{>>>}{<<<}{=}cial]
-%D
-%D \starttyping
-%D \start \setreplacements[basics] What the heck, it's now or never, isn't it new? \par \stop
-%D \start \setreplacements[more] Do we --- {\it really} --- need this kind of crap? \par \stop
-%D \start \setreplacements[basics] All kinds of special thingies! \par \stop
-%D \start \setreplacements[basics] \hsize1mm special \par \stop
-%D \stoptyping
-
-\unexpanded\def\setreplacements[#1]%
- {\ctxcommand{setreplacements("#1")}}
-
-\unexpanded\def\resetreplacements
- {\attribute\replacementsattribute\attributeunsetvalue}
-
-\unexpanded\def\replaceword
- {\dotripleargument\languages_replacements_replace}
-
-\unexpanded\def\languages_replacements_replace[#1][#2][#3]%
- {\ifthirdargument
- \ctxcommand{addreplacements("#1",\!!bs#2\!!es,\!!bs#3\!!es)}%
- \fi}
-
-\appendtoks
- \resetreplacements
-\to \everyresettypesetting
-
-\appendtoks
- \resetreplacements
-\to \everyinitializeverbatim
-
-\protect \endinput
diff --git a/tex/context/base/lang-wrd.lua b/tex/context/base/lang-wrd.lua
index 5fc23757e..bf066fc09 100644
--- a/tex/context/base/lang-wrd.lua
+++ b/tex/context/base/lang-wrd.lua
@@ -26,18 +26,7 @@ words.threshold = 4
local numbers = languages.numbers
local registered = languages.registered
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local setattr = nuts.setattr
-
-local traverse_nodes = nuts.traverse
-
+local traverse_nodes = node.traverse
local wordsdata = words.data
local chardata = characters.data
local tasks = nodes.tasks
@@ -107,7 +96,7 @@ end
-- there is an n=1 problem somewhere in nested boxes
local function mark_words(head,whenfound) -- can be optimized and shared
- local current, language, done = tonut(head), nil, nil, 0, false
+ local current, language, done = head, nil, nil, 0, false
local str, s, nds, n = { }, 0, { }, 0 -- n could also be a table, saves calls
local function action()
if s > 0 then
@@ -123,9 +112,9 @@ local function mark_words(head,whenfound) -- can be optimized and shared
n, s = 0, 0
end
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local a = getfield(current,"lang")
+ local a = current.lang
if a then
if a ~= language then
if s > 0 then
@@ -137,16 +126,16 @@ local function mark_words(head,whenfound) -- can be optimized and shared
action()
language = a
end
- local components = getfield(current,"components")
+ local components = current.components
if components then
n = n + 1
nds[n] = current
for g in traverse_nodes(components) do
s = s + 1
- str[s] = utfchar(getchar(g))
+ str[s] = utfchar(g.char)
end
else
- local code = getchar(current)
+ local code = current.char
local data = chardata[code]
if is_letter[data.category] then
n = n + 1
@@ -162,12 +151,12 @@ local function mark_words(head,whenfound) -- can be optimized and shared
n = n + 1
nds[n] = current
end
- elseif id == kern_code and getsubtype(current) == kerning_code and s > 0 then
+ elseif id == kern_code and current.subtype == kerning_code and s > 0 then
-- ok
elseif s > 0 then
action()
end
- current = getnext(current)
+ current = current.next
end
if s > 0 then
action()
@@ -187,8 +176,6 @@ local enabled = false
function words.check(head)
if enabled then
return methods[wordmethod](head)
- elseif not head then
- return head, false
else
return head, false
end
@@ -220,7 +207,7 @@ table.setmetatableindex(cache, function(t,k) -- k == language, numbers[k] == tag
else
c = colist["word:" .. (numbers[k] or "unset")] or colist["word:unknown"]
end
- local v = c and function(n) setattr(n,a_color,c) end or false
+ local v = c and function(n) n[a_color] = c end or false
t[k] = v
return v
end)
@@ -239,7 +226,7 @@ end
methods[1] = function(head)
for n in traverse_nodes(head) do
- setattr(n,a_color,unsetvalue) -- hm, not that selective (reset color)
+ n[a_color] = unsetvalue -- hm, not that selective (reset color)
end
return mark_words(head,sweep)
end
@@ -340,7 +327,7 @@ end
methods[3] = function(head)
for n in traverse_nodes(head) do
- setattr(n,a_color,unsetvalue)
+ n[a_color] = unsetvalue
end
return mark_words(head,sweep)
end
diff --git a/tex/context/base/lpdf-mis.lua b/tex/context/base/lpdf-mis.lua
index 43f6cb7e1..174d17427 100644
--- a/tex/context/base/lpdf-mis.lua
+++ b/tex/context/base/lpdf-mis.lua
@@ -43,7 +43,6 @@ local pdfflushobject = lpdf.flushobject
local pdfflushstreamobject = lpdf.flushstreamobject
local variables = interfaces.variables
-local v_stop = variables.stop
local positive = register(pdfliteral("/GSpositive gs"))
local negative = register(pdfliteral("/GSnegative gs"))
@@ -338,82 +337,31 @@ local map = {
characters = "a",
}
--- local function featurecreep()
--- local pages, lastconversion, list = structures.pages.tobesaved, nil, pdfarray()
--- local getstructureset = structures.sets.get
--- for i=1,#pages do
--- local p = pages[i]
--- if not p then
--- return -- fatal error
--- else
--- local numberdata = p.numberdata
--- if numberdata then
--- local conversionset = numberdata.conversionset
--- if conversionset then
--- local conversion = getstructureset("structure:conversions",p.block,conversionset,1,"numbers")
--- if conversion ~= lastconversion then
--- lastconversion = conversion
--- list[#list+1] = i - 1 -- pdf starts numbering at 0
--- list[#list+1] = pdfdictionary { S = pdfconstant(map[conversion] or map.numbers) }
--- end
--- end
--- end
--- if not lastconversion then
--- lastconversion = "numbers"
--- list[#list+1] = i - 1 -- pdf starts numbering at 0
--- list[#list+1] = pdfdictionary { S = pdfconstant(map.numbers) }
--- end
--- end
--- end
--- lpdf.addtocatalog("PageLabels", pdfdictionary { Nums = list })
--- end
-
local function featurecreep()
- local pages = structures.pages.tobesaved
- local list = pdfarray()
- local getset = structures.sets.get
- local stopped = false
- local oldlabel = nil
- local olconversion = nil
+ local pages, lastconversion, list = structures.pages.tobesaved, nil, pdfarray()
+ local getstructureset = structures.sets.get
for i=1,#pages do
local p = pages[i]
if not p then
return -- fatal error
- end
- local label = p.viewerprefix or ""
- if p.status == v_stop then
- if not stopped then
- list[#list+1] = i - 1 -- pdf starts numbering at 0
- list[#list+1] = pdfdictionary {
- P = pdfunicode(label),
- }
- stopped = true
- end
- oldlabel = nil
- oldconversion = nil
- stopped = false
else
local numberdata = p.numberdata
- local conversion = nil
- local number = p.number
if numberdata then
local conversionset = numberdata.conversionset
if conversionset then
- conversion = getset("structure:conversions",p.block,conversionset,1,"numbers")
+ local conversion = getstructureset("structure:conversions",p.block,conversionset,1,"numbers")
+ if conversion ~= lastconversion then
+ lastconversion = conversion
+ list[#list+1] = i - 1 -- pdf starts numbering at 0
+ list[#list+1] = pdfdictionary { S = pdfconstant(map[conversion] or map.numbers) }
+ end
end
end
- conversion = conversion and map[conversion] or map.numbers
- if number == 1 or oldlabel ~= label or oldconversion ~= conversion then
+ if not lastconversion then
+ lastconversion = "numbers"
list[#list+1] = i - 1 -- pdf starts numbering at 0
- list[#list+1] = pdfdictionary {
- S = pdfconstant(conversion),
- St = number,
- P = label ~= "" and pdfunicode(label) or nil,
- }
+ list[#list+1] = pdfdictionary { S = pdfconstant(map.numbers) }
end
- oldlabel = label
- oldconversion = conversion
- stopped = false
end
end
lpdf.addtocatalog("PageLabels", pdfdictionary { Nums = list })
diff --git a/tex/context/base/lpdf-nod.lua b/tex/context/base/lpdf-nod.lua
index 68d7fca90..6b104d2fa 100644
--- a/tex/context/base/lpdf-nod.lua
+++ b/tex/context/base/lpdf-nod.lua
@@ -6,29 +6,21 @@ if not modules then modules = { } end modules ['lpdf-nod'] = {
license = "see context related readme files"
}
-local type = type
-
local formatters = string.formatters
-local whatsitcodes = nodes.whatsitcodes
-local nodeinjections = backends.nodeinjections
-
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local setfield = nuts.setfield
-
-local copy_node = nuts.copy
-local new_node = nuts.new
+local copy_node = node.copy
+local new_node = node.new
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local register = nodepool.register
+local whatsitcodes = nodes.whatsitcodes
+local nodeinjections = backends.nodeinjections
-local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdfliteral,"mode",1)
+local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) pdfliteral.mode = 1
local pdfsave = register(new_node("whatsit", whatsitcodes.pdfsave))
local pdfrestore = register(new_node("whatsit", whatsitcodes.pdfrestore))
local pdfsetmatrix = register(new_node("whatsit", whatsitcodes.pdfsetmatrix))
-local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) setfield(pdfdest,"named_id",1) -- xyz_zoom untouched
+local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) pdfdest.named_id = 1 -- xyz_zoom untouched
local pdfannot = register(new_node("whatsit", whatsitcodes.pdfannot))
local variables = interfaces.variables
@@ -46,14 +38,14 @@ local views = { -- beware, we do support the pdf keys but this is *not* official
function nodepool.pdfliteral(str)
local t = copy_node(pdfliteral)
- setfield(t,"data",str)
+ t.data = str
return t
end
function nodepool.pdfdirect(str)
local t = copy_node(pdfliteral)
- setfield(t,"data",str)
- setfield(t,"mode",1)
+ t.data = str
+ t.mode = 1
return t
end
@@ -65,10 +57,16 @@ function nodepool.pdfrestore()
return copy_node(pdfrestore)
end
-function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty) -- todo: tx ty
+function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
+ local t = copy_node(pdfsetmatrix)
+ t.data = formatters["%s %s %s %s"](rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty
+ return t
+end
+
+function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
local t = copy_node(pdfsetmatrix)
if type(rx) == "string" then
- setfield(t,"data",rx)
+ t.data = rx
else
if not rx then
rx = 1
@@ -88,12 +86,12 @@ function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty) -- todo: tx ty
end
if sx == 0 and sy == 0 then
if rx == 1 and ry == 1 then
- setfield(t,"data","1 0 0 1")
+ t.data = "1 0 0 1"
else
- setfield(t,"data",formatters["%0.6f 0 0 %0.6f"](rx,ry))
+ t.data = formatters["%0.6f 0 0 %0.6f"](rx,ry)
end
else
- setfield(t,"data",formatters["%0.6f %0.6f %0.6f %0.6f"](rx,sx,sy,ry))
+ t.data = formatters["%0.6f %0.6f %0.6f %0.6f"](rx,sx,sy,ry)
end
end
return t
@@ -106,19 +104,19 @@ nodeinjections.transform = nodepool.pdfsetmatrix
function nodepool.pdfannotation(w,h,d,data,n)
local t = copy_node(pdfannot)
if w and w ~= 0 then
- setfield(t,"width",w)
+ t.width = w
end
if h and h ~= 0 then
- setfield(t,"height",h)
+ t.height = h
end
if d and d ~= 0 then
- setfield(t,"depth",d)
+ t.depth = d
end
if n then
- setfield(t,"objnum",n)
+ t.objnum = n
end
if data and data ~= "" then
- setfield(t,"data",data)
+ t.data = data
end
return t
end
@@ -140,36 +138,35 @@ function nodepool.pdfdestination(w,h,d,name,view,n)
local t = copy_node(pdfdest)
local hasdimensions = false
if w and w ~= 0 then
- setfield(t,"width",w)
+ t.width = w
hasdimensions = true
end
if h and h ~= 0 then
- setfield(t,"height",h)
+ t.height = h
hasdimensions = true
end
if d and d ~= 0 then
- setfield(t,"depth",d)
+ t.depth = d
hasdimensions = true
end
if n then
- setfield(t,"objnum",n)
+ t.objnum = n
end
view = views[view] or view or 1 -- fit is default
- setfield(t,"dest_id",name)
- setfield(t,"dest_type",view)
+ t.dest_id = name
+ t.dest_type = view
if hasdimensions and view == 0 then -- xyz
-- see (!) s -> m -> t -> r
- -- linked
local s = copy_node(pdfsave)
local m = copy_node(pdfsetmatrix)
local r = copy_node(pdfrestore)
- setfield(m,"data","1 0 0 1")
- setfield(s,"next",m)
- setfield(m,"next",t)
- setfield(t,"next",r)
- setfield(m,"prev",s)
- setfield(t,"prev",m)
- setfield(r,"prev",t)
+ m.data = "1 0 0 1"
+ s.next = m
+ m.next = t
+ t.next = r
+ m.prev = s
+ t.prev = m
+ r.prev = t
return s -- a list
else
return t
diff --git a/tex/context/base/lpdf-tag.lua b/tex/context/base/lpdf-tag.lua
index afddec345..29ffcd207 100644
--- a/tex/context/base/lpdf-tag.lua
+++ b/tex/context/base/lpdf-tag.lua
@@ -6,7 +6,6 @@ if not modules then modules = { } end modules ['lpdf-tag'] = {
license = "see context related readme files"
}
-local next = next
local format, match, concat = string.format, string.match, table.concat
local lpegmatch = lpeg.match
local utfchar = utf.char
@@ -15,9 +14,7 @@ local trace_tags = false trackers.register("structures.tags", function(v) trace
local report_tags = logs.reporter("backend","tags")
-local backends = backends
-local lpdf = lpdf
-local nodes = nodes
+local backends, lpdf, nodes = backends, lpdf, nodes
local nodeinjections = backends.pdf.nodeinjections
local codeinjections = backends.pdf.codeinjections
@@ -37,6 +34,10 @@ local pdfpagereference = lpdf.pagereference
local texgetcount = tex.getcount
+local nodepool = nodes.pool
+
+local pdfliteral = nodepool.pdfliteral
+
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
@@ -46,26 +47,11 @@ local glyph_code = nodecodes.glyph
local a_tagged = attributes.private('tagged')
local a_image = attributes.private('image')
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local nodepool = nuts.pool
-local pdfliteral = nodepool.pdfliteral
-
-local getid = nuts.getid
-local getattr = nuts.getattr
-local getprev = nuts.getprev
-local getnext = nuts.getnext
-local getlist = nuts.getlist
-local setfield = nuts.setfield
-
-local traverse_nodes = nuts.traverse
-local tosequence = nuts.tosequence
-local copy_node = nuts.copy
-local slide_nodelist = nuts.slide
-local insert_before = nuts.insert_before
-local insert_after = nuts.insert_after
+local traverse_nodes = node.traverse
+local traverse_id = node.traverse_id
+local tosequence = nodes.tosequence
+local copy_node = node.copy
+local slide_nodelist = node.slide
local structure_stack = { }
local structure_kids = pdfarray()
@@ -189,8 +175,7 @@ local function makeelement(fulltag,parent)
end
local function makecontent(parent,start,stop,slist,id)
- local tag = parent.tag
- local kids = parent.kids
+ local tag, kids = parent.tag, parent.kids
local last = index
if id == "image" then
local d = pdfdictionary {
@@ -213,29 +198,24 @@ local function makecontent(parent,start,stop,slist,id)
end
--
local bliteral = pdfliteral(format("/%s <</MCID %s>>BDC",tag,last))
- local eliteral = pdfliteral("EMC")
- -- use insert instead:
- local prev = getprev(start)
+ local prev = start.prev
if prev then
- setfield(prev,"next",bliteral)
- setfield(bliteral,"prev",prev)
+ prev.next, bliteral.prev = bliteral, prev
end
- setfield(start,"prev",bliteral)
- setfield(bliteral,"next",start)
- -- use insert instead:
- local next = getnext(stop)
- if next then
- setfield(next,"prev",eliteral)
- setfield(eliteral,"next",next)
+ start.prev, bliteral.next = bliteral, start
+ if slist and slist.list == start then
+ slist.list = bliteral
+ elseif not prev then
+ report_tags("this can't happen: injection in front of nothing")
end
- setfield(stop,"next",eliteral)
- setfield(eliteral,"prev",stop)
--
- if slist and getlist(slist) == start then
- setfield(slist,"list",bliteral)
- elseif not getprev(start) then
- report_tags("this can't happen: injection in front of nothing")
+ local eliteral = pdfliteral("EMC")
+ local next = stop.next
+ if next then
+ next.prev, eliteral.next = eliteral, next
end
+ stop.next, eliteral.prev = eliteral, stop
+ --
index = index + 1
list[index] = parent.pref
return bliteral, eliteral
@@ -247,9 +227,9 @@ local level, last, ranges, range = 0, nil, { }, nil
local function collectranges(head,list)
for n in traverse_nodes(head) do
- local id = getid(n) -- 14: image, 8: literal (mp)
+ local id = n.id -- 14: image, 8: literal (mp)
if id == glyph_code then
- local at = getattr(n,a_tagged)
+ local at = n[a_tagged]
if not at then
range = nil
elseif last ~= at then
@@ -260,9 +240,9 @@ local function collectranges(head,list)
range[4] = n -- stop
end
elseif id == hlist_code or id == vlist_code then
- local at = getattr(n,a_image)
+ local at = n[a_image]
if at then
- local at = getattr(n,a_tagged)
+ local at = n[a_tagged]
if not at then
range = nil
else
@@ -270,7 +250,7 @@ local function collectranges(head,list)
end
last = nil
else
- local nl = getlist(n)
+ local nl = n.list
slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
collectranges(nl,n)
end
@@ -282,7 +262,6 @@ function nodeinjections.addtags(head)
-- no need to adapt head, as we always operate on lists
level, last, ranges, range = 0, nil, { }, nil
initializepage()
- head = tonut(head)
collectranges(head)
if trace_tags then
for i=1,#ranges do
@@ -316,9 +295,8 @@ function nodeinjections.addtags(head)
finishpage()
-- can be separate feature
--
- -- injectspans(tonut(head)) -- does to work yet
+ -- injectspans(head) -- does to work yet
--
- head = tonode(head)
return head, true
end
diff --git a/tex/context/base/luat-cnf.lua b/tex/context/base/luat-cnf.lua
index 4ad6cd69d..3672c603e 100644
--- a/tex/context/base/luat-cnf.lua
+++ b/tex/context/base/luat-cnf.lua
@@ -23,7 +23,7 @@ texconfig.half_error_line = 50 -- 50 -- obsolete
texconfig.expand_depth = 10000 -- 10000
texconfig.hash_extra = 100000 -- 0
texconfig.nest_size = 1000 -- 50
-texconfig.max_in_open = 500 -- 15 -- in fact it's limited to 127
+texconfig.max_in_open = 500 -- 15
texconfig.max_print_line = 10000 -- 79
texconfig.max_strings = 500000 -- 15000
texconfig.param_size = 25000 -- 60
diff --git a/tex/context/base/luat-sto.lua b/tex/context/base/luat-sto.lua
index 041050fb8..7a11b7f5e 100644
--- a/tex/context/base/luat-sto.lua
+++ b/tex/context/base/luat-sto.lua
@@ -163,7 +163,6 @@ storage.register("storage/shared", storage.shared, "storage.shared")
local mark = storage.mark
if string.patterns then mark(string.patterns) end
-if string.formatters then mark(string.formatters) end
if lpeg.patterns then mark(lpeg.patterns) end
if os.env then mark(os.env) end
if number.dimenfactors then mark(number.dimenfactors) end
diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua
index 8b34a96a3..3e10eb96d 100644
--- a/tex/context/base/lxml-tab.lua
+++ b/tex/context/base/lxml-tab.lua
@@ -746,11 +746,8 @@ local function _xmlconvert_(data, settings)
end
if errorstr and errorstr ~= "" then
result.error = true
- else
- errorstr = nil
end
result.statistics = {
- errormessage = errorstr,
entities = {
decimals = dcache,
hexadecimals = hcache,
@@ -1019,28 +1016,26 @@ local function verbose_document(e,handlers)
end
local function serialize(e,handlers,...)
- if e then
- local initialize = handlers.initialize
- local finalize = handlers.finalize
- local functions = handlers.functions
- if initialize then
- local state = initialize(...)
- if not state == true then
- return state
- end
- end
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers) -- dc ?
- end
- if finalize then
- return finalize()
+ local initialize = handlers.initialize
+ local finalize = handlers.finalize
+ local functions = handlers.functions
+ if initialize then
+ local state = initialize(...)
+ if not state == true then
+ return state
end
end
+ local etg = e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ -- elseif type(e) == "string" then
+ -- functions["@tx@"](e,handlers)
+ else
+ functions["@dc@"](e,handlers) -- dc ?
+ end
+ if finalize then
+ return finalize()
+ end
end
local function xserialize(e,handlers)
diff --git a/tex/context/base/m-oldbibtex.mkiv b/tex/context/base/m-oldbibtex.mkiv
deleted file mode 100644
index 08c23e7cc..000000000
--- a/tex/context/base/m-oldbibtex.mkiv
+++ /dev/null
@@ -1,16 +0,0 @@
-%D \module
-%D [ file=m-oldbibtex,
-%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
-%D title=Falback on old method,
-%D subtitle=Publications,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
-%D by \PRAGMA. See mreadme.pdf for details.
-
-\loadmarkfile{bibl-bib}
-\loadmarkfile{bibl-tra}
-
-\endinput
diff --git a/tex/context/base/math-dir.lua b/tex/context/base/math-dir.lua
index 525d07831..507a24e41 100644
--- a/tex/context/base/math-dir.lua
+++ b/tex/context/base/math-dir.lua
@@ -23,19 +23,8 @@ local trace_directions = false trackers.register("typesetters.directions.math
local report_directions = logs.reporter("typesetting","math directions")
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getlist = nuts.getlist
-local setfield = nuts.setfield
-local getattr = nuts.getattr
-
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
+local insert_node_before = nodes.insert_before
+local insert_node_after = nodes.insert_after
local nodecodes = nodes.nodecodes
local tasks = nodes.tasks
@@ -44,7 +33,7 @@ local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_textdir = nodepool.textdir
@@ -72,9 +61,9 @@ local function processmath(head)
stop = nil
end
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local char = getchar(current)
+ local char = current.char
local cdir = chardirections[char]
if cdir == "en" or cdir == "an" then -- we could check for mathclass punctuation
if not start then
@@ -94,7 +83,7 @@ local function processmath(head)
if mirror then
local class = charclasses[char]
if class == "open" or class == "close" then
- setfield(current,"char",mirror)
+ current.char = mirror
if trace_directions then
report_directions("mirrored: %C to %C",char,mirror)
end
@@ -105,13 +94,6 @@ local function processmath(head)
end
elseif not start then
-- nothing
-if id == hlist_code or id == vlist_code then
- local list, d = processmath(getlist(current))
- setfield(current,"list",list)
- if d then
- done = true
- end
-end
elseif start == stop then
start = nil
else
@@ -119,14 +101,14 @@ end
-- math can pack things into hlists .. we need to make sure we don't process
-- too often: needs checking
if id == hlist_code or id == vlist_code then
- local list, d = processmath(getlist(current))
- setfield(current,"list",list)
+ local list, d = processmath(current.list)
+ current.list = list
if d then
done = true
end
end
end
- current = getnext(current)
+ current = current.next
end
if not start then
-- nothing
@@ -142,11 +124,9 @@ local enabled = false
function directions.processmath(head) -- style, penalties
if enabled then
- local h = tonut(head)
- local a = getattr(h,a_mathbidi)
+ local a = head[a_mathbidi]
if a and a > 0 then
- local head, done = processmath(h)
- return tonode(head), done
+ return processmath(head)
end
end
return head, false
diff --git a/tex/context/base/math-fbk.lua b/tex/context/base/math-fbk.lua
index f4bd1348a..bd9a1d315 100644
--- a/tex/context/base/math-fbk.lua
+++ b/tex/context/base/math-fbk.lua
@@ -133,8 +133,10 @@ function fallbacks.apply(target,original)
else
-- something else
end
- if trace_fallbacks and characters[k] then
- report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
+ if trace_fallbacks then
+ if characters[k] then
+ report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
+ end
end
end
end
@@ -332,10 +334,9 @@ end
local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset)
local characters = target.characters
+ local addprivate = fonts.helpers.addprivate
local olddata = characters[oldchr]
- -- brrr ... pagella has only next
- if olddata and not olddata.commands and olddata.width > 0 then
- local addprivate = fonts.helpers.addprivate
+ if olddata and not olddata.commands then
if swap then
swap = characters[swap]
height = swap.depth
@@ -398,9 +399,9 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
end
end
end
- return glyphdata, true
+ return glyphdata
else
- return olddata, false
+ return olddata
end
end
@@ -444,9 +445,9 @@ addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mat
addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h" } )
addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h" } )
-virtualcharacters[0xFE3DF] = function(data) return data.target.characters[0x23DF] end
-virtualcharacters[0xFE3DD] = function(data) return data.target.characters[0x23DD] end
-virtualcharacters[0xFE3B5] = function(data) return data.target.characters[0x23B5] end
+virtualcharacters[0xFE3DF] = function(data) return data.original.characters[0x23DF] end
+virtualcharacters[0xFE3DD] = function(data) return data.original.characters[0x23DD] end
+virtualcharacters[0xFE3B5] = function(data) return data.original.characters[0x23B5] end
-- todo: add some more .. numbers might change
@@ -456,10 +457,8 @@ addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mat
local function smashed(data,unicode,private)
local target = data.target
local height = target.parameters.xheight / 2
- local c, done = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height)
- if done then
- c.top_accent = nil -- or maybe also all the others
- end
+ local c = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height)
+ c.top_accent = nil
return c
end
diff --git a/tex/context/base/math-fen.mkiv b/tex/context/base/math-fen.mkiv
index fe959cc1e..94d93e4af 100644
--- a/tex/context/base/math-fen.mkiv
+++ b/tex/context/base/math-fen.mkiv
@@ -144,7 +144,7 @@
\definemathfence [mirroredangle] [mirrored] [\c!right="27E8,\c!left="27E9]
\definemathfence [mirroreddoubleangle] [mirrored] [\c!right="27EA,\c!left="27EB]
\definemathfence [mirroredsolidus] [mirrored] [\c!right="2044,\c!left="2044]
-\definemathfence [mirrorednothing] [mirrored]
+\definemathfence [mirrorednothing] [mirorred]
%D A bonus:
diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua
index 1351559a0..6be06e634 100644
--- a/tex/context/base/math-ini.lua
+++ b/tex/context/base/math-ini.lua
@@ -350,12 +350,10 @@ local utf8byte = lpeg.patterns.utf8byte * lpeg.P(-1)
local somechar = { }
table.setmetatableindex(somechar,function(t,k)
- if k then
- local b = lpegmatch(utf8byte,k)
- local v = b and chardata[b] or false
- t[k] = v
- return v
- end
+ local b = lpegmatch(utf8byte,k)
+ local v = b and chardata[b] or false
+ t[k] = v
+ return v
end)
local function utfmathclass(chr, default)
@@ -472,7 +470,6 @@ mathematics.utfmathclass = utfmathclass
mathematics.utfmathstretch = utfmathstretch
mathematics.utfmathcommand = utfmathcommand
mathematics.utfmathfiller = utfmathfiller
-mathematics.utfmathaccent = utfmathaccent
-- interfaced
diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua
index 4e25fe206..f3987c12f 100644
--- a/tex/context/base/math-noa.lua
+++ b/tex/context/base/math-noa.lua
@@ -54,35 +54,16 @@ local report_families = logs.reporter("mathematics","families")
local a_mathrendering = attributes.private("mathrendering")
local a_exportstatus = attributes.private("exportstatus")
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-local tonut = nuts.tonut
-local nutstring = nuts.tostring
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local free_node = nuts.free
-local new_node = nuts.new -- todo: pool: math_noad math_sub
-local copy_node = nuts.copy
-
-local mlist_to_hlist = nodes.mlist_to_hlist
-
+local mlist_to_hlist = node.mlist_to_hlist
local font_of_family = node.family_font
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
+local free_node = node.free
+local new_node = node.new -- todo: pool: math_noad math_sub
+local copy_node = node.copy
-local new_kern = nodepool.kern
-local new_rule = nodepool.rule
+local new_kern = nodes.pool.kern
+local new_rule = nodes.pool.rule
local topoints = number.points
@@ -145,23 +126,23 @@ local function process(start,what,n,parent)
if n then n = n + 1 else n = 0 end
local prev = nil
while start do
- local id = getid(start)
+ local id = start.id
if trace_processing then
if id == math_noad then
- report_processing("%w%S, class %a",n*2,nutstring(start),noadcodes[getsubtype(start)])
+ report_processing("%w%S, class %a",n*2,start,noadcodes[start.subtype])
elseif id == math_char then
- local char = getchar(start)
- local fam = getfield(start,"fam")
+ local char = start.char
+ local fam = start.fam
local font = font_of_family(fam)
- report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,nutstring(start),fam,font,char,char)
+ report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,start,fam,font,char,char)
else
- report_processing("%w%S",n*2,nutstring(start))
+ report_processing("%w%S",n*2,start)
end
end
local proc = what[id]
if proc then
-- report_processing("start processing")
- local done, newstart = proc(start,what,n,parent) -- prev is bugged: or getprev(start)
+ local done, newstart = proc(start,what,n,parent) -- prev is bugged: or start.prev
if newstart then
start = newstart
-- report_processing("stop processing (new start)")
@@ -173,55 +154,55 @@ local function process(start,what,n,parent)
elseif id == math_noad then
if prev then
-- we have no proper prev in math nodes yet
- setfield(start,"prev",prev)
+ start.prev = prev
end
-
- local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
elseif id == math_box or id == math_sub then
- local noad = getfield(start,"list") if noad then process(noad,what,n,start) end -- list (not getlist !)
+ -- local noad = start.list if noad then process(noad,what,n,start) end -- list
+ local noad = start.head if noad then process(noad,what,n,start) end -- list
elseif id == math_fraction then
- local noad = getfield(start,"num") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"denom") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"left") if noad then process(noad,what,n,start) end -- delimiter
- noad = getfield(start,"right") if noad then process(noad,what,n,start) end -- delimiter
+ local noad = start.num if noad then process(noad,what,n,start) end -- list
+ noad = start.denom if noad then process(noad,what,n,start) end -- list
+ noad = start.left if noad then process(noad,what,n,start) end -- delimiter
+ noad = start.right if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_choice then
- local noad = getfield(start,"display") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"text") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"script") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"scriptscript") if noad then process(noad,what,n,start) end -- list
+ local noad = start.display if noad then process(noad,what,n,start) end -- list
+ noad = start.text if noad then process(noad,what,n,start) end -- list
+ noad = start.script if noad then process(noad,what,n,start) end -- list
+ noad = start.scriptscript if noad then process(noad,what,n,start) end -- list
elseif id == math_fence then
- local noad = getfield(start,"delim") if noad then process(noad,what,n,start) end -- delimiter
+ local noad = start.delim if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_radical then
- local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"left") if noad then process(noad,what,n,start) end -- delimiter
- noad = getfield(start,"degree") if noad then process(noad,what,n,start) end -- list
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
+ noad = start.left if noad then process(noad,what,n,start) end -- delimiter
+ noad = start.degree if noad then process(noad,what,n,start) end -- list
elseif id == math_accent then
- local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"accent") if noad then process(noad,what,n,start) end -- list
- noad = getfield(start,"bot_accent") if noad then process(noad,what,n,start) end -- list
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
+ noad = start.accent if noad then process(noad,what,n,start) end -- list
+ noad = start.bot_accent if noad then process(noad,what,n,start) end -- list
elseif id == math_style then
-- has a next
else
-- glue, penalty, etc
end
prev = start
- start = getnext(start)
+ start = start.next
end
end
local function processnoads(head,actions,banner)
if trace_processing then
report_processing("start %a",banner)
- process(tonut(head),actions)
+ process(head,actions)
report_processing("stop %a",banner)
else
- process(tonut(head),actions)
+ process(head,actions)
end
end
@@ -252,71 +233,37 @@ local familymap = { [0] =
"pseudobold",
}
--- families[math_char] = function(pointer)
--- if getfield(pointer,"fam") == 0 then
--- local a = getattr(pointer,a_mathfamily)
--- if a and a > 0 then
--- setattr(pointer,a_mathfamily,0)
--- if a > 5 then
--- local char = getchar(pointer)
--- local bold = boldmap[char]
--- local newa = a - 3
--- if bold then
--- setattr(pointer,a_exportstatus,char)
--- setfield(pointer,"char",bold)
--- if trace_families then
--- report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
--- end
--- else
--- if trace_families then
--- report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
--- end
--- end
--- setfield(pointer,"fam",newa)
--- else
--- if trace_families then
--- local char = getchar(pointer)
--- report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
--- end
--- setfield(pointer,"fam",a)
--- end
--- else
--- -- pointer.fam = 0
--- end
--- end
--- end
-
families[math_char] = function(pointer)
- if getfield(pointer,"fam") == 0 then
- local a = getattr(pointer,a_mathfamily)
+ if pointer.fam == 0 then
+ local a = pointer[a_mathfamily]
if a and a > 0 then
- setattr(pointer,a_mathfamily,0)
+ pointer[a_mathfamily] = 0
if a > 5 then
- local char = getchar(pointer)
+ local char = pointer.char
local bold = boldmap[char]
local newa = a - 3
if not bold then
if trace_families then
report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
end
- setfield(pointer,"fam",newa)
- elseif not fontcharacters[font_of_family(newa)][bold] then
+ pointer.fam = newa
+ elseif not fontcharacters[font_of_family(newa)][bold] then
if trace_families then
report_families("no bold character for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
end
if newa > 3 then
- setfield(pointer,"fam",newa-3)
+ pointer.fam = newa - 3
end
else
- setattr(pointer,a_exportstatus,char)
- setfield(pointer,"char",bold)
+ pointer[a_exportstatus] = char
+ pointer.char = bold
if trace_families then
report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
end
- setfield(pointer,"fam",newa)
+ pointer.fam = newa
end
else
- local char = getchar(pointer)
+ local char = pointer.char
if not fontcharacters[font_of_family(a)][char] then
if trace_families then
report_families("no bold replacement for %C",char)
@@ -325,7 +272,7 @@ families[math_char] = function(pointer)
if trace_families then
report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
end
- setfield(pointer,"fam",a)
+ pointer.fam = a
end
end
end
@@ -333,31 +280,31 @@ families[math_char] = function(pointer)
end
families[math_delim] = function(pointer)
- if getfield(pointer,"small_fam") == 0 then
- local a = getattr(pointer,a_mathfamily)
+ if pointer.small_fam == 0 then
+ local a = pointer[a_mathfamily]
if a and a > 0 then
- setattr(pointer,a_mathfamily,0)
+ pointer[a_mathfamily] = 0
if a > 5 then
-- no bold delimiters in unicode
a = a - 3
end
- local char = getfield(pointer,"small_char")
+ local char = pointer.small_char
local okay = fontcharacters[font_of_family(a)][char]
if okay then
- setfield(pointer,"small_fam",a)
+ pointer.small_fam = a
elseif a > 2 then
- setfield(pointer,"small_fam",a-3)
+ pointer.small_fam = a - 3
end
- local char = getfield(pointer,"large_char")
+ local char = pointer.large_char
local okay = fontcharacters[font_of_family(a)][char]
if okay then
- setfield(pointer,"large_fam",a)
+ pointer.large_fam = a
elseif a > 2 then
- setfield(pointer,"large_fam",a-3)
+ pointer.large_fam = a - 3
end
else
- setfield(pointer,"small_fam",0)
- setfield(pointer,"large_fam",0)
+ pointer.small_fam = 0
+ pointer.large_fam = 0
end
end
end
@@ -385,8 +332,8 @@ local fallbackstyleattr = mathematics.fallbackstyleattr
local setnodecolor = nodes.tracers.colors.set
local function checked(pointer)
- local char = getchar(pointer)
- local fam = getfield(pointer,"fam")
+ local char = pointer.char
+ local fam = pointer.fam
local id = font_of_family(fam)
local tc = fontcharacters[id]
if not tc[char] then
@@ -399,27 +346,27 @@ local function checked(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- setattr(pointer,a_exportstatus,char) -- testcase: exponentiale
- setfield(pointer,"char",newchar)
+ pointer[a_exportstatus] = char -- testcase: exponentiale
+ pointer.char = newchar
return true
end
end
end
processors.relocate[math_char] = function(pointer)
- local g = getattr(pointer,a_mathgreek) or 0
- local a = getattr(pointer,a_mathalphabet) or 0
+ local g = pointer[a_mathgreek] or 0
+ local a = pointer[a_mathalphabet] or 0
if a > 0 or g > 0 then
if a > 0 then
- setattr(pointer,a_mathgreek,0)
+ pointer[a_mathgreek] = 0
end
if g > 0 then
- setattr(pointer,a_mathalphabet,0)
+ pointer[a_mathalphabet] = 0
end
- local char = getchar(pointer)
+ local char = pointer.char
local newchar = remapalphabets(char,a,g)
if newchar then
- local fam = getfield(pointer,"fam")
+ local fam = pointer.fam
local id = font_of_family(fam)
local characters = fontcharacters[id]
if characters[newchar] then
@@ -429,7 +376,7 @@ processors.relocate[math_char] = function(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- setfield(pointer,"char",newchar)
+ pointer.char = newchar
return true
else
local fallback = fallbackstyleattr(a)
@@ -443,7 +390,7 @@ processors.relocate[math_char] = function(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- setfield(pointer,"char",newchar)
+ pointer.char = newchar
return true
elseif trace_remapping then
report_remap("char",id,char,newchar," fails (no fallback character)")
@@ -489,19 +436,19 @@ processors.render = { }
local rendersets = mathematics.renderings.numbers or { } -- store
processors.render[math_char] = function(pointer)
- local attr = getattr(pointer,a_mathrendering)
+ local attr = pointer[a_mathrendering]
if attr and attr > 0 then
- local char = getchar(pointer)
+ local char = pointer.char
local renderset = rendersets[attr]
if renderset then
local newchar = renderset[char]
if newchar then
- local fam = getfield(pointer,"fam")
+ local fam = pointer.fam
local id = font_of_family(fam)
local characters = fontcharacters[id]
if characters and characters[newchar] then
- setfield(pointer,"char",newchar)
- setattr(pointer,a_exportstatus,char)
+ pointer.char = newchar
+ pointer[a_exportstatus] = char
end
end
end
@@ -528,19 +475,19 @@ local mathsize = attributes.private("mathsize")
local resize = { } processors.resize = resize
resize[math_fence] = function(pointer)
- local subtype = getsubtype(pointer)
+ local subtype = pointer.subtype
if subtype == left_fence_code or subtype == right_fence_code then
- local a = getattr(pointer,mathsize)
+ local a = pointer[mathsize]
if a and a > 0 then
local method, size = div(a,100), a % 100
- setattr(pointer,mathsize,0)
- local delimiter = getfield(pointer,"delim")
- local chr = getfield(delimiter,"small_char")
+ pointer[mathsize] = 0
+ local delimiter = pointer.delim
+ local chr = delimiter.small_char
if chr > 0 then
- local fam = getfield(delimiter,"small_fam")
+ local fam = delimiter.small_fam
local id = font_of_family(fam)
if id > 0 then
- setfield(delimiter,"small_char",mathematics.big(fontdata[id],chr,size,method))
+ delimiter.small_char = mathematics.big(fontdata[id],chr,size,method)
end
end
end
@@ -552,6 +499,7 @@ function handlers.resize(head,style,penalties)
return true
end
+
local collapse = { } processors.collapse = collapse
local mathpairs = characters.mathpairs
@@ -590,20 +538,20 @@ local validpair = {
}
local function movesubscript(parent,current_nucleus,current_char)
- local prev = getfield(parent,"prev")
- if prev and getid(prev) == math_noad then
- if not getfield(prev,"sup") and not getfield(prev,"sub") then
- setfield(current_nucleus,"char",movesub[current_char or getchar(current_nucleus)])
+ local prev = parent.prev
+ if prev and prev.id == math_noad then
+ if not prev.sup and not prev.sub then
+ current_nucleus.char = movesub[current_char or current_nucleus.char]
-- {f} {'}_n => f_n^'
- local nucleus = getfield(parent,"nucleus")
- local sub = getfield(parent,"sub")
- local sup = getfield(parent,"sup")
- setfield(prev,"sup",nucleus)
- setfield(prev,"sub",sub)
+ local nucleus = parent.nucleus
+ local sub = parent.sub
+ local sup = parent.sup
+ prev.sup = nucleus
+ prev.sub = sub
local dummy = copy_node(nucleus)
- setfield(dummy,"char",0)
- setfield(parent,"nucleus",dummy)
- setfield(parent,"sub",nil)
+ dummy.char = 0
+ parent.nucleus = dummy
+ parent.sub = nil
if trace_collapsing then
report_collapsing("fixing subscript")
end
@@ -613,40 +561,40 @@ end
local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to turn in on and off
if parent then
- if validpair[getsubtype(parent)] then
- local current_nucleus = getfield(parent,"nucleus")
- if getid(current_nucleus) == math_char then
- local current_char = getchar(current_nucleus)
- if not getfield(parent,"sub") and not getfield(parent,"sup") then
+ if validpair[parent.subtype] then
+ local current_nucleus = parent.nucleus
+ if current_nucleus.id == math_char then
+ local current_char = current_nucleus.char
+ if not parent.sub and not parent.sup then
local mathpair = mathpairs[current_char]
if mathpair then
- local next_noad = getnext(parent)
- if next_noad and getid(next_noad) == math_noad then
- if validpair[getsubtype(next_noad)] then
- local next_nucleus = getfield(next_noad,"nucleus")
- if getid(next_nucleus) == math_char then
- local next_char = getchar(next_nucleus)
+ local next_noad = parent.next
+ if next_noad and next_noad.id == math_noad then
+ if validpair[next_noad.subtype] then
+ local next_nucleus = next_noad.nucleus
+ if next_nucleus.id == math_char then
+ local next_char = next_nucleus.char
local newchar = mathpair[next_char]
if newchar then
- local fam = getfield(current_nucleus,"fam")
+ local fam = current_nucleus.fam
local id = font_of_family(fam)
local characters = fontcharacters[id]
if characters and characters[newchar] then
if trace_collapsing then
report_collapsing("%U + %U => %U",current_char,next_char,newchar)
end
- setfield(current_nucleus,"char",newchar)
- local next_next_noad = getnext(next_noad)
+ current_nucleus.char = newchar
+ local next_next_noad = next_noad.next
if next_next_noad then
- setfield(parent,"next",next_next_noad)
- setfield(next_next_noad,"prev",parent)
+ parent.next = next_next_noad
+ next_next_noad.prev = parent
else
- setfield(parent,"next",nil)
+ parent.next = nil
end
- setfield(parent,"sup",getfield(next_noad,"sup"))
- setfield(parent,"sub",getfield(next_noad,"sub"))
- setfield(next_noad,"sup",nil)
- setfield(next_noad,"sub",nil)
+ parent.sup = next_noad.sup
+ parent.sub = next_noad.sub
+ next_noad.sup = nil
+ next_noad.sub = nil
free_node(next_noad)
collapsepair(pointer,what,n,parent,true)
if not nested and movesub[current_char] then
@@ -686,13 +634,13 @@ local replaced = { }
local function replace(pointer,what,n,parent)
pointer = parent -- we're following the parent list (chars trigger this)
- local next = getnext(pointer)
+ local next = pointer.next
local start_super, stop_super, start_sub, stop_sub
local mode = "unset"
- while next and getid(next) == math_noad do
- local nextnucleus = getfield(next,"nucleus")
- if nextnucleus and getid(nextnucleus) == math_char and not getfield(next,"sub") and not getfield(next,"sup") then
- local char = getchar(nextnucleus)
+ while next and next.id == math_noad do
+ local nextnucleus = next.nucleus
+ if nextnucleus and nextnucleus.id == math_char and not next.sub and not next.sup then
+ local char = nextnucleus.char
local s = superscripts[char]
if s then
if not start_super then
@@ -702,8 +650,8 @@ local function replace(pointer,what,n,parent)
break
end
stop_super = next
- next = getnext(next)
- setfield(nextnucleus,"char",s)
+ next = next.next
+ nextnucleus.char = s
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
report_normalizing("superscript %C becomes %C",char,s)
@@ -718,8 +666,8 @@ local function replace(pointer,what,n,parent)
break
end
stop_sub = next
- next = getnext(next)
- setfield(nextnucleus,"char",s)
+ next = next.next
+ nextnucleus.char = s
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
report_normalizing("subscript %C becomes %C",char,s)
@@ -734,29 +682,29 @@ local function replace(pointer,what,n,parent)
end
if start_super then
if start_super == stop_super then
- setfield(pointer,"sup",getfield(start_super,"nucleus"))
+ pointer.sup = start_super.nucleus
else
local list = new_node(math_sub) -- todo attr
- setfield(list,"list",start_super)
- setfield(pointer,"sup",list)
+ list.head = start_super
+ pointer.sup = list
end
if mode == "super" then
- setfield(pointer,"next",getnext(stop_super))
+ pointer.next = stop_super.next
end
- setfield(stop_super,"next",nil)
+ stop_super.next = nil
end
if start_sub then
if start_sub == stop_sub then
- setfield(pointer,"sub",getfield(start_sub,"nucleus"))
+ pointer.sub = start_sub.nucleus
else
local list = new_node(math_sub) -- todo attr
- setfield(list,"list",start_sub)
- setfield(pointer,"sub",list)
+ list.head = start_sub
+ pointer.sub = list
end
if mode == "sub" then
- setfield(pointer,"next",getnext(stop_sub))
+ pointer.next = stop_sub.next
end
- setfield(stop_sub,"next",nil)
+ stop_sub.next = nil
end
-- we could return stop
end
@@ -837,20 +785,20 @@ function mathematics.setalternate(fam,tag)
end
alternate[math_char] = function(pointer)
- local a = getattr(pointer,a_mathalternate)
+ local a = pointer[a_mathalternate]
if a and a > 0 then
- setattr(pointer,a_mathalternate,0)
- local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))] -- we can also have a famdata
+ pointer[a_mathalternate] = 0
+ local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
local mathalternatesattributes = tfmdata.shared.mathalternatesattributes
if mathalternatesattributes then
local what = mathalternatesattributes[a]
- local alt = getalternate(tfmdata,getchar(pointer),what.feature,what.value)
+ local alt = getalternate(tfmdata,pointer.char,what.feature,what.value)
if alt then
if trace_alternates then
report_alternates("alternate %a, value %a, replacing glyph %U by glyph %U",
- tostring(what.feature),tostring(what.value),getchar(pointer),alt)
+ tostring(what.feature),tostring(what.value),pointer.char,alt)
end
- setfield(pointer,"char",alt)
+ pointer.char = alt
end
end
end
@@ -937,14 +885,13 @@ end
local function insert_kern(current,kern)
local sub = new_node(math_sub) -- todo: pool
local noad = new_node(math_noad) -- todo: pool
- setfield(sub,"list",kern)
- setfield(kern,"next",noad)
- setfield(noad,"nucleus",current)
+ sub.head = kern
+ kern.next = noad
+ noad.nucleus = current
return sub
end
local setcolor = nodes.tracers.colors.set
-local resetcolor = nodes.tracers.colors.reset
local italic_kern = new_kern
local c_positive_d = "trace:db"
local c_negative_d = "trace:dr"
@@ -966,44 +913,44 @@ trackers.register("math.italics", function(v)
end)
italics[math_char] = function(pointer,what,n,parent)
- local method = getattr(pointer,a_mathitalics)
+ local method = pointer[a_mathitalics]
if method and method > 0 then
- local char = getchar(pointer)
- local font = font_of_family(getfield(pointer,"fam")) -- todo: table
+ local char = pointer.char
+ local font = font_of_family(pointer.fam) -- todo: table
local correction, visual = getcorrection(method,font,char)
if correction then
- local pid = getid(parent)
+ local pid = parent.id
local sub, sup
if pid == math_noad then
- sup = getfield(parent,"sup")
- sub = getfield(parent,"sub")
+ sup = parent.sup
+ sub = parent.sub
end
if sup or sub then
- local subtype = getsubtype(parent)
+ local subtype = parent.subtype
if subtype == noad_oplimits then
if sup then
- setfield(parent,"sup",insert_kern(sup,italic_kern(correction,font)))
+ parent.sup = insert_kern(sup,italic_kern(correction,font))
if trace_italics then
report_italics("method %a, adding %p italic correction for upper limit of %C",method,correction,char)
end
end
if sub then
local correction = - correction
- setfield(parent,"sub",insert_kern(sub,italic_kern(correction,font)))
+ parent.sub = insert_kern(sub,italic_kern(correction,font))
if trace_italics then
report_italics("method %a, adding %p italic correction for lower limit of %C",method,correction,char)
end
end
else
if sup then
- setfield(parent,"sup",insert_kern(sup,italic_kern(correction,font)))
+ parent.sup = insert_kern(sup,italic_kern(correction,font))
if trace_italics then
report_italics("method %a, adding %p italic correction before superscript after %C",method,correction,char)
end
end
end
else
- local next_noad = getnext(parent)
+ local next_noad = parent.next
if not next_noad then
if n== 1 then -- only at the outer level .. will become an option (always,endonly,none)
if trace_italics then
@@ -1011,12 +958,12 @@ italics[math_char] = function(pointer,what,n,parent)
end
insert_node_after(parent,parent,italic_kern(correction,font))
end
- elseif getid(next_noad) == math_noad then
- local next_subtype = getsubtype(next_noad)
+ elseif next_noad.id == math_noad then
+ local next_subtype = next_noad.subtype
if next_subtype == noad_punct or next_subtype == noad_ord then
- local next_nucleus = getfield(next_noad,"nucleus")
- if getid(next_nucleus) == math_char then
- local next_char = getchar(next_nucleus)
+ local next_nucleus = next_noad.nucleus
+ if next_nucleus.id == math_char then
+ local next_char = next_nucleus.char
local next_data = chardata[next_char]
local visual = next_data.visual
if visual == "it" or visual == "bi" then
@@ -1100,15 +1047,15 @@ local validvariants = { -- fast check on valid
}
variants[math_char] = function(pointer,what,n,parent) -- also set export value
- local char = getchar(pointer)
+ local char = pointer.char
local selector = validvariants[char]
if selector then
- local next = getnext(parent)
- if next and getid(next) == math_noad then
- local nucleus = getfield(next,"nucleus")
- if nucleus and getid(nucleus) == math_char and getchar(nucleus) == selector then
+ local next = parent.next
+ if next and next.id == math_noad then
+ local nucleus = next.nucleus
+ if nucleus and nucleus.id == math_char and nucleus.char == selector then
local variant
- local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))] -- we can also have a famdata
+ local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
local mathvariants = tfmdata.resources.variants -- and variantdata
if mathvariants then
mathvariants = mathvariants[selector]
@@ -1117,8 +1064,8 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
end
end
if variant then
- setfield(pointer,"char",variant)
- setattr(pointer,a_exportstatus,char) -- we don't export the variant as it's visual markup
+ pointer.char = variant
+ pointer[a_exportstatus] = char -- we don't export the variant as it's visual markup
if trace_variants then
report_variants("variant (%U,%U) replaced by %U",char,selector,variant)
end
@@ -1127,8 +1074,8 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
report_variants("no variant (%U,%U)",char,selector)
end
end
- setfield(next,"prev",pointer)
- setfield(parent,"next",getnext(next))
+ next.prev = pointer
+ parent.next = next.next
free_node(next)
end
end
@@ -1161,7 +1108,7 @@ local colors = {
}
classes[math_char] = function(pointer,what,n,parent)
- local color = colors[getsubtype(parent)]
+ local color = colors[parent.subtype]
if color then
setcolor(pointer,color)
else
diff --git a/tex/context/base/math-tag.lua b/tex/context/base/math-tag.lua
index 3cd4cae16..ab5902dd4 100644
--- a/tex/context/base/math-tag.lua
+++ b/tex/context/base/math-tag.lua
@@ -11,22 +11,10 @@ if not modules then modules = { } end modules ['math-tag'] = {
local find, match = string.find, string.match
local insert, remove = table.insert, table.remove
-local attributes = attributes
-local nodes = nodes
+local attributes, nodes = attributes, nodes
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getchar = nuts.getchar
-local getlist = nuts.getlist
-local getfield = nuts.getfield
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-
-local set_attributes = nuts.setattributes
-local traverse_nodes = nuts.traverse
+local set_attributes = nodes.setattributes
+local traverse_nodes = node.traverse
local nodecodes = nodes.nodecodes
@@ -73,24 +61,22 @@ local function processsubsup(start)
-- At some point we might need to add an attribute signaling the
-- super- and subscripts because TeX and MathML use a different
-- order.
- local nucleus = getfield(start,"nucleus")
- local sup = getfield(start,"sup")
- local sub = getfield(start,"sub")
+ local nucleus, sup, sub = start.nucleus, start.sup, start.sub
if sub then
if sup then
- setattr(start,a_tagged,start_tagged("msubsup"))
+ start[a_tagged] = start_tagged("msubsup")
process(nucleus)
process(sub)
process(sup)
stop_tagged()
else
- setattr(start,a_tagged,start_tagged("msub"))
+ start[a_tagged] = start_tagged("msub")
process(nucleus)
process(sub)
stop_tagged()
end
elseif sup then
- setattr(start,a_tagged,start_tagged("msup"))
+ start[a_tagged] = start_tagged("msup")
process(nucleus)
process(sup)
stop_tagged()
@@ -107,11 +93,11 @@ local actionstack = { }
process = function(start) -- we cannot use the processor as we have no finalizers (yet)
while start do
- local id = getid(start)
+ local id = start.id
if id == math_char_code then
- local char = getchar(start)
+ local char = start.char
-- check for code
- local a = getattr(start,a_mathcategory)
+ local a = start[a_mathcategory]
if a then
a = { detail = a }
end
@@ -133,22 +119,22 @@ process = function(start) -- we cannot use the processor as we have no finalizer
else
tag = "mo"
end
- setattr(start,a_tagged,start_tagged(tag,a))
+ start[a_tagged] = start_tagged(tag,a)
stop_tagged()
break -- okay?
elseif id == math_textchar_code then
-- check for code
- local a = getattr(start,a_mathcategory)
+ local a = start[a_mathcategory]
if a then
- setattr(start,a_tagged,start_tagged("ms",{ detail = a }))
+ start[a_tagged] = start_tagged("ms",{ detail = a })
else
- setattr(start,a_tagged,start_tagged("ms"))
+ start[a_tagged] = start_tagged("ms")
end
stop_tagged()
break
elseif id == math_delim_code then
-- check for code
- setattr(start,a_tagged,start_tagged("mo"))
+ start[a_tagged] = start_tagged("mo")
stop_tagged()
break
elseif id == math_style_code then
@@ -157,14 +143,14 @@ process = function(start) -- we cannot use the processor as we have no finalizer
processsubsup(start)
elseif id == math_box_code or id == hlist_code or id == vlist_code then
-- keep an eye on math_box_code and see what ends up in there
- local attr = getattr(start,a_tagged)
+ local attr = start[a_tagged]
local last = attr and taglist[attr]
if last and find(last[#last],"formulacaption[:%-]") then
-- leave alone, will nicely move to the outer level
else
local text = start_tagged("mtext")
- setattr(start,a_tagged,text)
- local list = getfield(start,"list")
+ start[a_tagged] = text
+ local list = start.list
if not list then
-- empty list
elseif not attr then
@@ -180,8 +166,8 @@ process = function(start) -- we cannot use the processor as we have no finalizer
local function runner(list) -- quite inefficient
local cache = { } -- we can have nested unboxed mess so best local to runner
for n in traverse_nodes(list) do
- local id = getid(n)
- local aa = getattr(n,a_tagged)
+ local id = n.id
+ local aa = n[a_tagged]
if aa then
local ac = cache[aa]
if not ac then
@@ -199,12 +185,12 @@ process = function(start) -- we cannot use the processor as we have no finalizer
end
cache[aa] = ac
end
- setattr(n,a_tagged,ac)
+ n[a_tagged] = ac
else
- setattr(n,a_tagged,text)
+ n[a_tagged] = text
end
if id == hlist_code or id == vlist_code then
- runner(getlist(n))
+ runner(n.list)
end
end
end
@@ -213,53 +199,47 @@ process = function(start) -- we cannot use the processor as we have no finalizer
stop_tagged()
end
elseif id == math_sub_code then
- local list = getfield(start,"list")
+ local list = start.list
if list then
- local attr = getattr(start,a_tagged)
+ local attr = start[a_tagged]
local last = attr and taglist[attr]
local action = last and match(last[#last],"maction:(.-)%-")
if action and action ~= "" then
if actionstack[#actionstack] == action then
- setattr(start,a_tagged,start_tagged("mrow"))
+ start[a_tagged] = start_tagged("mrow")
process(list)
stop_tagged()
else
insert(actionstack,action)
- setattr(start,a_tagged,start_tagged("mrow",{ detail = action }))
+ start[a_tagged] = start_tagged("mrow",{ detail = action })
process(list)
stop_tagged()
remove(actionstack)
end
else
- setattr(start,a_tagged,start_tagged("mrow"))
+ start[a_tagged] = start_tagged("mrow")
process(list)
stop_tagged()
end
end
elseif id == math_fraction_code then
- local num = getfield(start,"num")
- local denom = getfield(start,"denom")
- local left = getfield(start,"left")
- local right = getfield(start,"right")
+ local num, denom, left, right = start.num, start.denom, start.left, start.right
if left then
- setattr(left,a_tagged,start_tagged("mo"))
+ left[a_tagged] = start_tagged("mo")
process(left)
stop_tagged()
end
- setattr(start,a_tagged,start_tagged("mfrac"))
+ start[a_tagged] = start_tagged("mfrac")
process(num)
process(denom)
stop_tagged()
if right then
- setattr(right,a_tagged,start_tagged("mo"))
+ right[a_tagged] = start_tagged("mo")
process(right)
stop_tagged()
end
elseif id == math_choice_code then
- local display = getfield(start,"display")
- local text = getfield(start,"text")
- local script = getfield(start,"script")
- local scriptscript = getfield(start,"scriptscript")
+ local display, text, script, scriptscript = start.display, start.text, start.script, start.scriptscript
if display then
process(display)
end
@@ -273,69 +253,67 @@ process = function(start) -- we cannot use the processor as we have no finalizer
process(scriptscript)
end
elseif id == math_fence_code then
- local delim = getfield(start,"delim")
- local subtype = getfield(start,"subtype")
- -- setattr(start,a_tagged,start_tagged("mfenced")) -- needs checking
+ local delim = start.delim
+ local subtype = start.subtype
if subtype == 1 then
-- left
+ start[a_tagged] = start_tagged("mfenced")
if delim then
- setattr(start,a_tagged,start_tagged("mleft"))
+ start[a_tagged] = start_tagged("mleft")
process(delim)
stop_tagged()
end
elseif subtype == 2 then
-- middle
if delim then
- setattr(start,a_tagged,start_tagged("mmiddle"))
+ start[a_tagged] = start_tagged("mmiddle")
process(delim)
stop_tagged()
end
elseif subtype == 3 then
if delim then
- setattr(start,a_tagged,start_tagged("mright"))
+ start[a_tagged] = start_tagged("mright")
process(delim)
stop_tagged()
end
+ stop_tagged()
else
-- can't happen
end
- -- stop_tagged()
elseif id == math_radical_code then
- local left = getfield(start,"left")
- local degree = getfield(start,"degree")
+ local left, degree = start.left, start.degree
if left then
start_tagged("")
process(left) -- root symbol, ignored
stop_tagged()
end
if degree then -- not good enough, can be empty mlist
- setattr(start,a_tagged,start_tagged("mroot"))
+ start[a_tagged] = start_tagged("mroot")
processsubsup(start)
process(degree)
stop_tagged()
else
- setattr(start,a_tagged,start_tagged("msqrt"))
+ start[a_tagged] = start_tagged("msqrt")
processsubsup(start)
stop_tagged()
end
elseif id == math_accent_code then
- local accent = getfield(start,"accent")
- local bot_accent = getfield(start,"bot_accent")
+ local accent, bot_accent = start.accent, start.bot_accent
if bot_accent then
if accent then
- setattr(start,a_tagged,start_tagged("munderover",{ detail = "accent" }))
+ start[a_tagged] = start_tagged("munderover",{ detail = "accent" })
processsubsup(start)
process(bot_accent)
process(accent)
stop_tagged()
else
- setattr(start,a_tagged,start_tagged("munder",{ detail = "accent" }))
+ start[a_tagged] = start_tagged("munder",{ detail = "accent" })
processsubsup(start)
process(bot_accent)
stop_tagged()
end
elseif accent then
- setattr(start,a_tagged,start_tagged("mover",{ detail = "accent" }))
+ start[a_tagged] = start_tagged("mover",{ detail = "accent" })
processsubsup(start)
process(accent)
stop_tagged()
@@ -343,23 +321,22 @@ process = function(start) -- we cannot use the processor as we have no finalizer
processsubsup(start)
end
elseif id == glue_code then
- setattr(start,a_tagged,start_tagged("mspace"))
+ start[a_tagged] = start_tagged("mspace")
stop_tagged()
else
- setattr(start,a_tagged,start_tagged("merror", { detail = nodecodes[i] }))
+ start[a_tagged] = start_tagged("merror", { detail = nodecodes[i] })
stop_tagged()
end
- start = getnext(start)
+ start = start.next
end
end
function noads.handlers.tags(head,style,penalties)
- head = tonut(head)
local v_math = start_tagged("math")
local v_mrow = start_tagged("mrow")
- local v_mode = getattr(head,a_mathmode)
- -- setattr(head,a_tagged,v_math)
- setattr(head,a_tagged,v_mrow)
+ local v_mode = head[a_mathmode]
+ head[a_tagged] = v_math
+ head[a_tagged] = v_mrow
tags.setattributehash(v_math,"mode",v_mode == 1 and "display" or "inline")
process(head)
stop_tagged()
diff --git a/tex/context/base/mult-de.mkii b/tex/context/base/mult-de.mkii
index 0c4dae5b6..5f2714ce6 100644
--- a/tex/context/base/mult-de.mkii
+++ b/tex/context/base/mult-de.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-def.lua b/tex/context/base/mult-def.lua
index 65db8fd5e..afd466531 100644
--- a/tex/context/base/mult-def.lua
+++ b/tex/context/base/mult-def.lua
@@ -6613,9 +6613,6 @@ return {
["firstnamesep"]={
["en"]="firstnamesep",
},
- ["surnamefirstnamesep"]={
- ["en"]="surnamefirstnamesep",
- },
["vonsep"]={
["en"]="vonsep",
},
@@ -6625,9 +6622,6 @@ return {
["surnamesep"]={
["en"]="surnamesep",
},
- ["surnameinitialsep"]={
- ["en"]="surnameinitialsep",
- },
["lastnamesep"]={
["en"]="lastnamesep",
},
diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv
index 9206743f4..192a380ee 100644
--- a/tex/context/base/mult-def.mkiv
+++ b/tex/context/base/mult-def.mkiv
@@ -36,7 +36,6 @@
\def\c!fences {fences}
\def\c!keeptogether {keeptogether}
-\def\c!viewerprefix {viewerprefix}
\def\c!dataset {dataset}
\def\c!sectionblock {sectionblock}
@@ -54,7 +53,6 @@
\def\c!comma {comma}
\def\c!period {period}
\def\c!monthconversion {monthconversion}
-\def\c!authorconversion {authorconversion}
\def\c!comment {comment}
\def\c!textalign {textalign}
\def\c!up {up}
@@ -64,7 +62,6 @@
\def\c!group {group}
\def\c!groupsuffix {groupsuffix}
-\def\v!dataset {dataset}
\def\v!compressseparator{compressseparator}
\def\v!notation {notation}
\def\v!endnote {endnote}
@@ -81,7 +78,6 @@
\def\v!combination {combination}
\def\v!norepeat {norepeat}
\def\v!mixed {mixed}
-\def\v!centerlast {centerlast}
\def\s!lcgreek {lcgreek}
\def\s!ucgreek {ucgreek}
diff --git a/tex/context/base/mult-en.mkii b/tex/context/base/mult-en.mkii
index 00861c3be..97732dab7 100644
--- a/tex/context/base/mult-en.mkii
+++ b/tex/context/base/mult-en.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-fr.mkii b/tex/context/base/mult-fr.mkii
index 9afe371c2..520f8e1a6 100644
--- a/tex/context/base/mult-fr.mkii
+++ b/tex/context/base/mult-fr.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-fun.lua b/tex/context/base/mult-fun.lua
index 0f5bd8ace..2101b95e9 100644
--- a/tex/context/base/mult-fun.lua
+++ b/tex/context/base/mult-fun.lua
@@ -17,7 +17,7 @@ return {
--
"sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian",
"tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos",
- "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh",
+ "invsin", "invcos", "acosh", "asinh", "sinh", "cosh",
"paired", "tripled",
"unitcircle", "fulldiamond", "unitdiamond", "fullsquare",
-- "halfcircle", "quartercircle",
diff --git a/tex/context/base/mult-it.mkii b/tex/context/base/mult-it.mkii
index 802cb840c..2b31e8e10 100644
--- a/tex/context/base/mult-it.mkii
+++ b/tex/context/base/mult-it.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-low.lua b/tex/context/base/mult-low.lua
index 250b20c22..f82be039c 100644
--- a/tex/context/base/mult-low.lua
+++ b/tex/context/base/mult-low.lua
@@ -279,9 +279,7 @@ return {
--
"dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith",
--
- "newconstant", "setnewconstant", "setconstant", "setconstantvalue",
- "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue",
- --
+ "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant",
"newmacro", "setnewmacro", "newfraction",
"newsignal",
--
@@ -367,7 +365,5 @@ return {
--
"lesshyphens", "morehyphens", "nohyphens", "dohyphens",
--
- "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath",
- --
}
}
diff --git a/tex/context/base/mult-nl.mkii b/tex/context/base/mult-nl.mkii
index a1f9742f1..9f91515cb 100644
--- a/tex/context/base/mult-nl.mkii
+++ b/tex/context/base/mult-nl.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixscheider}
\setinterfaceconstant{suffixstopper}{suffixafsluiter}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-pe.mkii b/tex/context/base/mult-pe.mkii
index 999b16cf5..240130cdf 100644
--- a/tex/context/base/mult-pe.mkii
+++ b/tex/context/base/mult-pe.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{پسوند}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/mult-ro.mkii b/tex/context/base/mult-ro.mkii
index f577eabda..3b7206e44 100644
--- a/tex/context/base/mult-ro.mkii
+++ b/tex/context/base/mult-ro.mkii
@@ -1045,8 +1045,6 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
-\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
-\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
diff --git a/tex/context/base/node-acc.lua b/tex/context/base/node-acc.lua
index 59fa031bf..81ae496b2 100644
--- a/tex/context/base/node-acc.lua
+++ b/tex/context/base/node-acc.lua
@@ -11,27 +11,10 @@ local nodes, node = nodes, node
local nodecodes = nodes.nodecodes
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local tonut = nodes.tonut
-local tonode = nodes.tonode
-
-local getid = nuts.getid
-local getfield = nuts.getfield
-local getattr = nuts.getattr
-local getlist = nuts.getlist
-local getchar = nuts.getchar
-local getnext = nuts.getnext
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local traverse_nodes = nuts.traverse
-local traverse_id = nuts.traverse_id
-local copy_node = nuts.copy
-local free_nodelist = nuts.flush_list
-local insert_after = nuts.insert_after
-
-local new_gluespec = nuts.pool.gluespec -- temp hack
+local traverse_nodes = node.traverse
+local traverse_id = node.traverse_id
+local copy_node = node.copy
+local free_nodelist = node.flush_list
local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
@@ -46,72 +29,57 @@ local threshold = 65536
-- todo: nbsp etc
-- todo: collapse kerns
--- p_id
-
local function injectspaces(head)
- local p, p_id
+ local p
local n = head
while n do
- local id = getid(n)
+ local id = n.id
if id == glue_code then -- todo: check for subtype related to spacing (13/14 but most seems to be 0)
- -- if getfield(getfield(n,"spec"),"width") > 0 then -- threshold
--- if p and p_id == glyph_code then
- if p and getid(p) == glyph_code then
+ -- if n.spec.width > 0 then -- threshold
+ if p and p.id == glyph_code then
local g = copy_node(p)
- local c = getfield(g,"components")
+ local c = g.components
if c then -- it happens that we copied a ligature
free_nodelist(c)
- setfield(g,"components",nil)
- setfield(g,"subtype",256)
+ g.components = nil
+ g.subtype = 256
end
- local a = getattr(n,a_characters)
- -- local s = copy_node(getfield(n,"spec"))
- -- this will be fixed in luatex but for now a temp hack (zero test)
- local s = getfield(n,"spec")
- s = s == 0 and new_gluespec(0) or copy_node(s)
- --
- setfield(g,"char",32)
- setfield(n,"spec",s)
- -- insert_after(p,p,g)
- setfield(p,"next",g)
- setfield(g,"prev",p)
- setfield(g,"next",n)
- setfield(n,"prev",g)
- setfield(s,"width",getfield(s,"width") - getfield(g,"width"))
+ local a = n[a_characters]
+ local s = copy_node(n.spec)
+ g.char, n.spec = 32, s
+ p.next, g.prev = g, p
+ g.next, n.prev = n, g
+ s.width = s.width - g.width
if a then
- setattr(g,a_characters,a)
+ g[a_characters] = a
end
- setattr(s,a_characters,0)
- setattr(n,a_characters,0)
+ s[a_characters] = 0
+ n[a_characters] = 0
end
-- end
elseif id == hlist_code or id == vlist_code then
- injectspaces(getlist(n),attribute)
+ injectspaces(n.list,attribute)
-- elseif id == kern_code then -- the backend already collapses
-- local first = n
-- while true do
- -- local nn = getnext(n)
- -- if nn and getid(nn) == kern_code then
+ -- local nn = n.next
+ -- if nn and nn.id == kern_code then
-- -- maybe we should delete kerns but who cares at this stage
- -- setfield(first,"kern",getfield(first,"kern") + getfield(nn,"kern")
- -- setfield(nn,"kern",0)
+ -- first.kern = first.kern + nn.kern
+ -- nn.kern = 0
-- n = nn
-- else
-- break
-- end
-- end
end
- p_id = id
p = n
- n = getnext(n)
+ n = n.next
end
- return head, true -- always done anyway
+ return head, true
end
-nodes.handlers.accessibility = function(head)
- local head, done = injectspaces(tonut(head))
- return tonode(head), done
-end
+nodes.handlers.accessibility = injectspaces
-- todo:
@@ -122,18 +90,16 @@ end
-- local function compact(n)
-- local t = { }
-- for n in traverse_id(glyph_code,n) do
--- t[#t+1] = utfchar(getchar(n)) -- check for unicode
+-- t[#t+1] = utfchar(n.char) -- check for unicode
-- end
-- return concat(t,"")
-- end
--
-- local function injectspans(head)
--- local done = false
--- for n in traverse_nodes(tonuts(head)) do
--- local id = getid(n)
+-- for n in traverse_nodes(head) do
+-- local id = n.id
-- if id == disc then
--- local r = getfield(n,"replace")
--- local p = getfield(n,"pre")
+-- local r, p = n.replace, n.pre
-- if r and p then
-- local str = compact(r)
-- local hsh = hyphenated[str]
@@ -142,14 +108,13 @@ end
-- hyphenated[str] = hsh
-- codes[hsh] = str
-- end
--- setattr(n,a_hyphenated,hsh)
--- done = true
+-- n[a_hyphenated] = hsh
-- end
-- elseif id == hlist_code or id == vlist_code then
--- injectspans(getlist(n))
+-- injectspans(n.list)
-- end
-- end
--- return tonodes(head), done
+-- return head, true
-- end
--
-- nodes.injectspans = injectspans
@@ -157,22 +122,19 @@ end
-- tasks.appendaction("processors", "words", "nodes.injectspans")
--
-- local function injectspans(head)
--- local done = false
--- for n in traverse_nodes(tonut(head)) do
--- local id = getid(n)
+-- for n in traverse_nodes(head) do
+-- local id = n.id
-- if id == disc then
--- local a = getattr(n,a_hyphenated)
+-- local a = n[a_hyphenated]
-- if a then
-- local str = codes[a]
-- local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str)))
-- local e = new_pdfliteral("EMC")
--- insert_before(head,n,b)
--- insert_after(head,n,e)
--- done = true
+-- node.insert_before(head,n,b)
+-- node.insert_after(head,n,e)
-- end
-- elseif id == hlist_code or id == vlist_code then
--- injectspans(getlist(n))
+-- injectspans(n.list)
-- end
-- end
--- return tonodes(head), done
-- end
diff --git a/tex/context/base/node-aux.lua b/tex/context/base/node-aux.lua
index 7f4b0342a..443c78547 100644
--- a/tex/context/base/node-aux.lua
+++ b/tex/context/base/node-aux.lua
@@ -22,108 +22,82 @@ local vlist_code = nodecodes.vlist
local attributelist_code = nodecodes.attributelist -- temporary
local math_code = nodecodes.math
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-local vianuts = nuts.vianuts
-
-local getbox = nuts.getbox
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getattr = nuts.getattr
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local traverse_nodes = nuts.traverse
-local traverse_id = nuts.traverse_id
-local free_node = nuts.free
-local hpack_nodes = nuts.hpack
-local unset_attribute = nuts.unset_attribute
-local first_glyph = nuts.first_glyph
-local copy_node = nuts.copy
-local copy_node_list = nuts.copy_list
-local find_tail = nuts.tail
-local insert_node_after = nuts.insert_after
-local isnode = nuts.is_node
-
-local nodes_traverse_id = nodes.traverse_id
-local nodes_first_glyph = nodes.first_glyph
-
-local nodepool = nuts.pool
+local nodepool = nodes.pool
+
local new_glue = nodepool.glue
local new_glyph = nodepool.glyph
+local traverse_nodes = node.traverse
+local traverse_id = node.traverse_id
+local free_node = node.free
+local hpack_nodes = node.hpack
+local unset_attribute = node.unset_attribute
+local first_glyph = node.first_glyph or node.first_character
+local copy_node = node.copy
+local copy_node_list = node.copy_list
+local slide_nodes = node.slide
+local insert_node_after = node.insert_after
+local isnode = node.is_node
+
local unsetvalue = attributes.unsetvalue
local current_font = font.current
+local texgetbox = tex.getbox
+
local report_error = logs.reporter("node-aux:error")
-local function repackhlist(list,...)
+function nodes.repackhlist(list,...)
+--~ nodes.showsimplelist(list)
local temp, b = hpack_nodes(list,...)
- list = getlist(temp)
- setfield(temp,"list",nil)
+ list = temp.list
+ temp.list = nil
free_node(temp)
return list, b
end
-nuts.repackhlist = repackhlist
-
-function nodes.repackhlist(list,...)
- local list, b = repackhlist(tonut(list),...)
- return tonode(list), b
-end
-
local function set_attributes(head,attr,value)
for n in traverse_nodes(head) do
- setattr(n,attr,value)
- local id = getid(n)
+ n[attr] = value
+ local id = n.id
if id == hlist_node or id == vlist_node then
- set_attributes(getlist(n),attr,value)
+ set_attributes(n.list,attr,value)
end
end
end
local function set_unset_attributes(head,attr,value)
for n in traverse_nodes(head) do
- if not getattr(n,attr) then
- setattr(n,attr,value)
+ if not n[attr] then
+ n[attr] = value
end
- local id = getid(n)
+ local id = n.id
if id == hlist_code or id == vlist_code then
- set_unset_attributes(getlist(n),attr,value)
+ set_unset_attributes(n.list,attr,value)
end
end
end
local function unset_attributes(head,attr)
for n in traverse_nodes(head) do
- setattr(n,attr,unsetvalue)
- local id = getid(n)
+ n[attr] = unsetvalue
+ local id = n.id
if id == hlist_code or id == vlist_code then
- unset_attributes(getlist(n),attr)
+ unset_attributes(n.list,attr)
end
end
end
--- for old times sake
+nodes.setattribute = node.set_attribute
+nodes.getattribute = node.has_attribute
+nodes.unsetattribute = node.unset_attribute
+nodes.has_attribute = node.has_attribute
-nuts.setattribute = nuts.setattr nodes.setattribute = nodes.setattr
-nuts.getattribute = nuts.getattr nodes.getattribute = nodes.getattr
-nuts.unsetattribute = nuts.unset_attribute nodes.unsetattribute = nodes.unset_attribute
-nuts.has_attribute = nuts.has_attribute nodes.has_attribute = nodes.has_attribute
-nuts.firstglyph = nuts.first_glyph nodes.firstglyph = nodes.first_glyph
+nodes.firstglyph = first_glyph
+nodes.setattributes = set_attributes
+nodes.setunsetattributes = set_unset_attributes
+nodes.unsetattributes = unset_attributes
-nuts.setattributes = set_attributes nodes.setattributes = vianuts(set_attributes)
-nuts.setunsetattributes = set_unset_attributes nodes.setunsetattributes = vianuts(set_unset_attributes)
-nuts.unsetattributes = unset_attributes nodes.unsetattributes = vianuts(unset_attributes)
-
--- history:
---
-- function nodes.is_skipable(a,id) -- skipable nodes at the margins during character protrusion
-- return (
-- id ~= glyph_node
@@ -132,26 +106,29 @@ nuts.unsetattributes = unset_attributes nodes.unsetattribut
-- or id == adjust_node
-- or id == penalty_node
-- or (id == glue_node and a.spec.writable)
--- or (id == disc_node and getfield(a,"pre") == nil and getfield(a,"post") == nil and getfield(a,"replace") == nil)
--- or (id == math_node and getfield(a,"surround") == 0)
--- or (id == kern_node and (getfield(a,"kern") == 0 or getsubtype(subtype) == NORMAL))
--- or (id == hlist_node and getfield(a,"width") == 0 and getfield(a,"height") == 0 and getfield(a,"depth") == 0 and getlist(a) == nil)
--- or (id == whatsit_node and getsubtype(a) ~= pdf_refximage_node and getsubtype(a) ~= pdf_refxform_node)
+-- or (id == disc_node and a.pre == nil and a.post == nil and a.replace == nil)
+-- or (id == math_node and a.surround == 0)
+-- or (id == kern_node and (a.kern == 0 or a.subtype == NORMAL))
+-- or (id == hlist_node and a.width == 0 and a.height == 0 and a.depth == 0 and a.list == nil)
+-- or (id == whatsit_node and a.subtype ~= pdf_refximage_node and a.subtype ~= pdf_refxform_node)
-- )
-- end
+
+-- history:
+--
--
-- local function glyph_width(a)
--- local ch = chardata[getfont(a)][getchar(a)]
+-- local ch = chardata[a.font][a.char]
-- return (ch and ch.width) or 0
-- end
--
-- local function glyph_total(a)
--- local ch = chardata[getfont(a)][getchar(a)]
+-- local ch = chardata[a.font][a.char]
-- return (ch and (ch.height+ch.depth)) or 0
-- end
--
-- local function non_discardable(a) -- inline
--- return getid(id) < math_node -- brrrr
+-- return a.id < math_node -- brrrr
-- end
--
-- local function calculate_badness(t,s)
@@ -206,36 +183,8 @@ nuts.unsetattributes = unset_attributes nodes.unsetattribut
-- return -u
-- end
-- end
---
--- if not node.end_of_math then
--- function node.end_of_math(n)
--- for n in traverse_id(math_code,getnext(next)) do
--- return n
--- end
--- end
--- end
---
--- nodes.endofmath = node.end_of_math
---
--- local function firstline(n)
--- while n do
--- local id = getid(n)
--- if id == hlist_code then
--- if getsubtype(n) == line_code then
--- return n
--- else
--- return firstline(getlist(n))
--- end
--- elseif id == vlist_code then
--- return firstline(getlist(n))
--- end
--- n = getnext(n)
--- end
--- end
---
--- nodes.firstline = firstline
-function nuts.firstcharacter(n,untagged) -- tagged == subtype > 255
+function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
if untagged then
return first_glyph(n)
else
@@ -245,38 +194,44 @@ function nuts.firstcharacter(n,untagged) -- tagged == subtype > 255
end
end
--- function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
--- if untagged then
--- return nodes_first_glyph(n)
--- else
--- for g in nodes_traverse_id(glyph_code,n) do
--- return g
--- end
--- end
--- end
-
-local function firstcharinbox(n)
- local l = getlist(getbox(n))
+function nodes.firstcharinbox(n)
+ local l = texgetbox(n).list
if l then
for g in traverse_id(glyph_code,l) do
- return getchar(g)
+ return g.char
end
end
return 0
end
-nuts .firstcharinbox = firstcharinbox
-nodes.firstcharinbox = firstcharinbox
-nodes.firstcharacter = vianuts(firstcharacter)
-
-function commands.buildtextaccent(n)
- local char = firstcharinbox(n)
- if char > 0 then
- -- context.accent(false,char)
- context([[\accent%s\relax]],char)
+if not node.end_of_math then
+ function node.end_of_math(n)
+ for n in traverse_id(math_code,n.next) do
+ return n
+ end
end
end
+nodes.endofmath = node.end_of_math
+
+-- local function firstline(n)
+-- while n do
+-- local id = n.id
+-- if id == hlist_code then
+-- if n.subtype == line_code then
+-- return n
+-- else
+-- return firstline(n.list)
+-- end
+-- elseif id == vlist_code then
+-- return firstline(n.list)
+-- end
+-- n = n.next
+-- end
+-- end
+
+-- nodes.firstline = firstline
+
-- this depends on fonts, so we have a funny dependency ... will be
-- sorted out .. we could make tonodes a plugin into this
@@ -287,8 +242,10 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
local head, tail, space, fnt, template = nil, nil, nil, nil, nil
if not fnt then
fnt = current_font()
- elseif type(fnt) ~= "number" and getid(fnt) == glyph_code then -- so it has to be a real node
- fnt, template = nil, tonut(fnt)
+ elseif type(fnt) ~= "number" and fnt.id == "glyph" then
+ fnt, template = nil, fnt
+ -- else
+ -- already a number
end
for s in utfvalues(str) do
local n
@@ -302,12 +259,12 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
end
elseif template then
n = copy_node(template)
- setvalue(n,"char",s)
+ n.char = s
else
n = new_glyph(fnt,s)
end
if attr then -- normally false when template
- setfield(n,"attr",copy_node_list(attr))
+ n.attr = copy_node_list(attr)
end
if head then
insert_node_after(head,tail,n)
@@ -319,130 +276,69 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
return head, tail
end
-nuts.tonodes = tonodes
+nodes.tonodes = tonodes
-nodes.tonodes = function(str,fnt,attr)
- local head, tail = tonodes(str,fnt,attr)
- return tonode(head), tonode(tail)
-end
-
--- local function link(list,currentfont,currentattr,head,tail)
--- for i=1,#list do
--- local n = list[i]
--- if n then
--- local tn = isnode(n)
--- if not tn then
--- local tn = type(n)
--- if tn == "number" then
--- if not currentfont then
--- currentfont = current_font()
--- end
--- local h, t = tonodes(tostring(n),currentfont,currentattr)
--- if not h then
--- -- skip
--- elseif not head then
--- head = h
--- tail = t
--- else
--- setfield(tail,"next",h)
--- setfield(h,"prev",t)
--- tail = t
--- end
--- elseif tn == "string" then
--- if #tn > 0 then
--- if not currentfont then
--- currentfont = current_font()
--- end
--- local h, t = tonodes(n,currentfont,currentattr)
--- if not h then
--- -- skip
--- elseif not head then
--- head, tail = h, t
--- else
--- setfield(tail,"next",h)
--- setfield(h,"prev",t)
--- tail = t
--- end
--- end
--- elseif tn == "table" then
--- if #tn > 0 then
--- if not currentfont then
--- currentfont = current_font()
--- end
--- head, tail = link(n,currentfont,currentattr,head,tail)
--- end
--- end
--- elseif not head then
--- head = n
--- tail = find_tail(n)
--- elseif getid(n) == attributelist_code then
--- -- weird case
--- report_error("weird node type in list at index %s:",i)
--- for i=1,#list do
--- local l = list[i]
--- report_error("%3i: %s %S",i,getid(l) == attributelist_code and "!" or ">",l)
--- end
--- os.exit()
--- else
--- setfield(tail,"next",n)
--- setfield(n,"prev",tail)
--- if getnext(n) then
--- tail = find_tail(n)
--- else
--- tail = n
--- end
--- end
--- else
--- -- permitting nil is convenient
--- end
--- end
--- return head, tail
--- end
-
-local function link(list,currentfont,currentattr,head,tail) -- an oldie, might be replaced
+local function link(list,currentfont,currentattr,head,tail)
for i=1,#list do
local n = list[i]
if n then
- local tn = type(n)
- if tn == "string" then
- if #tn > 0 then
+ local tn = isnode(n)
+ if not tn then
+ local tn = type(n)
+ if tn == "number" then
if not currentfont then
currentfont = current_font()
end
- local h, t = tonodes(n,currentfont,currentattr)
+ local h, t = tonodes(tostring(n),currentfont,currentattr)
if not h then
-- skip
elseif not head then
head, tail = h, t
else
- setfield(tail,"next",h)
- setfield(h,"prev",t)
- tail = t
+ tail.next, h.prev, tail = h, t, t
end
- end
- elseif tn == "table" then
- if #tn > 0 then
- if not currentfont then
- currentfont = current_font()
+ elseif tn == "string" then
+ if #tn > 0 then
+ if not currentfont then
+ currentfont = current_font()
+ end
+ local h, t = tonodes(n,currentfont,currentattr)
+ if not h then
+ -- skip
+ elseif not head then
+ head, tail = h, t
+ else
+ tail.next, h.prev, tail = h, t, t
+ end
+ end
+ elseif tn == "table" then
+ if #tn > 0 then
+ if not currentfont then
+ currentfont = current_font()
+ end
+ head, tail = link(n,currentfont,currentattr,head,tail)
end
- head, tail = link(n,currentfont,currentattr,head,tail)
end
elseif not head then
head = n
- tail = find_tail(n)
- elseif getid(n) == attributelist_code then
+ if n.next then
+ tail = slide_nodes(n)
+ else
+ tail = n
+ end
+ elseif n.id == attributelist_code then
-- weird case
report_error("weird node type in list at index %s:",i)
for i=1,#list do
local l = list[i]
- report_error("%3i: %s %S",i,getid(l) == attributelist_code and "!" or ">",l)
+ report_error("%3i: %s %S",i,l.id == attributelist_code and "!" or ">",l)
end
os.exit()
else
- setfield(tail,"next",n)
- setfield(n,"prev",tail)
- if getnext(n) then
- tail = find_tail(n)
+ tail.next = n
+ n.prev = tail
+ if n.next then
+ tail = slide_nodes(n)
else
tail = n
end
@@ -454,22 +350,17 @@ local function link(list,currentfont,currentattr,head,tail) -- an oldie, might b
return head, tail
end
-nuts.link = link
-
-nodes.link = function(list,currentfont,currentattr,head,tail)
- local head, tail = link(list,currentfont,currentattr,tonut(head),tonut(tail))
- return tonode(head), tonode(tail)
-end
+nodes.link = link
local function locate(start,wantedid,wantedsubtype)
for n in traverse_nodes(start) do
- local id = getid(n)
+ local id = n.id
if id == wantedid then
- if not wantedsubtype or getsubtype(n) == wantedsubtype then
+ if not wantedsubtype or n.subtype == wantedsubtype then
return n
end
elseif id == hlist_code or id == vlist_code then
- local found = locate(getlist(n),wantedid,wantedsubtype)
+ local found = locate(n.list,wantedid,wantedsubtype)
if found then
return found
end
@@ -477,12 +368,7 @@ local function locate(start,wantedid,wantedsubtype)
end
end
-nuts.locate = locate
-
-nodes.locate = function(start,wantedid,wantedsubtype)
- local found = locate(tonut(start),wantedid,wantedsubtype)
- return found and tonode(found)
-end
+nodes.locate = locate
-- I have no use for this yet:
--
@@ -495,12 +381,10 @@ end
-- return (badness/100)^(1/3)
-- end
--
--- function tex.stretch_amount(skip,badness) -- node no nut
+-- function tex.stretch_amount(skip,badness)
-- if skip.id == gluespec_code then
-- return skip.width + (badness and (badness/100)^(1/3) or 1) * skip.stretch
-- else
-- return 0
-- end
-- end
-
-
diff --git a/tex/context/base/node-bck.lua b/tex/context/base/node-bck.lua
index 4b7b4a064..feaa2c684 100644
--- a/tex/context/base/node-bck.lua
+++ b/tex/context/base/node-bck.lua
@@ -11,8 +11,6 @@ if not modules then modules = { } end modules ['node-bck'] = {
local attributes, nodes, node = attributes, nodes, node
-local tasks = nodes.tasks
-
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -21,25 +19,11 @@ local vlist_code = nodecodes.vlist
local glyph_code = nodecodes.glyph
local cell_code = listcodes.cell
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getsubtype = nuts.getsubtype
-
-local traverse = nuts.traverse
-local traverse_id = nuts.traverse_id
+local traverse = node.traverse
+local traverse_id = node.traverse_id
+local nodepool = nodes.pool
+local tasks = nodes.tasks
local new_rule = nodepool.rule
local new_glue = nodepool.glue
@@ -53,50 +37,50 @@ local a_alignbackground = attributes.private('alignbackground')
local function add_backgrounds(head) -- rather old code .. to be redone
local current = head
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code or id == vlist_code then
- local list = getlist(current)
+ local list = current.list
if list then
local head = add_backgrounds(list)
if head then
- setfield(current,"list",head)
+ current.list = head
list = head
end
end
- local width = getfield(current,"width")
+ local width = current.width
if width > 0 then
- local background = getattr(current,a_background)
+ local background = current[a_background]
if background then
-- direct to hbox
-- colorspace is already set so we can omit that and stick to color
- local mode = getattr(current,a_colorspace)
+ local mode = current[a_colorspace]
if mode then
- local height = getfield(current,"height")
- local depth = getfield(current,"depth")
+ local height = current.height
+ local depth = current.depth
local skip = id == hlist_code and width or (height + depth)
local glue = new_glue(-skip)
local rule = new_rule(width,height,depth)
- local color = getattr(current,a_color)
- local transparency = getattr(current,a_transparency)
- setattr(rule,a_colorspace,mode)
+ local color = current[a_color]
+ local transparency = current[a_transparency]
+ rule[a_colorspace] = mode
if color then
- setattr(rule,a_color,color)
+ rule[a_color] = color
end
if transparency then
- setattr(rule,a_transparency,transparency)
+ rule[a_transparency] = transparency
end
- setfield(rule,"next",glue)
- setfield(glue,"prev",rule)
+ rule.next = glue
+ glue.prev = rule
if list then
- setfield(glue,"next",list)
- setfield(list,"prev",glue)
+ glue.next = list
+ list.prev = glue
end
- setfield(current,"list",rule)
+ current.list = rule
end
end
end
end
- current = getnext(current)
+ current = current.next
end
return head, true
end
@@ -104,16 +88,16 @@ end
local function add_alignbackgrounds(head)
local current = head
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code then
- local list = getlist(current)
+ local list = current.list
if not list then
-- no need to look
- elseif getsubtype(current) == cell_code then
+ elseif current.subtype == cell_code then
local background = nil
local found = nil
-- for l in traverse(list) do
- -- background = getattr(l,a_alignbackground)
+ -- background = l[a_alignbackground]
-- if background then
-- found = l
-- break
@@ -122,7 +106,7 @@ local function add_alignbackgrounds(head)
-- we know that it's a fake hlist (could be user node)
-- but we cannot store tables in user nodes yet
for l in traverse_id(hpack_code,list) do
- background = getattr(l,a_alignbackground)
+ background = l[a_alignbackground]
if background then
found = l
end
@@ -131,28 +115,28 @@ local function add_alignbackgrounds(head)
--
if background then
-- current has subtype 5 (cell)
- local width = getfield(current,"width")
+ local width = current.width
if width > 0 then
- local mode = getattr(found,a_colorspace)
+ local mode = found[a_colorspace]
if mode then
local glue = new_glue(-width)
- local rule = new_rule(width,getfield(current,"height"),getfield(current,"depth"))
- local color = getattr(found,a_color)
- local transparency = getattr(found,a_transparency)
- setattr(rule,a_colorspace,mode)
+ local rule = new_rule(width,current.height,current.depth)
+ local color = found[a_color]
+ local transparency = found[a_transparency]
+ rule[a_colorspace] = mode
if color then
- setattr(rule,a_color,color)
+ rule[a_color] = color
end
if transparency then
- setattr(rule,a_transparency,transparency)
+ rule[a_transparency] = transparency
end
- setfield(rule,"next",glue)
- setfield(glue,"prev",rule)
+ rule.next = glue
+ glue.prev = rule
if list then
- setfield(glue,"next",list)
- setfield(list,"prev",glue)
+ glue.next = list
+ list.prev = glue
end
- setfield(current,"list",rule)
+ current.list = rule
end
end
end
@@ -160,23 +144,18 @@ local function add_alignbackgrounds(head)
add_alignbackgrounds(list)
end
elseif id == vlist_code then
- local list = getlist(current)
+ local list = current.list
if list then
add_alignbackgrounds(list)
end
end
- current = getnext(current)
+ current = current.next
end
return head, true
end
--- nodes.handlers.backgrounds = add_backgrounds
--- nodes.handlers.alignbackgrounds = add_alignbackgrounds
-
-nodes.handlers.backgrounds = function(head) local head, done = add_backgrounds (tonut(head)) return tonode(head), done end
-nodes.handlers.alignbackgrounds = function(head) local head, done = add_alignbackgrounds(tonut(head)) return tonode(head), done end
-
--- elsewhere: needs checking
+nodes.handlers.backgrounds = add_backgrounds
+nodes.handlers.alignbackgrounds = add_alignbackgrounds
tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds")
tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds")
diff --git a/tex/context/base/node-fin.lua b/tex/context/base/node-fin.lua
index 8476b47a6..63a5ef83e 100644
--- a/tex/context/base/node-fin.lua
+++ b/tex/context/base/node-fin.lua
@@ -8,54 +8,36 @@ if not modules then modules = { } end modules ['node-fin'] = {
-- this module is being reconstructed
-- local functions, only slightly slower
---
--- leaders are also triggers
local next, type, format = next, type, string.format
local attributes, nodes, node = attributes, nodes, node
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getleader = nuts.getleader
-local getattr = nuts.getattr
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local copy_node = nuts.copy
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
+local copy_node = node.copy
+local find_tail = node.slide
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local rule_code = nodecodes.rule
-local whatsit_code = nodecodes.whatsit
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
-local pdfliteral_code = whatcodes.pdfliteral
+local pdfliteral_code = whatcodes.pdfliteral
-local states = attributes.states
-local numbers = attributes.numbers
-local a_trigger = attributes.private('trigger')
-local triggering = false
+local states = attributes.states
+local numbers = attributes.numbers
+local a_trigger = attributes.private('trigger')
+local triggering = false
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local loadstripped = utilities.lua.loadstripped
-local unsetvalue = attributes.unsetvalue
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local loadstripped = utilities.lua.loadstripped
+local unsetvalue = attributes.unsetvalue
-- these two will be like trackers
@@ -120,14 +102,11 @@ function nodes.installattributehandler(plugin)
return loadstripped(template)()
end
--- for the moment:
-
-local function copied(n)
- return copy_node(tonut(n))
-end
-
-- the injectors
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+
local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger
local current, current_selector, done = 0, 0, false -- nb, stack has a local current !
local nsbegin, nsend
@@ -153,25 +132,23 @@ end
function states.finalize(namespace,attribute,head) -- is this one ok?
if current > 0 and nsnone then
- head = tonut(head)
- local id = getid(head)
+ local id = head.id
if id == hlist_code or id == vlist_code then
- local list = getlist(head)
+ local list = head.list
if list then
- list = insert_node_before(list,list,copied(nsnone)) -- two return values
- setfield(head,"list",list)
+ head.list = insert_node_before(list,list,copy_node(nsnone))
end
else
- head = insert_node_before(head,head,copied(nsnone))
+ head = insert_node_before(head,head,copy_node(nsnone))
end
- return tonode(head), true, true
+ return head, true, true
end
return head, false, false
end
-- disc nodes can be ignored
-- we need to deal with literals too (reset as well as oval)
--- if id == glyph_code or (id == whatsit_code and getsubtype(stack) == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
+-- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
local function process(namespace,attribute,head,inheritance,default) -- one attribute
local stack = head
@@ -179,57 +156,53 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
local check = false
local leader = nil
while stack do
- local id = getid(stack)
+ local id = stack.id
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = getleader(stack)
+ leader = stack.leader
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = getlist(stack)
+ local content = stack.list
if content then
-- begin nested --
- if nstrigger and getattr(stack,nstrigger) then
- local outer = getattr(stack,attribute)
+ local ok
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
if outer ~= inheritance then
- local list, ok = process(namespace,attribute,content,inheritance,outer)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = process(namespace,attribute,content,inheritance,outer)
else
- local list, ok = process(namespace,attribute,content,inheritance,default)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = process(namespace,attribute,content,inheritance,default)
end
else
- local list, ok = process(namespace,attribute,content,inheritance,default)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = process(namespace,attribute,content,inheritance,default)
end
-- end nested --
+ done = done or ok
end
elseif id == rule_code then
- check = getfield(stack,"width") ~= 0
+ check = stack.width ~= 0
end
-- much faster this way than using a check() and nested() function
if check then
- local c = getattr(stack,attribute)
+ local c = stack[attribute]
if c then
if default and c == inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copied(nsdata[default]))
+ head = insert_node_before(head,stack,copy_node(nsdata[default]))
current = default
done = true
end
elseif current ~= c then
- head = insert_node_before(head,stack,copied(nsdata[c]))
+ head = insert_node_before(head,stack,copy_node(nsdata[c]))
current = c
done = true
end
if leader then
local savedcurrent = current
- local ci = getid(leader)
+ local ci = leader.id
if ci == hlist_code or ci == vlist_code then
-- else we reset inside a box unneeded, okay, the downside is
-- that we trigger color in each repeated box, so there is room
@@ -237,48 +210,41 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
current = 0
end
-- begin nested --
- if nstrigger and getattr(stack,nstrigger) then
- local outer = getattr(stack,attribute)
+ local ok = false
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
if outer ~= inheritance then
- local list, ok = process(namespace,attribute,leader,inheritance,outer)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,outer)
else
- local list, ok = process(namespace,attribute,leader,inheritance,default)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
end
else
- local list, ok = process(namespace,attribute,leader,inheritance,default)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
end
-- end nested --
+ done = done or ok
current = savedcurrent
leader = false
end
elseif default and inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copied(nsdata[default]))
+ head = insert_node_before(head,stack,copy_node(nsdata[default]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copied(nsnone))
+ head = insert_node_before(head,stack,copy_node(nsnone))
current = 0
done = true
end
check = false
end
- stack = getnext(stack)
+ stack = stack.next
end
return head, done
end
-states.process = function(namespace,attribute,head,default)
- local head, done = process(namespace,attribute,tonut(head),default)
- return tonode(head), done
-end
+states.process = process
-- we can force a selector, e.g. document wide color spaces, saves a little
-- watch out, we need to check both the selector state (like colorspace) and
@@ -292,103 +258,93 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
local check = false
local leader = nil
while stack do
- local id = getid(stack)
+ local id = stack.id
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = getleader(stack)
+ leader = stack.leader
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = getlist(stack)
+ local content = stack.list
if content then
+ local ok = false
-- begin nested
- if nstrigger and getattr(stack,nstrigger) then
- local outer = getattr(stack,attribute)
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
if outer ~= inheritance then
- local list, ok = selective(namespace,attribute,content,inheritance,outer)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
else
- local list, ok = selective(namespace,attribute,content,inheritance,default)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = selective(namespace,attribute,content,inheritance,default)
end
else
- local list, ok = selective(namespace,attribute,content,inheritance,default)
- setfield(stack,"list",list)
- done = done or ok
+ stack.list, ok = selective(namespace,attribute,content,inheritance,default)
end
-- end nested
+ done = done or ok
end
elseif id == rule_code then
- check = getfield(stack,"width") ~= 0
+ check = stack.width ~= 0
end
if check then
- local c = getattr(stack,attribute)
+ local c = stack[attribute]
if c then
if default and c == inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
current = default
done = true
end
else
- local s = getattr(stack,nsselector)
+ local s = stack[nsselector]
if current ~= c or current_selector ~= s then
local data = nsdata[c]
- head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
current = c
current_selector = s
done = true
end
end
if leader then
+ local ok = false
-- begin nested
- if nstrigger and getattr(stack,nstrigger) then
- local outer = getatribute(stack,attribute)
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
if outer ~= inheritance then
- local list, ok = selective(namespace,attribute,leader,inheritance,outer)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,outer)
else
- local list, ok = selective(namespace,attribute,leader,inheritance,default)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
end
else
- local list, ok = selective(namespace,attribute,leader,inheritance,default)
- setfield(stack,"leader",list)
- done = done or ok
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
end
-- end nested
- leader = false
+ done = done or ok
+ leader = false
end
elseif default and inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copied(nsnone))
+ head = insert_node_before(head,stack,copy_node(nsnone))
current, current_selector, done = 0, 0, true
end
check = false
end
- stack = getnext(stack)
+
+ stack = stack.next
end
return head, done
end
-states.selective = function(namespace,attribute,head,default)
- local head, done = selective(namespace,attribute,tonut(head),default)
- return tonode(head), done
-end
+states.selective = selective
-- Ideally the next one should be merged with the previous but keeping it separate is
-- safer. We deal with two situations: efficient boxwise (layoutareas) and mixed layers
@@ -407,80 +363,76 @@ local function stacked(namespace,attribute,head,default) -- no triggering, no in
local check = false
local leader = false
while stack do
- local id = getid(stack)
+ local id = stack.id
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = getleader(stack)
+ leader = stack.leader
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = getlist(stack)
+ local content = stack.list
if content then
-- the problem is that broken lines gets the attribute which can be a later one
if nslistwise then
- local a = getattr(stack,attribute)
+ local a = stack[attribute]
if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
local p = current
- current = a
- head = insert_node_before(head,stack,copied(nsdata[a]))
- local list = stacked(namespace,attribute,content,current) -- two return values
- setfield(stack,"list",list)
- done = true
- head, stack = insert_node_after(head,stack,copied(nsnone))
+ current, done = a, true
+ head = insert_node_before(head,stack,copy_node(nsdata[a]))
+ stack.list = stacked(namespace,attribute,content,current)
+ head, stack = insert_node_after(head,stack,copy_node(nsnone))
current = p
else
- local list, ok = stacked(namespace,attribute,content,current)
- setfield(stack,"list",list) -- only if ok
+ local ok = false
+ stack.list, ok = stacked(namespace,attribute,content,current)
done = done or ok
end
else
- local list, ok = stacked(namespace,attribute,content,current)
- setfield(stack,"list",list) -- only if ok
+ local ok = false
+ stack.list, ok = stacked(namespace,attribute,content,current)
done = done or ok
end
end
elseif id == rule_code then
- check = getfield(stack,"width") ~= 0
+ check = stack.width ~= 0
end
if check then
- local a = getattr(stack,attribute)
+ local a = stack[attribute]
if a then
if current ~= a then
- head = insert_node_before(head,stack,copied(nsdata[a]))
+ head = insert_node_before(head,stack,copy_node(nsdata[a]))
depth = depth + 1
current, done = a, true
end
if leader then
- local list, ok = stacked(namespace,attribute,content,current)
- setfield(stack,"leader",list) -- only if ok
+ local ok = false
+ stack.leader, ok = stacked(namespace,attribute,content,current)
done = done or ok
leader = false
end
elseif default > 0 then
--
elseif current > 0 then
- head = insert_node_before(head,stack,copied(nsnone))
+ head = insert_node_before(head,stack,copy_node(nsnone))
depth = depth - 1
current, done = 0, true
end
check = false
end
- stack = getnext(stack)
+
+ stack = stack.next
end
while depth > 0 do
- head = insert_node_after(head,stack,copied(nsnone))
+ head = insert_node_after(head,stack,copy_node(nsnone))
depth = depth - 1
end
return head, done
end
-states.stacked = function(namespace,attribute,head,default)
- local head, done = stacked(namespace,attribute,tonut(head),default)
- return tonode(head), done
-end
+states.stacked = stacked
-- experimental
@@ -494,53 +446,52 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
local check = false
local leader = false
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = getleader(current)
+ leader = current.leader
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = getlist(current)
+ local content = current.list
if not content then
-- skip
elseif nslistwise then
- local a = getattr(current,attribute)
+ local a = current[attribute]
if a and attrib ~= a and nslistwise[a] then -- viewerlayer
- head = insert_node_before(head,current,copied(nsdata[a]))
- local list = stacker(namespace,attribute,content,a)
- setfield(current,"list",list)
done = true
- head, current = insert_node_after(head,current,copied(nsnone))
+ head = insert_node_before(head,current,copy_node(nsdata[a]))
+ current.list = stacker(namespace,attribute,content,a)
+ head, current = insert_node_after(head,current,copy_node(nsnone))
else
- local list, ok = stacker(namespace,attribute,content,attrib)
- setfield(current,"list",list)
+ local ok = false
+ current.list, ok = stacker(namespace,attribute,content,attrib)
done = done or ok
end
else
- local list, ok = stacker(namespace,attribute,content,default)
- setfield(current,"list",list)
+ local ok = false
+ current.list, ok = stacker(namespace,attribute,content,default)
done = done or ok
end
elseif id == rule_code then
- check = getfield(current,"width") ~= 0
+ check = current.width ~= 0
end
if check then
- local a = getattr(current,attribute) or unsetvalue
+ local a = current[attribute] or unsetvalue
if a ~= attrib then
local n = nsstep(a)
if n then
-- !!!! TEST CODE !!!!
- -- head = insert_node_before(head,current,copied(nsdata[tonumber(n)])) -- a
- head = insert_node_before(head,current,tonut(n)) -- a
+ -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
+ head = insert_node_before(head,current,n) -- a
end
attrib, done, okay = a, true, true
if leader then
-- tricky as a leader has to be a list so we cannot inject before
- local list, ok = stacker(namespace,attribute,leader,attrib)
+ local _, ok = stacker(namespace,attribute,leader,attrib)
done = done or ok
leader = false
end
@@ -549,23 +500,20 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
end
previous = current
- current = getnext(current)
+ current = current.next
end
if okay then
local n = nsend()
if n then
-- !!!! TEST CODE !!!!
- -- head = insert_node_after(head,previous,copied(nsdata[tostring(n)]))
- head = insert_node_after(head,previous,tonut(n))
+ -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
+ head = insert_node_after(head,previous,n)
end
end
return head, done
end
-states.stacker = function(namespace,attribute,head,default)
- local head, done = stacker(namespace,attribute,tonut(head),default)
- return tonode(head), done
-end
+states.stacker = stacker
-- -- --
diff --git a/tex/context/base/node-fnt.lua b/tex/context/base/node-fnt.lua
index 7000c4fd7..2f59d513c 100644
--- a/tex/context/base/node-fnt.lua
+++ b/tex/context/base/node-fnt.lua
@@ -23,24 +23,12 @@ local fontdata = fonthashes.identifiers
local otf = fonts.handlers.otf
+local traverse_id = node.traverse_id
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
-
local nodecodes = nodes.nodecodes
local handlers = nodes.handlers
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getattr = nuts.getattr
-local getid = nuts.getid
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local getnext = nuts.getnext
-
-local traverse_id = nuts.traverse_id
-
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
@@ -121,25 +109,25 @@ function handlers.characters(head)
report_fonts()
report_fonts("checking node list, run %s",run)
report_fonts()
- local n = tonut(head)
+ local n = head
while n do
- local id = getid(n)
+ local id = n.id
if id == glyph_code then
- local font = getfont(n)
- local attr = getattr(n,0) or 0
- report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,getchar(n))
+ local font = n.font
+ local attr = n[0] or 0
+ report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,n.char)
elseif id == disc_code then
report_fonts("[disc] %s",nodes.listtoutf(n,true,false,n))
else
report_fonts("[%s]",nodecodes[id])
end
- n = getnext(n)
+ n = n.next
end
end
- for n in traverse_id(glyph_code,tonut(head)) do
- -- if getsubtype(n) <256 then -- all are 1
- local font = getfont(n)
- local attr = getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
+ for n in traverse_id(glyph_code,head) do
+ -- if n.subtype<256 then -- all are 1
+ local font = n.font
+ local attr = n[0] or 0 -- zero attribute is reserved for fonts in context
if font ~= prevfont or attr ~= prevattr then
if attr > 0 then
local used = attrfonts[font]
@@ -403,8 +391,5 @@ end
-- return head, true
-- end
-local d_protect_glyphs = nuts.protect_glyphs
-local d_unprotect_glyphs = nuts.unprotect_glyphs
-
-handlers.protectglyphs = function(n) return d_protect_glyphs (tonut(n)) end
-handlers.unprotectglyphs = function(n) return d_unprotect_glyphs(tonut(n)) end
+handlers.protectglyphs = node.protect_glyphs
+handlers.unprotectglyphs = node.unprotect_glyphs
diff --git a/tex/context/base/node-inj.lua b/tex/context/base/node-inj.lua
index f30070e9e..ae48150a6 100644
--- a/tex/context/base/node-inj.lua
+++ b/tex/context/base/node-inj.lua
@@ -11,7 +11,7 @@ if not modules then modules = { } end modules ['node-inj'] = {
-- test fonts. Btw, future versions of luatex will have extended glyph properties
-- that can be of help. Some optimizations can go away when we have faster machines.
--- todo: ignore kerns between disc and glyph
+-- todo: make a special one for context
local next = next
local utfchar = utf.char
@@ -30,32 +30,13 @@ local injections = nodes.injections
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
-
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
+local nodepool = nodes.pool
local newkern = nodepool.kern
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local traverse_id = nuts.traverse_id
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
local a_kernpair = attributes.private('kernpair')
local a_ligacomp = attributes.private('ligacomp')
@@ -90,8 +71,8 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne
local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
local ws, wn = tfmstart.width, tfmnext.width
local bound = #cursives + 1
- setattr(start,a_cursbase,bound)
- setattr(nxt,a_curscurs,bound)
+ start[a_cursbase] = bound
+ nxt[a_curscurs] = bound
cursives[bound] = { rlmode, dx, dy, ws, wn }
return dx, dy, bound
end
@@ -100,14 +81,14 @@ function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
-- dy = y - h
if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = getattr(current,a_kernpair)
+ local bound = current[a_kernpair]
if bound then
local kb = kerns[bound]
-- inefficient but singles have less, but weird anyway, needs checking
kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
else
bound = #kerns + 1
- setattr(current,a_kernpair,bound)
+ current[a_kernpair] = bound
kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
end
return x, y, w, h, bound
@@ -119,7 +100,7 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
local dx = factor*x
if dx ~= 0 then
local bound = #kerns + 1
- setattr(current,a_kernpair,bound)
+ current[a_kernpair] = bound
kerns[bound] = { rlmode, dx }
return dx, bound
else
@@ -129,7 +110,7 @@ end
function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor
local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = getattr(base,a_markbase) -- fails again we should pass it
+ local bound = base[a_markbase] -- fails again we should pass it
local index = 1
if bound then
local mb = marks[bound]
@@ -137,19 +118,19 @@ function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) --
-- if not index then index = #mb + 1 end
index = #mb + 1
mb[index] = { dx, dy, rlmode }
- setattr(start,a_markmark,bound)
- setattr(start,a_markdone,index)
+ start[a_markmark] = bound
+ start[a_markdone] = index
return dx, dy, bound
else
- report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound)
+ report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
end
end
-- index = index or 1
index = index or 1
bound = #marks + 1
- setattr(base,a_markbase,bound)
- setattr(start,a_markmark,bound)
- setattr(start,a_markdone,index)
+ base[a_markbase] = bound
+ start[a_markmark] = bound
+ start[a_markdone] = index
marks[bound] = { [index] = { dx, dy, rlmode, baseismark } }
return dx, dy, bound
end
@@ -161,15 +142,15 @@ end
local function trace(head)
report_injections("begin run")
for n in traverse_id(glyph_code,head) do
- if getsubtype(n) < 256 then
- local kp = getattr(n,a_kernpair)
- local mb = getattr(n,a_markbase)
- local mm = getattr(n,a_markmark)
- local md = getattr(n,a_markdone)
- local cb = getattr(n,a_cursbase)
- local cc = getattr(n,a_curscurs)
- local char = getchar(n)
- report_injections("font %s, char %U, glyph %c",getfont(n),char,char)
+ if n.subtype < 256 then
+ local kp = n[a_kernpair]
+ local mb = n[a_markbase]
+ local mm = n[a_markmark]
+ local md = n[a_markdone]
+ local cb = n[a_cursbase]
+ local cc = n[a_curscurs]
+ local char = n.char
+ report_injections("font %s, char %U, glyph %c",n.font,char,char)
if kp then
local k = kerns[kp]
if k[3] then
@@ -217,24 +198,22 @@ local function show_result(head)
local current = head
local skipping = false
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",
- getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
skipping = false
elseif id == kern_code then
- report_injections("kern: %p",getfield(current,"kern"))
+ report_injections("kern: %p",current.kern)
skipping = false
elseif not skipping then
report_injections()
skipping = true
end
- current = getnext(current)
+ current = current.next
end
end
function injections.handler(head,where,keep)
- head = tonut(head)
local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
if has_marks or has_cursives then
if trace_injections then
@@ -245,18 +224,17 @@ function injections.handler(head,where,keep)
if has_kerns then -- move outside loop
local nf, tm = nil, nil
for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if getsubtype(n) < 256 then
+ if n.subtype < 256 then
nofvalid = nofvalid + 1
valid[nofvalid] = n
- local f = getfont(n)
- if f ~= nf then
- nf = f
- tm = fontdata[nf].resources.marks -- other hash in ctx
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
end
if tm then
- mk[n] = tm[getchar(n)]
+ mk[n] = tm[n.char]
end
- local k = getattr(n,a_kernpair)
+ local k = n[a_kernpair]
if k then
local kk = kerns[k]
if kk then
@@ -276,16 +254,15 @@ function injections.handler(head,where,keep)
else
local nf, tm = nil, nil
for n in traverse_id(glyph_code,head) do
- if getsubtype(n) < 256 then
+ if n.subtype < 256 then
nofvalid = nofvalid + 1
valid[nofvalid] = n
- local f = getfont(n)
- if f ~= nf then
- nf = f
- tm = fontdata[nf].resources.marks -- other hash in ctx
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
end
if tm then
- mk[n] = tm[getchar(n)]
+ mk[n] = tm[n.char]
end
end
end
@@ -295,7 +272,7 @@ function injections.handler(head,where,keep)
local cx = { }
if has_kerns and next(ky) then
for n, k in next, ky do
- setfield(n,"yoffset",k)
+ n.yoffset = k
end
end
-- todo: reuse t and use maxt
@@ -306,9 +283,9 @@ function injections.handler(head,where,keep)
for i=1,nofvalid do -- valid == glyphs
local n = valid[i]
if not mk[n] then
- local n_cursbase = getattr(n,a_cursbase)
+ local n_cursbase = n[a_cursbase]
if p_cursbase then
- local n_curscurs = getattr(n,a_curscurs)
+ local n_curscurs = n[a_curscurs]
if p_cursbase == n_curscurs then
local c = cursives[n_curscurs]
if c then
@@ -333,20 +310,20 @@ function injections.handler(head,where,keep)
end
end
elseif maxt > 0 then
- local ny = getfield(n,"yoffset")
+ local ny = n.yoffset
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- setfield(ti,"yoffset",getfield(ti,"yoffset") + ny)
+ ti.yoffset = ti.yoffset + ny
end
maxt = 0
end
if not n_cursbase and maxt > 0 then
- local ny = getfield(n,"yoffset")
+ local ny = n.yoffset
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- setfield(ti,"yoffset",ny)
+ ti.yoffset = ny
end
maxt = 0
end
@@ -354,11 +331,11 @@ function injections.handler(head,where,keep)
end
end
if maxt > 0 then
- local ny = getfield(n,"yoffset")
+ local ny = n.yoffset
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- setfield(ti,"yoffset",ny)
+ ti.yoffset = ny
end
maxt = 0
end
@@ -369,83 +346,57 @@ function injections.handler(head,where,keep)
if has_marks then
for i=1,nofvalid do
local p = valid[i]
- local p_markbase = getattr(p,a_markbase)
+ local p_markbase = p[a_markbase]
if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,getnext(p)) do
- local n_markmark = getattr(n,a_markmark)
+ local mrks = marks[p_markbase]
+ local nofmarks = #mrks
+ for n in traverse_id(glyph_code,p.next) do
+ local n_markmark = n[a_markmark]
if p_markbase == n_markmark then
- local index = getattr(n,a_markdone) or 1
+ local index = n[a_markdone] or 1
local d = mrks[index]
if d then
local rlmode = d[3]
--
local k = wx[p]
- local px = getfield(p,"xoffset")
- local ox = 0
if k then
local x = k[2]
local w = k[4]
if w then
if rlmode and rlmode >= 0 then
-- kern(x) glyph(p) kern(w-x) mark(n)
- ox = px - getfield(p,"width") + d[1] - (w-x)
- -- report_injections("l2r case 1: %p",ox)
+ n.xoffset = p.xoffset - p.width + d[1] - (w-x)
else
-- kern(w-x) glyph(p) kern(x) mark(n)
- ox = px - d[1] - x
- -- report_injections("r2l case 1: %p",ox)
+ n.xoffset = p.xoffset - d[1] - x
end
else
if rlmode and rlmode >= 0 then
-- okay for husayni
- ox = px - getfield(p,"width") + d[1]
- -- report_injections("r2l case 2: %p",ox)
+ n.xoffset = p.xoffset - p.width + d[1]
else
-- needs checking: is x ok here?
- ox = px - d[1] - x
- -- report_injections("r2l case 2: %p",ox)
+ n.xoffset = p.xoffset - d[1] - x
end
end
else
- -- if rlmode and rlmode >= 0 then
- -- ox = px - getfield(p,"width") + d[1]
- -- -- report_injections("l2r case 3: %p",ox)
- -- else
- -- ox = px - d[1]
- -- -- report_injections("r2l case 3: %p",ox)
- -- end
- --
- -- we need to deal with fonts that have marks with width
- --
- local wp = getfield(p,"width")
- local wn = getfield(n,"width") -- in arial marks have widths
if rlmode and rlmode >= 0 then
- ox = px - wp + d[1]
- -- report_injections("l2r case 3: %p",ox)
+ n.xoffset = p.xoffset - p.width + d[1]
else
- ox = px - d[1]
- -- report_injections("r2l case 3: %p",ox)
+ n.xoffset = p.xoffset - d[1]
end
- if wn ~= 0 then
- -- bad: we should center
- insert_node_before(head,n,newkern(-wn/2))
- insert_node_after(head,n,newkern(-wn/2))
- -- wx[n] = { 0, -wn/2, 0, -wn }
+ local w = n.width
+ if w ~= 0 then
+ insert_node_before(head,n,newkern(-w/2))
+ insert_node_after(head,n,newkern(-w/2))
end
- -- so far
end
- setfield(n,"xoffset",ox)
- --
- local py = getfield(p,"yoffset")
- local oy = 0
+ -- --
if mk[p] then
- oy = py + d[2]
+ n.yoffset = p.yoffset + d[2]
else
- oy = getfield(n,"yoffset") + py + d[2]
+ n.yoffset = n.yoffset + p.yoffset + d[2]
end
- setfield(n,"yoffset",oy)
--
if nofmarks == 1 then
break
@@ -453,8 +404,6 @@ function injections.handler(head,where,keep)
nofmarks = nofmarks - 1
end
end
- elseif not n_markmark then
- break -- HH: added 2013-09-12: no need to deal with non marks
else
-- KE: there can be <mark> <mkmk> <mark> sequences in ligatures
end
@@ -516,7 +465,6 @@ function injections.handler(head,where,keep)
-- if trace_injections then
-- show_result(head)
-- end
-head = tonode(head)
return head, true
elseif not keep then
kerns, cursives, marks = { }, { }, { }
@@ -526,14 +474,14 @@ head = tonode(head)
trace(head)
end
for n in traverse_id(glyph_code,head) do
- if getsubtype(n) < 256 then
- local k = getattr(n,a_kernpair)
+ if n.subtype < 256 then
+ local k = n[a_kernpair]
if k then
local kk = kerns[k]
if kk then
local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
if y and y ~= 0 then
- setfield(n,"yoffset",y) -- todo: h ?
+ n.yoffset = y -- todo: h ?
end
if w then
-- copied from above
@@ -570,9 +518,9 @@ head = tonode(head)
-- if trace_injections then
-- show_result(head)
-- end
- return tonode(head), true
+ return head, true
else
-- no tracing needed
end
- return tonode(head), false
+ return head, false
end
diff --git a/tex/context/base/node-ltp.lua b/tex/context/base/node-ltp.lua
index 9f2491cfa..c52e001df 100644
--- a/tex/context/base/node-ltp.lua
+++ b/tex/context/base/node-ltp.lua
@@ -18,6 +18,7 @@ if not modules then modules = { } end modules ['node-par'] = {
-- todo: add a couple of plugin hooks
-- todo: maybe split expansion code paths
-- todo: fix line numbers (cur_list.pg_field needed)
+-- todo: make kerns stretch an option and disable it by default (definitely not shrink)
-- todo: check and improve protrusion
-- todo: arabic etc (we could use pretty large scales there) .. marks and cursive
@@ -72,8 +73,7 @@ if not modules then modules = { } end modules ['node-par'] = {
To be honest, I slowly start to grasp the magic here as normally I start from scratch when implementing
something (as it's the only way I can understand things). This time I had a recently acquired stack of
- Porcupine Tree disks to get me through, although I must admit that watching their dvd's is more fun
- than coding.
+ Porcupine Tree disks to get me through.
Picking up this effort was inspired by discussions between Luigi Scarso and me about efficiency of Lua
code and we needed some stress tests to compare regular LuaTeX and LuajitTeX. One of the tests was
@@ -121,13 +121,6 @@ if not modules then modules = { } end modules ['node-par'] = {
is enabled, but in the Lua variant the extra overhead is way less significant. This means that when we
retrofit the same approach into the core, the overhead of expansion can be sort of nilled.
- In 2013 the expansion factor method became also used at the TeX end so then I could complete the code
- here, and indeed, expansions works quite well now (not compatible of course because we use floats at the
- Lua end. The Lua base variant is still slower but quite ok, especially if we go nuts.
-
- A next iteration will provide plug-ins and more control. I will also explore the possibility to avoid the
- redundant hpack calculations (easier now, although I've only done some quick and dirty experiments.)
-
]]--
local utfchar = utf.char
@@ -187,38 +180,22 @@ local chardata = fonthashes.characters
local quaddata = fonthashes.quads
local parameters = fonthashes.parameters
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getlist = nuts.getlist
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getattr = nuts.getattr
-
-local slide_nodelist = nuts.slide -- get rid of this, probably ok > 78.2
-local find_tail = nuts.tail
-local new_node = nuts.new
-local copy_node = nuts.copy
-local copy_nodelist = nuts.copy_list
-local flush_node = nuts.free
-local flush_nodelist = nuts.flush_list
-local hpack_nodes = nuts.hpack
-local xpack_nodes = nuts.hpack
-local replace_node = nuts.replace
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local traverse_by_id = nuts.traverse_id
+local slide_nodes = node.slide
+local new_node = node.new
+local copy_node = node.copy
+local copy_node_list = node.copy_list
+local flush_node = node.free
+local flush_node_list = node.flush_list
+local hpack_nodes = node.hpack
+local xpack_nodes = node.hpack
+local replace_node = nodes.replace
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
+local traverse_by_id = node.traverse_id
local setnodecolor = nodes.tracers.colors.set
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -310,8 +287,7 @@ local glyphdir_is_equal = nodes.glyphdir_is_equal
local dir_pops = nodes.dir_is_pop
local dir_negations = nodes.dir_negation
-local is_skipable = nuts.protrusion_skippable
-
+local is_skipable = node.protrusion_skippable
local a_fontkern = attributes.private('fontkern')
-- helpers --
@@ -332,12 +308,12 @@ local function checked_line_dir(stack,current)
local n = stack.n + 1
stack.n = n
stack[n] = current
- return getfield(current,"dir")
+ return current.dir
elseif n > 0 then
local n = stack.n
local dirnode = stack[n]
dirstack.n = n - 1
- return getfield(dirnode,"dir")
+ return dirnode.dir
else
report_parbuilders("warning: missing pop node (%a)",1) -- in line ...
end
@@ -352,8 +328,8 @@ local function inject_dirs_at_end_of_line(stack,current,start,stop)
local n = stack.n
local h = nil
while start and start ~= stop do
- if getid(start) == whatsit_code and getsubtype(start) == dir_code then
- if not dir_pops[getfield(start,"dir")] then -- weird, what is this #
+ if start.id == whatsit_code and start.subtype == dir_code then
+ if not dir_pops[start.dir] then
n = n + 1
stack[n] = start
elseif n > 0 then
@@ -362,10 +338,10 @@ local function inject_dirs_at_end_of_line(stack,current,start,stop)
report_parbuilders("warning: missing pop node (%a)",2) -- in line ...
end
end
- start = getnext(start)
+ start = start.next
end
for i=n,1,-1 do
- h, current = insert_node_after(current,current,new_dir(dir_negations[getfield(stack[i],"dir")]))
+ h, current = insert_node_after(current,current,new_dir(dir_negations[stack[i].dir]))
end
stack.n = n
return current
@@ -414,8 +390,8 @@ local whatsiters = {
local get_whatsit_width = whatsiters.get_width
local get_whatsit_dimensions = whatsiters.get_dimensions
-local function get_width (n,dir) return getfield(n,"width") end
-local function get_dimensions(n,dir) return getfield(n,"width"), getfield(n,"height"), getfield(n,"depth") end
+local function get_width (n) return n.width end
+local function get_dimensions(n) return n.width, n.height, n.depth end
get_whatsit_width[pdfrefximage_code] = get_width
get_whatsit_width[pdfrefxform_code ] = get_width
@@ -438,13 +414,13 @@ end
local function check_shrinkage(par,n)
-- called often, so maybe move inline -- use NORMAL
- if getfield(n,"shrink_order") ~= 0 and getfield(n,"shrink") ~= 0 then
+ if n.shrink_order ~= 0 and n.shrink ~= 0 then
if par.no_shrink_error_yet then
par.no_shrink_error_yet = false
report_parbuilders("infinite glue shrinkage found in a paragraph and removed")
end
n = copy_node(n)
- setfield(n,"shrink_order",0)
+ n.shrink_order = 0
end
return n
end
@@ -491,10 +467,48 @@ setmetatableindex(expansions,function(t,font) -- we can store this in tfmdata if
end
end)
+-- local function char_stretch_shrink(p)
+-- local data = expansions[p.font][p.char]
+-- if data then
+-- return data.glyphstretch, data.glyphshrink
+-- else
+-- return 0, 0
+-- end
+-- end
+--
+-- local cal_margin_kern_var = char_stretch_shrink
+
+-- local function kern_stretch_shrink(p,d)
+-- local l = p.prev
+-- if l and l.id == glyph_code then -- how about disc nodes?
+-- local r = p.next
+-- if r and r.id == glyph_code then
+-- local lf, rf = l.font, r.font
+-- if lf == rf then
+-- local data = expansions[lf][l.char]
+-- if data then
+-- local stretch = data.stretch
+-- local shrink = data.shrink
+-- if stretch ~= 0 then
+-- -- stretch = data.factor * (d * stretch - d)
+-- stretch = data.factor * d * (stretch - 1)
+-- end
+-- if shrink ~= 0 then
+-- -- shrink = data.factor * (d * shrink - d)
+-- shrink = data.factor * d * (shrink - 1)
+-- end
+-- return stretch, shrink
+-- end
+-- end
+-- end
+-- end
+-- return 0, 0
+-- end
+
local function kern_stretch_shrink(p,d)
- local left = getprev(p)
- if left and getid(left) == glyph_code then -- how about disc nodes?
- local data = expansions[getfont(left)][getchar(left)]
+ local left = p.prev
+ if left and left.id == glyph_code then -- how about disc nodes?
+ local data = expansions[left.font][left.char]
if data then
local stretch = data.stretch
local shrink = data.shrink
@@ -512,8 +526,14 @@ local function kern_stretch_shrink(p,d)
return 0, 0
end
+-- local function kern_stretch_shrink(p,d)
+-- -- maybe make it an option in luatex where we also need to check for attribute fontkern but in general
+-- -- it makes no sense to scale kerns
+-- return 0, 0
+-- end
+
local expand_kerns = false
------ expand_kerns = "both"
+-- local expand_kerns = "both"
directives.register("builders.paragraphs.adjusting.kerns",function(v)
if not v then
@@ -603,18 +623,18 @@ end
local function find(head) -- do we really want to recurse into an hlist?
while head do
- local id = getid(head)
+ local id = head.id
if id == glyph_code then
return head
elseif id == hlist_code then
- local found = find(getlist(head))
+ local found = find(head.list)
if found then
return found
else
- head = getnext(head)
+ head = head.next
end
elseif is_skipable(head) then
- head = getnext(head)
+ head = head.next
else
return head
end
@@ -623,38 +643,38 @@ local function find(head) -- do we really want to recurse into an hlist?
end
local function find_protchar_left(l) -- weird function
- local ln = getnext(l)
- if ln and getid(ln) == hlist_code and not getlist(ln) and getfield(ln,"width") == 0 and getfield(ln,"height") == 0 and getfield(ln,"depth") == 0 then
- l = getnext(l)
+ local ln = l.next
+ if ln and ln.id == hlist_code and not ln.list and ln.width == 0 and ln.height == 0 and ln.depth == 0 then
+ l = l.next
else -- if d then -- was always true
- local id = getid(l)
+ local id = l.id
while ln and not (id == glyph_code or id < math_code) do -- is there always a glyph?
l = ln
- ln = getnext(l)
- id = getid(ln)
+ ln = l.next
+ id = ln.id
end
end
- -- if getid(l) == glyph_code then
+ -- if l.id == glyph_code then
-- return l
-- end
return find(l) or l
end
local function find(head,tail)
- local tail = tail or find_tail(head)
+ local tail = tail or slide_nodes(head)
while tail do
- local id = getid(tail)
+ local id = tail.id
if id == glyph_code then
return tail
elseif id == hlist_code then
- local found = find(getlist(tail))
+ local found = find(tail.list)
if found then
return found
else
- tail = getprev(tail)
+ tail = tail.prev
end
elseif is_skipable(tail) then
- tail = getprev(tail)
+ tail = tail.prev
else
return tail
end
@@ -667,8 +687,8 @@ local function find_protchar_right(l,r)
end
local function left_pw(p)
- local font = getfont(p)
- local prot = chardata[font][getchar(p)].left_protruding
+ local font = p.font
+ local prot = chardata[font][p.char].left_protruding
if not prot or prot == 0 then
return 0
end
@@ -676,8 +696,8 @@ local function left_pw(p)
end
local function right_pw(p)
- local font = getfont(p)
- local prot = chardata[font][getchar(p)].right_protruding
+ local font = p.font
+ local prot = chardata[font][p.char].right_protruding
if not prot or prot == 0 then
return 0
end
@@ -701,17 +721,17 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
local adjust_stretch = 0
local adjust_shrink = 0
while s do
- local id = getid(s)
+ local id = s.id
if id == glyph_code then
if is_rotated[line_break_dir] then -- can be shared
- size = size + getfield(s,"height") + getfield(s,"depth")
+ size = size + s.height + s.depth
else
- size = size + getfield(s,"width")
+ size = size + s.width
end
if checked_expansion then
- local data = checked_expansion[getfont(s)]
+ local data = checked_expansion[s.font]
if data then
- data = data[getchar(s)]
+ data = data[s.char]
if data then
adjust_stretch = adjust_stretch + data.glyphstretch
adjust_shrink = adjust_shrink + data.glyphshrink
@@ -719,16 +739,16 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
end
end
elseif id == hlist_code or id == vlist_code then
- if is_parallel[getfield(s,"dir")][line_break_dir] then
- size = size + getfield(s,"width")
+ if is_parallel[s.dir][line_break_dir] then
+ size = size + s.width
else
- size = size + getfield(s,"height") + getfield(s,"depth")
+ size = size + s.depth + s.height
end
elseif id == kern_code then
- local kern = getfield(s,"kern")
- if kern ~= 0 then
- if checked_expansion and expand_kerns and (getsubtype(s) == kerning_code or getattr(a_fontkern)) then
- local stretch, shrink = kern_stretch_shrink(s,kern)
+ local d = s.kern
+ if d ~= 0 then
+ if checked_expansion and expand_kerns and (s.subtype == kerning_code or s[a_fontkern]) then
+ local stretch, shrink = kern_stretch_shrink(s,d)
if expand_kerns == "stretch" then
adjust_stretch = adjust_stretch + stretch
elseif expand_kerns == "shrink" then
@@ -738,14 +758,14 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
adjust_shrink = adjust_shrink + shrink
end
end
- size = size + kern
+ size = size + d
end
elseif id == rule_code then
- size = size + getfield(s,"width")
- elseif trace_unsupported then
+ size = size + s.width
+ else
report_parbuilders("unsupported node at location %a",6)
end
- s = getnext(s)
+ s = s.next
end
return size, adjust_stretch, adjust_shrink
end
@@ -759,14 +779,14 @@ local function compute_break_width(par,break_type,p) -- split in two
local break_size = break_width.size + disc_width.size
local break_adjust_stretch = break_width.adjust_stretch + disc_width.adjust_stretch
local break_adjust_shrink = break_width.adjust_shrink + disc_width.adjust_shrink
- local replace = getfield(p,"replace")
+ local replace = p.replace
if replace then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace)
break_size = break_size - size
break_adjust_stretch = break_adjust_stretch - adjust_stretch
break_adjust_shrink = break_adjust_shrink - adjust_shrink
end
- local post = getfield(p,"post")
+ local post = p.post
if post then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,post)
break_size = break_size + size
@@ -777,56 +797,56 @@ local function compute_break_width(par,break_type,p) -- split in two
break_width.adjust_stretch = break_adjust_stretch
break_width.adjust_shrink = break_adjust_shrink
if not post then
- p = getnext(p)
+ p = p.next
else
return
end
end
while p do -- skip spacing etc
- local id = getid(p)
+ local id = p.id
if id == glyph_code then
return -- happens often
elseif id == glue_code then
- local spec = getfield(p,"spec")
- local order = stretch_orders[getfield(spec,"stretch_order")]
- break_width.size = break_width.size - getfield(spec,"width")
- break_width[order] = break_width[order] - getfield(spec,"stretch")
- break_width.shrink = break_width.shrink - getfield(spec,"shrink")
+ local spec = p.spec
+ local order = stretch_orders[spec.stretch_order]
+ break_width.size = break_width.size - spec.width
+ break_width[order] = break_width[order] - spec.stretch
+ break_width.shrink = break_width.shrink - spec.shrink
elseif id == penalty_code then
-- do nothing
elseif id == kern_code then
- if getsubtype(p) == userkern_code then
- break_width.size = break_width.size - getfield(p,"kern")
+ if p.subtype == userkern_code then
+ break_width.size = break_width.size - p.kern
else
return
end
elseif id == math_code then
- break_width.size = break_width.size - getfield(p,"surround")
+ break_width.size = break_width.size - p.surround
else
return
end
- p = getnext(p)
+ p = p.next
end
end
local function append_to_vlist(par, b)
local prev_depth = par.prev_depth
if prev_depth > par.ignored_dimen then
- if getid(b) == hlist_code then
- local d = getfield(par.baseline_skip,"width") - prev_depth - getfield(b,"height") -- deficiency of space between baselines
- local s = d < par.line_skip_limit and new_lineskip(par.lineskip) or new_baselineskip(d)
+ if b.id == hlist_code then
+ local d = par.baseline_skip.width - prev_depth - b.height -- deficiency of space between baselines
+ local s = d < par.line_skip_limit and new_lineskip(tex.lineskip) or new_baselineskip(d)
-- local s = d < par.line_skip_limit
-- if s then
-- s = new_lineskip()
- -- setfield(s,"spec",tex.lineskip)
+ -- s.spec = tex.lineskip
-- else
-- s = new_baselineskip(d)
-- end
local head_field = par.head_field
if head_field then
- local n = slide_nodelist(head_field) -- todo: find_tail
- setfield(n,"next",s)
- setfield(s,"prev",n)
+ local n = slide_nodes(head_field)
+ n.next = s
+ s.prev = n
else
par.head_field = s
end
@@ -834,14 +854,14 @@ local function append_to_vlist(par, b)
end
local head_field = par.head_field
if head_field then
- local n = slide_nodelist(head_field) -- todo: find_tail
- setfield(n,"next",b)
- setfield(b,"prev",n)
+ local n = slide_nodes(head_field)
+ n.next = b
+ b.prev = n
else
par.head_field = b
end
- if getid(b) == hlist_code then
- local pd = getfield(b,"depth")
+ if b.id == hlist_code then
+ local pd = b.depth
par.prev_depth = pd
texnest[texnest.ptr].prevdepth = pd
end
@@ -850,9 +870,9 @@ end
local function append_list(par, b)
local head_field = par.head_field
if head_field then
- local n = slide_nodelist(head_field) -- todo: find_tail
- setfield(n,"next",b)
- setfield(b,"prev",n)
+ local n = slide_nodes(head_field)
+ n.next = b
+ b.prev = n
else
par.head_field = b
end
@@ -864,18 +884,14 @@ end
local hztolerance = 2500
local hzwarned = false
-local function used_skip(s)
- return s and (getfield(s,"width") ~= 0 or getfield(s,"stretch") ~= 0 or getfield(s,"shrink") ~= 0) and s or nil
-end
-
local function initialize_line_break(head,display)
local hang_indent = tex.hangindent or 0
local hsize = tex.hsize or 0
local hang_after = tex.hangafter or 0
local par_shape_ptr = tex.parshape
- local left_skip = tonut(tex.leftskip) -- nodes
- local right_skip = tonut(tex.rightskip) -- nodes
+ local left_skip = tex.leftskip -- nodes
+ local right_skip = tex.rightskip -- nodes
local pretolerance = tex.pretolerance
local tolerance = tex.tolerance
local adjust_spacing = tex.pdfadjustspacing
@@ -883,7 +899,7 @@ local function initialize_line_break(head,display)
local last_line_fit = tex.lastlinefit
local newhead = new_temp()
- setfield(newhead,"next",head)
+ newhead.next = head
local adjust_spacing_status = adjust_spacing > 1 and -1 or 0
@@ -950,13 +966,13 @@ local function initialize_line_break(head,display)
last_line_depth = tex.pdflastlinedepth or 0, -- this will go away
ignored_dimen = tex.pdfignoreddimen or 0, -- this will go away
- baseline_skip = tonut(tex.baselineskip),
- lineskip = tonut(tex.lineskip),
- line_skip_limit = tex.lineskiplimit,
+ baseline_skip = tex.baselineskip or 0,
+ lineskip = tex.lineskip or 0,
+ line_skip_limit = tex.lineskiplimit or 0,
prev_depth = texnest[texnest.ptr].prevdepth,
- final_par_glue = slide_nodelist(head), -- todo: we know tail already, slow
+ final_par_glue = slide_nodes(head), -- todo: we know tail already, slow
par_break_dir = tex.pardir,
line_break_dir = tex.pardir,
@@ -1025,13 +1041,6 @@ local function initialize_line_break(head,display)
}
- -- optimizers
-
- par.used_left_skip = used_skip(par.left_skip)
- par.used_right_skip = used_skip(par.right_skip)
-
- -- so far
-
if adjust_spacing > 1 then
local checked_expansion = { par = par }
setmetatableindex(checked_expansion,check_expand_pars)
@@ -1053,13 +1062,13 @@ local function initialize_line_break(head,display)
local l = check_shrinkage(par,left_skip)
local r = check_shrinkage(par,right_skip)
- local l_order = stretch_orders[getfield(l,"stretch_order")]
- local r_order = stretch_orders[getfield(r,"stretch_order")]
+ local l_order = stretch_orders[l.stretch_order]
+ local r_order = stretch_orders[r.stretch_order]
- background.size = getfield(l,"width") + getfield(r,"width")
- background.shrink = getfield(l,"shrink") + getfield(r,"shrink")
- background[l_order] = getfield(l,"stretch")
- background[r_order] = getfield(r,"stretch") + background[r_order]
+ background.size = l.width + r.width
+ background.shrink = l.shrink + r.shrink
+ background[l_order] = l.stretch
+ background[r_order] = r.stretch + background[r_order]
-- this will move up so that we can assign the whole par table
@@ -1139,192 +1148,185 @@ local function initialize_line_break(head,display)
return par
end
--- there are still all kind of artefacts in here (a side effect I guess of pdftex,
--- etex, omega and other extensions that got obscured by patching)
-
local function post_line_break(par)
local prevgraf = texnest[texnest.ptr].prevgraf
- local current_line = prevgraf + 1 -- the current line number being justified
+ local cur_line = prevgraf + 1 -- the current line number being justified
+ local cur_p = nil
local adjust_spacing = par.adjust_spacing
local protrude_chars = par.protrude_chars
local statistics = par.statistics
- local stack = new_dir_stack()
-
- local leftskip = par.used_left_skip -- used or normal ?
- local rightskip = par.right_skip
- local parshape = par.par_shape_ptr
- local ignored_dimen = par.ignored_dimen
-
- local adapt_width = par.adapt_width
+ local p, s, k, w -- check when local
- -- reverse the links of the relevant passive nodes, goto first breakpoint
+ local q = par.best_bet.break_node
+ repeat -- goto first breakpoint
+ local r = q
+ q = q.prev_break
+ r.prev_break = cur_p
+ cur_p = r
+ until not q
- local current_break = nil
+ local stack = new_dir_stack()
- local break_node = par.best_bet.break_node
repeat
- local first_break = break_node
- break_node = break_node.prev_break
- first_break.prev_break = current_break
- current_break = first_break
- until not break_node
-
- local head = par.head
-
- -- maybe : each_...
- while current_break do
+ inject_dirs_at_begin_of_line(stack,par.head)
- inject_dirs_at_begin_of_line(stack,head)
+ local q = nil
+ local r = cur_p.cur_break
local disc_break = false
local post_disc_break = false
local glue_break = false
- local lineend = nil -- q lineend refers to the last node of the line (and paragraph)
- local lastnode = current_break.cur_break -- r lastnode refers to the node after which the dir nodes should be closed
-
- if not lastnode then
- -- only at the end
- lastnode = slide_nodelist(head) -- todo: find_tail
- if lastnode == par.final_par_glue then
- lineend = lastnode
- lastnode = getprev(lastnode)
+ if not r then
+ r = slide_nodes(par.head)
+ if r == par.final_par_glue then
+ q = r -- q refers to the last node of the line (and paragraph)
+ r = r.prev -- r refers to the node after which the dir nodes should be closed
end
- else -- todo: use insert_list_after
- local id = getid(lastnode)
+ else
+ local id = r.id
if id == glue_code then
- -- lastnode is normal skip
- lastnode = replace_node(lastnode,new_rightskip(rightskip))
+ -- r is normal skip
+ r = replace_node(r,new_rightskip(par.right_skip))
glue_break = true
- lineend = lastnode
- lastnode = getprev(r)
+ q = r -- q refers to the last node of the line
+ r = r.prev -- r refers to the node after which the dir nodes should be closed
elseif id == disc_code then
- local prevlast = getprev(lastnode)
- local nextlast = getnext(lastnode)
- local subtype = getsubtype(lastnode)
- local pre = getfield(lastnode,"pre")
- local post = getfield(lastnode,"post")
- local replace = getfield(lastnode,"replace")
+ -- todo: use insert_before/after
+ local prev_r = r.prev
+ local next_r = r.next
+ local subtype = r.subtype
+ local pre = r.pre
+ local post = r.post
+ local replace = r.replace
if subtype == second_disc_code then
- if not (getid(prevlast) == disc_code and getsubtype(prevlast) == first_disc_code) then
+ if not (prev_r.id == disc_code and prev_r.subtype == first_disc_code) then
report_parbuilders('unsupported disc at location %a',3)
end
if pre then
- flush_nodelist(pre)
- setfield(lastnode,"pre",nil)
- pre = nil -- signal
+ flush_node_list(pre)
+ r.pre = nil
+ pre = nil -- signal
end
if replace then
- local n = find_tail(replace)
- setfield(prevlast,"next",replace)
- setfield(replace,"prev",prevlast)
- setfield(n,"next",lastnode)
- setfield(lastnode,"prev",n)
- setfield(lastnode,"replace",nil)
- replace = nil -- signal
+ local n = slide_nodes(replace)
+ prev_r.next = replace
+ replace.prev = prev_r
+ n.next = r
+ r.prev = n
+ r.replace = nil
+ replace = nil -- signal
end
- local pre = getfield(prevlast,"pre")
- local post = getfield(prevlast,"post")
- local replace = getfield(prevlast,"replace")
+ local pre = prev_r.pre
+ local post = prev_r.post
+ local replace = prev_r.replace
if pre then
- flush_nodelist(pre)
- setfield(prevlast,"pre",nil)
+ flush_node_list(pre)
+ prev_r.pre = nil
end
if replace then
- flush_nodelist(replace)
- setfield(prevlast,"replace",nil)
+ flush_node_list(replace)
+ prev_r.replace = nil
end
if post then
- flush_nodelist(post)
- setfield(prevlast,"post",nil)
+ flush_node_list(post)
+ prev_r.post = nil
end
elseif subtype == first_disc_code then
- if not (getid(v) == disc_code and getsubtype(v) == second_disc_code) then
+ if not (v.id == disc_code and v.subtype == second_disc_code) then
report_parbuilders('unsupported disc at location %a',4)
end
- setfield(nextlast,"subtype",regular_disc_code)
- setfield(nextlast,"replace",post)
- setfield(lastnode,"post",nil)
+ next_r.subtype = regular_disc_code
+ next_r.replace = post
+ r.post = nil
end
if replace then
- setfield(lastnode,"replace",nil) -- free
- flush_nodelist(replace)
+ r.replace = nil -- free
+ flush_node_list(replace)
end
if pre then
- local n = find_tail(pre)
- setfield(prevlast,"next",pre)
- setfield(pre,"prev",prevlast)
- setfield(n,"next",lastnode)
- setfield(lastnode,"prev",n)
- setfield(lastnode,"pre",nil)
+ local n = slide_nodes(pre)
+ prev_r.next = pre
+ pre.prev = prev_r
+ n.next = r
+ r.prev = n
+ r.pre = nil
end
if post then
- local n = find_tail(post)
- setfield(lastnode,"next",post)
- setfield(post,"prev",lastnode)
- setfield(n,"next",nextlast)
- setfield(nextlast,"prev",n)
- setfield(lastnode,"post",nil)
+ local n = slide_nodes(post)
+ r.next = post
+ post.prev = r
+ n.next = next_r
+ next_r.prev = n
+ r.post = nil
post_disc_break = true
end
disc_break = true
elseif id == kern_code then
- setfield(lastnode,"kern",0)
- elseif getid(lastnode) == math_code then
- setfield(lastnode,"surround",0)
+ r.kern = 0
+ elseif r.id == math_code then
+ r.surround = 0
end
end
- lastnode = inject_dirs_at_end_of_line(stack,lastnode,getnext(head),current_break.cur_break)
- local rightbox = current_break.passive_right_box
- if rightbox then
- lastnode = insert_node_after(lastnode,lastnode,copy_node(rightbox))
+ r = inject_dirs_at_end_of_line(stack,r,par.head.next,cur_p.cur_break)
+ local crb = cur_p.passive_right_box
+ if crb then
+ local s = copy_node(crb)
+ local e = r.next
+ r.next = s
+ s.prev = r
+ s.next = e
+ if e then
+ e.prev = s
+ end
+ r = s
end
- if not lineend then
- lineend = lastnode
+ if not q then
+ q = r
end
- if lineend and lineend ~= head and protrude_chars > 0 then
- local id = getid(lineend)
- local c = (disc_break and (id == glyph_code or id ~= disc_code) and lineend) or getprev(lineend)
- local p = find_protchar_right(getnext(head),c)
- if p and getid(p) == glyph_code then
+ if q and q ~= par.head and protrude_chars > 0 then
+ local id = q.id
+ local c = (disc_break and (id == glyph_code or id ~= disc_code) and q) or q.prev
+ local p = find_protchar_right(par.head.next,c)
+ if p and p.id == glyph_code then
local w, last_rightmost_char = right_pw(p)
if last_rightmost_char and w ~= 0 then
- -- so we inherit attributes, lineend is new pseudo head
- lineend, c = insert_node_after(lineend,c,new_rightmarginkern(copy_node(last_rightmost_char),-w))
+ -- so we inherit attributes, q is new pseudo head
+ q, c = insert_node_after(q,c,new_rightmarginkern(copy_node(last_rightmost_char),-w))
end
end
end
- -- we finish the line
- local r = getnext(lineend)
- setfield(lineend,"next",nil)
if not glue_break then
- if rightskip then
- insert_node_after(lineend,lineend,new_rightskip(right_skip)) -- lineend moves on as pseudo head
- end
- end
- -- each time ?
- local q = getnext(head)
- setfield(head,"next",r)
+ local h
+ h, q = insert_node_after(q,q,new_rightskip(par.right_skip)) -- q moves on as pseudo head
+ end
+ r = q.next
+ q.next = nil
+ local phead = par.head
+ q = phead.next
+ phead.next = r
if r then
- setfield(r,"prev",head)
- end
- -- insert leftbox (if needed after parindent)
- local leftbox = current_break.passive_left_box
- if leftbox then
- local first = getnext(q)
- if first and current_line == (par.first_line + 1) and getid(first) == hlist_code and not getlist(first) then
- insert_node_after(q,q,copy_node(leftbox))
- else
- q = insert_node_before(q,q,copy_node(leftbox))
+ r.prev = phead
+ end
+ local clb = cur_p.passive_left_box
+ if clb then -- here we miss some prev links
+ local s = copy_node(cb)
+ s = q.next
+ r.next = q
+ q = r
+ if s and cur_line == (par.first_line + 1) and s.id == hlist_code and not s.list then
+ q = q.next
+ r.next = s.next
+ s.next = r
end
end
if protrude_chars > 0 then
local p = find_protchar_left(q)
- if p and getid(p) == glyph_code then
+ if p and p.id == glyph_code then
local w, last_leftmost_char = left_pw(p)
if last_leftmost_char and w ~= 0 then
-- so we inherit attributes, q is pseudo head and moves back
@@ -1332,35 +1334,32 @@ local function post_line_break(par)
end
end
end
- if leftskip then
- q = insert_node_before(q,q,new_leftskip(leftskip))
+ local ls = par.left_skip
+ if ls and (ls.width ~= 0 or ls.stretch ~= 0 or ls.shrink ~= 0) then
+ q = insert_node_before(q,q,new_leftskip(ls))
end
- local cur_width, cur_indent
- if current_line > par.last_special_line then
+ local curwidth, cur_indent
+ if cur_line > par.last_special_line then
cur_indent = par.second_indent
cur_width = par.second_width
- elseif parshape then
- local shape = parshape[current_line]
- cur_indent = shape[1]
- cur_width = shape[2]
else
- cur_indent = par.first_indent
- cur_width = par.first_width
- end
-
- if adapt_width then -- extension
- local l, r = adapt_width(par,current_line)
- cur_indent = cur_indent + l
- cur_width = cur_width - l - r
+ local psp = par.par_shape_ptr
+ if psp then
+ cur_indent = psp[cur_line][1]
+ cur_width = psp[cur_line][2]
+ else
+ cur_indent = par.first_indent
+ cur_width = par.first_width
+ end
end
-
statistics.noflines = statistics.noflines + 1
- local finished_line = nil
if adjust_spacing > 0 then
statistics.nofadjustedlines = statistics.nofadjustedlines + 1
- finished_line = xpack_nodes(q,cur_width,"cal_expand_ratio",par.par_break_dir,par.first_line,current_line) -- ,current_break.analysis)
+ -- in the built-in hpack cal_expand_ratio will later on call subst_ext_font
+ -- in the alternative approach we can do both in one run
+ just_box = xpack_nodes(q,cur_width,"cal_expand_ratio",par.par_break_dir) -- ,cur_p.analysis)
else
- finished_line = xpack_nodes(q,cur_width,"exactly",par.par_break_dir,par.first_line,current_line) -- ,current_break.analysis)
+ just_box = xpack_nodes(q,cur_width,"exactly",par.par_break_dir) -- ,cur_p.analysis)
end
if protrude_chars > 0 then
statistics.nofprotrudedlines = statistics.nofprotrudedlines + 1
@@ -1369,42 +1368,39 @@ local function post_line_break(par)
local adjust_head = texlists.adjust_head
local pre_adjust_head = texlists.pre_adjust_head
--
- setfield(finished_line,"shift",cur_indent)
- -- this will probably go away:
- if par.each_line_height ~= ignored_dimen then
- setfield(finished_line,"height",par.each_line_height)
+ just_box.shift = cur_indent
+ if par.each_line_height ~= par.ignored_dimen then
+ just_box.height = par.each_line_height
end
- if par.each_line_depth ~= ignored_dimen then
- setfield(finished_line,"depth",par.each_line_depth)
+ if par.each_line_depth ~= par.ignored_dimen then
+ just_box.depth = par.each_line_depth
end
- if par.first_line_height ~= ignored_dimen and (current_line == par.first_line + 1) then
- setfield(finished_line,"height",par.first_line_height)
+ if par.first_line_height ~= par.ignored_dimen and (cur_line == par.first_line + 1) then
+ just_box.height = par.first_line_height
end
- if par.last_line_depth ~= ignored_dimen and current_line + 1 == par.best_line then
- setfield(finished_line,"depth",par.last_line_depth)
+ if par.last_line_depth ~= par.ignored_dimen and cur_line + 1 == par.best_line then
+ just_box.depth = par.last_line_depth
end
- --
if texlists.pre_adjust_head ~= pre_adjust_head then
append_list(par, texlists.pre_adjust_head)
texlists.pre_adjust_head = pre_adjust_head
end
- append_to_vlist(par,finished_line)
+ append_to_vlist(par, just_box)
if texlists.adjust_head ~= adjust_head then
append_list(par, texlists.adjust_head)
texlists.adjust_head = adjust_head
end
- --
local pen
- if current_line + 1 ~= par.best_line then
- if current_break.passive_pen_inter then
- pen = current_break.passive_pen_inter
+ if cur_line + 1 ~= par.best_line then
+ if cur_p.passive_pen_inter then
+ pen = cur_p.passive_pen_inter
else
pen = par.inter_line_penalty
end
- if current_line == prevgraf + 1 then
+ if cur_line == prevgraf + 1 then
pen = pen + par.club_penalty
end
- if current_line + 2 == par.best_line then
+ if cur_line + 2 == par.best_line then
if par.display then
pen = pen + par.display_widow_penalty
else
@@ -1412,58 +1408,56 @@ local function post_line_break(par)
end
end
if disc_break then
- if current_break.passive_pen_broken ~= 0 then
- pen = pen + current_break.passive_pen_broken
+ if cur_p.passive_pen_broken ~= 0 then
+ pen = pen + cur_p.passive_pen_broken
else
pen = pen + par.broken_penalty
end
end
if pen ~= 0 then
append_to_vlist(par,new_penalty(pen))
- end
+ end
end
- current_line = current_line + 1
- current_break = current_break.prev_break
- if current_break and not post_disc_break then
- local current = head
- local next = nil
+ cur_line = cur_line + 1
+ cur_p = cur_p.prev_break
+ if cur_p and not post_disc_break then
+ local phead = par.head
+ local r = phead
while true do
- next = getnext(current)
- if next == current_break.cur_break or getid(next) == glyph_code then
+ q = r.next
+ if q == cur_p.cur_break or q.id == glyph_code then
break
end
- local id = getid(next)
- local subtype = getsubtype(next)
- if id == whatsit_code and subtype == localpar_code then
- -- nothing
- elseif id < math_code then
- -- messy criterium
- break
- elseif id == kern_code and (subtype ~= userkern_code and not getattr(next,a_fontkern)) then
- -- fontkerns and accent kerns as well as otf injections
- break
+ local id = q.id
+ if not (id == whatsit_code and q.subtype == localpar_code) then
+ if id < math_code or (id == kern_code and q.subtype ~= userkern_code) then
+ break
+ end
end
- current = next
+ r = q
end
- if current ~= head then
- setfield(current,"next",nil)
- flush_nodelist(getnext(head))
- setfield(head,"next",next)
- if next then
- setfield(next,"prev",head)
+ if r ~= phead then
+ r.next = nil
+ flush_node_list(phead.next)
+ phead.next = q
+ if q then
+ q.prev = phead
end
end
end
+ until not cur_p
+ if cur_line ~= par.best_line then -- or not par.head.next then
+ report_parbuilders("line breaking")
end
- -- if current_line ~= par.best_line then
- -- report_parbuilders("line breaking")
- -- end
- par.head = nil -- needs checking
- current_line = current_line - 1
+ if par.head then -- added
+-- flush_node(par.head) -- the localpar_code whatsit
+ par.head = nil
+ end
+ cur_line = cur_line - 1
if trace_basic then
- report_parbuilders("paragraph broken into %a lines",current_line)
+ report_parbuilders("paragraph broken into %a lines",cur_line)
end
- texnest[texnest.ptr].prevgraf = current_line
+ texnest[texnest.ptr].prevgraf = cur_line
end
local function wrap_up(par)
@@ -1481,11 +1475,11 @@ local function wrap_up(par)
par.do_last_line_fit = false
else
local glue = par.final_par_glue
- local spec = copy_node(getfield(glue,"spec"))
- setfield(spec,"width",getfield(spec,"width") + active_short - active_glue)
- setfield(spec,"stretch",0)
- -- flush_node(getfield(glue,"spec")) -- brrr, when we do this we can get an "invalid id stretch message", maybe dec refcount
- setfield(glue,"spec",spec)
+ local spec = copy_node(glue.spec)
+ spec.width = spec.width + active_short - active_glue
+ spec.stretch = 0
+ -- flush_node(glue.spec) -- brrr, when we do this we can get an "invalid id stretch message", maybe dec refcount
+ glue.spec = spec
if trace_lastlinefit then
report_parbuilders("applying last line fit, short %a, glue %p",active_short,active_glue)
end
@@ -1493,8 +1487,8 @@ local function wrap_up(par)
end
-- we have a bunch of glue and and temp nodes not freed
local head = par.head
- if getid(head) == temp_code then
- par.head = getnext(head)
+ if head.id == temp_code then
+ par.head = head.next
flush_node(head)
end
post_line_break(par)
@@ -1504,8 +1498,7 @@ local function wrap_up(par)
end
-- we could do active nodes differently ... table instead of linked list or a list
--- with prev nodes but it doesn't save much (as we still need to keep indices then
--- in next)
+-- with prev nodes
local function deactivate_node(par,prev_prev_r,prev_r,r,cur_active_width,checked_expansion) -- no need for adjust if disabled
local active = par.active
@@ -1623,26 +1616,18 @@ local function lastlinecrap(shortfall,active_short,active_glue,cur_active_width,
end
end
--- todo: statistics .. count tries and so
-
-local trialcount = 0
-
-local function try_break(pi, break_type, par, first_p, current, checked_expansion)
-
--- trialcount = trialcount + 1
--- print(trialcount,pi,break_type,current,nuts.tostring(current))
+local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
- if pi >= infinite_penalty then -- this breakpoint is inhibited by infinite penalty
- local p_active = par.active
- return p_active, p_active and p_active.next
- elseif pi <= -infinite_penalty then -- this breakpoint will be forced
- pi = eject_penalty
+ if pi >= infinite_penalty then
+ return -- this breakpoint is inhibited by infinite penalty
+ elseif pi <= -infinite_penalty then
+ pi = eject_penalty -- this breakpoint will be forced
end
local prev_prev_r = nil -- a step behind prev_r, if type(prev_r)=delta_code
local prev_r = par.active -- stays a step behind r
local r = nil -- runs through the active list
- local no_break_yet = true -- have we found a feasible break at current?
+ local no_break_yet = true -- have we found a feasible break at cur_p?
local node_r_stays_active = false -- should node r remain in the active list?
local line_width = 0 -- the current line will be justified to this width
local line_number = 0 -- line number of current active node
@@ -1663,10 +1648,6 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
local tracing_paragraphs = par.tracing_paragraphs
-- local par_active = par.active
- local adapt_width = par.adapt_width
-
- local parshape = par.par_shape_ptr
-
local cur_active_width = checked_expansion and { -- distance from current active node
size = active_width.size,
stretch = active_width.stretch,
@@ -1721,8 +1702,8 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
break_width.adjust_stretch = 0
break_width.adjust_shrink = 0
end
- if current then
- compute_break_width(par,break_type,current)
+ if cur_p then
+ compute_break_width(par,break_type,cur_p)
end
end
if prev_r.id == delta_code then
@@ -1788,14 +1769,14 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
end
for fit_class = fit_very_loose_class, fit_tight_class do
if minimal_demerits[fit_class] <= minimum_demerits then
- -- insert a new active node from best_place[fit_class] to current
+ -- insert a new active node from best_place[fit_class] to cur_p
par.pass_number = par.pass_number + 1
local prev_break = best_place[fit_class]
local passive = {
id = passive_code,
subtype = nosubtype_code,
next = par.passive,
- cur_break = current,
+ cur_break = cur_p,
serial = par.pass_number,
prev_break = prev_break,
passive_pen_inter = par.internal_pen_inter,
@@ -1830,7 +1811,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
prev_r.next = q
prev_r = q
if tracing_paragraphs then
- diagnostics.break_node(par,q,fit_class,break_type,current)
+ diagnostics.break_node(par,q,fit_class,break_type,cur_p)
end
end
minimal_demerits[fit_class] = awful_badness
@@ -1869,7 +1850,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
end
end
if r == par.active then
- return r, r and r.next -- p_active, n_active
+ return
end
if line_number > par.easy_line then
old_line_number = max_halfword - 1
@@ -1878,16 +1859,12 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
old_line_number = line_number
if line_number > par.last_special_line then
line_width = par.second_width
- elseif parshape then
- line_width = parshape[line_number][2]
+ elseif par.par_shape_ptr then
+ line_width = par.par_shape_ptr[line_number][2]
else
line_width = par.first_width
end
end
- if adapt_width then
- local l, r = adapt_width(par,line_number)
- line_width = line_width - l - r
- end
end
local artificial_demerits = false -- has d been forced to zero
local shortfall = line_width - cur_active_width.size - par.internal_right_box_width -- used in badness calculations
@@ -1901,17 +1878,17 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
-- this is quite time consuming
local b = r.break_node
local l = b and b.cur_break or first_p
- local o = current and getprev(current)
- if current and getid(current) == disc_code and getfield(current,"pre") then
- o = find_tail(getfield(current,"pre"))
+ local o = cur_p and cur_p.prev
+ if cur_p and cur_p.id == disc_code and cur_p.pre then
+ o = slide_nodes(cur_p.pre)
else
o = find_protchar_right(l,o)
end
- if o and getid(o) == glyph_code then
+ if o and o.id == glyph_code then
pw, rp = right_pw(o)
shortfall = shortfall + pw
end
- local id = getid(l)
+ local id = l.id
if id == glyph_code then
-- ok ?
elseif id == disc_code and l.post then
@@ -1919,7 +1896,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
else
l = find_protchar_left(l)
end
- if l and getid(l) == glyph_code then
+ if l and l.id == glyph_code then
pw, lp = left_pw(l)
shortfall = shortfall + pw
end
@@ -1929,23 +1906,27 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
local margin_kern_shrink = 0
if protrude_chars > 1 then
if lp then
- local data = expansions[getfont(lp)][getchar(lp)]
- if data then
- margin_kern_stretch, margin_kern_shrink = data.glyphstretch, data.glyphshrink
- end
+-- margin_kern_stretch, margin_kern_shrink = cal_margin_kern_var(lp)
+local data = expansions[lp.font][lp.char]
+if data then
+ margin_kern_stretch, margin_kern_shrink = data.glyphstretch, data.glyphshrink
+end
end
if rp then
- local data = expansions[getfont(lp)][getchar(lp)]
- if data then
- margin_kern_stretch = margin_kern_stretch + data.glyphstretch
- margin_kern_shrink = margin_kern_shrink + data.glyphshrink
- end
+-- local mka, mkb = cal_margin_kern_var(rp)
+-- margin_kern_stretch = margin_kern_stretch + mka
+-- margin_kern_shrink = margin_kern_shrink + mkb
+local data = expansions[lp.font][lp.char]
+if data then
+ margin_kern_stretch = margin_kern_stretch + data.glyphstretch
+ margin_kern_shrink = margin_kern_shrink + data.glyphshrink
+end
end
end
local total = cur_active_width.adjust_stretch + margin_kern_stretch
if shortfall > 0 and total > 0 then
if total > shortfall then
- shortfall = total / (par.max_stretch_ratio / par.cur_font_step) / 2
+ shortfall = total / (par.max_stretch_ratio / par.cur_font_step) / 2 -- to be adapted
else
shortfall = shortfall - total
end
@@ -1953,7 +1934,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
total = cur_active_width.adjust_shrink + margin_kern_shrink
if shortfall < 0 and total > 0 then
if total > - shortfall then
- shortfall = - total / (par.max_shrink_ratio / par.cur_font_step) / 2
+ shortfall = - total / (par.max_shrink_ratio / par.cur_font_step) / 2 -- to be adapted
else
shortfall = shortfall + total
end
@@ -1968,7 +1949,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
if cur_active_width.fi ~= 0 or cur_active_width.fil ~= 0 or cur_active_width.fill ~= 0 or cur_active_width.filll ~= 0 then
if not do_last_line_fit then
-- okay
- elseif not current then
+ elseif not cur_p then
found, shortfall, fit_class, g, b = lastlinecrap(shortfall,r.active_short,r.active_glue,cur_active_width,par.fill_width,par.last_line_fit)
else
shortfall = 0
@@ -2003,7 +1984,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
end
end
if do_last_line_fit and not found then
- if not current then
+ if not cur_p then
-- g = 0
shortfall = 0
elseif shortfall > 0 then
@@ -2051,7 +2032,7 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
d = d - pi * pi
end
if break_type == hyphenated_code and r.id == hyphenated_code then
- if current then
+ if cur_p then
d = d + par.double_hyphen_demerits
else
d = d + par.final_hyphen_demerits
@@ -2063,9 +2044,9 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
end
end
if tracing_paragraphs then
- diagnostics.feasible_break(par,current,r,b,pi,d,artificial_demerits)
+ diagnostics.feasible_break(par,cur_p,r,b,pi,d,artificial_demerits)
end
- d = d + r.total_demerits -- this is the minimum total demerits from the beginning to current via r
+ d = d + r.total_demerits -- this is the minimum total demerits from the beginning to cur_p via r
if d <= minimal_demerits[fit_class] then
minimal_demerits[fit_class] = d
best_place [fit_class] = r.break_node
@@ -2089,16 +2070,25 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
end
end
+local function kern_break(par, cur_p, first_p, checked_expansion) -- move inline if needed
+ local v = cur_p.next
+ if par.auto_breaking and v.id == glue_code then
+ try_break(0, unhyphenated_code, par, first_p, cur_p, checked_expansion)
+ end
+ local active_width = par.active_width
+ if cur_p.id ~= math_code then
+ active_width.size = active_width.size + cur_p.kern
+ else
+ active_width.size = active_width.size + cur_p.surround
+ end
+end
+
-- we can call the normal one for simple box building in the otr so we need
-- frequent enabling/disabling
-local dcolor = { [0] = "red", "green", "blue", "magenta", "cyan", "gray" }
-
local temp_head = new_temp()
function constructors.methods.basic(head,d)
- head = tonut(head)
-
if trace_basic then
report_parbuilders("starting at %a",head)
end
@@ -2150,27 +2140,24 @@ function constructors.methods.basic(head,d)
par.passive = nil -- = 0
par.printed_node = temp_head -- only when tracing, shared
+ par.printed_node.next = head
par.pass_number = 0
--- par.auto_breaking = true
-
- setfield(temp_head,"next",head)
-
- local current = head
- local first_p = current
+ par.auto_breaking = true
- local auto_breaking = true
+ local cur_p = head
+ local first_p = cur_p
par.font_in_short_display = 0
- if current and getid(current) == whatsit_code and getsubtype(current) == localpar_code then
- par.init_internal_left_box = getfield(current,"box_left")
- par.init_internal_left_box_width = getfield(current,"box_left_width")
- par.internal_pen_inter = getfield(current,"pen_inter")
- par.internal_pen_broken = getfield(current,"pen_broken")
+ if cur_p and cur_p.id == whatsit_code and cur_p.subtype == localpar_code then
+ par.init_internal_left_box = cur_p.box_left
+ par.init_internal_left_box_width = cur_p.box_left_width
+ par.internal_pen_inter = cur_p.pen_inter
+ par.internal_pen_broken = cur_p.pen_broken
par.internal_left_box = par.init_internal_left_box
par.internal_left_box_width = par.init_internal_left_box_width
- par.internal_right_box = getfield(current,"box_right")
- par.internal_right_box_width = getfield(current,"box_right_width")
+ par.internal_right_box = cur_p.box_right
+ par.internal_right_box_width = cur_p.box_right_width
end
-- all passes are combined in this loop so maybe we should split this into
@@ -2182,34 +2169,23 @@ function constructors.methods.basic(head,d)
local fontexp, lastfont -- we can pass fontexp to calculate width if needed
- -- i flattened the inner loop over glyphs .. it looks nicer and the extra p_active ~= n_active
- -- test is fast enough (and try_break now returns the updated values); the kern helper has been
- -- inlined as it did a double check on id so in fact we had hardly any code to share
-
- local p_active = par.active
- local n_active = p_active and p_active.next
- local second_pass = par.second_pass
-
- trialcount = 0
-
- while current and p_active ~= n_active do
- local id = getid(current)
- if id == glyph_code then
+ while cur_p and par.active.next ~= par.active do
+ while cur_p and cur_p.id == glyph_code do
if is_rotated[par.line_break_dir] then
- active_width.size = active_width.size + getfield(current,"height") + getfield(current,"depth")
+ active_width.size = active_width.size + cur_p.height + cur_p.depth
else
- active_width.size = active_width.size + getfield(current,"width")
+ active_width.size = active_width.size + cur_p.width
end
if checked_expansion then
- local currentfont = getfont(current)
- local data= checked_expansion[currentfont]
+ local data= checked_expansion[cur_p.font]
if data then
+ local currentfont = cur_p.font
if currentfont ~= lastfont then
fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
lastfont = currentfont
end
if fontexps then
- local expansion = fontexps[getchar(current)]
+ local expansion = fontexps[cur_p.char]
if expansion then
active_width.adjust_stretch = active_width.adjust_stretch + expansion.glyphstretch
active_width.adjust_shrink = active_width.adjust_shrink + expansion.glyphshrink
@@ -2217,45 +2193,51 @@ function constructors.methods.basic(head,d)
end
end
end
- elseif id == hlist_code or id == vlist_code then
- if is_parallel[getfield(current,"dir")][par.line_break_dir] then
- active_width.size = active_width.size + getfield(current,"width")
+ cur_p = cur_p.next
+ end
+ if not cur_p then -- TODO
+ report_parbuilders("problems with linebreak_tail")
+ os.exit()
+ end
+ local id = cur_p.id
+ if id == hlist_code or id == vlist_code then
+ if is_parallel[cur_p.dir][par.line_break_dir] then
+ active_width.size = active_width.size + cur_p.width
else
- active_width.size = active_width.size + getfield(current,"depth") + getfield(current,"height")
+ active_width.size = active_width.size + cur_p.depth + cur_p.height
end
elseif id == glue_code then
--- if par.auto_breaking then
- if auto_breaking then
- local prev_p = getprev(current)
+ if par.auto_breaking then
+ local prev_p = cur_p.prev
if prev_p and prev_p ~= temp_head then
- local id = getid(prev_p)
+ local id = prev_p.id
if id == glyph_code or
- (id < math_code and (id ~= whatsit_code or getsubtype(prev_p) ~= dir_code)) or -- was: precedes_break(prev_p)
- (id == kern_code and getsubtype(prev_p) ~= userkern_code) then
- p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
+ (id < math_code and (id ~= whatsit_code or prev_p.subtype ~= dir_code)) or -- was: precedes_break(prev_p)
+ (id == kern_code and prev_p.subtype ~= userkern_code) then
+ try_break(0, unhyphenated_code, par, first_p, cur_p, checked_expansion)
end
end
end
- local spec = check_shrinkage(par,getfield(current,"spec"))
- local order = stretch_orders[getfield(spec,"stretch_order")]
- setfield(current,"spec",spec)
- active_width.size = active_width.size + getfield(spec,"width")
- active_width[order] = active_width[order] + getfield(spec,"stretch")
- active_width.shrink = active_width.shrink + getfield(spec,"shrink")
+ local spec = check_shrinkage(par,cur_p.spec)
+ local order = stretch_orders[spec.stretch_order]
+ cur_p.spec = spec
+ active_width.size = active_width.size + spec.width
+ active_width[order] = active_width[order] + spec.stretch
+ active_width.shrink = active_width.shrink + spec.shrink
elseif id == disc_code then
- local subtype = getsubtype(current)
- if subtype ~= second_disc_code then
+ local subtype = cur_p.subtype
+ if subtype ~= second_disc_code then -- are there still second_disc_code in luatex
local line_break_dir = par.line_break_dir
- if second_pass or subtype <= automatic_disc_code then
+ if par.second_pass then -- todo: make second pass local
local actual_pen = subtype == automatic_disc_code and par.ex_hyphen_penalty or par.hyphen_penalty
- local pre = getfield(current,"pre")
+ local pre = cur_p.pre
if not pre then -- trivial pre-break
disc_width.size = 0
if checked_expansion then
disc_width.adjust_stretch = 0
disc_width.adjust_shrink = 0
end
- p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, current, checked_expansion)
+ try_break(actual_pen, hyphenated_code, par, first_p, cur_p, checked_expansion)
else
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre)
disc_width.size = size
@@ -2269,13 +2251,13 @@ function constructors.methods.basic(head,d)
-- disc_width.adjust_stretch = 0
-- disc_width.adjust_shrink = 0
end
- p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, current, checked_expansion)
+ try_break(actual_pen, hyphenated_code, par, first_p, cur_p, checked_expansion)
if subtype == first_disc_code then
- local cur_p_next = getnext(current)
- if getid(cur_p_next) ~= disc_code or getsubtype(cur_p_next) ~= second_disc_code then
+ local cur_p_next = cur_p.next
+ if cur_p_next.id ~= disc_code or cur_p_next.subtype ~= second_disc_code then
report_parbuilders("unsupported disc at location %a",1)
else
- local pre = getfield(cur_p_next,"pre")
+ local pre = cur_p_next.pre
if pre then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre)
disc_width.size = disc_width.size + size
@@ -2283,16 +2265,16 @@ function constructors.methods.basic(head,d)
disc_width.adjust_stretch = disc_width.adjust_stretch + adjust_stretch
disc_width.adjust_shrink = disc_width.adjust_shrink + adjust_shrink
end
- p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, cur_p_next, checked_expansion)
+ try_break(actual_pen, hyphenated_code, par, first_p, cur_p_next, checked_expansion)
--
-- I will look into this some day ... comment in linebreak.w says that this fails,
-- maybe this is what Taco means with his comment in the luatex manual.
--
-- do_one_seven_eight(sub_disc_width_from_active_width);
-- do_one_seven_eight(reset_disc_width);
- -- s = vlink_no_break(vlink(current));
+ -- s = vlink_no_break(vlink(cur_p));
-- add_to_widths(s, line_break_dir, pdf_adjust_spacing,disc_width);
- -- ext_try_break(...,first_p,vlink(current));
+ -- ext_try_break(...,first_p,vlink(cur_p));
--
else
report_parbuilders("unsupported disc at location %a",2)
@@ -2307,7 +2289,7 @@ function constructors.methods.basic(head,d)
end
end
end
- local replace = getfield(current,"replace")
+ local replace = cur_p.replace
if replace then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace)
active_width.size = active_width.size + size
@@ -2318,20 +2300,14 @@ function constructors.methods.basic(head,d)
end
end
elseif id == kern_code then
- if getsubtype(current) == userkern_code then
- local v = getnext(current)
--- if par.auto_breaking and getid(v) == glue_code then
- if auto_breaking and getid(v) == glue_code then
- p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
- end
- local active_width = par.active_width
- active_width.size = active_width.size + getfield(current,"kern")
+ if cur_p.subtype == userkern_code then
+ kern_break(par,cur_p,first_p, checked_expansion)
else
- local kern = getfield(current,"kern")
- if kern ~= 0 then
- active_width.size = active_width.size + kern
- if checked_expansion and expand_kerns and (getsubtype(current) == kerning_code or getattr(current,a_fontkern)) then
- local stretch, shrink = kern_stretch_shrink(current,kern)
+ local d = cur_p.kern
+ of d ~= 0 then
+ active_width.size = active_width.size + d
+ if checked_expansion and expand_kerns and (cur_p.subtype == kerning_code or cur_p[a_fontkern]) then
+ local stretch, shrink = kern_stretch_shrink(cur_p,d)
if expand_kerns == "stretch" then
active_width.adjust_stretch = active_width.adjust_stretch + stretch
elseif expand_kerns == "shrink" then
@@ -2344,47 +2320,40 @@ function constructors.methods.basic(head,d)
end
end
elseif id == math_code then
--- par.auto_breaking = getsubtype(current) == endmath_code
- auto_breaking = getsubtype(current) == endmath_code
- local v = getnext(current)
--- if par.auto_breaking and getid(v) == glue_code then
- if auto_breaking and getid(v) == glue_code then
- p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
- end
- local active_width = par.active_width
- active_width.size = active_width.size + getfield(current,"surround")
+ par.auto_breaking = cur_p.subtype == endmath_code
+ kern_break(par,cur_p, first_p, checked_expansion)
elseif id == rule_code then
- active_width.size = active_width.size + getfield(current,"width")
+ active_width.size = active_width.size + cur_p.width
elseif id == penalty_code then
- p_active, n_active = try_break(getfield(current,"penalty"), unhyphenated_code, par, first_p, current, checked_expansion)
+ try_break(cur_p.penalty, unhyphenated_code, par, first_p, cur_p, checked_expansion)
elseif id == whatsit_code then
- local subtype = getsubtype(current)
+ local subtype = cur_p.subtype
if subtype == localpar_code then
- par.internal_pen_inter = getfield(current,"pen_inter")
- par.internal_pen_broken = getfield(current,"pen_broken")
- par.internal_left_box = getfield(current,"box_left")
- par.internal_left_box_width = getfield(current,"box_left_width")
- par.internal_right_box = getfield(current,"box_right")
- par.internal_right_box_width = getfield(current,"box_right_width")
+ par.internal_pen_inter = cur_p.pen_inter
+ par.internal_pen_broken = cur_p.pen_broken
+ par.internal_left_box = cur_p.box_left
+ par.internal_left_box_width = cur_p.box_left_width
+ par.internal_right_box = cur_p.box_right
+ par.internal_right_box_width = cur_p.box_right_width
elseif subtype == dir_code then
par.line_break_dir = checked_line_dir(dirstack) or par.line_break_dir
else
local get_width = get_whatsit_width[subtype]
if get_width then
- active_width.size = active_width.size + get_width(current,par.line_break_dir)
+ active_width.size = active_width.size + get_width(cur_p)
end
end
- elseif trace_unsupported then
- if id == mark_code or id == ins_code or id == adjust_code then
- -- skip
- else
- report_parbuilders("node of type %a found in paragraph",type(id))
- end
+ elseif id == mark_code or id == ins_code or id == adjust_code then
+ -- skip
+ else
+ report_parbuilders("node of type %a found in paragraph",type(id))
end
- current = getnext(current)
+ cur_p = cur_p.next
end
- if not current then
- local p_active, n_active = try_break(eject_penalty, hyphenated_code, par, first_p, current, checked_expansion)
+ if not cur_p then
+ try_break(eject_penalty, hyphenated_code, par, first_p, cur_p, checked_expansion)
+ local p_active = par.active
+ local n_active = p_active.next
if n_active ~= p_active then
local r = n_active
par.fewest_demerits = awful_badness
@@ -2398,7 +2367,7 @@ function constructors.methods.basic(head,d)
par.best_line = par.best_bet.line_number
local asked_looseness = par.looseness
if asked_looseness == 0 then
- return tonode(wrap_up(par))
+ return wrap_up(par)
end
local r = n_active
local actual_looseness = 0
@@ -2418,30 +2387,30 @@ function constructors.methods.basic(head,d)
end
end
r = r.next
- until r == p_active
+ until r == p_active -- weird, loop list?
par.best_line = par.best_bet.line_number
if actual_looseness == asked_looseness or par.final_pass then
- return tonode(wrap_up(par))
+ return wrap_up(par)
end
end
end
reset_meta(par) -- clean up the memory by removing the break nodes
- if not second_pass then
+ if not par.second_pass then
if tracing_paragraphs then
diagnostics.current_pass(par,"secondpass")
end
- par.threshold = par.tolerance
+ par.threshold = par.tolerance
par.second_pass = true
- par.final_pass = par.emergency_stretch <= 0
+ par.final_pass = par.emergency_stretch <= 0
else
if tracing_paragraphs then
diagnostics.current_pass(par,"emergencypass")
end
par.background.stretch = par.background.stretch + par.emergency_stretch
- par.final_pass = true
+ par.final_pass = true
end
end
- return tonode(wrap_up(par))
+ return wrap_up(par)
end
-- standard tex logging .. will be adapted ..
@@ -2466,58 +2435,48 @@ function diagnostics.current_pass(par,what)
write_nl("log",format("@%s",what))
end
-local verbose = false -- true
-
-local function short_display(target,a,font_in_short_display)
+local function short_display(a,font_in_short_display)
while a do
- local id = getid(a)
+ local id = a.id
if id == glyph_code then
- local font = getfont(a)
+ local font = a.font
if font ~= font_in_short_display then
- write(target,tex.fontidentifier(font) .. ' ')
+ write("log",tex.fontidentifier(font) .. ' ')
font_in_short_display = font
end
- if getsubtype(a) == ligature_code then
- font_in_short_display = short_display(target,getfield(a,"components"),font_in_short_display)
+ if a.subtype == ligature_code then
+ font_in_short_display = short_display(a.components,font_in_short_display)
else
- write(target,utfchar(getchar(a)))
+ write("log",utfchar(a.char))
end
+-- elseif id == rule_code then
+-- write("log","|")
+-- elseif id == glue_code then
+-- if a.spec.writable then
+-- write("log"," ")
+-- end
+-- elseif id == math_code then
+-- write("log","$")
elseif id == disc_code then
- font_in_short_display = short_display(target,getfield(a,"pre"),font_in_short_display)
- font_in_short_display = short_display(target,getfield(a,"post"),font_in_short_display)
- elseif verbose then
- write(target,format("[%s]",nodecodes[id]))
- elseif id == rule_code then
- write(target,"|")
- elseif id == glue_code then
- if getfield(getfield(a,"spec"),"writable") then
- write(target," ")
- end
- elseif id == kern_code and (getsubtype(a) == userkern_code or getattr(a,a_fontkern)) then
- if verbose then
- write(target,"[|]")
- else
- write(target,"")
- end
- elseif id == math_code then
- write(target,"$")
- else
- write(target,"[]")
+ font_in_short_display = short_display(a.pre,font_in_short_display)
+ font_in_short_display = short_display(a.post,font_in_short_display)
+ else -- no explicit checking
+ write("log",format("[%s]",nodecodes[id]))
end
- a = getnext(a)
+ a = a.next
end
return font_in_short_display
end
diagnostics.short_display = short_display
-function diagnostics.break_node(par, q, fit_class, break_type, current) -- %d ?
+function diagnostics.break_node(par, q, fit_class, break_type, cur_p) -- %d ?
local passive = par.passive
local typ_ind = break_type == hyphenated_code and '-' or ""
if par.do_last_line_fit then
local s = number.toscaled(q.active_short)
local g = number.toscaled(q.active_glue)
- if current then
+ if cur_p then
write_nl("log",format("@@%d: line %d.%d%s t=%s s=%s g=%s",
passive.serial or 0,q.line_number-1,fit_class,typ_ind,q.total_demerits,s,g))
else
@@ -2535,26 +2494,26 @@ function diagnostics.break_node(par, q, fit_class, break_type, current) -- %d ?
end
end
-function diagnostics.feasible_break(par, current, r, b, pi, d, artificial_demerits)
+function diagnostics.feasible_break(par, cur_p, r, b, pi, d, artificial_demerits)
local printed_node = par.printed_node
- if printed_node ~= current then
+ if printed_node ~= cur_p then
write_nl("log","")
- if not current then
- par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
+ if not cur_p then
+ par.font_in_short_display = short_display(printed_node.next,par.font_in_short_display)
else
- local save_link = getnext(current)
- setfield(cur_p,"next",nil)
+ local save_link = cur_p.next
+ cur_p.next = nil
write_nl("log","")
- par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
- setfield(cur_p,"next",save_link)
+ par.font_in_short_display = short_display(printed_node.next,par.font_in_short_display)
+ cur_p.next = save_link
end
- par.printed_node = current
+ par.printed_node = cur_p
end
write_nl("log","@")
- if not current then
+ if not cur_p then
write_esc("par")
else
- local id = getid(current)
+ local id = cur_p.id
if id == glue_code then
-- print nothing
elseif id == penalty_code then
@@ -2603,54 +2562,49 @@ end)
-- with the glyph.
local function glyph_width_height_depth(curdir,pdir,p)
- local wd = getfield(p,"width")
- local ht = getfield(p,"height")
- local dp = getfield(p,"depth")
if is_rotated[curdir] then
if is_parallel[curdir][pdir] then
- local half = (ht + dp) / 2
- return wd, half, half
+ local half = (p.height + p.depth) / 2
+ return p.width, half, half
else
- local half = wd / 2
- return ht + dp, half, half
+ local half = p.width / 2
+ return p.height + p.depth, half, half
end
elseif is_rotated[pdir] then
if is_parallel[curdir][pdir] then
- local half = (ht + dp) / 2
- return wd, half, half
+ local half = (p.height + p.depth) / 2
+ return p.width, half, half
else
- return ht + dp, wd, 0 -- weird
+ return p.height + p.depth, p.width, 0 -- weird
end
else
if glyphdir_is_equal[curdir][pdir] then
- return wd, ht, dp
+ return p.width, p.height, p.depth
elseif is_opposite[curdir][pdir] then
- return wd, dp, ht
+ return p.width, p.depth, p.height
else -- can this happen?
- return ht + dp, wd, 0
+ return p.height + p.depth, p.width, 0 -- weird
end
end
end
local function pack_width_height_depth(curdir,pdir,p)
- local wd = getfield(p,"width")
- local ht = getfield(p,"height")
- local dp = getfield(p,"depth")
if is_rotated[curdir] then
if is_parallel[curdir][pdir] then
- local half = (ht + dp) / 2
- return wd, half, half
+ local half = (p.height + p.depth) / 2
+ return p.width, half, half
else -- can this happen?
- local half = wd / 2
- return ht + dp, half, half
+ local half = p.width / 2
+ return p.height + p.depth, half, half
end
else
if pardir_is_equal[curdir][pdir] then
- return wd, ht, dp
+ return p.width, p.height, p.depth
elseif is_opposite[curdir][pdir] then
- return wd, dp, ht
+ return p.width, p.depth, p.height
else -- weird dimensions, can this happen?
- return ht + dp, wd, 0
+ -- return p.width, p.depth, p.height
+ return p.height + p.depth, p.width, 0
end
end
end
@@ -2668,17 +2622,17 @@ end
--
-- local hlist = new_node("hlist")
--
--- setfield(hlist,"list",head)
--- setfield(hlist,"dir",direction or tex.textdir)
--- setfield(hlist,"width",width)
--- setfield(hlist,"height",height)
--- setfield(hlist,"depth",depth)
+-- hlist.list = head
+-- hlist.dir = direction or tex.textdir
+-- hlist.width = width
+-- hlist.height = height
+-- hlist.depth = depth
--
-- if delta == 0 then
--
--- setfield(hlist,"glue_sign",0)
--- setfield(hlist,"glue_order",0)
--- setfield(hlist,"glue_set",0)
+-- hlist.glue_sign = 0
+-- hlist.glue_order = 0
+-- hlist.glue_set = 0
--
-- else
--
@@ -2694,15 +2648,16 @@ end
-- else
-- local stretch = analysis.stretch
-- if stretch ~= 0 then
--- setfield(hlist,"glue_sign",1) -- stretch
--- setfield(hlist,"glue_order",order)
--- setfield(hlist,"glue_set",delta/stretch)
+-- hlist.glue_sign = 1 -- stretch
+-- hlist.glue_order = order
+-- hlist.glue_set = delta/stretch
-- else
--- setfield(hlist,"glue_sign",0) -- nothing
--- setfield(hlist,"glue_order",order)
--- setfield(hlist,"glue_set",0)
+-- hlist.glue_sign = 0 -- nothing
+-- hlist.glue_order = order
+-- hlist.glue_set = 0
-- end
-- end
+-- print("stretch",hlist.glue_sign,hlist.glue_order,hlist.glue_set)
--
-- else
--
@@ -2711,15 +2666,16 @@ end
-- else
-- local shrink = analysis.shrink
-- if shrink ~= 0 then
--- setfield(hlist,"glue_sign",2) -- shrink
--- setfield(hlist,"glue_order",order)
--- setfield(hlist,"glue_set",-delta/stretch)
+-- hlist.glue_sign = 2 -- shrink
+-- hlist.glue_order = order
+-- hlist.glue_set = - delta/shrink
-- else
--- setfield(hlist,"glue_sign",0) -- nothing
--- setfield(hlist,"glue_order",order)
--- setfield(hlist,"glue_set",0)
+-- hlist.glue_sign = 0 -- nothing
+-- hlist.glue_order = order
+-- hlist.glue_set = 0
-- end
-- end
+-- print("shrink",hlist.glue_sign,hlist.glue_order,hlist.glue_set)
--
-- end
--
@@ -2733,7 +2689,7 @@ end
-- end
-- local current = head
-- while current do
--- local id = getid(current)
+-- local id = current.id
-- if id == glyph_code then
-- local stretch, shrink = char_stretch_shrink(current) -- get only one
-- if stretch then
@@ -2743,12 +2699,12 @@ end
-- current.expansion_factor = font_expand_ratio * stretch
-- end
-- elseif id == kern_code then
--- local kern = getfield(current,"kern")
--- if kern ~= 0 and getsubtype(current) == kerning_code then
--- setfield(current,"kern",font_expand_ratio * kern)
+-- local kern = current.kern
+-- if kern ~= 0 and current.subtype == kerning_code then
+-- current.kern = font_expand_ratio * current.kern
-- end
-- end
--- current = getnext(current)
+-- current = current.next
-- end
-- elseif font_expand_ratio < 0 then
-- if font_expand_ratio < -1000 then
@@ -2756,7 +2712,7 @@ end
-- end
-- local current = head
-- while current do
--- local id = getid(current)
+-- local id = current.id
-- if id == glyph_code then
-- local stretch, shrink = char_stretch_shrink(current) -- get only one
-- if shrink then
@@ -2766,31 +2722,26 @@ end
-- current.expansion_factor = font_expand_ratio * shrink
-- end
-- elseif id == kern_code then
--- local kern = getfield(current,"kern")
--- if kern ~= 0 and getsubtype(current) == kerning_code then
--- setfield(current,"kern",font_expand_ratio * kern)
+-- local kern = current.kern
+-- if kern ~= 0 and current.subtype == kerning_code then
+-- current.kern = font_expand_ratio * current.kern
-- end
-- end
--- current = getnext(current)
+-- current = current.next
-- end
-- end
-- return hlist, 0
-- end
-local function hpack(head,width,method,direction,firstline,line) -- fast version when head = nil
+local function hpack(head,width,method,direction) -- fast version when head = nil
-- we can pass the adjust_width and adjust_height so that we don't need to recalculate them but
- -- with the glue mess it's less trivial as we lack detail .. challenge
+ -- with the glue mess it's less trivial as we lack detail
local hlist = new_node("hlist")
- setfield(hlist,"dir",direction)
-
if head == nil then
- setfield(hlist,"width",width)
return hlist, 0
- else
- setfield(hlist,"list",head)
end
local cal_expand_ratio = method == "cal_expand_ratio" or method == "subst_ex_font"
@@ -2806,6 +2757,8 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
local font_shrink = 0
local font_expand_ratio = 0
local last_badness = 0
+ local disc_stack = { }
+ local disc_level = 0
local expansion_stack = cal_expand_ratio and { } -- todo: optionally pass this
local expansion_index = 0
local total_stretch = { [0] = 0, 0, 0, 0, 0 }
@@ -2815,8 +2768,11 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
local adjust_head = texlists.adjust_head
local pre_adjust_head = texlists.pre_adjust_head
- local adjust_tail = adjust_head and slide_nodelist(adjust_head) -- todo: find_tail
- local pre_adjust_tail = pre_adjust_head and slide_nodelist(pre_adjust_head) -- todo: find_tail
+ local adjust_tail = adjust_head and slide_nodes(adjust_head)
+ local pre_adjust_tail = pre_adjust_head and slide_nodes(pre_adjust_head)
+
+ hlist.list = head
+ hlist.dir = hpack_dir
new_dir_stack(hpack_dir)
@@ -2831,205 +2787,225 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
local fontexps, lastfont
- local function process(current) -- called nested in disc replace
+ local current = head
- while current do
- local id = getid(current)
- if id == glyph_code then
- if cal_expand_ratio then
- local currentfont = getfont(current)
- if currentfont ~= lastfont then
- fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
- lastfont = currentfont
- end
- if fontexps then
- local expansion = fontexps[getchar(current)]
- if expansion then
- font_stretch = font_stretch + expansion.glyphstretch
- font_shrink = font_shrink + expansion.glyphshrink
- expansion_index = expansion_index + 1
- expansion_stack[expansion_index] = current
- end
- end
- end
- -- use inline
- local wd, ht, dp = glyph_width_height_depth(hpack_dir,"TLT",current) -- was TRT ?
- natural = natural + wd
- if ht > height then
- height = ht
- end
- if dp > depth then
- depth = dp
- end
- elseif id == kern_code then
- local kern = getfield(current,"kern")
- if kern == 0 then
- -- no kern
- elseif getsubtype(current) == kerning_code then -- check getfield(p,"kern")
- if cal_expand_ratio then
- local stretch, shrink = kern_stretch_shrink(current,kern)
- font_stretch = font_stretch + stretch
- font_shrink = font_shrink + shrink
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ if cal_expand_ratio then
+ local currentfont = current.font
+ if currentfont ~= lastfont then
+ fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
+ lastfont = currentfont
+ end
+ if fontexps then
+ local expansion = fontexps[current.char]
+ if expansion then
+ font_stretch = font_stretch + expansion.glyphstretch
+ font_shrink = font_shrink + expansion.glyphshrink
expansion_index = expansion_index + 1
expansion_stack[expansion_index] = current
end
- natural = natural + kern
- else
- natural = natural + kern
end
- elseif id == disc_code then
- local subtype = getsubtype(current)
- if subtype ~= second_disc_code then
- -- todo : local stretch, shrink = char_stretch_shrink(s)
- local replace = getfield(current,"replace")
- if replace then
- process(replace)
- end
- end
- elseif id == glue_code then
- local spec = getfield(current,"spec")
- natural = natural + getfield(spec,"width")
- local op = getfield(spec,"stretch_order")
- local om = getfield(spec,"shrink_order")
- total_stretch[op] = total_stretch[op] + getfield(spec,"stretch")
- total_shrink [om] = total_shrink [om] + getfield(spec,"shrink")
- if getsubtype(current) >= leaders_code then
- local leader = getleader(current)
- local ht = getfield(leader,"height")
- local dp = getfield(leader,"depth")
- if ht > height then
- height = ht
- end
- if dp > depth then
- depth = dp
+ end
+ -- use inline if no expansion
+ local wd, ht, dp = glyph_width_height_depth(hpack_dir,"TLT",current) -- was TRT ?
+ natural = natural + wd
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ current = current.next
+ elseif id == kern_code then
+ local kern = current.kern
+ if kern == 0 then
+ -- no kern
+ else
+ if cal_expand_ratio and expand_kerns and current.subtype == kerning_code or current[a_fontkern] then -- check p.kern
+ local stretch, shrink = kern_stretch_shrink(current,kern)
+ if expand_kerns == "stretch" then
+ font_stretch = font_stretch + stretch
+ elseif expand_kerns == "shrink" then
+ font_shrink = font_shrink + shrink
+ else
+ font_stretch = font_stretch + stretch
+ font_shrink = font_shrink + shrink
end
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = current
end
- elseif id == hlist_code or id == vlist_code then
- local sh = getfield(current,"shift")
- local wd, ht, dp = pack_width_height_depth(hpack_dir,getfield(current,"dir") or hpack_dir,current) -- added: or pack_dir
- local hs, ds = ht - sh, dp + sh
- natural = natural + wd
- if hs > height then
- height = hs
- end
- if ds > depth then
- depth = ds
+ natural = natural + kern
+ end
+ current = current.next
+ elseif id == disc_code then
+ if current.subtype ~= second_disc_code then
+ -- we follow the end of line disc chain
+ local replace = current.replace
+ if replace then
+ disc_level = disc_level + 1
+ disc_stack[disc_level] = current.next
+ current = replace
+ else
+ current = current.next
end
- elseif id == rule_code then
- local wd = getfield(current,"width")
- local ht = getfield(current,"height")
- local dp = getfield(current,"depth")
- natural = natural + wd
+ else
+ current = current.next
+ end
+ elseif id == glue_code then
+ local spec = current.spec
+ natural = natural + spec.width
+ local op = spec.stretch_order
+ local om = spec.shrink_order
+ total_stretch[op] = total_stretch[op] + spec.stretch
+ total_shrink [om] = total_shrink [om] + spec.shrink
+ if current.subtype >= leaders_code then
+ local leader = current.leader
+ local ht = leader.height
+ local dp = leader.depth
if ht > height then
height = ht
end
if dp > depth then
depth = dp
end
- elseif id == math_code then
- natural = natural + getfield(current,"surround")
- elseif id == unset_code then
- local wd = getfield(current,"width")
- local ht = getfield(current,"height")
- local dp = getfield(current,"depth")
- local sh = getfield(current,"shift")
- local hs = ht - sh
- local ds = dp + sh
- natural = natural + wd
- if hs > height then
- height = hs
+ end
+ current = current.next
+ elseif id == hlist_code or id == vlist_code then
+ local sh = current.shift
+ local wd, ht, dp = pack_width_height_depth(hpack_dir,current.dir or hpack_dir,current) -- added: or pack_dir
+ local hs, ds = ht - sh, dp + sh
+ natural = natural + wd
+ if hs > height then
+ height = hs
+ end
+ if ds > depth then
+ depth = ds
+ end
+ current = current.next
+ elseif id == rule_code then
+ local wd = current.width
+ local ht = current.height
+ local dp = current.depth
+ natural = natural + wd
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ current = current.next
+ elseif id == math_code then
+ natural = natural + current.surround
+ current = current.next
+ elseif id == unset_code then
+ local wd = current.width
+ local ht = current.height
+ local dp = current.depth
+ local sh = current.shift
+ local hs = ht - sh
+ local ds = dp + sh
+ natural = natural + wd
+ if hs > height then
+ height = hs
+ end
+ if ds > depth then
+ depth = ds
+ end
+ current = current.next
+ elseif id == ins_code or id == mark_code then
+ local prev = current.prev
+ local next = current.next
+ if adjust_tail then -- todo
+ if next then
+ next.prev = prev
end
- if ds > depth then
- depth = ds
+ if prev then
+ prev.next = next
end
- elseif id == ins_code or id == mark_code then
- local prev = getprev(current)
- local next = getnext(current)
- if adjust_tail then -- todo
- if next then
- setfield(next,"prev",prev)
- end
- if prev then
- setfield(prev,"next",next)
+ current.prev = adjust_tail
+ current.next = nil
+ adjust_tail.next = current
+ adjust_tail = current
+ else
+ adjust_head = current
+ adjust_tail = current
+ current.prev = nil
+ current.next = nil
+ end
+ current = next
+ elseif id == adjust_code then
+ local list = current.list
+ if adjust_tail then
+ adjust_tail.next = list
+ adjust_tail = slide_nodes(list)
+ else
+ adjust_head = list
+ adjust_tail = slide_nodes(list)
+ end
+ current = current.next
+ elseif id == whatsit_code then
+ local subtype = current.subtype
+ if subtype == dir_code then
+ hpack_dir = checked_line_dir(stack,current) or hpack_dir
+ else
+ local get_dimensions = get_whatsit_dimensions[subtype]
+ if get_dimensions then
+ local wd, ht, dp = get_dimensions(current)
+ natural = natural + wd
+ if ht > height then
+ height = ht
end
- setfield(current,"prev",adjust_tail)
- setfield(current,"next",nil)
- adjust_setfield(tail,"next",current)
- adjust_tail = current
- else
- adjust_head = current
- adjust_tail = current
- setfield(current,"prev",nil)
- setfield(current,"next",nil)
- end
- elseif id == adjust_code then
- local list = getlist(current)
- if adjust_tail then
- adjust_setfield(tail,"next",list)
- else
- adjust_head = list
- end
- adjust_tail = slide_nodelist(list) -- find_tail(list)
- elseif id == whatsit_code then
- local subtype = getsubtype(current)
- if subtype == dir_code then
- hpack_dir = checked_line_dir(stack,current) or hpack_dir
- else
- local get_dimensions = get_whatsit_dimensions[subtype]
- if get_dimensions then
- local wd, ht, dp = get_dimensions(current,hpack_dir)
- natural = natural + wd
- if ht > height then
- height = ht
- end
- if dp > depth then
- depth = dp
- end
+ if dp > depth then
+ depth = dp
end
end
- elseif id == marginkern_code then
- local width = getfield(current,"width")
- if cal_expand_ratio then
- -- is this ok?
- local glyph = getfield(current,"glyph")
- local char_pw = getsubtype(current) == leftmargin_code and left_pw or right_pw
- font_stretch = font_stretch - width - char_pw(glyph)
- font_shrink = font_shrink - width - char_pw(glyph)
- expansion_index = expansion_index + 1
- expansion_stack[expansion_index] = glyph
- end
- natural = natural + width
end
- current = getnext(current)
+ current = current.next
+ elseif id == marginkern_code then
+ if cal_expand_ratio then
+ local glyph = current.glyph
+ local char_pw = current.subtype == leftmargin_code and left_pw or right_pw
+ font_stretch = font_stretch - current.width - char_pw(glyph)
+ font_shrink = font_shrink - current.width - char_pw(glyph)
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = glyph
+ end
+ natural = natural + current.width
+ current = current.next
+ else
+ current = current.next
+ end
+ if not current and disc_level > 0 then
+ current = disc_stack[disc_level]
+ disc_level = disc_level - 1
end
-
end
-
- process(head)
-
if adjust_tail then
adjust_tail.next = nil -- todo
end
if pre_adjust_tail then
pre_adjust_tail.next = nil -- todo
end
- if method == "additional" then
+ if mode == "additional" then
width = width + natural
end
- setfield(hlist,"width",width)
- setfield(hlist,"height",height)
- setfield(hlist,"depth",depth)
+ hlist.width = width
+ hlist.height = height
+ hlist.depth = depth
local delta = width - natural
if delta == 0 then
- setfield(hlist,"glue_sign",0)
- setfield(hlist,"glue_order",0)
- setfield(hlist,"glue_set",0)
+ hlist.glue_sign = 0
+ hlist.glue_order = 0
+ hlist.glue_set = 0
elseif delta > 0 then
-- natural width smaller than requested width
local order = (total_stretch[4] ~= 0 and 4 or total_stretch[3] ~= 0 and 3) or
(total_stretch[2] ~= 0 and 2 or total_stretch[1] ~= 0 and 1) or 0
+-- local correction = 0
if cal_expand_ratio and order == 0 and font_stretch > 0 then -- check sign of font_stretch
font_expand_ratio = delta/font_stretch
@@ -3041,38 +3017,41 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
for i=1,expansion_index do
local g = expansion_stack[i]
local e
- if getid(g) == glyph_code then
- local currentfont = getfont(g)
+ if g.id == glyph_code then
+ local currentfont = g.font
if currentfont ~= lastfont then
fontexps = expansions[currentfont]
lastfont = currentfont
end
- local data = fontexps[getchar(g)]
+ local data = fontexps[g.char]
if trace_expansion then
setnodecolor(g,"hz:positive")
end
e = font_expand_ratio * data.glyphstretch / 1000
+-- correction = correction + (e / 1000) * g.width
else
- local kern = getfield(g,"kern")
+ local kern = g.kern
local stretch, shrink = kern_stretch_shrink(g,kern)
e = font_expand_ratio * stretch / 1000
+-- correction = correction + (e / 1000) * kern
end
- setfield(g,"expansion_factor",e)
+ g.expansion_factor = e
end
end
+-- delta = delta - correction
local tso = total_stretch[order]
if tso ~= 0 then
- setfield(hlist,"glue_sign",1)
- setfield(hlist,"glue_order",order)
- setfield(hlist,"glue_set",delta/tso)
+ hlist.glue_sign = 1
+ hlist.glue_order = order
+ hlist.glue_set = delta/tso
else
- setfield(hlist,"glue_sign",0)
- setfield(hlist,"glue_order",order)
- setfield(hlist,"glue_set",0)
+ hlist.glue_sign = 0
+ hlist.glue_order = order
+ hlist.glue_set = 0
end
if font_expand_ratio ~= 0 then
-- todo
- elseif order == 0 then -- and getlist(hlist) then
+ elseif order == 0 then -- and hlist.list then
last_badness = calculate_badness(delta,total_stretch[0])
if last_badness > tex.hbadness then
if last_badness > 100 then
@@ -3086,6 +3065,7 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
-- natural width larger than requested width
local order = total_shrink[4] ~= 0 and 4 or total_shrink[3] ~= 0 and 3
or total_shrink[2] ~= 0 and 2 or total_shrink[1] ~= 0 and 1 or 0
+-- local correction = 0
if cal_expand_ratio and order == 0 and font_shrink > 0 then -- check sign of font_shrink
font_expand_ratio = delta/font_shrink
@@ -3097,47 +3077,54 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
for i=1,expansion_index do
local g = expansion_stack[i]
local e
- if getid(g) == glyph_code then
- local currentfont = getfont(g)
+ if g.id == glyph_code then
+ local currentfont = g.font
if currentfont ~= lastfont then
fontexps = expansions[currentfont]
lastfont = currentfont
end
- local data = fontexps[getchar(g)]
+ local data = fontexps[g.char]
if trace_expansion then
setnodecolor(g,"hz:negative")
end
e = font_expand_ratio * data.glyphshrink / 1000
+ -- local d = (e / 1000) * 1000
+ -- local eps = g.width - (1 + d / 1000000) * g.width
+ -- correction = correction + eps
+ -- e = d
+-- correction = correction + (e / 1000) * g.width
else
- local kern = getfield(g,"kern")
+ local kern = g.kern
local stretch, shrink = kern_stretch_shrink(g,kern)
e = font_expand_ratio * shrink / 1000
+-- correction = correction + (e / 1000) * kern
end
- setfield(g,"expansion_factor",e)
+ g.expansion_factor = e
end
end
+-- delta = delta - correction
local tso = total_shrink[order]
if tso ~= 0 then
- setfield(hlist,"glue_sign",2)
- setfield(hlist,"glue_order",order)
- setfield(hlist,"glue_set",-delta/tso)
+ hlist.glue_sign = 2
+ hlist.glue_order = order
+ hlist.glue_set = -delta/tso
else
- setfield(hlist,"glue_sign",0)
- setfield(hlist,"glue_order",order)
- setfield(hlist,"glue_set",0)
+ hlist.glue_sign = 0
+ hlist.glue_order = order
+ hlist.glue_set = 0
end
if font_expand_ratio ~= 0 then
-- todo
- elseif tso < -delta and order == 0 then -- and getlist(hlist) then
+ elseif tso < -delta and order == 0 then -- and hlist.list then
last_badness = 1000000
- setfield(hlist,"glue_set",1)
+ hlist.glue_set = 1
local fuzz = - delta - total_shrink[0]
local hfuzz = tex.hfuzz
if fuzz > hfuzz or tex.hbadness < 100 then
local overfullrule = tex.overfullrule
if fuzz > hfuzz and overfullrule > 0 then
-- weird, is always called and no rules shows up
- setfield(slide_nodelist(list),"next",new_rule(overfullrule,nil,nil,hlist.dir)) -- todo: find_tail
+ slide_nodes(list).next = new_rule(overfullrule,nil,nil,hlist.dir)
end
diagnostics.overfull_hbox(hlist,line,-delta)
end
@@ -3148,7 +3135,7 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
return hlist, last_badness
end
-xpack_nodes = hpack -- comment this for old fashioned expansion (we need to fix float mess)
+xpack_nodes = hpack -- comment this for old fashioned expansion
local function common_message(hlist,line,str)
write_nl("")
@@ -3186,3 +3173,20 @@ end
function diagnostics.loose_hbox(hlist,line,b)
common_message(hlist,line,format("Loose \\hbox (badness %i)",b))
end
+
+-- e = font_expand_ratio * data.glyphstretch / 1000
+-- local stretch = data.stretch
+-- if e >= stretch then
+-- e = stretch
+-- else
+-- local step = 5
+-- e = math.round(e/step) * step
+-- end
+
+-- local shrink = - data.shrink
+-- if e <= shrink then
+-- e = shrink
+-- else
+-- local step = 5
+-- e = math.round(e/step) * step
+-- end
diff --git a/tex/context/base/node-met.lua b/tex/context/base/node-met.lua
index d52349b4a..c85a53c8e 100644
--- a/tex/context/base/node-met.lua
+++ b/tex/context/base/node-met.lua
@@ -332,28 +332,6 @@ function nodes.writable_spec(n) -- not pool
return spec
end
-function nodes.copy_spec(old,free) -- also frees
- if not old then
- return n_new_node("glue_spec")
- else
- local new = n_copy_node(old)
- if free and old.writable then
- free_node(old)
- end
- return new
- end
-end
-
-function nodes.free_spec(old)
- if not old then
- -- skip
- elseif old.writable then
- free_node(old)
- else
- -- skip
- end
-end
-
if gonuts then
function nodes.reference(n)
@@ -690,34 +668,3 @@ end
nodes.keys = keys -- [id][subtype]
nodes.fields = nodefields -- (n)
-
--- one issue solved in flush_node:
---
--- case glue_spec_node:
--- if (glue_ref_count(p)!=null) {
--- decr(glue_ref_count(p));
--- return ;
--- /*
--- } else if (! valid_node(p)) {
--- return ;
--- */
--- /*
--- } else {
--- free_node(p, get_node_size(type(p), subtype(p)));
--- return ;
--- */
--- }
--- break ;
---
--- or:
---
--- case glue_spec_node:
--- if (glue_ref_count(p)!=null) {
--- decr(glue_ref_count(p));
--- return ;
--- } else if (valid_node(p)) {
--- free_node(p, get_node_size(type(p), subtype(p)));
--- return ;
--- } else {
--- break ;
--- }
diff --git a/tex/context/base/node-mig.lua b/tex/context/base/node-mig.lua
index 41f95be45..9fc35a048 100644
--- a/tex/context/base/node-mig.lua
+++ b/tex/context/base/node-mig.lua
@@ -6,32 +6,15 @@ if not modules then modules = { } end modules ['node-mig'] = {
license = "see context related readme files"
}
--- todo: insert_after
-
local format = string.format
-local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
+local attributes, nodes, node = attributes, nodes, node
-local report_nodes = logs.reporter("nodes","migrations")
+local remove_nodes = nodes.remove
-local attributes = attributes
-local nodes = nodes
+local nodecodes = nodes.nodecodes
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local remove_node = nuts.remove
-
-local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local insert_code = nodecodes.ins
@@ -39,6 +22,10 @@ local mark_code = nodecodes.mark
local a_migrated = attributes.private("migrated")
+local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
+
+local report_nodes = logs.reporter("nodes","migrations")
+
local migrate_inserts, migrate_marks, inserts_too
local t_inserts, t_marks, t_sweeps = 0, 0, 0
@@ -46,42 +33,32 @@ local t_inserts, t_marks, t_sweeps = 0, 0, 0
local function locate(head,first,last,ni,nm)
local current = head
while current do
- local id = getid(current)
+ local id = current.id
if id == vlist_code or id == hlist_code then
- local list = getlist(current)
- if list then
- list, first, last, ni, nm = locate(list,first,last,ni,nm)
- setfield(current,"list",list)
- end
- current = getnext(current)
+ current.list, first, last, ni, nm = locate(current.list,first,last,ni,nm)
+ current = current.next
elseif migrate_inserts and id == insert_code then
local insert
- head, current, insert = remove_node(head,current)
- setfield(insert,"next",nil)
+ head, current, insert = remove_nodes(head,current)
+ insert.next = nil
if first then
- setfield(insert,"prev",last)
- setfield(last,"next",insert)
+ insert.prev, last.next = last, insert
else
- setfield(insert,"prev",nil)
- first = insert
+ insert.prev, first = nil, insert
end
- last = insert
- ni = ni + 1
+ last, ni = insert, ni + 1
elseif migrate_marks and id == mark_code then
local mark
- head, current, mark = remove_node(head,current)
- setfield(mark,"next",nil)
+ head, current, mark = remove_nodes(head,current)
+ mark.next = nil
if first then
- setfield(mark,"prev",last)
- setfield(last,"next",mark)
+ mark.prev, last.next = last, mark
else
- setfield(mark,"prev",nil)
- first = mark
+ mark.prev, first = nil, mark
end
- last = mark
- nm = nm + 1
+ last, nm = mark, nm + 1
else
- current = getnext(current)
+ current= current.next
end
end
return head, first, last, ni, nm
@@ -93,43 +70,39 @@ function nodes.handlers.migrate(head,where)
if trace_migrations then
report_nodes("migration sweep %a",where)
end
- local current = tonut(head)
+ local current = head
while current do
- local id = getid(current)
+ local id = current.id
-- inserts_too is a temp hack, we should only do them when it concerns
-- newly placed (flushed) inserts
- if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not getattr(current,a_migrated) then
- setattr(current,a_migrated,1)
+ if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not current[a_migrated] then
+ current[a_migrated] = 1
t_sweeps = t_sweeps + 1
- local h = getlist(current)
+ local h = current.list
local first, last, ni, nm
while h do
- local id = getid(h)
+ local id = h.id
if id == vlist_code or id == hlist_code then
h, first, last, ni, nm = locate(h,first,last,0,0)
end
- h = getnext(h)
+ h = h.next
end
if first then
- t_inserts = t_inserts + ni
- t_marks = t_marks + nm
+ t_inserts, t_marks = t_inserts + ni, t_marks + nm
if trace_migrations and (ni > 0 or nm > 0) then
report_nodes("sweep %a, container %a, %s inserts and %s marks migrated outwards during %a",
t_sweeps,nodecodes[id],ni,nm,where)
end
- -- inserts after head, use insert_after
- local n = getnext(current)
+ -- inserts after head
+ local n = current.next
if n then
- setfield(last,"next",n)
- setfield(n,"prev",last)
+ last.next, n.prev = n, last
end
- setfield(current,"next",first)
- setfield(first,"prev",current)
- done = true
- current = last
+ current.next, first.prev = first, current
+ done, current = true, last
end
end
- current = getnext(next)
+ current = current.next
end
return head, done
end
diff --git a/tex/context/base/node-nut.lua b/tex/context/base/node-nut.lua
deleted file mode 100644
index 4732b09eb..000000000
--- a/tex/context/base/node-nut.lua
+++ /dev/null
@@ -1,650 +0,0 @@
-if not modules then modules = { } end modules ['node-met'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Here starts some more experimental code that Luigi and I use in a next stage of
--- exploring and testing potential speedups in the engines. This code is not meant
--- for users and can change (or be removed) any moment. During the experiments I'll
--- do my best to keep the code as fast as possible by using two codebases. See
--- about-fast.pdf for some more info about impacts. Although key based access has
--- more charm, function based is somewhat faster and has more potential for future
--- speedups.
-
--- This next iteration is flagged direct because we avoid user data which has a price
--- in allocation and metatable tagging. Although in this stage we pass numbers around
--- future versions might use light user data, so never depend on what direct function
--- return. Using the direct approach had some speed advantages but you loose the key
--- based access. The speed gain is only measurable in cases with lots of access. For
--- instance when typesettign arabic with advanced fonts, we're talking of many millions
--- of function calls and there we can get a 30\% or more speedup. On average complex
--- \CONTEXT\ runs the gain can be 10\% to 15\% percent. Because mixing the two models
--- (here we call then nodes and nuts) is not possible you need to cast either way which
--- has a penalty. Also, error messages in nuts mode are less clear and \LUATEX\ will
--- often simply abort when you make mistakes of mix the models. So, development (at least
--- in \CONTEXT) can be done in node mode and not in nuts mode. Only robust code will
--- be turned nuts afterwards and quite likely not all code. The official \LUATEX\ api
--- to nodes is userdata!
---
--- Listening to 'lunatic soul' at the same time helped wrapping my mind around the mixed
--- usage of both models. Just for the record: the potential of the direct approach only
--- became clear after experimenting for weeks and partly adapting code. It is one of those
--- (sub)projects where you afterwards wonder if it was worth the trouble, but users that
--- rely on lots of complex functionality and font support will probably notice the speedup.
---
--- luatex luajittex
--- ------------- ----- -------------------- ---------------------------------
--- name pages old new pct old new pct
--- ------------- ----- -------------------- ---------------------------------
--- fonts-mkiv 166 9.3 7.7/7.4 17.2 7.4 (37.5) 5.9/5.7 (55.6) 20.3
--- about 60 3.3 2.7/2.6 20.4 2.5 (39.5) 2.1 (57.0) 23.4
--- arabic-001 61 25.3 15.8 18.2 15.3 (46.7) 6.8 (54.7) 16.0
--- torture-001 300 21.4 11.4 24.2 13.9 (35.0) 6.3 (44.7) 22.2
---
--- so:
---
--- - we run around 20% faster on documents of average complexity and gain more when
--- dealing with scripts like arabic and such
--- - luajittex benefits a bit more so a luajittex job can (in principle) now be much
--- faster
--- - if we reason backwards, and take luajittex as norm we get 1:2:3 on some jobs for
--- luajittex direct:luatex direct:luatex normal i.e. we can be 3 times faster
--- - keep in mind that these are tex/lua runs so the real gain at the lua end is much
--- larger
---
--- Because we can fake direct mode a little bit by using the fast getfield and setfield
--- at the cost of wrapped getid and alike, we still are running quite ok. As we could gain
--- some 5% with fast mode, we can sacrifice some on wrappers when we use a few fast core
--- functions. This means that simulated direct mode runs font-mkiv in 9.1 seconds (we could
--- get down to 8.7 seconds in fast mode) and that we can migrate slowely to direct mode.
---
--- The following measurements are from 2013-07-05 after adapting some 47 files to nuts. Keep
--- in mind that the old binary can fake a fast getfield and setfield but that the other
--- getters are wrapped functions. The more we have, the slower it gets.
---
--- fonts about arabic
--- old mingw, indexed plus some functions : 8.9 3.2 20.3
--- old mingw, fake functions : 9.9 3.5 27.4
--- new mingw, node functions : 9.0 3.1 20.8
--- new mingw, indexed plus some functions : 8.6 3.1 19.6
--- new mingw, direct functions : 7.5 2.6 14.4
---
--- \starttext \dorecurse{1000}{test\page} \stoptext :
---
--- luatex 560 pps
--- luajittex 600 pps
---
--- \setupbodyfont[pagella]
---
--- \edef\zapf{\cldcontext{context(io.loaddata(resolvers.findfile("zapf.tex")))}}
---
--- \starttext \dorecurse{1000}{\zapf\par} \stoptext
---
--- luatex 3.9 sec / 54 pps
--- luajittex 2.3 sec / 93 pps
-
-local nodes = nodes
-local gonuts = nodes.gonuts
-local direct = node.direct
-
-if type(direct) ~= "table" then
- return
-elseif gonuts then
- statistics.register("running in nuts mode", function() return "yes" end)
-else
- statistics.register("running in nuts mode", function() return "no" end)
- return
-end
-
-local texget = tex.get
-
-local nodecodes = nodes.nodecodes
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-
-local nuts = nodes.nuts or { }
-nodes.nuts = nuts
-
-nodes.is_node = direct.is_node or function() return true end
-nodes.is_direct = direct.is_direct or function() return false end
-nodes.is_nut = nodes.is_direct
-
--- casters
-
-local tonode = direct.tonode or function(n) return n end
-local tonut = direct.todirect or function(n) return n end
-
-nuts.tonode = tonode
-nuts.tonut = tonut
-
-nodes.tonode = tonode
-nodes.tonut = tonut
-
--- getters
-
-nuts.getfield = direct.getfield
-nuts.getnext = direct.getnext
-nuts.getprev = direct.getprev
-nuts.getid = direct.getid
-nuts.getattr = direct.getfield
-nuts.getchar = direct.getchar
-nuts.getfont = direct.getfont
-nuts.getsubtype = direct.getsubtype
-nuts.getlist = direct.getlist -- only hlist and vlist !
-nuts.getleader = direct.getleader
-
--- local dgf = direct.getfield function nuts.getlist(n) return dgf(n,"list") end
-
--- setters
-
-nuts.setfield = direct.setfield
-nuts.setattr = direct.setfield
-
-nuts.getbox = direct.getbox
-nuts.setbox = direct.setbox
-nuts.getskip = direct.getskip or function(s) return tonut(texget(s)) end
-
--- helpers
-
-nuts.tostring = direct.tostring
-nuts.copy = direct.copy
-nuts.copy_list = direct.copy_list
-nuts.delete = direct.delete
-nuts.dimensions = direct.dimensions
-nuts.end_of_math = direct.end_of_math
-nuts.flush_list = direct.flush_list
-nuts.flush_node = direct.flush_node
-nuts.free = direct.free
-nuts.insert_after = direct.insert_after
-nuts.insert_before = direct.insert_before
-nuts.hpack = direct.hpack
-nuts.new = direct.new
-nuts.tail = direct.tail
-nuts.traverse = direct.traverse
-nuts.traverse_id = direct.traverse_id
-nuts.slide = direct.slide
-nuts.writable_spec = direct.writable_spec
-nuts.vpack = direct.vpack
-nuts.is_node = direct.is_node
-nuts.is_direct = direct.is_direct
-nuts.is_nut = direct.is_direct
-nuts.first_glyph = direct.first_glyph
-nuts.first_character = direct.first_character
-nuts.has_glyph = direct.has_glyph or direct.first_glyph
-
-nuts.current_attr = direct.current_attr
-nuts.do_ligature_n = direct.do_ligature_n
-nuts.has_field = direct.has_field
-nuts.last_node = direct.last_node
-nuts.usedlist = direct.usedlist
-nuts.protrusion_skippable = direct.protrusion_skippable
-nuts.write = direct.write
-
-nuts.has_attribute = direct.has_attribute
-nuts.set_attribute = direct.set_attribute
-nuts.unset_attribute = direct.unset_attribute
-
-nuts.protect_glyphs = direct.protect_glyphs
-nuts.unprotect_glyphs = direct.unprotect_glyphs
-
--- placeholders
-
-if not direct.kerning then
-
- local n_kerning = node.kerning
-
- function nuts.kerning(head)
- return tonode(n_kerning(tonut(head)))
- end
-
-end
-
-if not direct.ligaturing then
-
- local n_ligaturing = node.ligaturing
-
- function nuts.ligaturing(head)
- return tonode(n_ligaturing(tonut(head)))
- end
-
-end
-
-if not direct.mlist_to_hlist then
-
- local n_mlist_to_hlist = node.mlist_to_hlist
-
- function nuts.mlist_to_hlist(head)
- return tonode(n_mlist_to_hlist(tonut(head)))
- end
-
-end
-
---
-
-local d_remove_node = direct.remove
-local d_free_node = direct.free
-local d_getfield = direct.getfield
-local d_setfield = direct.setfield
-local d_getnext = direct.getnext
-local d_getprev = direct.getprev
-local d_getid = direct.getid
-local d_getlist = direct.getlist
-local d_find_tail = direct.tail
-local d_insert_after = direct.insert_after
-local d_insert_before = direct.insert_before
-local d_slide = direct.slide
-local d_copy_node = direct.copy
-local d_traverse = direct.traverse
-
-local function remove(head,current,free_too)
- local t = current
- head, current = d_remove_node(head,current)
- if not t then
- -- forget about it
- elseif free_too then
- d_free_node(t)
- t = nil
- else
- d_setfield(t,"next",nil) -- not that much needed (slows down unless we check the source on this)
- d_setfield(t,"prev",nil) -- not that much needed (slows down unless we check the source on this)
- end
- return head, current, t
-end
-
--- bad: we can have prev's being glue_spec
-
--- local function remove(head,current,free_too) -- d_remove_node does a slide which can fail
--- local prev = d_getprev(current) -- weird
--- local next = d_getnext(current)
--- if next then
--- -- print("!!!!!!!! prev is gluespec",
--- -- nodes.nodecodes[d_getid(current)],
--- -- nodes.nodecodes[d_getid(next)],
--- -- nodes.nodecodes[d_getid(prev)])
--- d_setfield(prev,"next",next)
--- d_setfield(next,"prev",prev)
--- else
--- d_setfield(prev,"next",nil)
--- end
--- if free_too then
--- d_free_node(current)
--- current = nil
--- else
--- d_setfield(current,"next",nil) -- use this fact !
--- d_setfield(current,"prev",nil) -- use this fact !
--- end
--- if head == current then
--- return next, next, current
--- else
--- return head, next, current
--- end
--- end
-
-nuts.remove = remove
-
-function nuts.delete(head,current)
- return remove(head,current,true)
-end
-
-function nuts.replace(head,current,new) -- no head returned if false
- if not new then
- head, current, new = false, head, current
- end
- local prev = d_getprev(current)
- local next = d_getnext(current)
- if next then
- d_setfield(new,"next",next)
- d_setfield(next,"prev",new)
- end
- if prev then
- d_setfield(new,"prev",prev)
- d_setfield(prev,"next",new)
- end
- if head then
- if head == current then
- head = new
- end
- d_free_node(current)
- return head, new
- else
- d_free_node(current)
- return new
- end
-end
-
-local function count(stack,flat)
- local n = 0
- while stack do
- local id = d_getid(stack)
- if not flat and id == hlist_code or id == vlist_code then
- local list = d_getlist(stack)
- if list then
- n = n + 1 + count(list) -- self counts too
- else
- n = n + 1
- end
- else
- n = n + 1
- end
- stack = d_getnext(stack)
- end
- return n
-end
-
-nuts.count = count
-
-function nuts.append(head,current,...)
- for i=1,select("#",...) do
- head, current = d_insert_after(head,current,(select(i,...)))
- end
- return head, current
-end
-
-function nuts.prepend(head,current,...)
- for i=1,select("#",...) do
- head, current = d_insert_before(head,current,(select(i,...)))
- end
- return head, current
-end
-
-function nuts.linked(...)
- local head, last
- for i=1,select("#",...) do
- local next = select(i,...)
- if next then
- if head then
- d_setfield(last,"next",next)
- d_setfield(next,"prev",last)
- else
- head = next
- end
- last = d_find_tail(next) -- we could skip the last one
- end
- end
- return head
-end
-
-function nuts.concat(list) -- consider tail instead of slide
- local head, tail
- for i=1,#list do
- local li = list[i]
- if li then
- if head then
- d_setfield(tail,"next",li)
- d_setfield(li,"prev",tail)
- else
- head = li
- end
- tail = d_slide(li)
- end
- end
- return head, tail
-end
-
-function nuts.writable_spec(n) -- not pool
- local spec = d_getfield(n,"spec")
- if not spec then
- spec = d_copy_node(glue_spec)
- d_setfield(n,"spec",spec)
- elseif not d_getfield(spec,"writable") then
- spec = d_copy_node(spec)
- d_setfield(n,"spec",spec)
- end
- return spec
-end
-
-function nuts.reference(n)
- return n or "<none>"
-end
-
--- quick and dirty tracing of nuts
-
--- for k, v in next, nuts do
--- if string.find(k,"box") then
--- nuts[k] = function(...) print(k,...) return v(...) end
--- end
--- end
-
-function nodes.vianuts (f) return function(n,...) return tonode(f(tonut (n),...)) end end
-function nodes.vianodes(f) return function(n,...) return tonut (f(tonode(n),...)) end end
-
-nuts.vianuts = nodes.vianuts
-nuts.vianodes = nodes.vianodes
-
--- for k, v in next, nuts do
--- if type(v) == "function" then
--- if not string.find(k,"^[sg]et") and not string.find(k,"^to") then
--- local f = v
--- nuts[k] = function(...) print("d",k,...) return f(...) end
--- end
--- end
--- end
-
--- for k, v in next, nodes do
--- if type(v) == "function" then
--- if not string.find(k,"^[sg]et") and not string.find(k,"^to") then
--- local f = v
--- nodes[k] = function(...) print("n",k,...) return f(...) end
--- end
--- end
--- end
-
--- function nodes.insert_before(h,c,n)
--- if c then
--- if c == h then
--- n_setfield(n,"next",h)
--- n_setfield(n,"prev",nil)
--- n_setfield(h,"prev",n)
--- else
--- local cp = n_getprev(c)
--- n_setfield(n,"next",c)
--- n_setfield(n,"prev",cp)
--- if cp then
--- n_setfield(cp,"next",n)
--- end
--- n_setfield(c,"prev",n)
--- return h, n
--- end
--- end
--- return n, n
--- end
-
--- function nodes.insert_after(h,c,n)
--- if c then
--- local cn = n_getnext(c)
--- if cn then
--- n_setfield(n,"next",cn)
--- n_setfield(cn,"prev",n)
--- else
--- n_setfield(n,"next",nil)
--- end
--- n_setfield(c,"next",n)
--- n_setfield(n,"prev",c)
--- return h, n
--- end
--- return n, n
--- end
-
-function nodes.insert_list_after(h,c,n)
- local t = n_tail(n)
- if c then
- local cn = n_getnext(c)
- if cn then
- n_setfield(t,"next",cn)
- n_setfield(cn,"prev",t)
- else
- n_setfield(t,"next",nil)
- end
- n_setfield(c,"next",n)
- n_setfield(n,"prev",c)
- return h, n
- end
- return n, t
-end
-
--- function nuts.insert_before(h,c,n)
--- if c then
--- if c == h then
--- d_setfield(n,"next",h)
--- d_setfield(n,"prev",nil)
--- d_setfield(h,"prev",n)
--- else
--- local cp = d_getprev(c)
--- d_setfield(n,"next",c)
--- d_setfield(n,"prev",cp)
--- if cp then
--- d_setfield(cp,"next",n)
--- end
--- d_setfield(c,"prev",n)
--- return h, n
--- end
--- end
--- return n, n
--- end
-
--- function nuts.insert_after(h,c,n)
--- if c then
--- local cn = d_getnext(c)
--- if cn then
--- d_setfield(n,"next",cn)
--- d_setfield(cn,"prev",n)
--- else
--- d_setfield(n,"next",nil)
--- end
--- d_setfield(c,"next",n)
--- d_setfield(n,"prev",c)
--- return h, n
--- end
--- return n, n
--- end
-
-function nuts.insert_list_after(h,c,n)
- local t = d_tail(n)
- if c then
- local cn = d_getnext(c)
- if cn then
- d_setfield(t,"next",cn)
- d_setfield(cn,"prev",t)
- else
- d_setfield(t,"next",nil)
- end
- d_setfield(c,"next",n)
- d_setfield(n,"prev",c)
- return h, n
- end
- return n, t
-end
-
--- test code only
-
--- collectranges and mix
-
-local report = logs.reporter("sliding")
-
-local function message(detail,head,current,previous)
- report("error: %s, current: %s:%s, previous: %s:%s, list: %s, text: %s",
- detail,
- nodecodes[d_getid(current)],
- current,
- nodecodes[d_getid(previous)],
- previous,
- nodes.idstostring(head),
- nodes.listtoutf(head)
- )
- utilities.debugger.showtraceback(report)
-end
-
-local function warn()
- report()
- report("warning: the slide tracer is enabled")
- report()
- warn = false
-end
-
-local function tracedslide(head)
- if head then
- if warn then
- warn()
- end
- local next = d_getnext(head)
- if next then
- local prev = head
- for n in d_traverse(next) do
- local p = d_getprev(n)
- if not p then
- message("unset",head,n,prev)
- -- break
- elseif p ~= prev then
- message("wrong",head,n,prev)
- -- break
- end
- prev = n
- end
- end
- return d_slide(head)
- end
-end
-
-local function nestedtracedslide(head,level) -- no sliding !
- if head then
- if warn then
- warn()
- end
- local id = d_getid(head)
- local next = d_getnext(head)
- if next then
- report("%whead:%s",level or 0,nodecodes[id])
- local prev = head
- for n in d_traverse(next) do
- local p = d_getprev(n)
- if not p then
- message("unset",head,n,prev)
- -- break
- elseif p ~= prev then
- message("wrong",head,n,prev)
- -- break
- end
- prev = n
- local id = d_getid(n)
- if id == hlist_code or id == vlist_code then
- nestedtracedslide(d_getlist(n),(level or 0) + 1)
- end
- end
- elseif id == hlist_code or id == vlist_code then
- report("%wlist:%s",level or 0,nodecodes[id])
- nestedtracedslide(d_getlist(head),(level or 0) + 1)
- end
- -- return d_slide(head)
- end
-end
-
-local function untracedslide(head)
- if head then
- if warn then
- warn()
- end
- local next = d_getnext(head)
- if next then
- local prev = head
- for n in d_traverse(next) do
- local p = d_getprev(n)
- if not p then
- return "unset", d_getid(n)
- elseif p ~= prev then
- return "wrong", d_getid(n)
- end
- prev = n
- end
- end
- return d_slide(head)
- end
-end
-
-nuts.tracedslide = tracedslide
-nuts.untracedslide = untracedslide
-nuts.nestedtracedslide = nestedtracedslide
-
--- nuts.slide = tracedslide
diff --git a/tex/context/base/node-pro.lua b/tex/context/base/node-pro.lua
index 2cc00601c..aa6692d7b 100644
--- a/tex/context/base/node-pro.lua
+++ b/tex/context/base/node-pro.lua
@@ -13,15 +13,15 @@ local trace_callbacks = false trackers.register("nodes.callbacks", function(v)
local report_nodes = logs.reporter("nodes","processors")
-local nodes = nodes
+local nodes, node = nodes, node
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local first_glyph = nodes.first_glyph
-local has_glyph = nodes.has_glyph
+local free_node = node.free
+local first_glyph = node.first_glyph or node.first_character
+local has_attribute = node.has_attribute
nodes.processors = nodes.processors or { }
local processors = nodes.processors
@@ -31,53 +31,43 @@ local processors = nodes.processors
local actions = tasks.actions("processors")
-do
+local n = 0
- local tonut = nuts.tonut
- local getid = nuts.getid
- local getchar = nuts.getchar
- local getnext = nuts.getnext
-
- local n = 0
-
- local function reconstruct(head) -- we probably have a better one
- local t, n, h = { }, 0, tonut(head)
- while h do
- n = n + 1
- local id = getid(h)
- if id == glyph_code then -- todo: disc etc
- t[n] = utfchar(getchar(h))
- else
- t[n] = "[]"
- end
- h = getnext(h)
- end
- return concat(t)
- end
-
- local function tracer(what,state,head,groupcode,before,after,show)
- if not groupcode then
- groupcode = "unknown"
- elseif groupcode == "" then
- groupcode = "mvl"
- end
+local function reconstruct(head) -- we probably have a better one
+ local t, n, h = { }, 0, head
+ while h do
n = n + 1
- if show then
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
+ local id = h.id
+ if id == glyph_code then -- todo: disc etc
+ t[n] = utfchar(h.char)
else
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
+ t[n] = "[]"
end
+ h = h.next
end
+ return concat(t)
+end
- processors.tracer = tracer
-
+local function tracer(what,state,head,groupcode,before,after,show)
+ if not groupcode then
+ groupcode = "unknown"
+ elseif groupcode == "" then
+ groupcode = "mvl"
+ end
+ n = n + 1
+ if show then
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
+ else
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
+ end
end
+processors.tracer = tracer
+
processors.enabled = true -- this will become a proper state (like trackers)
function processors.pre_linebreak_filter(head,groupcode) -- ,size,packtype,direction
- -- local first, found = first_glyph(head) -- they really need to be glyphs
- local found = has_glyph(head)
+ local first, found = first_glyph(head) -- they really need to be glyphs
if found then
if trace_callbacks then
local before = nodes.count(head,true)
@@ -104,8 +94,10 @@ local enabled = true
function processors.hpack_filter(head,groupcode,size,packtype,direction)
if enabled then
- -- local first, found = first_glyph(head) -- they really need to be glyphs
- local found = has_glyph(head)
+ -- if not head.next and head.id ~= glyph_code then -- happens often but not faster
+ -- return true
+ -- end
+ local first, found = first_glyph(head) -- they really need to be glyphs
if found then
if trace_callbacks then
local before = nodes.count(head,true)
@@ -129,36 +121,15 @@ function processors.hpack_filter(head,groupcode,size,packtype,direction)
return true
end
-do
-
- local setfield = nodes.setfield
- local hpack = nodes.hpack
-
- function nodes.fasthpack(...) -- todo: pass explicit arguments
- enabled = false
- local hp, b = hpack(...)
- setfield(hp,"prev",nil)
- setfield(hp,"next",nil)
- enabled = true
- return hp, b
- end
-
-end
-
-do
-
- local setfield = nuts.setfield
- local hpack = nuts.hpack
-
- function nuts.fasthpack(...) -- todo: pass explicit arguments
- enabled = false
- local hp, b = hpack(...)
- setfield(hp,"prev",nil)
- setfield(hp,"next",nil)
- enabled = true
- return hp, b
- end
+local hpack = node.hpack
+function nodes.fasthpack(...) -- todo: pass explicit arguments
+ enabled = false
+ local hp, b = hpack(...)
+ hp.prev = nil
+ hp.next = nil
+ enabled = true
+ return hp, b
end
callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter, "all kind of horizontal manipulations (before par break)")
diff --git a/tex/context/base/node-ref.lua b/tex/context/base/node-ref.lua
index 7cfbde849..aa864fb1c 100644
--- a/tex/context/base/node-ref.lua
+++ b/tex/context/base/node-ref.lua
@@ -21,6 +21,7 @@ local attributes, nodes, node = attributes, nodes, node
local allocate = utilities.storage.allocate, utilities.storage.mark
local mark = utilities.storage.allocate, utilities.storage.mark
+
local nodeinjections = backends.nodeinjections
local codeinjections = backends.codeinjections
@@ -32,6 +33,9 @@ local colors = attributes.colors
local references = structures.references
local tasks = nodes.tasks
+local hpack_list = node.hpack
+local list_dimensions = node.dimensions
+
local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end)
local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end)
local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end)
@@ -40,27 +44,6 @@ local report_reference = logs.reporter("backend","references")
local report_destination = logs.reporter("backend","destinations")
local report_area = logs.reporter("backend","areas")
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getsubtype = nuts.getsubtype
-
-local hpack_list = nuts.hpack
-local list_dimensions = nuts.dimensions
-local traverse = nuts.traverse
-local find_node_tail = nuts.tail
-
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
local whatcodes = nodes.whatcodes
@@ -80,18 +63,21 @@ local dir_code = whatcodes.dir
local line_code = listcodes.line
-local new_rule = nodepool.rule
+local nodepool = nodes.pool
+
local new_kern = nodepool.kern
+local traverse = node.traverse
+local find_node_tail = node.tail or node.slide
local tosequence = nodes.tosequence
-- local function dimensions(parent,start,stop)
--- stop = stop and getnext(stop)
+-- stop = stop and stop.next
-- if parent then
-- if stop then
--- return list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),start,stop)
+-- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop)
-- else
--- return list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign",getfield(parent,"glue_order"),start)
+-- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start)
-- end
-- else
-- if stop then
@@ -106,9 +92,9 @@ local tosequence = nodes.tosequence
local function dimensions(parent,start,stop)
if parent then
- return list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),start,stop and getnext(stop))
+ return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop and stop.next)
else
- return list_dimensions(start,stop and getnext(stop))
+ return list_dimensions(start,stop and stop.next)
end
end
@@ -125,25 +111,25 @@ local function inject_range(head,first,last,reference,make,stack,parent,pardir,t
if trace_backend then
report_area("head: %04i %s %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
end
- setfield(result,"next",first)
- setfield(first,"prev",result)
+ result.next = first
+ first.prev = result
return result, last
else
if trace_backend then
report_area("middle: %04i %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
end
- local prev = getprev(first)
+ local prev = first.prev
if prev then
- setfield(result,"next",first)
- setfield(result,"prev",prev)
- setfield(prev,"next",result)
- setfield(first,"prev",result)
+ result.next = first
+ result.prev = prev
+ prev.next = result
+ first.prev = result
else
- setfield(result,"next",first)
- setfield(first,"prev",result)
+ result.next = first
+ first.prev = result
end
- if first == getnext(head) then
- setfield(head,"next",result) -- hm, weird
+ if first == head.next then
+ head.next = result -- hm, weird
end
return head, last
end
@@ -153,9 +139,9 @@ local function inject_range(head,first,last,reference,make,stack,parent,pardir,t
end
local function inject_list(id,current,reference,make,stack,pardir,txtdir)
- local width, height, depth, correction = getfield(current,"width"), getfield(current,"height"), getfield(current,"depth"), 0
+ local width, height, depth, correction = current.width, current.height, current.depth, 0
local moveright = false
- local first = getlist(current)
+ local first = current.list
if id == hlist_code then -- box_code line_code
-- can be either an explicit hbox or a line and there is no way
-- to recognize this; anyway only if ht/dp (then inline)
@@ -163,17 +149,17 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
if first then
if sr and sr[2] then
local last = find_node_tail(first)
- if getid(last) == glue_code and getsubtype(last) == rightskip_code then
- local prev = getprev(last)
- moveright = getid(first) == glue_code and getsubtype(first) == leftskip_code
- if prev and getid(prev) == glue_code and getsubtype(prev) == parfillskip_code then
- width = dimensions(current,first,getprev(prev)) -- maybe not current as we already take care of it
+ if last.id == glue_code and last.subtype == rightskip_code then
+ local prev = last.prev
+ moveright = first.id == glue_code and first.subtype == leftskip_code
+ if prev and prev.id == glue_code and prev.subtype == parfillskip_code then
+ width = dimensions(current,first,prev.prev) -- maybe not current as we already take care of it
else
- if moveright and getfield(first,"writable") then
- width = width - getfield(getfield(first,"spec"),"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
+ if moveright and first.writable then
+ width = width - first.spec.stretch*current.glue_set * current.glue_sign
end
- if getfield(last,"writable") then
- width = width - getfield(getfield(last,"spec"),"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
+ if last.writable then
+ width = width - last.spec.stretch*current.glue_set * current.glue_sign
end
end
end
@@ -198,21 +184,19 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
report_area("box: %04i %s %s: w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved)
end
if not first then
- setfield(current,"list",result)
+ current.list = result
elseif moveright then -- brr no prevs done
-- result after first
- local n = getnext(first)
- setfield(result,"next",n)
- setfield(first,"next",result)
- setfield(result,"prev",first)
- if n then
- setfield(n,"prev",result)
- end
+ local n = first.next
+ result.next = n
+ first.next = result
+ result.prev = first
+ if n then n.prev = result end
else
-- first after result
- setfield(result,"next",first)
- setfield(first,"prev",result)
- setfield(current,"list",result)
+ result.next = first
+ first.prev = result
+ current.list = result
end
end
end
@@ -225,9 +209,9 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
pardir = pardir or "==="
txtdir = txtdir or "==="
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code or id == vlist_code then
- local r = getattr(current,attribute)
+ local r = current[attribute]
-- somehow reference is true so the following fails (second one not done) in
-- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
-- so let's wait till this fails again
@@ -238,33 +222,32 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
if r then
done[r] = (done[r] or 0) + 1
end
- local list = getlist(current)
+ local list = current.list
if list then
- local h, ok
- h, ok , pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
- setfield(current,"list",h)
+ local _
+ current.list, _, pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
end
if r then
done[r] = done[r] - 1
end
elseif id == whatsit_code then
- local subtype = getsubtype(current)
+ local subtype = current.subtype
if subtype == localpar_code then
- pardir = getfield(current,"dir")
+ pardir = current.dir
elseif subtype == dir_code then
- txtdir = getfield(current,"dir")
+ txtdir = current.dir
end
- elseif id == glue_code and getsubtype(current) == leftskip_code then -- any glue at the left?
+ elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left?
--
else
- local r = getattr(current,attribute)
+ local r = current[attribute]
if not r then
-- just go on, can be kerns
elseif not reference then
reference, first, last, firstdir = r, current, current, txtdir
elseif r == reference then
last = current
- elseif (done[reference] or 0) == 0 then -- or id == glue_code and getsubtype(current) == right_skip_code
+ elseif (done[reference] or 0) == 0 then -- or id == glue_code and current.subtype == right_skip_code
if not skip or r > skip then -- maybe no > test
head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
reference, first, last, firstdir = nil, nil, nil, nil
@@ -273,7 +256,7 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
reference, first, last, firstdir = r, current, current, txtdir
end
end
- current = getnext(current)
+ current = current.next
end
if reference and (done[reference] or 0) == 0 then
head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
@@ -288,32 +271,32 @@ local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir)
txtdir = txtdir or "==="
local current = head
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code or id == vlist_code then
- local r = getattr(current,attribute)
+ local r = current[attribute]
if r and not done[r] then
done[r] = true
inject_list(id,current,r,make,stack,pardir,txtdir)
end
- local list = getlist(current)
+ local list = current.list
if list then
- setfield(current,"list",inject_area(list,attribute,make,stack,done,current,pardir,txtdir))
+ current.list = inject_area(list,attribute,make,stack,done,current,pardir,txtdir)
end
elseif id == whatsit_code then
- local subtype = getsubtype(current)
+ local subtype = current.subtype
if subtype == localpar_code then
- pardir = getfield(current,"dir")
+ pardir = current.dir
elseif subtype == dir_code then
- txtdir = getfield(current,"dir")
+ txtdir = current.dir
end
else
- local r = getattr(current,attribute)
+ local r = current[attribute]
if r and not done[r] then
done[r] = true
head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir)
end
end
- current = getnext(current)
+ current = current.next
end
end
return head, true
@@ -321,6 +304,12 @@ end
-- tracing
+local nodepool = nodes.pool
+
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
+
+local set_attribute = node.set_attribute
local register_color = colors.register
local a_color = attributes.private('color')
@@ -357,15 +346,15 @@ local function colorize(width,height,depth,n,reference,what)
height = 65536/2
depth = height
end
- local rule = new_rule(width,height,depth) -- todo: use tracer rule
- setattr(rule,a_colormodel,1) -- gray color model
- setattr(rule,a_color,u_color)
- setattr(rule,a_transparency,u_transparency)
+ local rule = new_rule(width,height,depth)
+ rule[a_colormodel] = 1 -- gray color model
+ rule[a_color] = u_color
+ rule[a_transparency] = u_transparency
if width < 0 then
local kern = new_kern(width)
- setfield(rule,"width",-width)
- setfield(kern,"next",rule)
- setfield(rule,"prev",kern)
+ rule.width = -width
+ kern.next = rule
+ rule.prev = kern
return kern
else
return rule
@@ -374,6 +363,9 @@ end
-- references:
+local nodepool = nodes.pool
+local new_kern = nodepool.kern
+
local texsetattribute = tex.setattribute
local texsetcount = tex.setcount
@@ -418,25 +410,22 @@ local function makereference(width,height,depth,reference)
end
local annot = nodeinjections.reference(width,height,depth,set)
if annot then
-annot = tonut(annot)
nofreferences = nofreferences + 1
local result, current
if trace_references then
local step = 65536
result = hpack_list(colorize(width,height-step,depth-step,2,reference,"reference")) -- step subtracted so that we can see seperate links
- setfield(result,"width",0)
+ result.width = 0
current = result
end
if current then
- setfield(current,"next",annot)
+ current.next = annot
else
result = annot
end
references.registerpage(n)
result = hpack_list(result,0)
- setfield(result,"width",0)
- setfield(result,"height",0)
- setfield(result,"depth",0)
+ result.width, result.height, result.depth = 0, 0, 0
if cleanupreferences then stack[reference] = nil end
return result, resolved
elseif trace_references then
@@ -447,19 +436,9 @@ annot = tonut(annot)
end
end
--- function nodes.references.handler(head)
--- if topofstack > 0 then
--- return inject_areas(head,attribute,makereference,stack,done)
--- else
--- return head, false
--- end
--- end
-
function nodes.references.handler(head)
if topofstack > 0 then
- head = tonut(head)
- local head, done = inject_areas(head,attribute,makereference,stack,done)
- return tonode(head), done
+ return inject_areas(head,attribute,makereference,stack,done)
else
return head, false
end
@@ -505,12 +484,12 @@ local function makedestination(width,height,depth,reference)
end
for n=1,#name do
local rule = hpack_list(colorize(width,height,depth,3,reference,"destination"))
- setfield(rule,"width",0)
+ rule.width = 0
if not result then
result, current = rule, rule
else
- setfield(current,"next",rule)
- setfield(rule,"prev",current)
+ current.next = rule
+ rule.prev = current
current = rule
end
width, height = width - step, height - step
@@ -520,12 +499,12 @@ local function makedestination(width,height,depth,reference)
for n=1,#name do
local annot = nodeinjections.destination(width,height,depth,name[n],view)
if annot then
-annot = tonut(annot) -- obsolete soon
+ -- probably duplicate
if not result then
result = annot
else
- setfield(current,"next",annot)
- setfield(annot,"prev",current)
+ current.next = annot
+ annot.prev = current
end
current = find_node_tail(annot)
end
@@ -533,9 +512,7 @@ annot = tonut(annot) -- obsolete soon
if result then
-- some internal error
result = hpack_list(result,0)
- setfield(result,"width",0)
- setfield(result,"height",0)
- setfield(result,"depth",0)
+ result.width, result.height, result.depth = 0, 0, 0
end
if cleanupdestinations then stack[reference] = nil end
return result, resolved
@@ -544,25 +521,14 @@ annot = tonut(annot) -- obsolete soon
end
end
--- function nodes.destinations.handler(head)
--- if topofstack > 0 then
--- return inject_area(head,attribute,makedestination,stack,done) -- singular
--- else
--- return head, false
--- end
--- end
-
function nodes.destinations.handler(head)
if topofstack > 0 then
- head = tonut(head)
- local head, done = inject_areas(head,attribute,makedestination,stack,done)
- return tonode(head), done
+ return inject_area(head,attribute,makedestination,stack,done) -- singular
else
return head, false
end
end
-
-- will move
function references.mark(reference,h,d,view)
diff --git a/tex/context/base/node-res.lua b/tex/context/base/node-res.lua
index 968283745..ca9d67f91 100644
--- a/tex/context/base/node-res.lua
+++ b/tex/context/base/node-res.lua
@@ -18,8 +18,13 @@ local report_nodes = logs.reporter("nodes","housekeeping")
local nodes, node = nodes, node
+local copy_node = node.copy
+local free_node = node.free
+local free_list = node.flush_list
+local new_node = node.new
+
nodes.pool = nodes.pool or { }
-local nodepool = nodes.pool
+local pool = nodes.pool
local whatsitcodes = nodes.whatsitcodes
local skipcodes = nodes.skipcodes
@@ -30,453 +35,400 @@ local glyph_code = nodecodes.glyph
local allocate = utilities.storage.allocate
+local texgetbox = tex.getbox
local texgetcount = tex.getcount
local reserved, nofreserved = { }, 0
--- user nodes
+local function register_node(n)
+ nofreserved = nofreserved + 1
+ reserved[nofreserved] = n
+ return n
+end
-local userids = allocate()
-local lastid = 0
+pool.register = register_node
-setmetatable(userids, {
- __index = function(t,k)
- if type(k) == "string" then
- lastid = lastid + 1
- rawset(userids,lastid,k)
- rawset(userids,k,lastid)
- return lastid
- else
- rawset(userids,k,k)
- return k
- end
- end,
- __call = function(t,k)
- return t[k]
+function pool.cleanup(nofboxes) -- todo
+ if nodes.tracers.steppers then -- to be resolved
+ nodes.tracers.steppers.reset() -- todo: make a registration subsystem
end
-} )
-
--- nuts overload
-
-local nuts = nodes.nuts
-local nutpool = { }
-nuts.pool = nutpool
-
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getbox = nuts.getbox
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getid = nuts.getid
-
-local copy_nut = nuts.copy
-local new_nut = nuts.new
-local free_nut = nuts.free
-
--- at some point we could have a dual set (the overhead of tonut is not much larger than
--- metatable associations at the lua/c end esp if we also take assignments into account
-
--- table.setmetatableindex(nodepool,function(t,k,v)
--- -- report_nodes("defining nodepool[%s] instance",k)
--- local f = nutpool[k]
--- local v = function(...)
--- return tonode(f(...))
--- end
--- t[k] = v
--- return v
--- end)
---
--- -- we delay one step because that permits us a forward reference
--- -- e.g. in pdfsetmatrix
-
-table.setmetatableindex(nodepool,function(t,k,v)
- -- report_nodes("defining nodepool[%s] instance",k)
- local v = function(...)
- local f = nutpool[k]
- local v = function(...)
- return tonode(f(...))
+ local nl, nr = 0, nofreserved
+ for i=1,nofreserved do
+ local ri = reserved[i]
+ -- if not (ri.id == glue_spec and not ri.is_writable) then
+ free_node(reserved[i])
+ -- end
+ end
+ if nofboxes then
+ for i=0,nofboxes do
+ local l = texgetbox(i)
+ if l then
+ free_node(l) -- also list ?
+ nl = nl + 1
+ end
end
- t[k] = v
- return v(...)
end
- t[k] = v
- return v
-end)
-
-local function register_nut(n)
- nofreserved = nofreserved + 1
- reserved[nofreserved] = n
- return n
+ reserved = { }
+ nofreserved = 0
+ return nr, nl, nofboxes -- can be nil
end
-local function register_node(n)
- nofreserved = nofreserved + 1
- if type(n) == "number" then -- isnut(n)
- reserved[nofreserved] = n
- else
- reserved[nofreserved] = tonut(n)
+function pool.usage()
+ local t = { }
+ for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
+ t[tag] = n
end
- return n
+ return t
end
-nodepool.userids = userids
-nodepool.register = register_node
-
-nutpool.userids = userids
-nutpool.register = register_node -- could be register_nut
-
--- so far
-
-local disc = register_nut(new_nut("disc"))
-local kern = register_nut(new_nut("kern",kerncodes.userkern))
-local fontkern = register_nut(new_nut("kern",kerncodes.fontkern))
-local penalty = register_nut(new_nut("penalty"))
-local glue = register_nut(new_nut("glue")) -- glue.spec = nil
-local glue_spec = register_nut(new_nut("glue_spec"))
-local glyph = register_nut(new_nut("glyph",0))
-local textdir = register_nut(new_nut("whatsit",whatsitcodes.dir))
-local latelua = register_nut(new_nut("whatsit",whatsitcodes.latelua))
-local special = register_nut(new_nut("whatsit",whatsitcodes.special))
-local user_n = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_n,"type",100) -- 44
-local user_l = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_l,"type",110) -- 44
-local user_s = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_s,"type",115) -- 44
-local user_t = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_t,"type",116) -- 44
-local left_margin_kern = register_nut(new_nut("margin_kern",0))
-local right_margin_kern = register_nut(new_nut("margin_kern",1))
-local lineskip = register_nut(new_nut("glue",skipcodes.lineskip))
-local baselineskip = register_nut(new_nut("glue",skipcodes.baselineskip))
-local leftskip = register_nut(new_nut("glue",skipcodes.leftskip))
-local rightskip = register_nut(new_nut("glue",skipcodes.rightskip))
-local temp = register_nut(new_nut("temp",0))
-local noad = register_nut(new_nut("noad"))
+local disc = register_node(new_node("disc"))
+local kern = register_node(new_node("kern",kerncodes.userkern))
+local fontkern = register_node(new_node("kern",kerncodes.fontkern))
+local penalty = register_node(new_node("penalty"))
+local glue = register_node(new_node("glue")) -- glue.spec = nil
+local glue_spec = register_node(new_node("glue_spec"))
+local glyph = register_node(new_node("glyph",0))
+local textdir = register_node(new_node("whatsit",whatsitcodes.dir))
+local latelua = register_node(new_node("whatsit",whatsitcodes.latelua))
+local special = register_node(new_node("whatsit",whatsitcodes.special))
+local user_n = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_n.type = 100 -- 44
+local user_l = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_l.type = 110 -- 44
+local user_s = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_s.type = 115 -- 44
+local user_t = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_t.type = 116 -- 44
+local left_margin_kern = register_node(new_node("margin_kern",0))
+local right_margin_kern = register_node(new_node("margin_kern",1))
+local lineskip = register_node(new_node("glue",skipcodes.lineskip))
+local baselineskip = register_node(new_node("glue",skipcodes.baselineskip))
+local leftskip = register_node(new_node("glue",skipcodes.leftskip))
+local rightskip = register_node(new_node("glue",skipcodes.rightskip))
+local temp = register_node(new_node("temp",0))
+local noad = register_node(new_node("noad"))
-- the dir field needs to be set otherwise crash:
-local rule = register_nut(new_nut("rule")) setfield(rule, "dir","TLT")
-local hlist = register_nut(new_nut("hlist")) setfield(hlist,"dir","TLT")
-local vlist = register_nut(new_nut("vlist")) setfield(vlist,"dir","TLT")
-
-function nutpool.zeroglue(n)
- local s = getfield(n,"spec")
- return
- getfield(s,"width") == 0 and
- getfield(s,"stretch") == 0 and
- getfield(s,"shrink") == 0 and
- getfield(s,"stretch_order") == 0 and
- getfield(s,"shrink_order") == 0
-end
-
-function nutpool.glyph(fnt,chr)
- local n = copy_nut(glyph)
- if fnt then setfield(n,"font",fnt) end
- if chr then setfield(n,"char",chr) end
+local rule = register_node(new_node("rule")) rule .dir = "TLT"
+local hlist = register_node(new_node("hlist")) hlist.dir = "TLT"
+local vlist = register_node(new_node("vlist")) vlist.dir = "TLT"
+
+function pool.zeroglue(n)
+ local s = n.spec
+ return not writable or (
+ s.width == 0
+ and s.stretch == 0
+ and s.shrink == 0
+ and s.stretch_order == 0
+ and s.shrink_order == 0
+ )
+end
+
+function pool.glyph(fnt,chr)
+ local n = copy_node(glyph)
+ if fnt then n.font = fnt end
+ if chr then n.char = chr end
return n
end
-function nutpool.penalty(p)
- local n = copy_nut(penalty)
- setfield(n,"penalty",p)
+function pool.penalty(p)
+ local n = copy_node(penalty)
+ n.penalty = p
return n
end
-function nutpool.kern(k)
- local n = copy_nut(kern)
- setfield(n,"kern",k)
+function pool.kern(k)
+ local n = copy_node(kern)
+ n.kern = k
return n
end
-function nutpool.fontkern(k)
- local n = copy_nut(fontkern)
- setfield(n,"kern",k)
+function pool.fontkern(k)
+ local n = copy_node(fontkern)
+ n.kern = k
return n
end
-function nutpool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
- local s = copy_nut(glue_spec)
- if width then setfield(s,"width",width) end
- if stretch then setfield(s,"stretch",stretch) end
- if shrink then setfield(s,"shrink",shrink) end
- if stretch_order then setfield(s,"stretch_order",stretch_order) end
- if shrink_order then setfield(s,"shrink_order",shrink_order) end
+function pool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
+ local s = copy_node(glue_spec)
+ if width then s.width = width end
+ if stretch then s.stretch = stretch end
+ if shrink then s.shrink = shrink end
+ if stretch_order then s.stretch_order = stretch_order end
+ if shrink_order then s.shrink_order = shrink_order end
return s
end
local function someskip(skip,width,stretch,shrink,stretch_order,shrink_order)
- local n = copy_nut(skip)
+ local n = copy_node(skip)
if not width then
-- no spec
elseif width == false or tonumber(width) then
- local s = copy_nut(glue_spec)
- if width then setfield(s,"width",width) end
- if stretch then setfield(s,"stretch",stretch) end
- if shrink then setfield(s,"shrink",shrink) end
- if stretch_order then setfield(s,"stretch_order",stretch_order) end
- if shrink_order then setfield(s,"shrink_order",shrink_order) end
- setfield(n,"spec",s)
+ local s = copy_node(glue_spec)
+ if width then s.width = width end
+ if stretch then s.stretch = stretch end
+ if shrink then s.shrink = shrink end
+ if stretch_order then s.stretch_order = stretch_order end
+ if shrink_order then s.shrink_order = shrink_order end
+ n.spec = s
else
-- shared
- setfield(n,"spec",copy_nut(width))
+ n.spec = copy_node(width)
end
return n
end
-function nutpool.stretch(a,b)
- local n = copy_nut(glue)
- local s = copy_nut(glue_spec)
+function pool.stretch(a,b)
+ local n = copy_node(glue)
+ local s = copy_node(glue_spec)
if b then
- setfield(s,"stretch",a)
- setfield(s,"stretch_order",b)
+ s.stretch = a
+ s.stretch_order = b
else
- setfield(s,"stretch",1)
- setfield(s,"stretch_order",a or 1)
+ s.stretch = 1
+ s.stretch_order = a or 1
end
- setfield(n,"spec",s)
+ n.spec = s
return n
end
-function nutpool.shrink(a,b)
- local n = copy_nut(glue)
- local s = copy_nut(glue_spec)
+function pool.shrink(a,b)
+ local n = copy_node(glue)
+ local s = copy_node(glue_spec)
if b then
- setfield(s,"shrink",a)
- setfield(s,"shrink_order",b)
+ s.shrink = a
+ s.shrink_order = b
else
- setfield(s,"shrink",1)
- setfield(s,"shrink_order",a or 1)
+ s.shrink = 1
+ s.shrink_order = a or 1
end
- setfield(n,"spec",s)
+ n.spec = s
return n
end
-function nutpool.glue(width,stretch,shrink,stretch_order,shrink_order)
+
+function pool.glue(width,stretch,shrink,stretch_order,shrink_order)
return someskip(glue,width,stretch,shrink,stretch_order,shrink_order)
end
-function nutpool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
+function pool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(leftskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function nutpool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
+function pool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(rightskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function nutpool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
+function pool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(lineskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function nutpool.baselineskip(width,stretch,shrink)
+function pool.baselineskip(width,stretch,shrink)
return someskip(baselineskip,width,stretch,shrink)
end
-function nutpool.disc()
- return copy_nut(disc)
+function pool.disc()
+ return copy_node(disc)
end
-function nutpool.textdir(dir)
- local t = copy_nut(textdir)
- setfield(t,"dir",dir)
+function pool.textdir(dir)
+ local t = copy_node(textdir)
+ t.dir = dir
return t
end
-function nutpool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
- local n = copy_nut(rule)
- if width then setfield(n,"width",width) end
- if height then setfield(n,"height",height) end
- if depth then setfield(n,"depth",depth) end
- if dir then setfield(n,"dir",dir) end
+function pool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
+ local n = copy_node(rule)
+ if width then n.width = width end
+ if height then n.height = height end
+ if depth then n.depth = depth end
+ if dir then n.dir = dir end
return n
end
--- if node.has_field(latelua,'string') then
- function nutpool.latelua(code)
- local n = copy_nut(latelua)
- setfield(n,"string",code)
+if node.has_field(latelua,'string') then
+ function pool.latelua(code)
+ local n = copy_node(latelua)
+ n.string = code
+ return n
+ end
+else
+ function pool.latelua(code)
+ local n = copy_node(latelua)
+ n.data = code
return n
end
--- else
--- function nutpool.latelua(code)
--- local n = copy_nut(latelua)
--- setfield(n,"data",code)
--- return n
--- end
--- end
-
-function nutpool.leftmarginkern(glyph,width)
- local n = copy_nut(left_margin_kern)
+end
+
+function pool.leftmarginkern(glyph,width)
+ local n = copy_node(left_margin_kern)
if not glyph then
report_nodes("invalid pointer to left margin glyph node")
- elseif getid(glyph) ~= glyph_code then
+ elseif glyph.id ~= glyph_code then
report_nodes("invalid node type %a for %s margin glyph node",nodecodes[glyph],"left")
else
- setfield(n,"glyph",glyph)
+ n.glyph = glyph
end
if width then
- setfield(n,"width",width)
+ n.width = width
end
return n
end
-function nutpool.rightmarginkern(glyph,width)
- local n = copy_nut(right_margin_kern)
+function pool.rightmarginkern(glyph,width)
+ local n = copy_node(right_margin_kern)
if not glyph then
report_nodes("invalid pointer to right margin glyph node")
- elseif getid(glyph) ~= glyph_code then
+ elseif glyph.id ~= glyph_code then
report_nodes("invalid node type %a for %s margin glyph node",nodecodes[p],"right")
else
- setfield(n,"glyph",glyph)
+ n.glyph = glyph
end
if width then
- setfield(n,"width",width)
+ n.width = width
end
return n
end
-function nutpool.temp()
- return copy_nut(temp)
+function pool.temp()
+ return copy_node(temp)
end
-function nutpool.noad()
- return copy_nut(noad)
+function pool.noad()
+ return copy_node(noad)
end
-function nutpool.hlist(list,width,height,depth)
- local n = copy_nut(hlist)
+function pool.hlist(list,width,height,depth)
+ local n = copy_node(hlist)
if list then
- setfield(n,"list",list)
+ n.list = list
end
if width then
- setfield(n,"width",width)
+ n.width = width
end
if height then
- setfield(n,"height",height)
+ n.height = height
end
if depth then
- setfield(n,"depth",depth)
+ n.depth = depth
end
return n
end
-function nutpool.vlist(list,width,height,depth)
- local n = copy_nut(vlist)
+function pool.vlist(list,width,height,depth)
+ local n = copy_node(vlist)
if list then
- setfield(n,"list",list)
+ n.list = list
end
if width then
- setfield(n,"width",width)
+ n.width = width
end
if height then
- setfield(n,"height",height)
+ n.height = height
end
if depth then
- setfield(n,"depth",depth)
+ n.depth = depth
end
return n
end
+--[[
+<p>At some point we ran into a problem that the glue specification
+of the zeropoint dimension was overwritten when adapting a glue spec
+node. This is a side effect of glue specs being shared. After a
+couple of hours tracing and debugging Taco and I came to the
+conclusion that it made no sense to complicate the spec allocator
+and settled on a writable flag. This all is a side effect of the
+fact that some glues use reserved memory slots (with the zeropoint
+glue being a noticeable one). So, next we wrap this into a function
+and hide it for the user. And yes, LuaTeX now gives a warning as
+well.</p>
+]]--
+
+function nodes.writable_spec(n) -- not pool
+ local spec = n.spec
+ if not spec then
+ spec = copy_node(glue_spec)
+ n.spec = spec
+ elseif not spec.writable then
+ spec = copy_node(spec)
+ n.spec = spec
+ end
+ return spec
+end
+
-- local num = userids["my id"]
-- local str = userids[num]
-function nutpool.usernumber(id,num)
- local n = copy_nut(user_n)
+local userids = allocate() pool.userids = userids
+local lastid = 0
+
+setmetatable(userids, {
+ __index = function(t,k)
+ if type(k) == "string" then
+ lastid = lastid + 1
+ rawset(userids,lastid,k)
+ rawset(userids,k,lastid)
+ return lastid
+ else
+ rawset(userids,k,k)
+ return k
+ end
+ end,
+ __call = function(t,k)
+ return t[k]
+ end
+} )
+
+function pool.usernumber(id,num)
+ local n = copy_node(user_n)
if num then
- setfield(n,"user_id",id)
- setfield(n,"value",num)
+ n.user_id, n.value = id, num
elseif id then
- setfield(n,"value",id)
+ n.value = id
end
return n
end
-function nutpool.userlist(id,list)
- local n = copy_nut(user_l)
+function pool.userlist(id,list)
+ local n = copy_node(user_l)
if list then
- setfield(n,"user_id",id)
- setfield(n,"value",list)
+ n.user_id, n.value = id, list
else
- setfield(n,"value",id)
+ n.value = id
end
return n
end
-function nutpool.userstring(id,str)
- local n = copy_nut(user_s)
+function pool.userstring(id,str)
+ local n = copy_node(user_s)
if str then
- setfield(n,"user_id",id)
- setfield(n,"value",str)
+ n.user_id, n.value = id, str
else
- setfield(n,"value",id)
+ n.value = id
end
return n
end
-function nutpool.usertokens(id,tokens)
- local n = copy_nut(user_t)
+function pool.usertokens(id,tokens)
+ local n = copy_node(user_t)
if tokens then
- setfield(n,"user_id",id)
- setfield(n,"value",tokens)
+ n.user_id, n.value = id, tokens
else
- setfield(n,"value",id)
+ n.value = id
end
return n
end
-function nutpool.special(str)
- local n = copy_nut(special)
- setfield(n,"data",str)
+function pool.special(str)
+ local n = copy_node(special)
+ n.data = str
return n
end
--- housekeeping
-
-local function cleanup(nofboxes) -- todo
- if nodes.tracers.steppers then -- to be resolved
- nodes.tracers.steppers.reset() -- todo: make a registration subsystem
- end
- local nl, nr = 0, nofreserved
- for i=1,nofreserved do
- local ri = reserved[i]
- -- if not (getid(ri) == glue_spec and not getfield(ri,"is_writable")) then
- free_nut(reserved[i])
- -- end
- end
- if nofboxes then
- for i=0,nofboxes do
- local l = getbox(i)
- if l then
- free_nut(l) -- also list ?
- nl = nl + 1
- end
- end
- end
- reserved = { }
- nofreserved = 0
- return nr, nl, nofboxes -- can be nil
-end
-
-
-local function usage()
- local t = { }
- for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
- t[tag] = n
- end
- return t
-end
-
-nutpool .cleanup = cleanup
-nodepool.cleanup = cleanup
-
-nutpool .usage = usage
-nodepool.usage = usage
-
--- end
-
statistics.register("cleaned up reserved nodes", function()
- return format("%s nodes, %s lists of %s", cleanup(texgetcount("c_syst_last_allocated_box")))
+ return format("%s nodes, %s lists of %s", pool.cleanup(texgetcount("c_syst_last_allocated_box")))
end) -- \topofboxstack
statistics.register("node memory usage", function() -- comes after cleanup !
return status.node_mem_usage
end)
-lua.registerfinalizer(cleanup, "cleanup reserved nodes")
+lua.registerfinalizer(pool.cleanup, "cleanup reserved nodes")
diff --git a/tex/context/base/node-rul.lua b/tex/context/base/node-rul.lua
index 6f3bc9df9..96d6bdf41 100644
--- a/tex/context/base/node-rul.lua
+++ b/tex/context/base/node-rul.lua
@@ -13,28 +13,12 @@ if not modules then modules = { } end modules ['node-rul'] = {
local attributes, nodes, node = attributes, nodes, node
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local getlist = nuts.getlist
-
-local nodecodes = nodes.nodecodes
-local tasks = nodes.tasks
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local rule_code = nodecodes.rule
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local rule_code = nodecodes.rule
function nodes.striprange(first,last) -- todo: dir
if first and last then -- just to be sure
@@ -42,11 +26,11 @@ function nodes.striprange(first,last) -- todo: dir
return first, last
end
while first and first ~= last do
- local id = getid(first)
+ local id = first.id
if id == glyph_code or id == disc_code then -- or id == rule_code
break
else
- first = getnext(first)
+ first = first.next
end
end
if not first then
@@ -55,13 +39,13 @@ function nodes.striprange(first,last) -- todo: dir
return first, last
end
while last and last ~= first do
- local id = getid(last)
+ local id = last.id
if id == glyph_code or id == disc_code then -- or id == rule_code
break
else
- local prev = getprev(last) -- luatex < 0.70 has italic correction kern not prev'd
+ local prev = last.prev -- luatex < 0.70 has italic correction kern not prev'd
if prev then
- last = prev
+ last = last.prev
else
break
end
@@ -89,12 +73,12 @@ local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
local a_colorspace = attributes.private('colormodel')
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local list_dimensions = nuts.dimensions
-local hpack_nodes = nuts.hpack
-
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
local striprange = nodes.striprange
+local list_dimensions = node.dimensions
+
+local hpack_nodes = node.hpack
local fontdata = fonts.hashes.identifiers
local variables = interfaces.variables
@@ -127,7 +111,7 @@ local dir_code = whatcodes.dir
local kerning_code = kerncodes.kern
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_rule = nodepool.rule
local new_kern = nodepool.kern
@@ -157,9 +141,9 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
local f, l, a, d, i, class
local continue, done, strip, level = false, false, true, -1
while n do
- local id = getid(n)
+ local id = n.id
if id == glyph_code or id == rule_code then
- local aa = getattr(n,attribute)
+ local aa = n[attribute]
if aa then
if aa == a then
if not f then -- ?
@@ -188,13 +172,13 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
end
f, l, a = nil, nil, nil
end
--- elseif f and (id == disc_code or (id == kern_code and getsubtype(n) == kerning_code)) then
+-- elseif f and (id == disc_code or (id == kern_code and n.subtype == kerning_code)) then
-- l = n
elseif id == disc_code then
if f then
l = n
end
- elseif id == kern_code and getsubtype(n) == kerning_code then
+ elseif id == kern_code and n.subtype == kerning_code then
if f then
l = n
end
@@ -203,11 +187,11 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
head, done = flush(head,f,l,d,level,parent,strip), true
f, l, a = nil, nil, nil
end
- local list = getlist(n)
+ local list = n.list
if list then
- setfield(n,"list",(processwords(attribute,data,flush,list,n))) -- watch ()
+ n.list = processwords(attribute,data,flush,list,n)
end
- elseif checkdir and id == whatsit_code and getsubtype(n) == dir_code then -- only changes in dir, we assume proper boundaries
+ elseif checkdir and id == whatsit_code and n.subtype == dir_code then -- only changes in dir, we assume proper boundaries
if f and a then
l = n
end
@@ -219,8 +203,8 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
-- l = n
elseif id == glue_code then
-- catch \underbar{a} \underbar{a} (subtype test is needed)
- local subtype = getsubtype(n)
- if getattr(n,attribute) and (subtype == userskip_code or subtype == spaceskip_code or subtype == xspaceskip_code) then
+ local subtype = n.subtype
+ if n[attribute] and (subtype == userskip_code or subtype == spaceskip_code or subtype == xspaceskip_code) then
l = n
else
head, done = flush(head,f,l,d,level,parent,strip), true
@@ -232,7 +216,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
f, l, a = nil, nil, nil
end
end
- n = getnext(n)
+ n = n.next
end
if f then
head, done = flush(head,f,l,d,level,parent,strip), true
@@ -243,16 +227,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
end
end
--- nodes.processwords = processwords
-
-nodes.processwords = function(attribute,data,flush,head,parent) -- we have hlistdir and local dir
- head = tonut(head)
- if parent then
- parent = tonut(parent)
- end
- local head, done = processwords(attribute,data,flush,head,parent)
- return tonode(head), done
-end
+nodes.processwords = processwords
--
@@ -271,7 +246,7 @@ end
local a_viewerlayer = attributes.private("viewerlayer")
local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but acceptable for this purpose
- if getid(f) ~= glyph_code then
+ if f.id ~= glyph_code then
-- saveguard ... we need to deal with rules and so (math)
return head
end
@@ -289,16 +264,16 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
if not f then
return head
end
- local w = list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),f,getnext(l))
+ local w = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,f,l.next)
local method, offset, continue, dy, order, max = d.method, d.offset, d.continue, d.dy, d.order, d.max
local rulethickness, unit = d.rulethickness, d.unit
local ma, ca, ta = d.ma, d.ca, d.ta
- local colorspace = ma > 0 and ma or getattr(f,a_colorspace) or 1
- local color = ca > 0 and ca or getattr(f,a_color)
- local transparency = ta > 0 and ta or getattr(f,a_transparency)
+ local colorspace = ma > 0 and ma or f[a_colorspace] or 1
+ local color = ca > 0 and ca or f[a_color]
+ local transparency = ta > 0 and ta or f[a_transparency]
local foreground = order == v_foreground
- local e = dimenfactor(unit,getfont(f)) -- what if no glyph node
+ local e = dimenfactor(unit,f.font) -- what if no glyph node
local rt = tonumber(rulethickness)
if rt then
@@ -306,7 +281,7 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
else
local n, u = splitdimen(rulethickness)
if n and u then -- we need to intercept ex and em and % and ...
- rulethickness = n * dimenfactor(u,fontdata[getfont(f)]) / 2
+ rulethickness = n * dimenfactor(u,fontdata[f.font]) / 2
else
rulethickness = 1/5
end
@@ -325,18 +300,18 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
local ht = (offset+(i-1)*dy)*e + rulethickness - m
local dp = -(offset+(i-1)*dy)*e + rulethickness + m
local r = new_rule(w,ht,dp)
- local v = getattr(f,a_viewerlayer)
+ local v = f[a_viewerlayer]
-- quick hack
if v then
- setattr(r,a_viewerlayer,v)
+ r[a_viewerlayer] = v
end
--
if color then
- setattr(r,a_colorspace,colorspace)
- setattr(r,a_color,color)
+ r[a_colorspace] = colorspace
+ r[a_color] = color
end
if transparency then
- setattr(r,a_transparency,transparency)
+ r[a_transparency] = transparency
end
local k = new_kern(-w)
if foreground then
@@ -390,27 +365,21 @@ local function flush_shifted(head,first,last,data,level,parent,strip) -- not tha
if true then
first, last = striprange(first,last)
end
- local prev = getprev(first)
- local next = getnext(last)
- setfield(first,"prev",nil)
- setfield(last,"next",nil)
- local width, height, depth = list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),first,next)
+ local prev, next = first.prev, last.next
+ first.prev, last.next = nil, nil
+ local width, height, depth = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,first,next)
local list = hpack_nodes(first,width,"exactly")
if first == head then
head = list
end
if prev then
- setfield(prev,"next",list)
- setfield(list,"prev",prev)
+ prev.next, list.prev = list, prev
end
if next then
- setfield(next,"prev",list)
- setfield(list,"next",next)
+ next.prev, list.next = list, next
end
- local raise = data.dy * dimenfactor(data.unit,fontdata[getfont(first)])
- setfield(list,"shift",raise)
- setfield(list,"height",height)
- setfield(list,"depth",depth)
+ local raise = data.dy * dimenfactor(data.unit,fontdata[first.font])
+ list.shift, list.height, list.depth = raise, height, depth
if trace_shifted then
report_shifted("width %p, nodes %a, text %a",width,n_tostring(first,last),n_tosequence(first,last,true))
end
diff --git a/tex/context/base/node-tra.lua b/tex/context/base/node-tra.lua
index 081107277..9617f7476 100644
--- a/tex/context/base/node-tra.lua
+++ b/tex/context/base/node-tra.lua
@@ -34,30 +34,9 @@ nodes.handlers = handlers
local injections = nodes.injections or { }
nodes.injections = injections
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getchar = nuts.getchar
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-
-local setattr = nuts.setattr
-
-local flush_list = nuts.flush_list
-local count_nodes = nuts.count
-local used_nodes = nuts.usedlist
-
-local traverse_by_id = nuts.traverse_id
-local traverse_nodes = nuts.traverse
-local d_tostring = nuts.tostring
-
-local nutpool = nuts.pool
-local new_rule = nutpool.rule
+local traverse_nodes = node.traverse
+local traverse_by_id = node.traverse_id
+local count_nodes = nodes.count
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -77,6 +56,9 @@ local gluespec_code = nodecodes.gluespec
local localpar_code = whatcodes.localpar
local dir_code = whatcodes.dir
+local nodepool = nodes.pool
+local new_rule = nodepool.rule
+
local dimenfactors = number.dimenfactors
local formatters = string.formatters
@@ -86,16 +68,15 @@ function nodes.showlist(head, message)
if message then
report_nodes(message)
end
- for n in traverse_nodes(tonut(head)) do
- report_nodes(d_tostring(n))
+ for n in traverse_nodes(head) do
+ report_nodes(tostring(n))
end
end
function nodes.handlers.checkglyphs(head,message)
- local h = tonut(head)
local t = { }
- for g in traverse_by_id(glyph_code,h) do
- t[#t+1] = formatters["%U:%s"](getchar(g),getsubtype(g))
+ for g in traverse_by_id(glyph_code,head) do
+ t[#t+1] = formatters["%U:%s"](g.char,g.subtype)
end
if #t > 0 then
if message and message ~= "" then
@@ -109,12 +90,12 @@ end
function nodes.handlers.checkforleaks(sparse)
local l = { }
- local q = used_nodes()
- for p in traverse_nodes(q) do
- local s = table.serialize(nodes.astable(p,sparse),nodecodes[getid(p)])
+ local q = node.usedlist()
+ for p in traverse(q) do
+ local s = table.serialize(nodes.astable(p,sparse),nodecodes[p.id])
l[s] = (l[s] or 0) + 1
end
- flush_list(q)
+ node.flush_list(q)
for k, v in next, l do
report_nodes("%s * %s",v,k)
end
@@ -124,40 +105,39 @@ local f_sequence = formatters["U+%04X:%s"]
local function tosequence(start,stop,compact)
if start then
- start = tonut(start)
- stop = stop and tonut(stop)
local t = { }
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- local c = getchar(start)
+ local c = start.char
if compact then
- local components = getfield(start,"components")
- if components then
- t[#t+1] = tosequence(components,nil,compact)
+ if start.components then
+ t[#t+1] = tosequence(start.components,nil,compact)
else
t[#t+1] = utfchar(c)
end
else
t[#t+1] = f_sequence(c,utfchar(c))
end
+ elseif id == whatsit_code and start.subtype == localpar_code or start.subtype == dir_code then
+ t[#t+1] = "[" .. start.dir .. "]"
elseif id == rule_code then
if compact then
t[#t+1] = "|"
else
t[#t+1] = nodecodes[id]
end
- elseif id == whatsit_code and getsubtype(start) == localpar_code or getsubtype(start) == dir_code then
- t[#t+1] = "[" .. getfield(start,"dir") .. "]"
- elseif compact then
- t[#t+1] = "[]"
else
- t[#t+1] = nodecodes[id]
+ if compact then
+ t[#t+1] = "[]"
+ else
+ t[#t+1] = nodecodes[id]
+ end
end
if start == stop then
break
else
- start = getnext(start)
+ start = start.next
end
end
if compact then
@@ -173,23 +153,21 @@ end
nodes.tosequence = tosequence
function nodes.report(t,done)
- report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(tonut(t)))
+ report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(t))
end
function nodes.packlist(head)
local t = { }
- for n in traverse_nodes(tonut(head)) do
- t[#t+1] = d_tostring(n)
+ for n in traverse(head) do
+ t[#t+1] = tostring(n)
end
return t
end
function nodes.idstostring(head,tail)
- head = tonut(head)
- tail = tail and tonut(tail)
local t, last_id, last_n = { }, nil, 0
for n in traverse_nodes(head,tail) do -- hm, does not stop at tail
- local id = getid(n)
+ local id = n.id
if not last_id then
last_id, last_n = id, 1
elseif last_id == id then
@@ -217,8 +195,6 @@ function nodes.idstostring(head,tail)
end
-- function nodes.xidstostring(head,tail) -- only for special tracing of backlinks
--- head = tonut(head)
--- tail = tonut(tail)
-- local n = head
-- while n.next do
-- n = n.next
@@ -241,7 +217,7 @@ end
-- if n == head then
-- break
-- end
--- n = getprev(n)
+-- n = n.prev
-- end
-- if not last_id then
-- t[#t+1] = "no nodes"
@@ -254,56 +230,51 @@ end
-- end
local function showsimplelist(h,depth,n)
- h = h and tonut(h)
while h do
report_nodes("% w%s",n,d_tostring(h))
if not depth or n < depth then
- local id = getid(h)
+ local id = h.id
if id == hlist_code or id == vlist_code then
- showsimplelist(getlist(h),depth,n+1)
+ showsimplelist(h.list,depth,n+1)
end
end
- h = getnext(h)
+ h = h.next
end
end
--- \startluacode
--- callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
--- \stopluacode
--- \vbox{b\footnote{n}a}
--- \startluacode
--- callback.register('buildpage_filter',nil)
--- \stopluacode
+--~ \startluacode
+--~ callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
+--~ \stopluacode
+--~ \vbox{b\footnote{n}a}
+--~ \startluacode
+--~ callback.register('buildpage_filter',nil)
+--~ \stopluacode
nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end
local function listtoutf(h,joiner,textonly,last)
+ local joiner = (joiner == true and utfchar(0x200C)) or joiner -- zwnj
local w = { }
while h do
- local id = getid(h)
+ local id = h.id
if id == glyph_code then -- always true
- local c = getchar(h)
+ local c = h.char
w[#w+1] = c >= 0 and utfchar(c) or formatters["<%i>"](c)
if joiner then
w[#w+1] = joiner
end
elseif id == disc_code then
- local pre = getfield(h,"pre")
- local pos = getfield(h,"post")
- local rep = getfield(h,"replace")
+ local pre = h.pre
+ local pos = h.post
+ local rep = h.replace
w[#w+1] = formatters["[%s|%s|%s]"] (
pre and listtoutf(pre,joiner,textonly) or "",
pos and listtoutf(pos,joiner,textonly) or "",
rep and listtoutf(rep,joiner,textonly) or ""
)
elseif textonly then
- if id == glue_code then
- local spec = getfield(h,"spec")
- if spec and getfield(spec,"width") > 0 then
- w[#w+1] = " "
- end
- elseif id == hlist_code or id == vlist_code then
- w[#w+1] = "[]"
+ if id == glue_code and h.spec and h.spec.width > 0 then
+ w[#w+1] = " "
end
else
w[#w+1] = "[-]"
@@ -311,28 +282,24 @@ local function listtoutf(h,joiner,textonly,last)
if h == last then
break
else
- h = getnext(h)
+ h = h.next
end
end
return concat(w)
end
-function nodes.listtoutf(h,joiner,textonly,last)
- local joiner = joiner == true and utfchar(0x200C) or joiner -- zwnj
- return listtoutf(tonut(h),joiner,textonly,last and tonut(last))
-end
+nodes.listtoutf = listtoutf
local what = { [0] = "unknown", "line", "box", "indent", "row", "cell" }
local function showboxes(n,symbol,depth)
- depth = depth or 0
- symbol = symbol or "."
- for n in traverse_nodes(tonut(n)) do
- local id = getid(n)
+ depth, symbol = depth or 0, symbol or "."
+ for n in traverse_nodes(n) do
+ local id = n.id
if id == hlist_code or id == vlist_code then
- local s = getsubtype(n)
+ local s = n.subtype
report_nodes(rep(symbol,depth) .. what[s] or s)
- showboxes(getlist(n),symbol,depth+1)
+ showboxes(n.list,symbol,depth+1)
end
end
end
@@ -355,8 +322,15 @@ local stripper = lpeg.patterns.stripzeros
local dimenfactors = number.dimenfactors
-local function nodetodimen(d,unit,fmt,strip)
- d = tonut(d) -- tricky: direct nuts are an issue
+local function numbertodimen(d,unit,fmt,strip)
+ if not d then
+ local str = formatters[fmt](0,unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ local t = type(d)
+ if t == 'string' then
+ return d
+ end
if unit == true then
unit = "pt"
fmt = "%0.5f%s"
@@ -368,23 +342,27 @@ local function nodetodimen(d,unit,fmt,strip)
fmt = "%0.5f%s"
end
end
- local id = getid(d)
+ if t == "number" then
+ local str = formatters[fmt](d*dimenfactors[unit],unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ local id = d.id
if id == kern_code then
- local str = formatters[fmt](getfield(d,"width")*dimenfactors[unit],unit)
+ local str = formatters[fmt](d.width*dimenfactors[unit],unit)
return strip and lpegmatch(stripper,str) or str
end
if id == glue_code then
- d = getfield(d,"spec")
+ d = d.spec
end
- if not d or not getid(d) == gluespec_code then
+ if not d or not d.id == gluespec_code then
local str = formatters[fmt](0,unit)
return strip and lpegmatch(stripper,str) or str
end
- local width = getfield(d,"width")
- local plus = getfield(d,"stretch_order")
- local minus = getfield(d,"shrink_order")
- local stretch = getfield(d,"stretch")
- local shrink = getfield(d,"shrink")
+ local width = d.width
+ local plus = d.stretch_order
+ local minus = d.shrink_order
+ local stretch = d.stretch
+ local shrink = d.shrink
if plus ~= 0 then
plus = " plus " .. stretch/65536 .. fillcodes[plus]
elseif stretch ~= 0 then
@@ -401,39 +379,11 @@ local function nodetodimen(d,unit,fmt,strip)
else
minus = ""
end
- local str = formatters[fmt](getfield(d,"width")*dimenfactors[unit],unit)
+ local str = formatters[fmt](d.width*dimenfactors[unit],unit)
return (strip and lpegmatch(stripper,str) or str) .. plus .. minus
end
-local function numbertodimen(d,unit,fmt,strip)
- if not d then
- local str = formatters[fmt](0,unit)
- return strip and lpegmatch(stripper,str) or str
- end
- local t = type(d)
- if t == 'string' then
- return d
- elseif t == "number" then
- if unit == true then
- unit = "pt"
- fmt = "%0.5f%s"
- else
- unit = unit or 'pt'
- if not fmt then
- fmt = "%s%s"
- elseif fmt == true then
- fmt = "%0.5f%s"
- end
- end
- local str = formatters[fmt](d*dimenfactors[unit],unit)
- return strip and lpegmatch(stripper,str) or str
- else
- return nodetodimen(d,unit,fmt,strip) -- real node
- end
-end
-
number.todimen = numbertodimen
-nodes .todimen = nodetodimen
function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end
function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end
@@ -448,19 +398,6 @@ function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end
function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end
function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end
-function nodes.topoints (n,fmt) return nodetodimen(n,"pt",fmt) end
-function nodes.toinches (n,fmt) return nodetodimen(n,"in",fmt) end
-function nodes.tocentimeters (n,fmt) return nodetodimen(n,"cm",fmt) end
-function nodes.tomillimeters (n,fmt) return nodetodimen(n,"mm",fmt) end
-function nodes.toscaledpoints(n,fmt) return nodetodimen(n,"sp",fmt) end
-function nodes.toscaledpoints(n) return n .. "sp" end
-function nodes.tobasepoints (n,fmt) return nodetodimen(n,"bp",fmt) end
-function nodes.topicas (n,fmt) return nodetodimen(n "pc",fmt) end
-function nodes.todidots (n,fmt) return nodetodimen(n,"dd",fmt) end
-function nodes.tociceros (n,fmt) return nodetodimen(n,"cc",fmt) end
-function nodes.tonewdidots (n,fmt) return nodetodimen(n,"nd",fmt) end
-function nodes.tonewciceros (n,fmt) return nodetodimen(n,"nc",fmt) end
-
-- stop redefinition
local points = function(n)
@@ -506,13 +443,8 @@ number.basepoints = basepoints
number.pts = pts
number.nopts = nopts
-nodes.points = function(n) return numbertodimen(n,"pt",true,true) end
-nodes.basepoints = function(n) return numbertodimen(n,"bp",true,true) end
-nodes.pts = function(n) return numbertodimen(n,"pt",true) end
-nodes.nopts = function(n) return format("%.5f",n*ptfactor) end
-
-local colors = { }
-tracers.colors = colors
+local colors = { }
+tracers.colors = colors
local unsetvalue = attributes.unsetvalue
@@ -522,34 +454,36 @@ local m_color = attributes.list[a_color] or { }
function colors.set(n,c,s)
local mc = m_color[c]
- local nn = tonut(n)
- if mc then
- local mm = s or texgetattribute(a_colormodel)
- setattr(nn,a_colormodel,mm <= 0 and mm or 1)
- setattr(nn,a_color,mc)
+ if not mc then
+ n[a_color] = unsetvalue
else
- setattr(nn,a_color,unsetvalue)
+ if not n[a_colormodel] then
+ n[a_colormodel] = s or 1
+ end
+ n[a_color] = mc
end
return n
end
function colors.setlist(n,c,s)
- local nn = tonut(n)
- local mc = m_color[c] or unsetvalue
- local mm = s or texgetattribute(a_colormodel)
- if mm <= 0 then
- mm = 1
- end
- while nn do
- setattr(nn,a_colormodel,mm)
- setattr(nn,a_color,mc)
- nn = getnext(nn)
+ local f = n
+ while n do
+ local mc = m_color[c]
+ if not mc then
+ n[a_color] = unsetvalue
+ else
+ if not n[a_colormodel] then
+ n[a_colormodel] = s or 1
+ end
+ n[a_color] = mc
+ end
+ n = n.next
end
- return n
+ return f
end
function colors.reset(n)
- setattr(tonut(n),a_color,unsetvalue)
+ n[a_color] = unsetvalue
return n
end
@@ -562,22 +496,31 @@ local a_transparency = attributes.private('transparency')
local m_transparency = attributes.list[a_transparency] or { }
function transparencies.set(n,t)
- setattr(tonut(n),a_transparency,m_transparency[t] or unsetvalue)
+ local mt = m_transparency[t]
+ if not mt then
+ n[a_transparency] = unsetvalue
+ else
+ n[a_transparency] = mt
+ end
return n
end
function transparencies.setlist(n,c,s)
- local nn = tonut(n)
- local mt = m_transparency[c] or unsetvalue
- while nn do
- setattr(nn,a_transparency,mt)
- nn = getnext(nn)
+ local f = n
+ while n do
+ local mt = m_transparency[c]
+ if not mt then
+ n[a_transparency] = unsetvalue
+ else
+ n[a_transparency] = mt
+ end
+ n = n.next
end
- return n
+ return f
end
function transparencies.reset(n)
- setattr(n,a_transparency,unsetvalue)
+ n[a_transparency] = unsetvalue
return n
end
@@ -594,76 +537,52 @@ end
-- although tracers are used seldom
local function setproperties(n,c,s)
- local nn = tonut(n)
local mm = texgetattribute(a_colormodel)
- setattr(nn,a_colormodel,mm > 0 and mm or 1)
- setattr(nn,a_color,m_color[c])
- setattr(nn,a_transparency,m_transparency[c])
+ n[a_colormodel] = mm > 0 and mm or 1
+ n[a_color] = m_color[c]
+ n[a_transparency] = m_transparency[c]
return n
end
tracers.setproperties = setproperties
-function tracers.setlist(n,c,s)
- local nn = tonut(n)
+function tracers.setlistv(n,c,s)
+ local f = n
local mc = m_color[c]
local mt = m_transparency[c]
local mm = texgetattribute(a_colormodel)
if mm <= 0 then
mm = 1
end
- while nn do
- setattr(nn,a_colormodel,mm)
- setattr(nn,a_color,mc)
- setattr(nn,a_transparency,mt)
- nn = getnext(nn)
+ while n do
+ n[a_colormodel] = mm
+ n[a_color] = mc
+ n[a_transparency] = mt
+ n = n.next
end
- return n
+ return f
end
function tracers.resetproperties(n)
- local nn = tonut(n)
- setattr(nn,a_color,unsetvalue)
- setattr(nn,a_transparency,unsetvalue)
+ n[a_color] = unsetvalue
+ n[a_transparency] = unsetvalue
return n
end
--- this one returns a nut
+function tracers.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
+ return setproperties(new_rule(w,h,d),c,s)
+end
+
+-- only nodes
local nodestracerpool = { }
-local nutstracerpool = { }
tracers.pool = {
nodes = nodestracerpool,
- nuts = nutstracerpool,
}
-table.setmetatableindex(nodestracerpool,function(t,k,v)
- local f = nutstracerpool[k]
- local v = function(...)
- return tonode(f(...))
- end
- t[k] = v
- return v
-end)
-
-function nutstracerpool.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
+function nodestracerpool.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
return setproperties(new_rule(w,h,d),c,s)
end
tracers.rule = nodestracerpool.rule -- for a while
-
--- local function show(head,n,message)
--- print("START",message or "")
--- local i = 0
--- for current in traverse(head) do
--- local prev = getprev(current)
--- local next = getnext(current)
--- i = i + 1
--- print(i, prev and nodecodes[getid(prev)],nodecodes[getid(current)],next and nodecodes[getid(next)])
--- if i == n then
--- break
--- end
--- end
--- print("STOP", message or "")
--- end
diff --git a/tex/context/base/node-tst.lua b/tex/context/base/node-tst.lua
index 7f5102d5f..bfe0051bd 100644
--- a/tex/context/base/node-tst.lua
+++ b/tex/context/base/node-tst.lua
@@ -24,26 +24,17 @@ local rightskip_code = skipcodes.rightskip
local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip
local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
-local nuts = nodes.nuts
+local find_node_tail = node.tail or node.slide
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getchar = nuts.getchar
-local getsubtype = nuts.getsubtype
-
-local find_node_tail = nuts.tail
-
-function nuts.leftmarginwidth(n) -- todo: three values
+function nodes.leftmarginwidth(n) -- todo: three values
while n do
- local id = getid(n)
+ local id = n.id
if id == glue_code then
- return getsubtype(n) == leftskip_code and getfield(getfield(n,"spec"),"width") or 0
+ return n.subtype == leftskip_code and n.spec.width or 0
elseif id == whatsit_code then
- n = getnext(n)
+ n = n.next
elseif id == hlist_code then
- return getfield(n,"width")
+ return n.width
else
break
end
@@ -51,15 +42,15 @@ function nuts.leftmarginwidth(n) -- todo: three values
return 0
end
-function nuts.rightmarginwidth(n)
+function nodes.rightmarginwidth(n)
if n then
n = find_node_tail(n)
while n do
- local id = getid(n)
+ local id = n.id
if id == glue_code then
- return getsubtype(n) == rightskip_code and getfield(getfield(n,"spec"),"width") or 0
+ return n.subtype == rightskip_code and n.spec.width or 0
elseif id == whatsit_code then
- n = getprev(n)
+ n = n.prev
else
break
end
@@ -68,15 +59,15 @@ function nuts.rightmarginwidth(n)
return false
end
-function nuts.somespace(n,all)
+function nodes.somespace(n,all)
if n then
- local id = getid(n)
+ local id = n.id
if id == glue_code then
- return (all or (getfield(getfield(n,"spec"),"width") ~= 0)) and glue_code
+ return (all or (n.spec.width ~= 0)) and glue_code
elseif id == kern_code then
- return (all or (getfield(n,"kern") ~= 0)) and kern
+ return (all or (n.kern ~= 0)) and kern
elseif id == glyph_code then
- local category = chardata[getchar(n)].category
+ local category = chardata[n.char].category
-- maybe more category checks are needed
return (category == "zs") and glyph_code
end
@@ -84,12 +75,12 @@ function nuts.somespace(n,all)
return false
end
-function nuts.somepenalty(n,value)
+function nodes.somepenalty(n,value)
if n then
- local id = getid(n)
+ local id = n.id
if id == penalty_code then
if value then
- return getfield(n,"penalty") == value
+ return n.penalty == value
else
return true
end
@@ -98,38 +89,32 @@ function nuts.somepenalty(n,value)
return false
end
-function nuts.is_display_math(head)
- local n = getprev(head)
+function nodes.is_display_math(head)
+ local n = head.prev
while n do
- local id = getid(n)
+ local id = n.id
if id == penalty_code then
elseif id == glue_code then
- if getsubtype(n) == abovedisplayshortskip_code then
+ if n.subtype == abovedisplayshortskip_code then
return true
end
else
break
end
- n = getprev(n)
+ n = n.prev
end
- n = getnext(head)
+ n = head.next
while n do
- local id = getid(n)
+ local id = n.id
if id == penalty_code then
elseif id == glue_code then
- if getsubtype(n) == belowdisplayshortskip_code then
+ if n.subtype == belowdisplayshortskip_code then
return true
end
else
break
end
- n = getnext(n)
+ n = n.next
end
return false
end
-
-nodes.leftmarginwidth = nodes.vianuts(nuts.leftmarginwidth)
-nodes.rightmarginwidth = nodes.vianuts(nuts.rightmarginwidth)
-nodes.somespace = nodes.vianuts(nuts.somespace)
-nodes.somepenalty = nodes.vianuts(nuts.somepenalty)
-nodes.is_display_math = nodes.vianuts(nuts.is_display_math)
diff --git a/tex/context/base/node-typ.lua b/tex/context/base/node-typ.lua
index 4c33e3199..4a2ef8d49 100644
--- a/tex/context/base/node-typ.lua
+++ b/tex/context/base/node-typ.lua
@@ -8,38 +8,26 @@ if not modules then modules = { } end modules ['node-typ'] = {
-- code has been moved to blob-ini.lua
-local typesetters = nodes.typesetters or { }
-nodes.typesetters = typesetters
+local typesetters = nodes.typesetters or { }
+nodes.typesetters = typesetters
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
+local hpack_node_list = nodes.hpack
+local vpack_node_list = nodes.vpack
+local fast_hpack_list = nodes.fasthpack
-local setfield = nuts.setfield
-local getfont = nuts.getfont
-
-local hpack_node_list = nuts.hpack
-local vpack_node_list = nuts.vpack
-local fast_hpack_list = nuts.fasthpack
-local copy_node = nuts.copy
-
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_glyph = nodepool.glyph
local new_glue = nodepool.glue
local utfvalues = utf.values
-local currentfont = font.current
-local fontparameters = fonts.hashes.parameters
+local currentfont = font.current
+local fontparameters = fonts.hashes.parameters
-local function tonodes(str,fontid,spacing,templateglyph) -- quick and dirty
+local function tonodes(str,fontid,spacing) -- quick and dirty
local head, prev = nil, nil
if not fontid then
- if templateglyph then
- fontid = getfont(templateglyph)
- else
- fontid = currentfont()
- end
+ fontid = currentfont()
end
local fp = fontparameters[fontid]
local s, p, m
@@ -56,10 +44,6 @@ local function tonodes(str,fontid,spacing,templateglyph) -- quick and dirty
next = new_glue(s,p,m)
spacedone = true
end
- elseif templateglyph then
- next = copy_glyph(templateglyph)
- setfield(next,"char",c)
- spacedone = false
else
next = new_glyph(fontid or 1,c)
spacedone = false
@@ -69,8 +53,8 @@ local function tonodes(str,fontid,spacing,templateglyph) -- quick and dirty
elseif not head then
head = next
else
- setfield(prev,"next",next)
- setfield(next,"prev",prev)
+ prev.next = next
+ next.prev = prev
end
prev = next
end
@@ -93,30 +77,17 @@ end
local tovpackfast = tovpack
-local tnuts = { }
-nuts.typesetters = tnuts
-
-tnuts.tonodes = tonodes
-tnuts.tohpack = tohpack
-tnuts.tohpackfast = tohpackfast
-tnuts.tovpack = tovpack
-tnuts.tovpackfast = tovpackfast
-
-tnuts.hpack = tohpack -- obsolete
-tnuts.fast_hpack = tohpackfast -- obsolete
-tnuts.vpack = tovpack -- obsolete
-
-typesetters.tonodes = function(...) local h, b = tonodes (...) return tonode(h), b end
-typesetters.tohpack = function(...) local h, b = tohpack (...) return tonode(h), b end
-typesetters.tohpackfast = function(...) local h, b = tohpackfast(...) return tonode(h), b end
-typesetters.tovpack = function(...) local h, b = tovpack (...) return tonode(h), b end
-typesetters.tovpackfast = function(...) local h, b = tovpackfast(...) return tonode(h), b end
+typesetters.tonodes = tonodes
+typesetters.tohpack = tohpack
+typesetters.tohpackfast = tohpackfast
+typesetters.tovpack = tovpack
+typesetters.tovpackfast = tovpackfast
-typesetters.hpack = typesetters.tohpack -- obsolete
-typesetters.fast_hpack = typesetters.tofasthpack -- obsolete
-typesetters.vpack = typesetters.tovpack -- obsolete
+typesetters.hpack = tohpack
+typesetters.fast_hpack = tohpackfast
+typesetters.vpack = tovpack
-- node.write(nodes.typestters.hpack("Hello World!"))
-- node.write(nodes.typestters.hpack("Hello World!",1,100*1024*10))
-string.tonodes = function(...) return tonode(tonodes(...)) end -- quite convenient
+string.tonodes = tonodes -- quite convenient
diff --git a/tex/context/base/pack-rul.lua b/tex/context/base/pack-rul.lua
index c8ed0722b..329ea63b8 100644
--- a/tex/context/base/pack-rul.lua
+++ b/tex/context/base/pack-rul.lua
@@ -21,25 +21,15 @@ local line_code = nodes.listcodes.line
local texsetdimen = tex.setdimen
local texsetcount = tex.setcount
-
-local nuts = nodes.nuts
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getlist = nuts.getlist
-local getsubtype = nuts.getsubtype
-local getbox = nuts.getbox
-
-local hpack = nuts.hpack
-local free = nuts.free
-local copy = nuts.copy_list
-local traverse_id = nuts.traverse_id
-local node_dimensions = nuts.dimensions
+local texgetbox = tex.getbox
+local hpack = nodes.hpack
+local free = nodes.free
+local copy = nodes.copy_list
+local traverse_id = nodes.traverse_id
+local node_dimensions = nodes.dimensions
function commands.doreshapeframedbox(n)
- local box = getbox(n)
+ local box = texgetbox(n)
local noflines = 0
local firstheight = nil
local lastdepth = nil
@@ -48,27 +38,27 @@ function commands.doreshapeframedbox(n)
local maxwidth = 0
local totalwidth = 0
local averagewidth = 0
- local boxwidth = getfield(box,"width")
+ local boxwidth = box.width
if boxwidth ~= 0 then -- and h.subtype == vlist_code
- local list = getlist(box)
+ local list = box.list
if list then
local function check(n,repack)
if not firstheight then
- firstheight = getfield(n,"height")
+ firstheight = n.height
end
- lastdepth = getfield(n,"depth")
+ lastdepth = n.depth
noflines = noflines + 1
- local l = getlist(n)
+ local l = n.list
if l then
if repack then
- local subtype = getsubtype(n)
+ local subtype = n.subtype
if subtype == box_code or subtype == line_code then
- lastlinelength = node_dimensions(l,getfield(n,"dir")) -- used to be: hpack(copy(l)).width
+ lastlinelength = node_dimensions(l,n.dir) -- used to be: hpack(copy(l)).width
else
- lastlinelength = getfield(n,"width")
+ lastlinelength = n.width
end
else
- lastlinelength = getfield(n,"width")
+ lastlinelength = n.width
end
if lastlinelength > maxwidth then
maxwidth = lastlinelength
@@ -94,27 +84,28 @@ function commands.doreshapeframedbox(n)
elseif maxwidth ~= 0 then
if hdone then
for h in traverse_id(hlist_code,list) do
- local l = getlist(h)
+ local l = h.list
if l then
- local subtype = getsubtype(h)
+ local subtype = h.subtype
if subtype == box_code or subtype == line_code then
- l = hpack(l,maxwidth,'exactly',getfield(h,"dir")) -- multiple return values
- setfield(h,"list",l)
- setfield(h,"shift",0) -- needed for display math, so no width check possible
+ h.list = hpack(l,maxwidth,'exactly',h.dir)
+ h.shift = 0 -- needed for display math
end
- setfield(h,"width",maxwidth)
+ h.width = maxwidth
end
end
+ box.width = maxwidth -- moved
+ averagewidth = noflines > 0 and totalwidth/noflines or 0
end
-- if vdone then
-- for v in traverse_id(vlist_code,list) do
- -- local width = getfield(n,"width")
+ -- local width = n.width
-- if width > maxwidth then
- -- setfield(v,"width",maxwidth)
+ -- v.width = maxwidth
-- end
-- end
-- end
- setfield(box,"width",maxwidth)
+ box.width = maxwidth
averagewidth = noflines > 0 and totalwidth/noflines or 0
end
end
@@ -128,18 +119,18 @@ function commands.doreshapeframedbox(n)
end
function commands.doanalyzeframedbox(n)
- local box = getbox(n)
+ local box = texgetbox(n)
local noflines = 0
local firstheight = nil
local lastdepth = nil
- if getfield(box,"width") ~= 0 then
- local list = getlist(box)
+ if box.width ~= 0 then
+ local list = box.list
if list then
local function check(n)
if not firstheight then
- firstheight = getfield(n,"height")
+ firstheight = n.height
end
- lastdepth = getfield(n,"depth")
+ lastdepth = n.depth
noflines = noflines + 1
end
for h in traverse_id(hlist_code,list) do
diff --git a/tex/context/base/pack-rul.mkiv b/tex/context/base/pack-rul.mkiv
index 8fcf8f548..377d39499 100644
--- a/tex/context/base/pack-rul.mkiv
+++ b/tex/context/base/pack-rul.mkiv
@@ -2564,25 +2564,10 @@
\inheritedframedtextframed\bgroup
\let\\=\endgraf
\framedtextparameter\c!inner % oud spul
- \edef\p_framed_text_depthcorrection{\framedtextparameter\c!depthcorrection}%
- \ifx\p_framed_text_depthcorrection\v!on
- \pack_framed_text_start_depth_correction
- \else
- \bgroup
- \fi
-\vskip-\strutdp % brrr why is this needed ... needs to be sorted out, see testcase 1
+ \doif{\framedtextparameter\c!depthcorrection}\v!on\pack_framed_text_start_depth_correction
\doinhibitblank
\setupindenting[\framedtextparameter\c!indenting]%
- \useframedtextstyleandcolor\c!style\c!color
- \ignorespaces}
-
-% testcase 1:
-%
-% \showstruts
-% \startframedtext[align={normal,tolerant},offset=0pt] \input tufte \stopframedtext
-% \startframedtext[align={normal,tolerant},offset=0pt,depthcorrection=off] \input tufte \stopframedtext
-% \startframedtext[align={normal,tolerant},offset=0pt,depthcorrection=off] \inframed{x} \stopframedtext
-% \framed[align={normal,tolerant},offset=0pt]{\input tufte }
+ \useframedtextstyleandcolor\c!style\c!color}
%D The \type {none} option is handy for nested usage, as in the presentation
%D styles, where we don't want interference.
@@ -2592,11 +2577,7 @@
\unexpanded\def\pack_framed_text_stop % no \baselinecorrection, see faq docs
{\endgraf
\removelastskip
- \ifx\p_framed_text_depthcorrection\v!on
- \pack_framed_text_stop_depth_correction
- \else
- \egroup
- \fi
+ \doif{\framedtextparameter\c!depthcorrection}\v!on\pack_framed_text_stop_depth_correction
\stopboxedcontent
\ifconditional\c_framed_text_location_none
\egroup
diff --git a/tex/context/base/page-brk.mkiv b/tex/context/base/page-brk.mkiv
index 11dc04bfd..cc9a9b4d2 100644
--- a/tex/context/base/page-brk.mkiv
+++ b/tex/context/base/page-brk.mkiv
@@ -316,204 +316,75 @@
%D Test page breaks.
-% \newdimen \d_page_tests_test
-% \newconstant\c_page_tests_mode
+\newdimen \d_page_tests_test
+\newconstant\c_page_tests_mode
-\newconstant\testpagemethod % old
-\newconstant\testpagetrigger % old
+\newconstant\testpagemethod % todo: \testnewpage[method=,lines=,voffset=]
+\newconstant\testpagetrigger
-% \unexpanded\def\testpage {\c_page_tests_mode\plusone \dodoubleempty\page_tests_test} %
-% \unexpanded\def\testpageonly{\c_page_tests_mode\plustwo \dodoubleempty\page_tests_test} % no penalties added to the mvl
-% \unexpanded\def\testpagesync{\c_page_tests_mode\plusthree\dodoubleempty\page_tests_test} % force sync
-%
-% \def\page_tests_test[#1][#2]% don't change, only add more methods
-% {\relax % needed before \if
-% \ifconditional\c_page_breaks_enabled
-% % new from here
-% \ifcase\testpagetrigger
-% \endgraf
-% \or\ifvmode
-% \dosomebreak\allowbreak
-% \else % indeed?
-% \vadjust{\allowbreak}%
-% \endgraf
-% \fi\fi
-% % till here
-% \ifdim\pagegoal<\maxdimen \relax
-% \ifdim\pagetotal<\pagegoal \relax
-% \d_page_tests_test\dimexpr
-% #1\lineheight
-% +\pagetotal
-% \ifdim\lastskip<\parskip+\parskip\fi
-% \ifsecondargument+#2\fi
-% \relax
-% \ifcase\testpagemethod
-% \ifdim\d_page_tests_test>.99\pagegoal
-% \penalty-\plustenthousand
-% \fi
-% \or
-% \ifdim\dimexpr\d_page_tests_test-\pagegoal\relax>-\lineheight
-% \penalty-\plustenthousand
-% \fi
-% \or
-% \getnoflines\pagegoal
-% \ifdim\dimexpr\d_page_tests_test-\noflines\lineheight\relax>-\lineheight
-% \penalty-\plustenthousand
-% \fi
-% \or % same as 0 but more accurate
-% \ifdim\dimexpr\d_page_tests_test-10\scaledpoint\relax>\pagegoal
-% \penalty-\plustenthousand
-% \fi
-% \fi
-% \else\ifnum\c_page_tests_mode=\plusthree
-% \page_tests_flush_so_far
-% \fi\fi
-% \else\ifnum\c_page_tests_mode=\plusone
-% \goodbreak
-% \fi\fi
-% \else
-% \endgraf
-% \fi}
-%
-% \def\page_tests_flush_so_far
-% {\endgraf
-% \ifdim\pagetotal>\pagegoal
-% \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
-% \goodbreak
-% \else
-% \page
-% \fi
-% \fi}
-
-\installcorenamespace {pagechecker}
-\installcorenamespace {pagecheckermethod}
-
-\installcommandhandler \??pagechecker {pagechecker} \??pagechecker
-
-\setuppagechecker
- [\c!method=1,
- \c!before=,
- \c!after=,
- \c!inbetween=,
- \c!lines=\plusthree,
- \c!offset=\zeropoint]
-
-\def\page_check_amount
- {\dimexpr
- \pagecheckerparameter\c!lines\lineheight
- +\pagetotal
- \ifdim\lastskip<\parskip+\parskip\fi
- +\pagecheckerparameter\c!offset
- \relax}
-
-\unexpanded\def\checkpage
- {\dodoubleempty\page_check}
-
-\def\page_check[#1][#2]%
+\unexpanded\def\testpage {\c_page_tests_mode\plusone \dodoubleempty\page_tests_test} %
+\unexpanded\def\testpageonly{\c_page_tests_mode\plustwo \dodoubleempty\page_tests_test} % no penalties added to the mvl
+\unexpanded\def\testpagesync{\c_page_tests_mode\plusthree\dodoubleempty\page_tests_test} % force sync
+
+\def\page_tests_test[#1][#2]% don't change, only add more methods
{\relax % needed before \if
- \endgraf
\ifconditional\c_page_breaks_enabled
- \begingroup
- \edef\currentpagechecker{#1}%
- \ifsecondargument\setupcurrentpagechecker[#2]\fi
- \csname\??pagecheckermethod\pagecheckerparameter\c!method\endcsname
- \endgroup
- \fi}
-
-\setvalue{\??pagecheckermethod 0}%
- {\ifdim\pagegoal<\maxdimen \relax
- \ifdim\pagetotal<\pagegoal \relax
- \ifdim\page_check_amount>.99\pagegoal
- \pagecheckerparameter\c!before
- \penalty-\plustenthousand
- \pagecheckerparameter\c!after
- \else
- \pagecheckerparameter\c!inbetween
- \fi
- \else
- \pagecheckerparameter\c!inbetween
- \fi
- \else
- \pagecheckerparameter\c!inbetween
- \fi}
-
-\setvalue{\??pagecheckermethod 1}%
- {\ifdim\pagegoal<\maxdimen \relax
- \ifdim\pagetotal<\pagegoal \relax
- \ifdim\dimexpr\page_check_amount-\pagegoal\relax>-\lineheight
- \pagecheckerparameter\c!before
- \penalty-\plustenthousand
- \pagecheckerparameter\c!after
- \else
- \pagecheckerparameter\c!inbetween
- \fi
- \else
- \pagecheckerparameter\c!inbetween
- \fi
- \else
- \goodbreak
- \pagecheckerparameter\c!inbetween
- \fi}
-
-\setvalue{\??pagecheckermethod 2}%
- {\ifdim\pagegoal<\maxdimen \relax
- \ifdim\pagetotal<\pagegoal \relax
- \getnoflines\pagegoal
- \ifdim\dimexpr\page_check_amount-\noflines\lineheight\relax>-\lineheight
- \pagecheckparameter\c!before
- \penalty-\plustenthousand
- \pagecheckerparameter\c!after
- \else
- \pagecheckerparameter\c!inbetween
- \fi
- \else
- \pagecheckerparameter\c!inbetween
- \fi
+ % new from here
+ \ifcase\testpagetrigger
+ \endgraf
+ \or\ifvmode
+ \dosomebreak\allowbreak
+ \else % indeed?
+ \vadjust{\allowbreak}%
+ \endgraf
+ \fi\fi
+ % till here
+ \ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \d_page_tests_test\dimexpr
+ #1\lineheight
+ +\pagetotal
+ \ifdim\lastskip<\parskip+\parskip\fi
+ \ifsecondargument+#2\fi
+ \relax
+ \ifcase\testpagemethod
+ \ifdim\d_page_tests_test>.99\pagegoal
+ \penalty-\plustenthousand
+ \fi
+ \or
+ \ifdim\dimexpr\d_page_tests_test-\pagegoal\relax>-\lineheight
+ \penalty-\plustenthousand
+ \fi
+ \or
+ \getnoflines\pagegoal
+ \ifdim\dimexpr\d_page_tests_test-\noflines\lineheight\relax>-\lineheight
+ \penalty-\plustenthousand
+ \fi
+ \or % same as 0 but more accurate
+ \ifdim\dimexpr\d_page_tests_test-10\scaledpoint\relax>\pagegoal
+ \penalty-\plustenthousand
+ \fi
+ \fi
+ \else\ifnum\c_page_tests_mode=\plusthree
+ \page_tests_flush_so_far
+ \fi\fi
+ \else\ifnum\c_page_tests_mode=\plusone
+ \goodbreak
+ \fi\fi
\else
- \pagecheckerparameter\c!inbetween
+ \endgraf
\fi}
-\setvalue{\??pagecheckermethod 3}%
- {\ifdim\pagegoal<\maxdimen \relax
- \ifdim\pagetotal<\pagegoal \relax
- \ifdim\dimexpr\page_check_amount-10\scaledpoint\relax>\pagegoal
- \pagecheckerparameter\c!before
- \penalty-\plustenthousand
- \pagecheckerparameter\c!after
- \else
- \pagecheckerparameter\c!inbetween
- \fi
+\def\page_tests_flush_so_far
+ {\endgraf
+ \ifdim\pagetotal>\pagegoal
+ \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
+ \goodbreak
\else
- \ifdim\pagetotal>\pagegoal
- \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
- \goodbreak
- \pagecheckerparameter\c!inbetween
- \else
- \pagecheckerparameter\c!before
- \page
- \pagecheckerparameter\c!after
- \fi
- \else
- \pagecheckerparameter\c!inbetween
- \fi
+ \page
\fi
- \else
- \pagecheckerparameter\c!inbetween
\fi}
-\definepagechecker[\s!unknown:0] [\c!method=0,\c!before=,\c!after=,\c!inbetween=]
-\definepagechecker[\s!unknown:1][\s!unknown:0][\c!method=1]
-\definepagechecker[\s!unknown:2][\s!unknown:0][\c!method=2]
-\definepagechecker[\s!unknown:3][\s!unknown:0][\c!method=3]
-
-\def\page_tests_test_a[#1][#2]{\normalexpanded{\checkpage[\s!unknown:1][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
-\def\page_tests_test_b[#1][#2]{\normalexpanded{\checkpage[\s!unknown:2][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
-\def\page_tests_test_c[#1][#2]{\normalexpanded{\checkpage[\s!unknown:3][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
-
-\unexpanded\def\testpage {\dodoubleempty\page_tests_test_a} %
-\unexpanded\def\testpageonly{\dodoubleempty\page_tests_test_b} % no penalties added to the mvl
-\unexpanded\def\testpagesync{\dodoubleempty\page_tests_test_c} % force sync
-
%D Test column breaks.
\unexpanded\def\testcolumn
diff --git a/tex/context/base/page-lay.mkiv b/tex/context/base/page-lay.mkiv
index 19f237242..81eb0423c 100644
--- a/tex/context/base/page-lay.mkiv
+++ b/tex/context/base/page-lay.mkiv
@@ -1026,12 +1026,12 @@
\unexpanded\def\startlayout[#1]%
{\page
- \globalpushmacro\currentlayout
+ \pushmacro\currentlayout
\doiflayoutdefinedelse{#1}{\setuplayout[#1]}\donothing} % {\setuplayout[\currentlayout]}}
\unexpanded\def\stoplayout
{\page
- \globalpopmacro\currentlayout
+ \popmacro\currentlayout
\setuplayout[\currentlayout]}
% NOG EENS NAGAAN WANNEER NU GLOBAL EN WANNEER NIET
@@ -1275,7 +1275,7 @@
{\globalpopmacro\currentlayout
\globalpopmacro\page_paper_restore
\page_paper_restore
- \setuplayout[\currentlayout]\relax} % explicit !
+ \setuplayout\relax}
%D \macros
%D {showprint, showframe, showlayout, showsetups}
diff --git a/tex/context/base/page-lin.lua b/tex/context/base/page-lin.lua
index 66b7e4684..7e8e9ad8a 100644
--- a/tex/context/base/page-lin.lua
+++ b/tex/context/base/page-lin.lua
@@ -8,35 +8,31 @@ if not modules then modules = { } end modules ['page-lin'] = {
-- experimental -> will become builders
--- if there is demand for it, we can support multiple numbering streams
--- and use more than one attibute
+local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
-local next, tonumber = next, tonumber
+local report_lines = logs.reporter("lines")
-local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
+local attributes, nodes, node, context = attributes, nodes, node, context
-local report_lines = logs.reporter("lines")
+nodes.lines = nodes.lines or { }
+local lines = nodes.lines
-local attributes = attributes
-local nodes = nodes
-local context = context
+lines.data = lines.data or { } -- start step tag
+local data = lines.data
+local last = #data
-nodes.lines = nodes.lines or { }
-local lines = nodes.lines
+local texgetbox = tex.getbox
-lines.data = lines.data or { } -- start step tag
-local data = lines.data
-local last = #data
+lines.scratchbox = lines.scratchbox or 0
-lines.scratchbox = lines.scratchbox or 0
+local leftmarginwidth = nodes.leftmarginwidth
-storage.register("lines/data", data, "nodes.lines.data")
+storage.register("lines/data", lines.data, "nodes.lines.data")
-local variables = interfaces.variables
+-- if there is demand for it, we can support multiple numbering streams
+-- and use more than one attibute
-local v_next = variables.next
-local v_page = variables.page
-local v_no = variables.no
+local variables = interfaces.variables
local nodecodes = nodes.nodecodes
@@ -53,25 +49,12 @@ local current_list = { }
local cross_references = { }
local chunksize = 250 -- not used in boxed
-local nuts = nodes.nuts
-
-local getid = nuts.getid
-local getnext = nuts.getnext
-local getattr = nuts.getattr
-local getlist = nuts.getlist
-local getbox = nuts.getbox
-local getfield = nuts.getfield
-
-local setfield = nuts.setfield
-
-local traverse_id = nuts.traverse_id
-local traverse = nuts.traverse
-local copy_node = nuts.copy
-local hpack_node = nuts.hpack
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local is_display_math = nuts.is_display_math
-local leftmarginwidth = nuts.leftmarginwidth
+local traverse_id = node.traverse_id
+local traverse = node.traverse
+local copy_node = node.copy
+local hpack_node = node.hpack
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
-- cross referencing
@@ -84,16 +67,16 @@ end
local function resolve(n,m) -- we can now check the 'line' flag (todo)
while n do
- local id = getid(n)
+ local id = n.id
if id == whatsit_code then -- why whatsit
- local a = getattr(n,a_linereference)
+ local a = n[a_linereference]
if a then
cross_references[a] = m
end
elseif id == hlist_code or id == vlist_code then
- resolve(getlist(n),m)
+ resolve(n.list,m)
end
- n = getnext(n)
+ n = n.next
end
end
@@ -182,20 +165,20 @@ local function check_number(n,a,skip,sameline)
if sameline then
skipflag = 0
if trace_numbers then
- report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
+ report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
end
elseif not skip and s % d.step == 0 then
skipflag, d.start = 1, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
+ report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
end
else
skipflag, d.start = 0, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
+ report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
end
end
- context.makelinenumber(tag,skipflag,s,getfield(n,"shift"),getfield(n,"width"),leftmarginwidth(getlist(n)),getfield(n,"dir"))
+ context.makelinenumber(tag,skipflag,s,n.shift,n.width,leftmarginwidth(n.list),n.dir)
end
end
@@ -206,26 +189,26 @@ end
local function identify(list)
if list then
for n in traverse_id(hlist_code,list) do
- if getattr(n,a_linenumber) then
+ if n[a_linenumber] then
return list
end
end
local n = list
while n do
- local id = getid(n)
+ local id = n.id
if id == hlist_code or id == vlist_code then
- local ok = identify(getlist(n))
+ local ok = identify(n.list)
if ok then
return ok
end
end
- n = getnext(n)
+ n = n.next
end
end
end
function boxed.stage_zero(n)
- return identify(getlist(getbox(n)))
+ return identify(texgetbox(n).list)
end
-- reset ranges per page
@@ -234,39 +217,39 @@ end
function boxed.stage_one(n,nested)
current_list = { }
- local box = getbox(n)
+ local box = texgetbox(n)
if box then
- local list = getlist(box)
+ local list = box.list
if nested then
list = identify(list)
end
local last_a, last_v, skip = nil, -1, false
for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found
- if getfield(n,"height") == 0 and getfield(n,"depth") == 0 then
+ if n.height == 0 and n.depth == 0 then
-- skip funny hlists -- todo: check line subtype
else
- local list = getlist(n)
- local a = getattr(list,a_linenumber)
+ local list = n.list
+ local a = list[a_linenumber]
if a and a > 0 then
if last_a ~= a then
local da = data[a]
local ma = da.method
- if ma == v_next then
+ if ma == variables.next then
skip = true
- elseif ma == v_page then
+ elseif ma == variables.page then
da.start = 1 -- eventually we will have a normal counter
end
last_a = a
if trace_numbers then
- report_lines("starting line number range %s: start %s, continue %s",a,da.start,da.continue or v_no)
+ report_lines("starting line number range %s: start %s, continue",a,da.start,da.continue or "no")
end
end
- if getattr(n,a_displaymath) then
- if is_display_math(n) then
+ if n[a_displaymath] then
+ if nodes.is_display_math(n) then
check_number(n,a,skip)
end
else
- local v = getattr(list,a_verbatimline)
+ local v = list[a_verbatimline]
if not v or v ~= last_v then
last_v = v
check_number(n,a,skip)
@@ -285,7 +268,7 @@ function boxed.stage_two(n,m)
if #current_list > 0 then
m = m or lines.scratchbox
local t, tn = { }, 0
- for l in traverse_id(hlist_code,getlist(getbox(m))) do
+ for l in traverse_id(hlist_code,texgetbox(m).list) do
tn = tn + 1
t[tn] = copy_node(l)
end
@@ -293,8 +276,7 @@ function boxed.stage_two(n,m)
local li = current_list[i]
local n, m, ti = li[1], li[2], t[i]
if ti then
- setfield(ti,"next",getlist(n))
- setfield(n,"list",ti)
+ ti.next, n.list = n.list, ti
resolve(n,m)
else
report_lines("error in linenumbering (1)")
diff --git a/tex/context/base/page-mak.mkvi b/tex/context/base/page-mak.mkvi
index c910f281d..71af520a1 100644
--- a/tex/context/base/page-mak.mkvi
+++ b/tex/context/base/page-mak.mkvi
@@ -91,60 +91,13 @@
\def\page_makeup_start_yes[#name]% [#settings]%
{\doifelsecommandhandler\??makeup{#name}\page_makeup_start_indeed\page_makeup_start_nop[#name]}%
-% case 1:
-%
-% \setuplayout[height=5cm]
-%
-% case 2:
-%
-% \definelayout[crap][height=10cm]
-% \definelayout[standard][crap]
-%
-% case 3:
-%
-% \setuplayout[standard][height=15cm]
-%
-% case 4:
-%
-% \definelayout[whatever][height=2cm]
-% \setuplayout[whatever]
-
\def\page_makeup_start_indeed[#name][#settings]%
- {% the next grouping hack is somewhat messy:
- \begingroup
- % we need to figure out the current layout
- \xdef\m_page_makeup_name{#name}%
- \let\currentmakeup\m_page_makeup_name
- \let\currentlayout\m_page_makeup_name
- \xdef\m_page_makeup_layout_parent{\layoutparameter\s!parent}%
- \setupcurrentmakeup[#settings]%
- \edef\p_page{\makeupparameter\c!page}%
- \ifx\p_page\empty
- \endgroup
- \page % new, so best not have dangling mess here like references (we could capture then and flush embedded)
- \else\ifx\p_page\v!no
- % nothing
- \endgroup
- \else
- \endgroup
- \page[\p_page]%
- \fi\fi
- % some dirty trickery (sorry) for determining if we have
- % - a layout definition at all
- % - inherit from the parent of that definition
- % - inherit from the current layout otherwise
- \ifx\m_page_makeup_name\currentlayout
- % we already use the layout
- \else\ifx\m_page_makeup_layout_parent\??layout
- % we inherit from the current layout
- \normalexpanded{\setuplayout[#name][\s!parent=\??layout\currentlayout]}% is remembered but checked later anyway
- % \else
- % we have an inherited layout
- \fi\fi
+ {\doifelsenothing{\namedmakeupparameter{#name}\c!page}
+ {\page}% new, so best not have dangling mess here like references (we could capture then and flush embedded)
+ {\page[\namedmakeupparameter{#name}\c!page]}%
\startlayout[#name]% includes \page
\bgroup
- %\edef\currentmakeup{#name}%
- \let\currentmakeup\m_page_makeup_name
+ \edef\currentmakeup{#name}%
\setupcurrentmakeup[#settings]%
\setsystemmode\v!makeup
\the\t_page_makeup_every_setup
@@ -184,12 +137,7 @@
\fi \fi
\strc_pagenumbers_page_state_pop % new
\egroup
- \stoplayout % includes \page
- \ifx\m_page_makeup_name\currentlayout
- \else\ifx\m_page_makeup_layout_parent\??layout
- \normalexpanded{\setuplayout[\m_page_makeup_name][\s!parent=\??layout]}% is remembered but checked later anyway
- % \else
- \fi\fi}
+ \stoplayout} % includes \page
\setvalue{\??makeupdoublesided\v!yes}%
{\emptyhbox
@@ -236,7 +184,6 @@
\c!headerstate=\v!stop,
\c!footerstate=\v!stop,
\c!pagestate=\v!stop] % in manual ! ! !
-% \c!pagestate=\v!start]
\definemakeup
[\v!standard]
diff --git a/tex/context/base/page-mix.lua b/tex/context/base/page-mix.lua
index 30a1fdccd..7d13d9e4e 100644
--- a/tex/context/base/page-mix.lua
+++ b/tex/context/base/page-mix.lua
@@ -15,73 +15,46 @@ if not modules then modules = { } end modules ["page-mix"] = {
local concat = table.concat
+local nodecodes = nodes.nodecodes
+local gluecodes = nodes.gluecodes
+local nodepool = nodes.pool
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local insert_code = nodecodes.ins
+local mark_code = nodecodes.mark
+
+local new_hlist = nodepool.hlist
+local new_vlist = nodepool.vlist
+local new_glue = nodepool.glue
+
+local hpack = node.hpack
+local vpack = node.vpack
+local freenode = node.free
+local concatnodes = nodes.concat
+
+local texgetbox = tex.getbox
+local texsetbox = tex.setbox
+local texgetskip = tex.getskip
+
+local points = number.points
+
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local variables = interfaces.variables
+local v_yes = variables.yes
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_columns = variables.columns
+
local trace_state = false trackers.register("mixedcolumns.trace", function(v) trace_state = v end)
local trace_detail = false trackers.register("mixedcolumns.detail", function(v) trace_detail = v end)
local report_state = logs.reporter("mixed columns")
-local nodecodes = nodes.nodecodes
-local gluecodes = nodes.gluecodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
-local penalty_code = nodecodes.penalty
-local insert_code = nodecodes.ins
-local mark_code = nodecodes.mark
-local rule_code = nodecodes.rule
-
-local topskip_code = gluecodes.topskip
-local lineskip_code = gluecodes.lineskip
-local baselineskip_code = gluecodes.baselineskip
-local userskip_code = gluecodes.userskip
-
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local nodetostring = nuts.tostring
-local listtoutf = nodes.listtoutf
-
-local hpack = nuts.hpack
-local vpack = nuts.vpack
-local freenode = nuts.free
-local concatnodes = nuts.concat
-local slidenodes = nuts.slide -- ok here as we mess with prev links intermediately
-local traversenodes = nuts.traverse
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getsubtype = nuts.getsubtype
-local getbox = nuts.getbox
-local setbox = nuts.setbox
-local getskip = nuts.getskip
-local getattribute = nuts.getattribute
-
-local nodepool = nuts.pool
-
-local new_hlist = nodepool.hlist
-local new_vlist = nodepool.vlist
-local new_glue = nodepool.glue
-
-local points = number.points
-
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local variables = interfaces.variables
-local v_yes = variables.yes
-local v_global = variables["global"]
-local v_local = variables["local"]
-local v_columns = variables.columns
-local v_fixed = variables.fixed
-local v_auto = variables.auto
-local v_none = variables.none
-local v_more = variables.more
-local v_less = variables.less
-
pagebuilders = pagebuilders or { }
pagebuilders.mixedcolumns = pagebuilders.mixedcolumns or { }
local mixedcolumns = pagebuilders.mixedcolumns
@@ -104,13 +77,13 @@ local function collectinserts(result,nxt,nxtid)
local inserts, currentskips, nextskips, inserttotal = { }, 0, 0, 0
while nxt do
if nxtid == insert_code then
- inserttotal = inserttotal + getfield(nxt,"height") + getfield(nxt,"depth")
- local s = getsubtype(nxt)
+ inserttotal = inserttotal + nxt.height + nxt.depth
+ local s = nxt.subtype
local c = inserts[s]
if not c then
c = { }
inserts[s] = c
- local width = getfield(getskip(s),"width")
+ local width = texgetskip(s).width
if not result.inserts[s] then
currentskips = currentskips + width
end
@@ -127,9 +100,9 @@ local function collectinserts(result,nxt,nxtid)
else
break
end
- nxt = getnext(nxt)
+ nxt = nxt.next
if nxt then
- nxtid = getid(nxt)
+ nxtid = nxt.id
else
break
end
@@ -155,30 +128,30 @@ end
local function discardtopglue(current,discarded)
local size = 0
while current do
- local id = getid(current)
+ local id = current.id
if id == glue_code then
- size = size + getfield(getfield(current,"spec"),"width")
+ size = size + current.spec.width
discarded[#discarded+1] = current
- current = getnext(current)
+ current = current.next
elseif id == penalty_code then
- if getfield(current,"penalty") == forcedbreak then
+ if current.penalty == forcedbreak then
discarded[#discarded+1] = current
- current = getnext(current)
- while current and getid(current) == glue_code do
- size = size + getfield(getfield(current,"spec"),"width")
+ current = current.next
+ while current and current.id == glue_code do
+ size = size + current.spec.width
discarded[#discarded+1] = current
- current = getnext(current)
+ current = current.next
end
else
discarded[#discarded+1] = current
- current = getnext(current)
+ current = current.next
end
else
break
end
end
if current then
- setfield(current,"prev",nil) -- prevent look back
+ current.prev = nil
end
return current, size
end
@@ -189,13 +162,13 @@ local function stripbottomglue(results,discarded)
local r = results[i]
local t = r.tail
while t and t ~= r.head do
- local prev = getprev(t)
+ local prev = t.prev
if not prev then
break
end
- local id = getid(t)
+ local id = t.id
if id == penalty_code then
- if getfield(t,"penalty") == forcedbreak then
+ if t.penalty == forcedbreak then
break
else
discarded[#discarded+1] = t
@@ -204,7 +177,7 @@ local function stripbottomglue(results,discarded)
end
elseif id == glue_code then
discarded[#discarded+1] = t
- local width = getfield(getfield(t,"spec"),"width")
+ local width = t.spec.width
if trace_state then
report_state("columns %s, discarded bottom glue %p",i,width)
end
@@ -228,21 +201,20 @@ local function setsplit(specification) -- a rather large function
report_state("fatal error, no box")
return
end
- local list = getbox(box)
+ local list = texgetbox(box)
if not list then
report_state("fatal error, no list")
return
end
- local head = getlist(list) or specification.originalhead
+ local head = list.head or specification.originalhead
if not head then
report_state("fatal error, no head")
return
end
- slidenodes(head) -- we can have set prev's to nil to prevent backtracking
local discarded = { }
local originalhead = head
- local originalwidth = specification.originalwidth or getfield(list,"width")
- local originalheight = specification.originalheight or getfield(list,"height")
+ local originalwidth = specification.originalwidth or list.width
+ local originalheight = specification.originalheight or list.height
local current = head
local skipped = 0
local height = 0
@@ -305,20 +277,20 @@ local function setsplit(specification) -- a rather large function
local current = start
-- first skip over glue and penalty
while current do
- local id = getid(current)
+ local id = current.id
if id == glue_code or id == penalty_code then
- current = getprev(current)
+ current = current.prev
else
break
end
end
-- then skip over content
while current do
- local id = getid(current)
+ local id = current.id
if id == glue_code or id == penalty_code then
break
else
- current = getprev(current)
+ current = current.prev
end
end
if not current then
@@ -352,7 +324,7 @@ local function setsplit(specification) -- a rather large function
if current == head then
result.tail = head
else
- result.tail = getprev(current)
+ result.tail = current.prev
end
result.height = height
result.depth = depth
@@ -372,9 +344,6 @@ local function setsplit(specification) -- a rather large function
report_state("setting collector to column %s",column)
end
current, skipped = discardtopglue(current,discarded)
- if trace_detail and skipped ~= 0 then
- report_state("check > column 1, discarded %p",skipped)
- end
head = current
return true, skipped
end
@@ -397,7 +366,7 @@ local function setsplit(specification) -- a rather large function
end
end
if trace_detail then
- report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p => %a (height %p, depth %p, skip %p)",
+ report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p, discarded %p => %a (height %p, depth %p, skip %p)",
where,curcol,delta,threshold,advance,total,target,state,skipped,height,depth,skip)
end
return state, skipped
@@ -418,7 +387,7 @@ local function setsplit(specification) -- a rather large function
head = current
local function process_skip(current,nxt)
- local advance = getfield(getfield(current,"spec"),"width")
+ local advance = current.spec.width
if advance ~= 0 then
local state, skipped = checked(advance,"glue")
if trace_state then
@@ -442,7 +411,7 @@ local function setsplit(specification) -- a rather large function
end
local function process_kern(current,nxt)
- local advance = getfield(current,"kern")
+ local advance = current.kern
if advance ~= 0 then
local state, skipped = checked(advance,"kern")
if trace_state then
@@ -465,10 +434,10 @@ local function setsplit(specification) -- a rather large function
local function process_rule(current,nxt)
-- simple variant of h|vlist
- local advance = getfield(current,"height") -- + getfield(current,"depth")
+ local advance = current.height -- + current.depth
local state, skipped = checked(advance+currentskips,"rule")
if trace_state then
- report_state("%-7s > column %s, state %a, rule, advance %p, height %p","rule",column,state,advance,inserttotal,height)
+ report_state("%-7s > column %s, state %a, rule, advance %p, height %p","line",column,state,advance,inserttotal,height)
if skipped ~= 0 then
report_state("%-7s > column %s, discarded %p","rule",column,skipped)
end
@@ -482,7 +451,7 @@ local function setsplit(specification) -- a rather large function
else
height = height + currentskips
end
- depth = getfield(current,"depth")
+ depth = current.depth
skip = 0
end
@@ -493,12 +462,12 @@ local function setsplit(specification) -- a rather large function
-- [chapter] [penalty] [section] [penalty] [first line]
local function process_penalty(current,nxt)
- local penalty = getfield(current,"penalty")
+ local penalty = current.penalty
if penalty == 0 then
lastlocked = nil
lastcurrent = nil
elseif penalty == forcedbreak then
- local needed = getattribute(current,a_checkedbreak)
+ local needed = current[a_checkedbreak]
local proceed = not needed or needed == 0
if not proceed then
local available = target - height
@@ -546,12 +515,12 @@ local function setsplit(specification) -- a rather large function
end
local function process_list(current,nxt)
- local nxtid = nxt and getid(nxt)
+ local nxtid = nxt and nxt.id
line = line + 1
local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0
- local advance = getfield(current,"height") -- + getfield(current,"depth")
+ local advance = current.height -- + current.depth
if trace_state then
- report_state("%-7s > column %s, content: %s","line",column,listtoutf(getlist(current),true,true))
+ report_state("%-7s > column %s, content: %s","line",column,listtoutf(current.list,true,true))
end
if nxt and (nxtid == insert_code or nxtid == mark_code) then
nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid)
@@ -572,7 +541,7 @@ local function setsplit(specification) -- a rather large function
else
height = height + currentskips
end
- depth = getfield(current,"depth")
+ depth = current.depth
skip = 0
if inserts then
-- so we already collect them ... makes backtracking tricky ... alternatively
@@ -586,8 +555,8 @@ local function setsplit(specification) -- a rather large function
while current do
- local id = getid(current)
- local nxt = getnext(current)
+ local id = current.id
+ local nxt = current.next
backtracked = false
@@ -660,7 +629,7 @@ local function setsplit(specification) -- a rather large function
specification.overflow = overflow
specification.discarded = discarded
- setfield(getbox(specification.box),"list",nil)
+ texgetbox(specification.box).list = nil
return specification
end
@@ -672,12 +641,12 @@ function mixedcolumns.finalize(result)
local r = results[i]
local h = r.head
if h then
- setfield(h,"prev",nil)
+ h.prev = nil
local t = r.tail
if t then
- setfield(t,"next",nil)
+ t.next = nil
else
- setfield(h,"next",nil)
+ h.next = nil
r.tail = h
end
for c, list in next, r.inserts do
@@ -686,13 +655,13 @@ function mixedcolumns.finalize(result)
local l = list[i]
local h = new_hlist()
t[i] = h
- setfield(h,"list",getfield(l,"head"))
- setfield(h,"height",getfield(l,"height"))
- setfield(h,"depth",getfield(l,"depth"))
- setfield(l,"head",nil)
+ h.head = l.head
+ h.height = l.height
+ h.depth = l.depth
+ l.head = nil
end
- setfield(t[1],"prev",nil) -- needs checking
- setfield(t[#t],"next",nil) -- needs checking
+ t[1].prev = nil -- needs checking
+ t[#t].next = nil -- needs checking
r.inserts[c] = t
end
end
@@ -764,13 +733,13 @@ function mixedcolumns.getsplit(result,n)
return new_glue(result.originalwidth)
end
- setfield(h,"prev",nil) -- move up
+ h.prev = nil -- move up
local strutht = result.strutht
local strutdp = result.strutdp
local lineheight = strutht + strutdp
local v = new_vlist()
- setfield(v,"list",h)
+ v.head = h
-- local v = vpack(h,"exactly",height)
@@ -792,14 +761,14 @@ function mixedcolumns.getsplit(result,n)
dp = result.depth
end
- setfield(v,"width",wd)
- setfield(v,"height",ht)
- setfield(v,"depth",dp)
+ v.width = wd
+ v.height = ht
+ v.depth = dp
if trace_state then
- local id = getid(h)
+ local id = h.id
if id == hlist_code then
- report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",listtoutf(getlist(h)))
+ report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",nodes.toutf(h.list))
else
report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"head node",nodecodes[id])
end
@@ -808,8 +777,8 @@ function mixedcolumns.getsplit(result,n)
for c, list in next, r.inserts do
local l = concatnodes(list)
local b = vpack(l) -- multiple arguments, todo: fastvpack
- -- setbox("global",c,b)
- setbox(c,b)
+ -- texsetbox("global",c,b)
+ texsetbox(c,b)
r.inserts[c] = nil
end
@@ -853,7 +822,7 @@ end
function commands.mixgetsplit(n)
if result then
- context(tonode(mixedcolumns.getsplit(result,n)))
+ context(mixedcolumns.getsplit(result,n))
end
end
@@ -865,13 +834,13 @@ end
function commands.mixflushrest()
if result then
- context(tonode(mixedcolumns.getrest(result)))
+ context(mixedcolumns.getrest(result))
end
end
function commands.mixflushlist()
if result then
- context(tonode(mixedcolumns.getlist(result)))
+ context(mixedcolumns.getlist(result))
end
end
diff --git a/tex/context/base/page-mix.mkiv b/tex/context/base/page-mix.mkiv
index d2bb38ca0..5d1c54a71 100644
--- a/tex/context/base/page-mix.mkiv
+++ b/tex/context/base/page-mix.mkiv
@@ -517,8 +517,7 @@
%D footnotes. Eventually we will have multiple strategies available.
\unexpanded\def\page_mix_routine_construct#1%
- {\d_page_mix_max_height\mixedcolumnsparameter\c!maxheight % can have changed due to header=high
- \ctxcommand{mixsetsplit {
+ {\ctxcommand{mixsetsplit {
box = \number\b_page_mix_collected,
nofcolumns = \number\c_page_mix_n_of_columns,
maxheight = \number\d_page_mix_max_height,
diff --git a/tex/context/base/page-mul.mkiv b/tex/context/base/page-mul.mkiv
index 73d84fe14..a874cd116 100644
--- a/tex/context/base/page-mul.mkiv
+++ b/tex/context/base/page-mul.mkiv
@@ -1605,11 +1605,9 @@
\else
\balancecolumnsfalse
\fi
- % % this won't work (blocked by check for overloading; too fuzzy anyway)
- % \installalign\v!yes {\page_columns_align_option_yes }% \stretchcolumnstrue \inheritcolumnsfalse
- % \installalign\v!no {\page_columns_align_option_no }% \stretchcolumnsfalse\inheritcolumnsfalse
- % \installalign\v!text{\page_columns_align_option_text}% \stretchcolumnsfalse\inheritcolumnstrue
- % %
+ \installalign\v!yes {\page_columns_align_option_yes }%
+ \installalign\v!no {\page_columns_align_option_no }%
+ \installalign\v!text{\page_columns_align_option_text}%
\stretchcolumnsfalse
\inheritcolumnstrue
\edef\p_align{\columnsparameter\c!align}%
diff --git a/tex/context/base/page-str.lua b/tex/context/base/page-str.lua
index f2ac27cd9..35ce85609 100644
--- a/tex/context/base/page-str.lua
+++ b/tex/context/base/page-str.lua
@@ -20,7 +20,7 @@ local tasks = nodes.tasks
local new_kern = nodepool.kern
local new_glyph = nodepool.glyph
-local slide_nodelist = node.slide
+local find_tail = node.slide
local write_node = node.write
local free_node = node.free
local copy_nodelist = node.copy_list
@@ -73,7 +73,7 @@ function streams.collect(head,where)
end
local last = dana[#dana]
if last then
- local tail = slide_nodelist(last)
+ local tail = find_tail(last)
tail.next, head.prev = head, tail
elseif last == false then
dana[#dana] = head
@@ -202,7 +202,7 @@ function streams.synchronize(list) -- this is an experiment !
else
-- this is not yet ok as we also need to keep an eye on vertical spacing
-- so we might need to do some splitting or whatever
- local tail = vbox.list and slide_nodelist(vbox.list)
+ local tail = vbox.list and find_tail(vbox.list)
local n, delta = 0, delta_height -- for tracing
while delta > 0 do
-- we need to add some interline penalties
diff --git a/tex/context/base/page-str.mkiv b/tex/context/base/page-str.mkiv
index a8fab9c6c..200a71377 100644
--- a/tex/context/base/page-str.mkiv
+++ b/tex/context/base/page-str.mkiv
@@ -29,6 +29,8 @@
%D
%D Remark: marknotes are gone, at least for a while.
+\writestatus{loading}{ConTeXt Page Macros / Page Streams}
+
\registerctxluafile{page-str}{1.001}
\unprotect
diff --git a/tex/context/base/publ-aut.lua b/tex/context/base/publ-aut.lua
deleted file mode 100644
index ba492a93b..000000000
--- a/tex/context/base/publ-aut.lua
+++ /dev/null
@@ -1,550 +0,0 @@
-if not modules then modules = { } end modules ['publ-aut'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if not characters then
- dofile(resolvers.findfile("char-def.lua"))
- dofile(resolvers.findfile("char-ini.lua"))
-end
-
-local context = context
-local chardata = characters.data
-
-local tostring = tostring
-local concat = table.concat
-local lpeg = lpeg
-local utfchar = utf.char
-
-local publications = publications or { }
-
-local datasets = publications.datasets or { }
-publications.datasets = datasets
-
-publications.authors = publications.authors or { }
-local authors = publications.authors
-
-local P, C, V, Cs, Ct, lpegmatch, lpegpatterns = lpeg.P, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Ct, lpeg.match, lpeg.patterns
-
--- local function makesplitter(separator)
--- return Ct { "start",
--- start = (Cs((V("outer") + (1-separator))^1) + separator^1)^1,
--- start = Cs(V("outer")) + (Cs((V("inner") + (1-separator))^1) + separator^1)^1,
--- outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^0) * (P("}")/""),
--- inner = P("{") * ((V("inner") + P(1-P("}")))^0) * P("}"),
--- }
--- end
-
-local space = P(" ")
-local comma = P(",")
-local firstcharacter = lpegpatterns.utf8byte
-
--- local andsplitter = lpeg.tsplitat(space^1 * "and" * space^1)
--- local commasplitter = lpeg.tsplitat(space^0 * comma * space^0)
--- local spacesplitter = lpeg.tsplitat(space^1)
-
-local p_and = space^1 * "and" * space^1
-local p_comma = space^0 * comma * space^0
-local p_space = space^1
-
-local andsplitter = Ct { "start",
- start = (Cs((V("inner") + (1-p_and))^1) + p_and)^1,
- inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
-}
-
-local commasplitter = Ct { "start",
- start = Cs(V("outer")) + (Cs((V("inner") + (1-p_comma))^1) + p_comma)^1,
- outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
- inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
-}
-
-local spacesplitter = Ct { "start",
- start = Cs(V("outer")) + (Cs((V("inner") + (1-p_space))^1) + p_space)^1,
- outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
- inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
-}
-
-local function is_upper(str)
- local first = lpegmatch(firstcharacter,str)
- local okay = chardata[first]
- return okay and okay.category == "lu"
-end
-
-local cache = { } -- 33% reuse on tugboat.bib
-local nofhits = 0
-local nofused = 0
-
-local function splitauthorstring(str)
- if not str then
- return
- end
- nofused = nofused + 1
- local authors = cache[str]
- if authors then
- -- hit 1
- -- print("hit 1",author,nofhits,nofused,math.round(100*nofhits/nofused))
- return { authors } -- we assume one author
- end
- local authors = lpegmatch(andsplitter,str)
- for i=1,#authors do
- local author = authors[i]
- local detail = cache[author]
- if detail then
- -- hit 2
- -- print("hit 2",author,nofhits,nofused,math.round(100*nofhits/nofused))
- end
- if not detail then
- local firstnames, vons, surnames, initials, juniors
- local split = lpegmatch(commasplitter,author)
--- inspect(split)
- local n = #split
- if n == 1 then
- -- First von Last
- local words = lpegmatch(spacesplitter,author)
- firstnames, vons, surnames = { }, { }, { }
- local i, n = 1, #words
- while i <= n do
- local w = words[i]
- if is_upper(w) then
- firstnames[#firstnames+1], i = w, i + 1
- else
- break
- end
- end
- while i <= n do
- local w = words[i]
- if is_upper(w) then
- break
- else
- vons[#vons+1], i = w, i + 1
- end
- end
- if i <= n then
- while i <= n do
- surnames[#surnames+1], i = words[i], i + 1
- end
- elseif #vons == 0 then
- surnames[1] = firstnames[#firstnames]
- firstnames[#firstnames] = nil
- else
- -- mess
- end
- -- safeguard
- if #surnames == 0 then
- firstnames = { }
- vons = { }
- surnames = { author }
- end
- elseif n == 2 then
- -- von Last, First
- firstnames, vons, surnames = { }, { }, { }
- local words = lpegmatch(spacesplitter,split[1])
- local i, n = 1, #words
- while i <= n do
- local w = words[i]
- if is_upper(w) then
- break
- else
- vons[#vons+1], i = w, i + 1
- end
- end
- while i <= n do
- surnames[#surnames+1], i = words[i], i + 1
- end
- --
- local words = lpegmatch(spacesplitter,split[2])
- local i, n = 1, #words
- while i <= n do
- local w = words[i]
- if is_upper(w) then
- firstnames[#firstnames+1], i = w, i + 1
- else
- break
- end
- end
- while i <= n do
- vons[#vons+1], i = words[i], i + 1
- end
- else
- -- von Last, Jr ,First
- firstnames = lpegmatch(spacesplitter,split[1])
- juniors = lpegmatch(spacesplitter,split[2])
- surnames = lpegmatch(spacesplitter,split[3])
- if n > 3 then
- -- error
- end
- end
- if #surnames == 0 then
- surnames[1] = firstnames[#firstnames]
- firstnames[#firstnames] = nil
- end
- if firstnames then
- initials = { }
- for i=1,#firstnames do
- initials[i] = utfchar(lpegmatch(firstcharacter,firstnames[i]))
- end
- end
- detail = {
- original = author,
- firstnames = firstnames,
- vons = vons,
- surnames = surnames,
- initials = initials,
- juniors = juniors,
- }
- cache[author] = detail
- nofhits = nofhits + 1
- end
- authors[i] = detail
- end
- return authors
-end
-
--- local function splitauthors(dataset,tag,field)
--- local entries = datasets[dataset]
--- local luadata = entries.luadata
--- if not luadata then
--- return { }
--- end
--- local entry = luadata[tag]
--- if not entry then
--- return { }
--- end
--- return splitauthorstring(entry[field])
--- end
-
-local function the_initials(initials,symbol)
- local t, symbol = { }, symbol or "."
- for i=1,#initials do
- t[i] = initials[i] .. symbol
- end
- return t
-end
-
--- authors
-
-local settings = { }
-
--- local defaultsettings = {
--- firstnamesep = " ",
--- vonsep = " ",
--- surnamesep = " ",
--- juniorsep = " ",
--- surnamejuniorsep = ", ",
--- juniorjuniorsep = ", ",
--- surnamefirstnamesep = ", ",
--- surnameinitialsep = ", ",
--- namesep = ", ",
--- lastnamesep = " and ",
--- finalnamesep = " and ",
--- etallimit = 1000,
--- etaldisplay = 1000,
--- etaltext = "",
--- }
-
-local defaultsettings = {
- firstnamesep = [[\btxlistvariantparameter{firstnamesep}]],
- vonsep = [[\btxlistvariantparameter{vonsep}]],
- surnamesep = [[\btxlistvariantparameter{surnamesep}]],
- juniorsep = [[\btxlistvariantparameter{juniorsep}]],
- surnamejuniorsep = [[\btxlistvariantparameter{surnamejuniorsep}]],
- juniorjuniorsep = [[\btxlistvariantparameter{juniorjuniorsep}]],
- surnamefirstnamesep = [[\btxlistvariantparameter{surnamefirstnamesep}]],
- surnameinitialsep = [[\btxlistvariantparameter{surnameinitialsep}]],
- namesep = [[\btxlistvariantparameter{namesep}]],
- lastnamesep = [[\btxlistvariantparameter{lastnamesep}]],
- finalnamesep = [[\btxlistvariantparameter{finalnamesep}]],
- --
- etaltext = [[\btxlistvariantparameter{etaltext}]],
- --
- etallimit = 1000,
- etaldisplay = 1000,
-}
-
-function authors.setsettings(s)
-end
-
-authors.splitstring = splitauthorstring
-
--- [firstnames] [firstnamesep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (Taco, von Hoekwater, jr)
-
-function authors.normal(author,settings)
- local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors
- local result, settings = { }, settings or defaultsettings
- if firstnames and #firstnames > 0 then
- result[#result+1] = concat(firstnames," ")
- result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep
- end
- if vons and #vons > 0 then
- result[#result+1] = concat(vons," ")
- result[#result+1] = settings.vonsep or defaultsettings.vonsep
- end
- if surnames and #surnames > 0 then
- result[#result+1] = concat(surnames," ")
- if juniors and #juniors > 0 then
- result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
- result[#result+1] = concat(juniors," ")
- end
- elseif juniors and #juniors > 0 then
- result[#result+1] = concat(juniors," ")
- end
- return concat(result)
-end
-
--- [initials] [initialsep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (T, von Hoekwater, jr)
-
-function authors.normalshort(author,settings)
- local initials, vons, surnames, juniors = author.initials, author.vons, author.surnames, author.juniors
- local result, settings = { }, settings or defaultsettings
- if initials and #initials > 0 then
- result[#result+1] = concat(initials," ")
- result[#result+1] = settings.initialsep or defaultsettings.initialsep
- end
- if vons and #vons > 0 then
- result[#result+1] = concat(vons," ")
- result[#result+1] = settings.vonsep or defaultsettings.vonsep
- end
- if surnames and #surnames > 0 then
- result[#result+1] = concat(surnames," ")
- if juniors and #juniors > 0 then
- result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
- result[#result+1] = concat(juniors," ")
- end
- elseif juniors and #juniors > 0 then
- result[#result+1] = concat(juniors," ")
- end
- return concat(result)
-end
-
--- vons surnames juniors, firstnames
-
--- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [firstnames] (von Hoekwater jr, Taco)
-
-function authors.inverted(author,settings)
- local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors
- local result, settings = { }, settings or defaultsettings
- if vons and #vons > 0 then
- result[#result+1] = concat(vons," ")
- result[#result+1] = settings.vonsep or defaultsettings.vonsep
- end
- if surnames and #surnames > 0 then
- result[#result+1] = concat(surnames," ")
- if juniors and #juniors > 0 then
- result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
- result[#result+1] = concat(juniors," ")
- end
- elseif juniors and #juniors > 0 then
- result[#result+1] = concat(juniors," ")
- end
- if firstnames and #firstnames > 0 then
- result[#result+1] = settings.surnamefirstnamesep or defaultsettings.surnamefirstnamesep
- result[#result+1] = concat(firstnames," ")
- end
- return concat(result)
-end
-
--- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [initials] (von Hoekwater jr, T)
-
-function authors.invertedshort(author,settings)
- local vons, surnames, initials, juniors = author.vons, author.surnames, author.initials, author.juniors
- local result, settings = { }, settings or defaultsettings
- if vons and #vons > 0 then
- result[#result+1] = concat(vons," ")
- result[#result+1] = settings.vonsep or defaultsettings.vonsep
- end
- if surnames and #surnames > 0 then
- result[#result+1] = concat(surnames," ")
- if juniors and #juniors > 0 then
- result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
- result[#result+1] = concat(juniors," ")
- end
- elseif juniors and #juniors > 0 then
- result[#result+1] = concat(juniors," ")
- end
- if initials and #initials > 0 then
- result[#result+1] = settings.surnameinitialsep or defaultsettings.surnameinitialsep
- result[#result+1] = concat(the_initials(initials)," ")
- end
- return concat(result)
-end
-
-local lastconcatsize = 1
-
-local function concatnames(t,settings)
- local namesep = settings.namesep
- local lastnamesep = settings.lastnamesep
- local finalnamesep = settings.finalnamesep
- local lastconcatsize = #t
- if lastconcatsize > 2 then
- local s = { }
- for i=1,lastconcatsize-2 do
- s[i] = t[i] .. namesep
- end
- s[lastconcatsize-1], s[lastconcatsize] = t[lastconcatsize-1] .. finalnamesep, t[lastconcatsize]
- return concat(s)
- elseif lastconcatsize > 1 then
- return concat(t,lastnamesep)
- elseif lastconcatsize > 0 then
- return t[1]
- else
- return ""
- end
-end
-
-function authors.concat(dataset,tag,field,settings)
- table.setmetatableindex(settings,defaultsettings)
- local combiner = settings.combiner
- if not combiner or type(combiner) == "string" then
- combiner = authors[combiner or "normal"] or authors.normal
- end
- local split = datasets[dataset].details[tag][field]
- local etallimit = settings.etallimit or 1000
- local etaldisplay = settings.etaldisplay or etallimit
- local max = split and #split or 0
- if max == 0 then
- -- error
- end
- if max > etallimit and etaldisplay < max then
- max = etaldisplay
- end
- local combined = { }
- for i=1,max do
- combined[i] = combiner(split[i],settings)
- end
- local result = concatnames(combined,settings)
- if #combined <= max then
- return result
- else
- return result .. settings.etaltext
- end
-end
-
-function commands.btxauthor(...)
- context(authors.concat(...))
-end
-
-function authors.short(author,year)
- -- todo
--- local result = { }
--- if author then
--- local authors = splitauthors(author)
--- for a=1,#authors do
--- local aa = authors[a]
--- local initials = aa.initials
--- for i=1,#initials do
--- result[#result+1] = initials[i]
--- end
--- local surnames = aa.surnames
--- for s=1,#surnames do
--- result[#result+1] = utfchar(lpegmatch(firstcharacter,surnames[s]))
--- end
--- end
--- end
--- if year then
--- result[#result+1] = year
--- end
--- return concat(result)
-end
-
--- We can consider creating a hashtable key -> entry but I wonder if
--- pays off.
-
-local compare = sorters.comparers.basic -- (a,b)
-local strip = sorters.strip
-local splitter = sorters.splitters.utf
-
-function authors.preparedsort(dataset,list,sorttype_a,sorttype_b,sorttype_c)
- local luadata = datasets[dataset].luadata
- local details = datasets[dataset].details
- local valid = { }
- local splitted = { }
- table.setmetatableindex(splitted,function(t,k) -- could be done in the sorter but seldom that many shared
- local v = splitter(k,true) -- in other cases
- t[k] = v
- return v
- end)
- local snippets = { }
- for i=1,#list do
- -- either { tag, tag, ... } or { { tag, index }, { tag, index } }
- local li = list[i]
- local tag = type(li) == "string" and li or li[1]
- local entry = luadata[tag]
- local detail = details[tag]
- local suffix = tostring(i)
- local year = nil
- local assembled = nil
- if entry and detail then
- local key = detail[sorttype_a] or detail[sorttype_b] or detail[sorttype_c]
- if key then
- -- maybe an option is to also sort the authors first
- local n = #key
- local s = 0
- for i=1,n do
- local k = key[i]
- local vons = k.vons
- local surnames = k.surnames
- local initials = k.initials
- if vons and #vons > 0 then
- s = s + 1 ; snippets[s] = concat(vons," ")
- end
- if surnames and #surnames > 0 then
- s = s + 1 ; snippets[s] = concat(surnames," ")
- end
- if initials and #initials > 0 then
- s = s + 1 ; snippets[s] = concat(initials," ")
- end
- end
- assembled = concat(snippets," ",1,s)
- else
- assembled = ""
- end
- year = entry.year or "9998"
- else
- assembled = ""
- year = "9999"
- end
- valid[i] = {
- index = i,
- split = {
- splitted[strip(assembled)],
- splitted[year],
- splitted[suffix],
- },
--- names = assembled,
--- year = year,
--- suffix = suffix,
- }
- end
- return valid
-end
-
-function authors.sorted(dataset,list,sorttype) -- experimental
- local valid = authors.preparedsort(dataset,list,sorttype)
- if #valid == 0 or #valid ~= #list then
- return list
- else
- sorters.sort(valid,compare)
- for i=1,#valid do
- valid[i] = valid[i].index
- end
- return valid
- end
-end
-
--- local dataset = publications.datasets.test
---
--- local function add(str)
--- dataset.details[str] = { author = publications.authors.splitstring(str) }
--- end
---
--- add("Hagen, Hans and Hoekwater, Taco Whoever T. Ex. and Henkel Hut, Hartmut Harald von der")
--- add("Hans Hagen and Taco Whoever T. Ex. Hoekwater and Hartmut Harald von der Henkel Hut")
--- add("de Gennes, P. and Gennes, P. de")
--- add("van't Hoff, J. H. and {van't Hoff}, J. H.")
---
--- local list = table.keys(dataset.details)
--- local sort = publications.authors.sorted("test",list,"author")
--- local test = { } for i=1,#sort do test[i] = dataset.details[list[sort[i]]] end
diff --git a/tex/context/base/publ-dat.lua b/tex/context/base/publ-dat.lua
deleted file mode 100644
index 8fce94822..000000000
--- a/tex/context/base/publ-dat.lua
+++ /dev/null
@@ -1,529 +0,0 @@
-if not modules then modules = { } end modules ['publ-dat'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: strip the @ in the lpeg instead of on do_definition and do_shortcut
--- todo: store bibroot and bibrootdt
-
---[[ldx--
-<p>This is a prelude to integrated bibliography support. This file just loads
-bibtex files and converts them to xml so that the we access the content
-in a convenient way. Actually handling the data takes place elsewhere.</p>
---ldx]]--
-
-if not characters then
- dofile(resolvers.findfile("char-def.lua"))
- dofile(resolvers.findfile("char-ini.lua"))
- dofile(resolvers.findfile("char-tex.lua"))
-end
-
-local chardata = characters.data
-local lowercase = characters.lower
-
-local lower, gsub, concat = string.lower, string.gsub, table.concat
-local next, type = next, type
-local utfchar = utf.char
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local textoutf = characters and characters.tex.toutf
-local settings_to_hash, settings_to_array = utilities.parsers.settings_to_hash, utilities.parsers.settings_to_array
-local formatters = string.formatters
-local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
-local xmlcollected, xmltext, xmlconvert = xml.collected, xml.text, xmlconvert
-local setmetatableindex = table.setmetatableindex
-
--- todo: more allocate
-
-local P, R, S, V, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
-
-local trace = false trackers.register("publications", function(v) trace = v end)
-local report = logs.reporter("publications")
-
-publications = publications or { }
-local publications = publications
-
-local datasets = publications.datasets or { }
-publications.datasets = datasets
-
-publications.statistics = publications.statistics or { }
-local publicationsstats = publications.statistics
-
-publicationsstats.nofbytes = 0
-publicationsstats.nofdefinitions = 0
-publicationsstats.nofshortcuts = 0
-publicationsstats.nofdatasets = 0
-
-local xmlplaceholder = "<?xml version='1.0' standalone='yes'?>\n<bibtex></bibtex>"
-
-local defaultshortcuts = {
- jan = "1",
- feb = "2",
- mar = "3",
- apr = "4",
- may = "5",
- jun = "6",
- jul = "7",
- aug = "8",
- sep = "9",
- oct = "10",
- nov = "11",
- dec = "12",
-}
-
-function publications.new(name)
- publicationsstats.nofdatasets = publicationsstats.nofdatasets + 1
- local dataset = {
- name = name or "dataset " .. publicationsstats.nofdatasets,
- nofentries = 0,
- shortcuts = { },
- luadata = { },
- xmldata = xmlconvert(xmlplaceholder),
- -- details = { },
- nofbytes = 0,
- entries = nil, -- empty == all
- sources = { },
- loaded = { },
- fields = { },
- userdata = { },
- used = { },
- commands = { }, -- for statistical purposes
- status = {
- resources = false,
- userdata = false,
- },
- }
- setmetatableindex(dataset,function(t,k)
- -- will become a plugin
- if k == "details" and publications.enhance then
- dataset.details = { }
- publications.enhance(dataset.name)
- return dataset.details
- end
- end)
- return dataset
-end
-
-function publications.markasupdated(name)
- if name == "string" then
- datasets[name].details = nil
- else
- datasets.details = nil
- end
-end
-
-setmetatableindex(datasets,function(t,k)
- if type(k) == "table" then
- return k -- so we can use this accessor as checker
- else
- local v = publications.new(k)
- datasets[k] = v
- return v
- end
-end)
-
--- we apply some normalization
-
-local space = S(" \t\n\r\f") -- / " "
-
------ command = P("\\") * Cc("btxcmd{") * (R("az","AZ")^1) * Cc("}")
------ command = P("\\") * (Carg(1) * C(R("az","AZ")^1) / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
-local command = P("\\") * (Carg(1) * C(R("az","AZ")^1) * space^0 / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
-local somemath = P("$") * ((1-P("$"))^1) * P("$") -- let's not assume nested math
-local any = P(1)
-local done = P(-1)
-local one_l = P("{") / ""
-local one_r = P("}") / ""
-local two_l = P("{{") / ""
-local two_r = P("}}") / ""
-local special = P("#") / "\\letterhash"
-
-local filter_0 = S('\\{}')
-local filter_1 = (1-filter_0)^0 * filter_0
-local filter_2 = Cs(
--- {{...}} ... {{...}}
--- two_l * (command + special + any - two_r - done)^0 * two_r * done +
--- one_l * (command + special + any - one_r - done)^0 * one_r * done +
- (somemath + command + special + any )^0
-)
-
--- Currently we expand shortcuts and for large ones (like the acknowledgements
--- in tugboat.bib this is not that efficient. However, eventually strings get
--- hashed again.
-
-local function do_shortcut(key,value,dataset)
- publicationsstats.nofshortcuts = publicationsstats.nofshortcuts + 1
- dataset.shortcuts[key] = value
-end
-
-local function getindex(dataset,luadata,tag)
- local found = luadata[tag]
- if found then
- return found.index or 0
- else
- local index = dataset.nofentries + 1
- dataset.nofentries = index
- return index
- end
-end
-
-publications.getindex = getindex
-
--- todo: categories : metatable that lowers and also counts
--- todo: fields : metatable that lowers
-
-local function do_definition(category,tag,tab,dataset)
- publicationsstats.nofdefinitions = publicationsstats.nofdefinitions + 1
- local fields = dataset.fields
- local luadata = dataset.luadata
- local found = luadata[tag]
- local index = getindex(dataset,luadata,tag)
- local entries = {
- category = lower(category),
- tag = tag,
- index = index,
- }
- for i=1,#tab,2 do
- local original = tab[i]
- local normalized = fields[original]
- if not normalized then
- normalized = lower(original) -- we assume ascii fields
- fields[original] = normalized
- end
- local value = tab[i+1]
- value = textoutf(value)
- if lpegmatch(filter_1,value) then
- value = lpegmatch(filter_2,value,1,dataset.commands) -- we need to start at 1 for { }
- end
- if normalized == "crossref" then
- local parent = luadata[value]
- if parent then
- setmetatableindex(entries,parent)
- else
- -- warning
- end
- end
- entries[normalized] = value
- end
- luadata[tag] = entries
-end
-
-local function resolve(s,dataset)
- return dataset.shortcuts[s] or defaultshortcuts[s] or s -- can be number
-end
-
-local percent = P("%")
-local start = P("@")
-local comma = P(",")
-local hash = P("#")
-local escape = P("\\")
-local single = P("'")
-local double = P('"')
-local left = P('{')
-local right = P('}')
-local both = left + right
-local lineending = S("\n\r")
-local space = S(" \t\n\r\f") -- / " "
-local spacing = space^0
-local equal = P("=")
------ collapsed = (space^1)/ " "
-local collapsed = (lpegpatterns.whitespace^1)/ " "
-
------ balanced = lpegpatterns.balanced
-local balanced = P {
- [1] = ((escape * (left+right)) + (collapsed + 1 - (left+right)) + V(2))^0,
- [2] = left * V(1) * right
-}
-
-local keyword = C((R("az","AZ","09") + S("@_:-"))^1)
-local key = C((1-space-equal)^1)
-local tag = C((1-space-comma)^1)
-local reference = keyword
-local category = P("@") * C((1-space-left)^1)
-local s_quoted = ((escape*single) + collapsed + (1-single))^0
-local d_quoted = ((escape*double) + collapsed + (1-double))^0
-
-local b_value = (left /"") * balanced * (right /"")
-local s_value = (single/"") * (b_value + s_quoted) * (single/"")
-local d_value = (double/"") * (b_value + d_quoted) * (double/"")
-local r_value = reference * Carg(1) /resolve
-
-local somevalue = s_value + d_value + b_value + r_value
-local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0))
-
-local assignment = spacing * key * spacing * equal * spacing * value * spacing
-local shortcut = P("@") * (P("string") + P("STRING")) * spacing * left * ((assignment * Carg(1))/do_shortcut * comma^0)^0 * spacing * right
-local definition = category * spacing * left * spacing * tag * spacing * comma * Ct((assignment * comma^0)^0) * spacing * right * Carg(1) / do_definition
-local comment = keyword * spacing * left * (1-right)^0 * spacing * right
-local forget = percent^1 * (1-lineending)^0
-
--- todo \%
-
-local bibtotable = (space + forget + shortcut + definition + comment + 1)^0
-
--- loadbibdata -> dataset.luadata
--- loadtexdata -> dataset.luadata
--- loadluadata -> dataset.luadata
-
--- converttoxml -> dataset.xmldata from dataset.luadata
-
-function publications.loadbibdata(dataset,content,source,kind)
- dataset = datasets[dataset]
- statistics.starttiming(publications)
- publicationsstats.nofbytes = publicationsstats.nofbytes + #content
- dataset.nofbytes = dataset.nofbytes + #content
- if source then
- table.insert(dataset.sources, { filename = source, checksum = md5.HEX(content) })
- dataset.loaded[source] = kind or true
- end
- dataset.newtags = #dataset.luadata > 0 and { } or dataset.newtags
- publications.markasupdated(dataset)
- lpegmatch(bibtotable,content or "",1,dataset)
- statistics.stoptiming(publications)
-end
-
--- we could use xmlescape again
-
-local cleaner_0 = S('<>&')
-local cleaner_1 = (1-cleaner_0)^0 * cleaner_0
-local cleaner_2 = Cs ( (
- P("<") / "&lt;" +
- P(">") / "&gt;" +
- P("&") / "&amp;" +
- P(1)
-)^0)
-
-local compact = false -- can be a directive but then we also need to deal with newlines ... not now
-
-function publications.converttoxml(dataset,nice) -- we have fields !
- dataset = datasets[dataset]
- local luadata = dataset and dataset.luadata
- if luadata then
- statistics.starttiming(publications)
- statistics.starttiming(xml)
- --
- local result, r = { }, 0
- --
- r = r + 1 ; result[r] = "<?xml version='1.0' standalone='yes'?>"
- r = r + 1 ; result[r] = "<bibtex>"
- --
- if nice then
- local f_entry_start = formatters[" <entry tag='%s' category='%s' index='%s'>"]
- local f_entry_stop = " </entry>"
- local f_field = formatters[" <field name='%s'>%s</field>"]
- for tag, entry in sortedhash(luadata) do
- r = r + 1 ; result[r] = f_entry_start(tag,entry.category,entry.index)
- for key, value in sortedhash(entry) do
- if key ~= "tag" and key ~= "category" and key ~= "index" then
- if lpegmatch(cleaner_1,value) then
- value = lpegmatch(cleaner_2,value)
- end
- if value ~= "" then
- r = r + 1 ; result[r] = f_field(key,value)
- end
- end
- end
- r = r + 1 ; result[r] = f_entry_stop
- end
- else
- local f_entry_start = formatters["<entry tag='%s' category='%s' index='%s'>"]
- local f_entry_stop = "</entry>"
- local f_field = formatters["<field name='%s'>%s</field>"]
- for tag, entry in next, luadata do
- r = r + 1 ; result[r] = f_entry_start(entry.tag,entry.category,entry.index)
- for key, value in next, entry do
- if key ~= "tag" and key ~= "category" and key ~= "index" then
- if lpegmatch(cleaner_1,value) then
- value = lpegmatch(cleaner_2,value)
- end
- if value ~= "" then
- r = r + 1 ; result[r] = f_field(key,value)
- end
- end
- end
- r = r + 1 ; result[r] = f_entry_stop
- end
- end
- --
- r = r + 1 ; result[r] = "</bibtex>"
- --
- result = concat(result,nice and "\n" or nil)
- --
- dataset.xmldata = xmlconvert(result, {
- resolve_entities = true,
- resolve_predefined_entities = true, -- in case we have escaped entities
- -- unify_predefined_entities = true, -- &#038; -> &amp;
- utfize_entities = true,
- } )
- --
- statistics.stoptiming(xml)
- statistics.stoptiming(publications)
- if lxml then
- lxml.register(formatters["btx:%s"](dataset.name),dataset.xmldata)
- end
- end
-end
-
-local loaders = publications.loaders or { }
-publications.loaders = loaders
-
-function loaders.bib(dataset,filename,kind)
- dataset = datasets[dataset]
- local data = io.loaddata(filename) or ""
- if data == "" then
- report("empty file %a, nothing loaded",filename)
- elseif trace then
- report("loading file",filename)
- end
- publications.loadbibdata(dataset,data,filename,kind)
-end
-
-function loaders.lua(dataset,filename) -- if filename is a table we load that one
- dataset = datasets[dataset]
- if type(dataset) == "table" then
- dataset = datasets[dataset]
- end
- local data = type(filename) == "table" and filename or table.load(filename)
- if data then
- local luadata = dataset.luadata
- for tag, entry in next, data do
- if type(entry) == "table" then
- entry.index = getindex(dataset,luadata,tag)
- luadata[tag] = entry -- no cleaning yet
- end
- end
- end
-end
-
-function loaders.xml(dataset,filename)
- dataset = datasets[dataset]
- local luadata = dataset.luadata
- local root = xml.load(filename)
- for entry in xmlcollected(root,"/bibtex/entry") do
- local attributes = entry.at
- local tag = attributes.tag
- local entry = {
- category = attributes.category
- }
- for field in xmlcollected(entry,"/field") do
- -- entry[field.at.name] = xmltext(field)
- entry[field.at.name] = field.dt[1] -- no cleaning yet
- end
- -- local edt = entry.dt
- -- for i=1,#edt do
- -- local e = edt[i]
- -- local a = e.at
- -- if a and a.name then
- -- t[a.name] = e.dt[1] -- no cleaning yet
- -- end
- -- end
- entry.index = getindex(dataset,luadata,tag)
- luadata[tag] = entry
- end
-end
-
-setmetatableindex(loaders,function(t,filetype)
- local v = function(dataset,filename)
- report("no loader for file %a with filetype %a",filename,filetype)
- end
- t[k] = v
- return v
-end)
-
-function publications.load(dataset,filename,kind)
- dataset = datasets[dataset]
- statistics.starttiming(publications)
- local files = settings_to_array(filename)
- for i=1,#files do
- local filetype, filename = string.splitup(files[i],"::")
- if not filename then
- filename = filetype
- filetype = file.suffix(filename)
- end
- local fullname = resolvers.findfile(filename,"bib")
- if dataset.loaded[fullname] then -- will become better
- -- skip
- elseif fullname == "" then
- report("no file %a",filename)
- else
- loaders[filetype](dataset,fullname)
- end
- if kind then
- dataset.loaded[fullname] = kind
- end
- end
- statistics.stoptiming(publications)
- return dataset
-end
-
-local checked = function(s,d) d[s] = (d[s] or 0) + 1 end
-local checktex = ( (1-P("\\"))^1 + P("\\") * ((C(R("az","AZ")^1) * Carg(1))/checked))^0
-
-function publications.analyze(dataset)
- dataset = datasets[dataset]
- local data = dataset.luadata
- local categories = { }
- local fields = { }
- local commands = { }
- for k, v in next, data do
- categories[v.category] = (categories[v.category] or 0) + 1
- for k, v in next, v do
- fields[k] = (fields[k] or 0) + 1
- lpegmatch(checktex,v,1,commands)
- end
- end
- dataset.analysis = {
- categories = categories,
- fields = fields,
- commands = commands,
- }
-end
-
--- str = [[
--- @COMMENT { CRAP }
--- @STRING{ hans = "h a n s" }
--- @STRING{ taco = "t a c o" }
--- @SOMETHING{ key1, abc = "t a c o" , def = "h a n s" }
--- @SOMETHING{ key2, abc = hans # taco }
--- @SOMETHING{ key3, abc = "hans" # taco }
--- @SOMETHING{ key4, abc = hans # "taco" }
--- @SOMETHING{ key5, abc = hans # taco # "hans" # "taco"}
--- @SOMETHING{ key6, abc = {oeps {oeps} oeps} }
--- ]]
-
--- local dataset = publications.new()
--- publications.tolua(dataset,str)
--- publications.toxml(dataset)
--- publications.toxml(dataset)
--- print(dataset.xmldata)
--- inspect(dataset.luadata)
--- inspect(dataset.xmldata)
--- inspect(dataset.shortcuts)
--- print(dataset.nofbytes,statistics.elapsedtime(publications))
-
--- local dataset = publications.new()
--- publications.load(dataset,"IEEEabrv.bib")
--- publications.load(dataset,"IEEEfull.bib")
--- publications.load(dataset,"IEEEexample.bib")
--- publications.toxml(dataset)
--- print(dataset.nofbytes,statistics.elapsedtime(publications))
-
--- local dataset = publications.new()
--- publications.load(dataset,"gut.bib")
--- publications.load(dataset,"komoedie.bib")
--- publications.load(dataset,"texbook1.bib")
--- publications.load(dataset,"texbook2.bib")
--- publications.load(dataset,"texbook3.bib")
--- publications.load(dataset,"texgraph.bib")
--- publications.load(dataset,"texjourn.bib")
--- publications.load(dataset,"texnique.bib")
--- publications.load(dataset,"tugboat.bib")
--- publications.toxml(dataset)
--- print(dataset.nofbytes,statistics.elapsedtime(publications))
-
--- print(table.serialize(dataset.luadata))
--- print(table.serialize(dataset.xmldata))
--- print(table.serialize(dataset.shortcuts))
--- print(xml.serialize(dataset.xmldata))
diff --git a/tex/context/base/publ-imp-apa.mkiv b/tex/context/base/publ-imp-apa.mkiv
deleted file mode 100644
index 3f7b119af..000000000
--- a/tex/context/base/publ-imp-apa.mkiv
+++ /dev/null
@@ -1,547 +0,0 @@
-%D \module
-%D [ file=publ-imp-apa,
-%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
-%D title=APA bibliography style,
-%D subtitle=Publications,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
-%D by \PRAGMA. See mreadme.pdf for details.
-
-% common
-
-% \loadbtxdefinitionfile[def]
-
-\startsetups btx:apa:common:wherefrom
- \btxdoifelse {address} {
- \getvariable{btx:temp}{left}
- \btxdoifelse {country} {
- \btxdoifelse {\getvariable{btx:temp}{label}} {
- \btxflush{address}\btxcomma\btxflush{country}: \btxflush{\getvariable{btx:temp}{label}}
- } {
- \btxflush{address}\btxcomma\btxflush{country}
- }
- } {
- \btxdoifelse {\getvariable{btx:temp}{label}} {
- \btxflush{address}\btxcomma\btxflush{\getvariable{btx:temp}{label}}
- } {
- \btxflush{address}
- }
- }
- \getvariable{btx:temp}{right}
- } {
- \btxdoifelse {country} {
- \getvariable{btx:temp}{left}
- \btxdoifelse {\getvariable{btx:temp}{label}} {
- \btxflush{country}: \btxflush{\getvariable{btx:temp}{label}}
- } {
- \btxflush{country}
- }
- \getvariable{btx:temp}{right}
- } {
- \btxdoifelse {\getvariable{btx:temp}{label}} {
- \getvariable{btx:temp}{left}
- \btxflush{\getvariable{btx:temp}{label}}
- \getvariable{btx:temp}{right}
- } {
- \getvariable{btx:temp}{otherwise}
- }
- }
- }
-\stopsetups
-
-% \setvariables[btx:temp][label=,left=,right=,otherwise=]
-
-\startsetups btx:apa:common:publisher
- \begingroup
- \setvariables[btx:temp][label=publisher]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:organization
- \begingroup
- \setvariables[btx:temp][label=organization]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:school
- \begingroup
- \setvariables[btx:temp][label=school]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:institution
- \begingroup
- \setvariables[btx:temp][label=institution]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:school:subsentence
- \begingroup
- \setvariables[btx:temp][label=school,left=\btxcomma,right=\btxperiod,otherwise=\btxperiod]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:institution:subsentence
- \begingroup
- \setvariables[btx:temp][label=institution,left=\btxcomma,right=\btxperiod,otherwise=\btxperiod]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:publisher:sentence
- \begingroup
- \setvariables[btx:temp][label=publisher,left=\btxspace,right=\btxperiod]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:organization:sentence
- \begingroup
- \setvariables[btx:temp][label=organization,left=\btxspace,right=\btxperiod]\relax
- \btxsetup{btx:apa:common:wherefrom}
- \endgroup
-\stopsetups
-
-\startsetups btx:apa:common:title-and-series
- \btxdoif {title} {
- \btxflush{title}
- \btxdoif {series} {
- \btxlparent\btxflush{series}\btxrparent
- }
- \btxperiod
- }
-\stopsetups
-
-\startsetups btx:apa:common:title-it-and-series
- \btxdoif {title} {
- \bgroup\it\btxflush{title}\/\egroup
- \btxdoif {series} {
- \btxlparent\btxflush{series}\btxrparent
- }
- \btxperiod
- }
-\stopsetups
-
-\disablemode[btx:apa:edited-book]
-
-\startsetups btx:apa:common:author-and-year
- \btxdoif {author} {
- \btxflushauthor{author}
- }
- \btxdoif {year} {
- \btxlparent\btxflush{year}\btxrparent
- }
- \btxperiod
-\stopsetups
-
-\startsetups btx:apa:common:author-or-key-and-year
- \btxdoifelse {author} {
- \btxflushauthor{author}
- } {
- \btxdoif {key} {
- \btxlbracket\btxsetup{btx:format:key}\btxrbracket
- }
- }
- \btxdoif {year} {
- \btxlparent\btxflush{year}\btxrparent
- }
- \btxperiod
-\stopsetups
-
-\startsetups btx:apa:common:author-editors-crossref-year
- \btxdoif {author} {
- \btxflushauthor{author}
- } {
- \btxdoifelse {editor} {
- \enablemode[btx:apa:edited-book]
- \btxflushauthor{editor}
- \btxcomma\btxsingularplural{editor}{editor}{editors}
- } {
- % weird period
- \btxdoif {crossref} {
- \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket\btxperiod
- }
- }
- }
- \btxdoif {year} {
- \btxlparent\btxflush{year}\btxrparent
- }
- \btxperiod
-\stopsetups
-
-\startsetups btx:apa:common:editor-or-key-and-year
- \btxdoifelse {editor} {
- \enablemode[btx:apa:edited-book]
- \btxflushauthor{editor}
- \btxcomma\btxsingularplural{editor}{editor}{editors}
- } {
- \btxdoif {key} {
- \btxlbracket\btxsetup{btx:format:key}\btxrbracket
- }
- }
- \btxspace
- \btxdoif {year} {
- \btxlparent\btxflush{year}\btxrparent
- }
- \btxperiod
-\stopsetups
-
-\startsetups btx:apa:common:note
- \btxdoif {note} {
- \btxspace\btxflush{note}\btxperiod
- }
-\stopsetups
-
-\startsetups btx:apa:common:comment
- \btxdoif {comment} {
- \btxspace\btxflush{comment}\btxperiod
- }
-\stopsetups
-
-\startsetups btx:apa:common:pages:p
- \btxdoif {pages} {
- \btxspace\btxflush{pages}\btxspace p\btxperiod
- }
-\stopsetups
-
-\startsetups btx:apa:common:pages:pp
- \btxdoif {pages} {
- \btxspace\btxflush{pages}\btxspace pp\btxperiod
- }
-\stopsetups
-
-\startsetups btx:apa:common:pages:pages
- \btxdoif {pages} {
- \btxcomma pages~\btxflush{pages}
- }
-\stopsetups
-
-\startsetups btx:apa:common:edition:sentense
- \btxdoif {edition} {
- \btxspace\btxflush{edition}\btxspace edition\btxperiod
- }
-\stopsetups
-
-% check when the next is used (no period)
-
-% \startsetups btx:apa:common:edition
-% \btxdoif {edition} {
-% \btxspace\btxflush{edition}\btxspace edition
-% }
-% \stopsetups
-
-% we can share more, todo
-
-% specific
-
-\startsetups btx:apa:article
- \btxsetup{btx:apa:common:author-or-key-and-year}
- \btxdoif {title} {
- \btxflush{title}\btxperiod
- }
- \btxdoifelse {journal} {
- \bgroup\it\btxflush{journal}\/\egroup
- } {
- \btxdoif {crossref} {
- In\btxspace\btxflush{crossref}
- }
- }
- \btxdoifelse {volume} {
- \btxcomma\bgroup\it\btxflush{volume}\/\egroup
- \btxdoif {issue} {
- \btxlparent\btxflush{issue}\btxrparent
- }
- \btxdoif {pages} {
- \btxcomma\btxflush{pages}
- }
- \btxperiod
- } {
- \btxsetup{btx:apa:common:pages:pp}
- }
- \btxsetup{btx:apa:common:note}
- \btxsetup{btx:apa:common:comment}
-\stopsetups
-
-\startsetups btx:apa:book
- \btxsetup{btx:apa:common:author-editors-crossref-year}
- \btxdoif {title} {
- \bgroup\it\btxflush{title}\/\egroup
- \doifmodeelse {btx:apa:edited-book} {
- \btxdoifelse {volume} {
- \btxspace Number\nonbreakablespace\btxflush{volume}
- \btxdoifelse {series} {
- \btxspace in\nonbreakablespace\btxflush{series}\btxperiod
- } {
- \btxdoifelse {crossref} {
- \btxspace in\btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
- } {
- \btxperiod
- }
- }
- } {
- \btxdoif {series} {
- \btxspace\btxflush{series}
- }
- \btxperiod
- }
- } {
- \btxdoifelse {crossref} {
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxperiod
- \btxdoif {volume} {
- Volume\nonbreakablespace\btxflush{volume}\btxspace of\nonbreakablespace
- }
- } {
- \btxdoif {volume} {
- \btxcomma volume\nonbreakablespace\btxflush{volume}
- \btxdoif {series} {
- \btxspace of\nonbreakablespace\bgroup\it\btxflush{series}\/\egroup
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}
- }
- \btxsetup{btx:apa:common:pages:pages}
- }
- \btxperiod
- }
- }
- }
- \btxsetup{btx:apa:common:edition:sentence}
- \btxsetup{btx:apa:common:publisher:sentence}
- \btxsetup{btx:apa:common:pages:p}% twice?
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:inbook
- \btxsetup{btx:apa:common:author-editors-crossref-year}
- \btxdoifelse {title} {
- \bgroup\it\btxflush{title}\/\egroup
- } {
- \doifmodeelse {btx:apa:edited-book} {
- \btxdoifelse {volume} {
- \btxspace number\nonbreakablespace\btxflush{volume}
- \btxdoifelse {series} {
- \btxspace in\nonbreakablespace\btxflush{series}\btxperiod
- } {
- \btxdoifelse {crossref} {
- \btxspace in\btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
- } {
- \btxperiod
- }
- }
- } {
- \btxdoif {series} {
- \btxspace\btxflush{series}\btxperiod
- }
- }
- } {
- \btxdoifelse {crossref} {
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxdoif {volume} {
- Volume\nonbreakablespace\btxflush{volume}\btxspace of\nonbreakablespace
- }
- \btxdoif {crossref} {
- \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
- }
- } {
- \btxdoif {volume} {
- \btxcomma volume\nonbreakablespace\btxflush{volume}
- \btxdoif {series} {
- \btxspace of\nonbreakablespace\bgroup\it\btxflush{series}\/\egroup
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxperiod
- }
- }
- }
- }
- \btxspace
- \btxsetup{btx:apa:common:edition:sentence}
- \btxsetup{btx:apa:common:publisher}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:booklet
- \btxsetup{btx:apa:common:author-or-key-and-year}
- \btxsetup{btx:apa:common:title-it-and-series}
- \btxsetup{btx:apa:common:edition:sentence}
- \btxsetup{btx:apa:common:publication:sentence}
- \btxsetup{btx:apa:common:pages:p}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:manual
- \btxsetup{btx:apa:common:author-or-key-and-year}
- \btxsetup{btx:apa:common:title-it-and-series}
- \btxsetup{btx:apa:common:edition:sentence}
- \btxsetup{btx:apa:common:organization:sentence}
- \btxsetup{btx:apa:common:pages:p}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:incollection
- \btxsetup{btx:apa:common:author-and-year}
- \btxdoif {arttitle} {
- \btxflush{arttitle}\btxperiod
- }
- In\btxspace
- \btxdoifelse {title} {
- \btxflushauthor{editor}\btxcomma
- \bgroup\it\btxflush{title}\/\egroup
- \btxdoif {series} {
- \btxdoif {volume} {
- \btxcomma number\btxspace\btxflush{volume}\btxspace in
- }
- \btxspace\btxflush{series}
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}\btxspace
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxdoif {edition} {
- \btxspace\btxflush{edition}\btxspace edition
- }
- \btxsetup{btx:apa:common:publisher:sentence}
- } {
- \btxdoif {crossref} {
- \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}
- }
- \btxspace
- \btxsetup{btx:apa:common:pages:pages}
- }
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:inproceedings
- \btxsetup{btx:apa:common:author-and-year}
- \btxdoif {arttitle} {
- \btxflush{arttitle}\btxperiod
- }
- In\btxspace
- \btxdoifelse {title} {
- \btxdoif {editor} {
- \btxflush{btx:apa:format:editors}
- \btxcomma\btxsingularplural{editor}{editor}{editors}\btxcomma
- }
- \bgroup\it\btxflush{title}\/\egroup
- \btxdoif {series} {
- \btxdoif {volume} {
- \btxcomma number~\btxflush{volume} in
- }
- \btxspace
- \btxflush{series}
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}\btxspace
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxperiod
- \btxsetup{btx:apa:common:organization:sentence}
- } {
- \btxdoif {crossref} {
- \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}\btxspace
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxperiod
- }
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:proceedings
- \btxsetup{btx:apa:common:editor-or-key-and-year}
- \btxdoif {title} {
- \bgroup\it\btxflush{title}\/\egroup
- \btxdoif {volume} {
- \btxcomma number\btxspace\btxflush{volume}\btxspace in\btxspace
- }
- \btxdoif {chapter} {
- \btxcomma\btxflush{chapter}\btxspace
- }
- \btxsetup{btx:apa:common:pages:pages}
- \btxperiod
- \btxsetup{btx:apa:common:organization:sentence}
- }
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:common:thesis
- \btxsetup{btx:apa:common:author-and-year}
- \btxsetup{btx:apa:common:title-it-and-series}
- \btxdoifelse {type} {
- \btxflush{type}
- } {
- \getvariable{btx:temp}{label}
- }
- \btxsetup{btx:apa:common:school:subsentence}
- \btxsetup{btx:apa:common:pages:p}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:mastersthesis
- \setvariables[btx:temp][label=Master's thesis]
- \btxsetup{btx:apa:common:thesis}
-\stopsetups
-
-\startsetups btx:apa:phdthesis
- \setvariables[btx:temp][label=PhD thesis]
- \btxsetup{btx:apa:common:thesis}
-\stopsetups
-
-\startsetups btx:apa:techreport
- \btxsetup{btx:apa:common:author-and-year}
- \btxsetup{btx:apa:common:title-and-series}
- \btxdoifelse {type} {
- \btxflush{type}
- \btxdoif {volume} {
- \btxspace\btxflush{volume}
- }
- } {
- \btxspace Technical Report
- }
- \btxsetup{btx:apa:common:institution:subsentence}
- \btxsetup{btx:apa:common:pages:p}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:misc
- \btxsetup{btx:apa:common:author-and-year}
- \btxsetup{btx:apa:common:title-and-series}
- \btxsetup{btx:apa:common:publisher:sentence}
- \btxsetup{btx:apa:common:pages:p}
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\startsetups btx:apa:unpublished
- \btxsetup{btx:apa:common:author-and-year}
- \btxsetup{btx:apa:common:title-and-series}
- \btxsetup{btx:apa:common:pages:p}
- \btxdoif {type} {
- \btxlparent\btxflush{type}\btxrparent
- }
- \btxsetup{btx:apa:common:note}
-\stopsetups
-
-\endinput
diff --git a/tex/context/base/publ-imp-cite.mkiv b/tex/context/base/publ-imp-cite.mkiv
deleted file mode 100644
index d64c2132c..000000000
--- a/tex/context/base/publ-imp-cite.mkiv
+++ /dev/null
@@ -1,74 +0,0 @@
-%D \module
-%D [ file=publ-imp-cite,
-%D version=2013.12.24,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=XML,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\startsetups btx:cite:author
- \btxcitevariant{author}
-\stopsetups
-
-\startsetups btx:cite:authoryear
- \btxcitevariant{authoryear}
-\stopsetups
-
-\startsetups btx:cite:authoryears
- \btxcitevariant{authoryears}
-\stopsetups
-
-% \startsetups btx:cite:authornum
-% \btxcitevariant{author}
-% \btxcitevariantparameter\c!inbetween
-% \btxcitevariant{num}
-% \stopsetups
-
-\startsetups btx:cite:authornum
- \btxcitevariant{authornum}
-\stopsetups
-
-\startsetups btx:cite:year
- \btxcitevariant{year}
-\stopsetups
-
-\startsetups btx:cite:short
- \btxcitevariant{short}
-\stopsetups
-
-\startsetups btx:cite:serial
- \btxcitevariant{serial}
-\stopsetups
-
-\startsetups btx:cite:key
- \currentbtxtag % \btxcitevariant{tag}
-\stopsetups
-
-\startsetups btx:cite:doi
- todo: \btxcitevariant{doi}
-\stopsetups
-
-\startsetups btx:cite:url
- todo: \btxcitevariant{url}
-\stopsetups
-
-\startsetups btx:cite:type
- \btxcitevariant{category}
-\stopsetups
-
-\startsetups btx:cite:page
- \btxcitevariant{page}
-\stopsetups
-
-\startsetups btx:cite:none
- % dummy
-\stopsetups
-
-\startsetups btx:cite:num
- \btxcitevariant{num}
-\stopsetups
diff --git a/tex/context/base/publ-imp-commands.mkiv b/tex/context/base/publ-imp-commands.mkiv
deleted file mode 100644
index 14e2dbae1..000000000
--- a/tex/context/base/publ-imp-commands.mkiv
+++ /dev/null
@@ -1,15 +0,0 @@
-\unprotect
-
-% for tugboat
-
-\definebtxcommand\hbox {\hbox}
-\definebtxcommand\vbox {\vbox}
-\definebtxcommand\llap {\llap}
-\definebtxcommand\rlap {\rlap}
-\definebtxcommand\url #1{\hyphenatedurl{#1}}
-\definebtxcommand\acro #1{\dontleavehmode{\smallcaps#1}}
-
-\let\<<
-\let\<>
-
-\protect \endinput
diff --git a/tex/context/base/publ-imp-definitions.mkiv b/tex/context/base/publ-imp-definitions.mkiv
deleted file mode 100644
index 2cf2e3e8e..000000000
--- a/tex/context/base/publ-imp-definitions.mkiv
+++ /dev/null
@@ -1,68 +0,0 @@
-%D \module
-%D [ file=publ-imp-def,
-%D version=2013.12.24,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=Definitions,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-%D Here we collect some helper setups. We assume that checking of a field
-%D happens in the calling setup, if only because that is the place where
-%D fences are also dealt with.
-
-\unprotect
-
-\startxmlsetups btx:format:crossref
- \cite[\btxfield{crossref}]
-\stopxmlsetups
-
-\startxmlsetups btx:format:key
- \btxfield{short}
-\stopxmlsetups
-
-\startxmlsetups btx:format:doi
- \edef\currentbtxfielddoi{\btxfield{doi}}
- \ifx\currentbtxfielddoi\empty
- {\tttf no-doi}
- \else\ifconditional\btxinteractive
- \goto{\hyphenatedurl{\currentbtxfielddoi}}[url(http://dx.doi.org/\currentbtxfielddoi)]
- \else
- \hyphenatedurl{\currentbtxfielddoi}
- \fi\fi
-\stopxmlsetups
-
-\startxmlsetups btx:format:url
- \edef\currentbtxfieldurl{\btxfield{url}}
- \ifx\currentbtxfieldurl\empty
- {\tttf no-url}
- \else\ifconditional\btxinteractive
- \goto{\hyphenatedurl{\currentbtxfieldurl}}[url(\currentbtxfieldurl)]
- \else
- \hyphenatedurl{\currentbtxfieldurl}
- \fi\fi
-\stopxmlsetups
-
-\startxmlsetups btx:format:month
- \edef\currentbtxfieldmonth{\btxfield{month}}
- \ifx\currentbtxfieldmonth\empty
- {\tttf no-month}
- \else
- \edef\p_monthconversion{\btxlistvariantparameter\c!monthconversion}
- \ifx\p_monthconversion\empty % month month:mnem
- \currentbtxfieldmonth
- \else
- \doifnumberelse \currentbtxfieldmonth {
- \convertnumber\p_monthconversion\currentbtxfieldmonth
- } {
- \currentbtxfieldmonth
- }
- \fi
- \fi
-\stopxmlsetups
-
-\protect
diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua
deleted file mode 100644
index 6bf6714da..000000000
--- a/tex/context/base/publ-ini.lua
+++ /dev/null
@@ -1,1425 +0,0 @@
-if not modules then modules = { } end modules ['publ-ini'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- for the moment here
-
-local lpegmatch = lpeg.match
-local P, C, Ct, Cs = lpeg.P, lpeg.C, lpeg.Ct, lpeg.Cs
-
-local lpegmatch = lpeg.match
-local pattern = Cs((1 - P(1) * P(-1))^0 * (P(".")/"" + P(1)))
-
-local manipulators = {
- stripperiod = function(str) return lpegmatch(pattern,str) end,
- uppercase = characters.upper,
- lowercase = characters.lower,
-}
-
-local manipulation = C((1-P("->"))^1) * P("->") * C(P(1)^0)
-
-local pattern = manipulation / function(operation,str)
- local manipulator = manipulators[operation]
- return manipulator and manipulator(str) or str
-end
-
-local function manipulated(str)
- return lpegmatch(pattern,str) or str
-end
-
-utilities.parsers.manipulation = manipulation
-utilities.parsers.manipulators = manipulators
-utilities.parsers.manipulated = manipulated
-
-function commands.manipulated(str)
- context(manipulated(str))
-end
-
--- use: for rest in gmatch(reference,"[^, ]+") do
-
-local next, rawget, type = next, rawget, type
-local match, gmatch, format, gsub = string.match, string.gmatch, string.format, string.gsub
-local concat, sort = table.concat, table.sort
-local utfsub = utf.sub
-local formatters = string.formatters
-local allocate = utilities.storage.allocate
-local settings_to_array, settings_to_set = utilities.parsers.settings_to_array, utilities.parsers.settings_to_set
-local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
-local lpegmatch = lpeg.match
-local P, C, Ct = lpeg.P, lpeg.C, lpeg.Ct
-
-local report = logs.reporter("publications")
-local trace = false trackers.register("publications", function(v) trace = v end)
-
-local datasets = publications.datasets
-
-local variables = interfaces.variables
-
-local v_local = variables["local"]
-local v_global = variables["global"]
-
-local v_force = variables.force
-local v_standard = variables.standard
-local v_start = variables.start
-local v_none = variables.none
-local v_left = variables.left
-local v_right = variables.right
-local v_middle = variables.middle
-local v_inbetween = variables.inbetween
-
-local v_short = variables.short
-local v_cite = variables.cite
-local v_default = variables.default
-local v_reference = variables.reference
-local v_dataset = variables.dataset
-local v_author = variables.author or "author"
-local v_editor = variables.editor or "editor"
-
-local numbertochar = converters.characters
-
-local logsnewline = logs.newline
-local logspushtarget = logs.pushtarget
-local logspoptarget = logs.poptarget
-local csname_id = token.csname_id
-
-local basicsorter = sorters.basicsorter -- (a,b)
-local sortcomparer = sorters.comparers.basic -- (a,b)
-local sortstripper = sorters.strip
-local sortsplitter = sorters.splitters.utf
-
-local context = context
-
-local ctx_btxlistparameter = context.btxlistparameter
-local ctx_btxcitevariantparameter = context.btxcitevariantparameter
-local ctx_btxlistvariantparameter = context.btxlistvariantparameter
-local ctx_btxdomarkcitation = context.btxdomarkcitation
-local ctx_setvalue = context.setvalue
-local ctx_firstoftwoarguments = context.firstoftwoarguments
-local ctx_secondoftwoarguments = context.secondoftwoarguments
-local ctx_firstofoneargument = context.firstofoneargument
-local ctx_gobbleoneargument = context.gobbleoneargument
-local ctx_btxdirectlink = context.btxdirectlink
-local ctx_btxhandlelistentry = context.btxhandlelistentry
-local ctx_btxchecklistentry = context.btxchecklistentry
-local ctx_dodirectfullreference = context.dodirectfullreference
-local ctx_directsetup = context.directsetup
-
-statistics.register("publications load time", function()
- local publicationsstats = publications.statistics
- local nofbytes = publicationsstats.nofbytes
- if nofbytes > 0 then
- return string.format("%s seconds, %s bytes, %s definitions, %s shortcuts",
- statistics.elapsedtime(publications),nofbytes,publicationsstats.nofdefinitions,publicationsstats.nofshortcuts)
- else
- return nil
- end
-end)
-
-luatex.registerstopactions(function()
- logspushtarget("logfile")
- logsnewline()
- report("start used btx commands")
- logsnewline()
- local undefined = csname_id("undefined*crap")
- for name, dataset in sortedhash(datasets) do
- for command, n in sortedhash(dataset.commands) do
- local c = csname_id(command)
- if c and c ~= undefined then
- report("%-20s %-20s % 5i %s",name,command,n,"known")
- else
- local u = csname_id(utf.upper(command))
- if u and u ~= undefined then
- report("%-20s %-20s % 5i %s",name,command,n,"KNOWN")
- else
- report("%-20s %-20s % 5i %s",name,command,n,"unknown")
- end
- end
- end
- end
- logsnewline()
- report("stop used btxcommands")
- logsnewline()
- logspoptarget()
-end)
-
--- multipass, we need to sort because hashing is random per run and not per
--- version (not the best changed feature of lua)
-
-local collected = allocate()
-local tobesaved = allocate()
-
--- we use a a dedicated (and efficient as it know what it deals with) serializer,
--- also because we need to ignore the 'details' field
-
-local function serialize(t)
- local f_key_table = formatters[" [%q] = {"]
- local f_key_string = formatters[" %s = %q,"]
- local r = { "return {" }
- local m = 1
- for tag, entry in sortedhash(t) do
- m = m + 1
- r[m] = f_key_table(tag)
- local s = sortedkeys(entry)
- for i=1,#s do
- local k = s[i]
- -- if k ~= "details" then
- m = m + 1
- r[m] = f_key_string(k,entry[k])
- -- end
- end
- m = m + 1
- r[m] = " },"
- end
- r[m] = "}"
- return concat(r,"\n")
-end
-
-local function finalizer()
- local prefix = tex.jobname -- or environment.jobname
- local setnames = sortedkeys(datasets)
- for i=1,#setnames do
- local name = setnames[i]
- local dataset = datasets[name]
- local userdata = dataset.userdata
- local checksum = nil
- local username = file.addsuffix(file.robustname(formatters["%s-btx-%s"](prefix,name)),"lua")
- if userdata and next(userdata) then
- if job.passes.first then
- local newdata = serialize(userdata)
- checksum = md5.HEX(newdata)
- io.savedata(username,newdata)
- end
- else
- os.remove(username)
- username = nil
- end
- local loaded = dataset.loaded
- local sources = dataset.sources
- local used = { }
- for i=1,#sources do
- local source = sources[i]
- if loaded[source.filename] ~= "previous" then -- or loaded[source.filename] == "current"
- used[#used+1] = source
- end
- end
- tobesaved[name] = {
- usersource = {
- filename = username,
- checksum = checksum,
- },
- datasources = used,
- }
- end
-end
-
-local function initializer()
- statistics.starttiming(publications)
-collected = publications.collected or collected -- for the moment as we load runtime
- for name, state in next, collected do
- local dataset = datasets[name]
- local datasources = state.datasources
- local usersource = state.usersource
- if datasources then
- for i=1,#datasources do
- local filename = datasources[i].filename
- publications.load(dataset,filename,"previous")
- end
- end
- if usersource then
- dataset.userdata = table.load(usersource.filename) or { }
- end
- end
- statistics.stoptiming(publications)
- function initializer() end -- will go, for now, runtime loaded
-end
-
-job.register('publications.collected',tobesaved,initializer,finalizer)
-
-if not publications.authors then
- initializer() -- for now, runtime loaded
-end
-
--- basic access
-
-local function getfield(dataset,tag,name)
- local d = datasets[dataset].luadata[tag]
- return d and d[name]
-end
-
-local function getdetail(dataset,tag,name)
- local d = datasets[dataset].details[tag]
- return d and d[name]
-end
-
-function commands.btxsingularorplural(dataset,tag,name) -- todo: make field dependent
- local d = datasets[dataset].details[tag]
- if d then
- d = d[name]
- end
- if d then
- d = #d <= 1
- end
- commands.doifelse(d)
-end
-
--- basic loading
-
-function commands.usebtxdataset(name,filename)
- publications.load(datasets[name],filename,"current")
-end
-
-function commands.convertbtxdatasettoxml(name,nice)
- publications.converttoxml(datasets[name],nice)
-end
-
--- enhancing
-
-local splitauthorstring = publications.authors.splitstring
-
-local pagessplitter = lpeg.splitat(P("-")^1)
-
--- maybe not redo when already done
-
-function publications.enhance(dataset) -- for the moment split runs (maybe publications.enhancers)
- statistics.starttiming(publications)
- if type(dataset) == "string" then
- dataset = datasets[dataset]
- end
- local luadata = dataset.luadata
- local details = dataset.details
- -- author, editor
- for tag, entry in next, luadata do
- local author = entry.author
- local editor = entry.editor
- details[tag] = {
- author = author and splitauthorstring(author),
- editor = editor and splitauthorstring(editor),
- }
- end
- -- short
- local shorts = { }
- for tag, entry in next, luadata do
- local author = details[tag].author
- if author then
- -- number depends on sort order
- local t = { }
- if #author == 0 then
- -- what
- else
- local n = #author == 1 and 3 or 1
- for i=1,#author do
- local surnames = author[i].surnames
- if not surnames or #surnames == 0 then
- -- error
- else
- t[#t+1] = utfsub(surnames[1],1,n)
- end
- end
- end
- local year = tonumber(entry.year) or 0
- local short = formatters["%t%02i"](t,math.mod(year,100))
- local s = shorts[short]
- if not s then
- shorts[short] = tag
- elseif type(s) == "string" then
- shorts[short] = { s, tag }
- else
- s[#s+1] = tag
- end
- else
- --
- end
- end
- for short, tags in next, shorts do
- if type(tags) == "table" then
- sort(tags)
- for i=1,#tags do
--- details[tags[i]].short = short .. numbertochar(i)
-local detail = details[tags[i]]
-detail.short = short
-detail.suffix = numbertochar(i)
- end
- else
- details[tags].short = short
- end
- end
- -- pages
- for tag, entry in next, luadata do
- local pages = entry.pages
- if pages then
- local first, last = lpegmatch(pagessplitter,pages)
- details[tag].pages = first and last and { first, last } or pages
- end
- end
- -- keywords
- for tag, entry in next, luadata do
- local keyword = entry.keyword
- if keyword then
- details[tag].keyword = settings_to_set(keyword)
- end
- end
- statistics.stoptiming(publications)
-end
-
-function commands.addbtxentry(name,settings,content)
- local dataset = datasets[name]
- if dataset then
- publications.addtexentry(dataset,settings,content)
- end
-end
-
-function commands.setbtxdataset(name)
- local dataset = rawget(datasets,name)
- if dataset then
- context(name)
- else
- report("unknown dataset %a",name)
- end
-end
-
-function commands.setbtxentry(name,tag)
- local dataset = rawget(datasets,name)
- if dataset then
- if dataset.luadata[tag] then
- context(tag)
- else
- report("unknown tag %a in dataset %a",tag,name)
- end
- else
- report("unknown dataset %a",name)
- end
-end
-
--- rendering of fields (maybe multiple manipulators)
-
-local manipulation = utilities.parsers.manipulation
-local manipulators = utilities.parsers.manipulators
-
--- local function checked(field)
--- local m, f = lpegmatch(manipulation,field)
--- if m then
--- return manipulators[m], f or field
--- else
--- return nil, field
--- end
--- end
-
-local manipulation = Ct((C((1-P("->"))^1) * P("->"))^1) * C(P(1)^0)
-
-local function checked(field)
- local m, f = lpegmatch(manipulation,field)
- if m then
- return m, f or field
- else
- return nil, field
- end
-end
-
-local function manipulated(actions,str)
- for i=1,#actions do
- local action = manipulators[actions[i]]
- if action then
- str = action(str) or str
- end
- end
- return str
-end
-
-function commands.btxflush(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local fields = dataset.luadata[tag]
- if fields then
- local manipulator, field = checked(field)
- local value = fields[field]
- if type(value) == "string" then
- -- context(manipulator and manipulator(value) or value)
- context(manipulator and manipulated(manipulator,value) or value)
- return
- end
- local details = dataset.details[tag]
- if details then
- local value = details[field]
- if type(value) == "string" then
- -- context(manipulator and manipulator(value) or value)
- context(manipulator and manipulated(manipulator,value) or value)
- return
- end
- end
- report("unknown field %a of tag %a in dataset %a",field,tag,name)
- else
- report("unknown tag %a in dataset %a",tag,name)
- end
- else
- report("unknown dataset %a",name)
- end
-end
-
-function commands.btxdetail(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local details = dataset.details[tag]
- if details then
- local manipulator, field = checked(field)
- local value = details[field]
- if type(value) == "string" then
- -- context(manipulator and manipulator(value) or value)
- context(manipulator and manipulated(manipulator,value) or value)
- else
- report("unknown detail %a of tag %a in dataset %a",field,tag,name)
- end
- else
- report("unknown tag %a in dataset %a",tag,name)
- end
- else
- report("unknown dataset %a",name)
- end
-end
-
-function commands.btxfield(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local fields = dataset.luadata[tag]
- if fields then
- local manipulator, field = checked(field)
- local value = fields[field]
- if type(value) == "string" then
- -- context(manipulator and manipulator(value) or value)
- context(manipulator and manipulated(manipulator,value) or value)
- else
- report("unknown field %a of tag %a in dataset %a",field,tag,name)
- end
- else
- report("unknown tag %a in dataset %a",tag,name)
- end
- else
- report("unknown dataset %a",name)
- end
-end
-
--- testing: to be speed up with testcase
-
-function commands.btxdoifelse(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local data = dataset.luadata[tag]
- local value = data and data[field]
- if value and value ~= "" then
- ctx_firstoftwoarguments()
- return
- end
- end
- ctx_secondoftwoarguments()
-end
-
-function commands.btxdoif(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local data = dataset.luadata[tag]
- local value = data and data[field]
- if value and value ~= "" then
- ctx_firstofoneargument()
- return
- end
- end
- ctx_gobbleoneargument()
-end
-
-function commands.btxdoifnot(name,tag,field)
- local dataset = rawget(datasets,name)
- if dataset then
- local data = dataset.luadata[tag]
- local value = data and data[field]
- if value and value ~= "" then
- ctx_gobbleoneargument()
- return
- end
- end
- ctx_firstofoneargument()
-end
-
--- -- alternative approach: keep data at the tex end
-
-function publications.listconcat(t)
- local n = #t
- if n > 0 then
- context(t[1])
- if n > 1 then
- if n > 2 then
- for i=2,n-1 do
- ctx_btxlistparameter("sep")
- context(t[i])
- end
- ctx_btxlistparameter("finalsep")
- else
- ctx_btxlistparameter("lastsep")
- end
- context(t[n])
- end
- end
-end
-
-function publications.citeconcat(t)
- local n = #t
- if n > 0 then
- context(t[1])
- if n > 1 then
- if n > 2 then
- for i=2,n-1 do
- ctx_btxcitevariantparameter("sep")
- context(t[i])
- end
- ctx_btxcitevariantparameter("finalsep")
- else
- ctx_btxcitevariantparameter("lastsep")
- end
- context(t[n])
- end
- end
-end
-
-function publications.singularorplural(singular,plural)
- if lastconcatsize and lastconcatsize > 1 then
- context(plural)
- else
- context(singular)
- end
-end
-
--- function commands.makebibauthorlist(settings) -- ?
--- if not settings then
--- return
--- end
--- local dataset = datasets[settings.dataset]
--- if not dataset or dataset == "" then
--- return
--- end
--- local tag = settings.tag
--- if not tag or tag == "" then
--- return
--- end
--- local asked = settings_to_array(tag)
--- if #asked == 0 then
--- return
--- end
--- local compress = settings.compress
--- local interaction = settings.interactionn == v_start
--- local limit = tonumber(settings.limit)
--- local found = { }
--- local hash = { }
--- local total = 0
--- local luadata = dataset.luadata
--- for i=1,#asked do
--- local tag = asked[i]
--- local data = luadata[tag]
--- if data then
--- local author = data.a or "Xxxxxxxxxx"
--- local year = data.y or "0000"
--- if not compress or not hash[author] then
--- local t = {
--- author = author,
--- name = name, -- first
--- year = { [year] = name },
--- }
--- total = total + 1
--- found[total] = t
--- hash[author] = t
--- else
--- hash[author].year[year] = name
--- end
--- end
--- end
--- for i=1,total do
--- local data = found[i]
--- local author = data.author
--- local year = table.keys(data.year)
--- table.sort(year)
--- if interaction then
--- for i=1,#year do
--- year[i] = formatters["\\bibmaybeinteractive{%s}{%s}"](data.year[year[i]],year[i])
--- end
--- end
--- ctx_setvalue("currentbibyear",concat(year,","))
--- if author == "" then
--- ctx_setvalue("currentbibauthor","")
--- else -- needs checking
--- local authors = settings_to_array(author) -- {{}{}},{{}{}}
--- local nofauthors = #authors
--- if nofauthors == 1 then
--- if interaction then
--- author = formatters["\\bibmaybeinteractive{%s}{%s}"](data.name,author)
--- end
--- ctx_setvalue("currentbibauthor",author)
--- else
--- limit = limit or nofauthors
--- if interaction then
--- for i=1,#authors do
--- authors[i] = formatters["\\bibmaybeinteractive{%s}{%s}"](data.name,authors[i])
--- end
--- end
--- if limit == 1 then
--- ctx_setvalue("currentbibauthor",authors[1] .. "\\bibalternative{otherstext}")
--- elseif limit == 2 and nofauthors == 2 then
--- ctx_setvalue("currentbibauthor",concat(authors,"\\bibalternative{andtext}"))
--- else
--- for i=1,limit-1 do
--- authors[i] = authors[i] .. "\\bibalternative{namesep}"
--- end
--- if limit < nofauthors then
--- authors[limit+1] = "\\bibalternative{otherstext}"
--- ctx_setvalue("currentbibauthor",concat(authors,"",1,limit+1))
--- else
--- authors[limit-1] = authors[limit-1] .. "\\bibalternative{andtext}"
--- ctx_setvalue("currentbibauthor",concat(authors))
--- end
--- end
--- end
--- end
--- -- the following use: currentbibauthor and currentbibyear
--- if i == 1 then
--- context.ixfirstcommand()
--- elseif i == total then
--- context.ixlastcommand()
--- else
--- context.ixsecondcommand()
--- end
--- end
--- end
-
-local patterns = { "publ-imp-%s.mkiv", "publ-imp-%s.tex" }
-
-local function failure(name)
- report("unknown library %a",name)
-end
-
-local function action(name,foundname)
- context.input(foundname)
-end
-
-function commands.loadbtxdefinitionfile(name) -- a more specific name
- commands.uselibrary {
- name = gsub(name,"^publ%-",""),
- patterns = patterns,
- action = action,
- failure = failure,
- onlyonce = false,
- }
-end
-
--- lists:
-
-publications.lists = publications.lists or { }
-local lists = publications.lists
-
-local context = context
-local structures = structures
-
-local references = structures.references
-local sections = structures.sections
-
--- per rendering
-
-local renderings = { } --- per dataset
-
-table.setmetatableindex(renderings,function(t,k)
- local v = {
- list = { },
- done = { },
- alldone = { },
- used = { },
- registered = { },
- ordered = { },
- shorts = { },
- method = v_none,
- currentindex = 0,
- }
- t[k] = v
- return v
-end)
-
--- why shorts vs tags: only for sorting
-
-function lists.register(dataset,tag,short) -- needs checking now that we split
- local r = renderings[dataset]
- if not short or short == "" then
- short = tag
- end
- if trace then
- report("registering publication entry %a with shortcut %a",tag,short)
- end
- local top = #r.registered + 1
- -- do we really need these
- r.registered[top] = tag
- r.ordered [tag] = top
- r.shorts [tag] = short
-end
-
-function lists.nofregistered(dataset)
- return #renderings[dataset].registered
-end
-
-function lists.setmethod(dataset,method)
- local r = renderings[dataset]
- r.method = method or v_none
- r.list = { }
- r.done = { }
-end
-
-local function validkeyword(dataset,tag,keyword)
- local ds = datasets[dataset]
- if not ds then
- report("unknown dataset %a",dataset)
- return
- end
- local dt = ds.details[tag]
- if not dt then
- report("no details for tag %a",tag)
- return
- end
- local kw = dt.keyword
- if kw then
--- inspect(keyword)
--- inspect(kw)
- for k in next, keyword do
- if kw[k] then
- return true
- end
- end
- end
-end
-
-function lists.collectentries(specification)
- local dataset = specification.btxdataset
- if not dataset then
- return
- end
- local rendering = renderings[dataset]
--- specification.names = "btx"
- local method = rendering.method
- if method == v_none then
- return
- end
--- method=v_local --------------------
- local result = structures.lists.filter(specification)
- --
- local keyword = specification.keyword
- if keyword and keyword ~= "" then
- keyword = settings_to_set(keyword)
- else
- keyword = nil
- end
- lists.result = result
- local section = sections.currentid()
- local list = rendering.list
- local done = rendering.done
- local alldone = rendering.alldone
- if method == v_local then
- for listindex=1,#result do
- local r = result[listindex]
- local u = r.userdata
- if u and u.btxset == dataset then
- local tag = u.btxref
- if tag and done[tag] ~= section then
- if not keyword or validkeyword(dataset,tag,keyword) then
- done[tag] = section
- alldone[tag] = true
- list[#list+1] = { tag, listindex }
- end
- end
- end
- end
- elseif method == v_global then
- for listindex=1,#result do
- local r = result[listindex]
- local u = r.userdata
- if u and u.btxset == dataset then
- local tag = u.btxref
- if tag and not alldone[tag] and done[tag] ~= section then
- if not keyword or validkeyword(dataset,tag,keyword) then
- done[tag] = section
- alldone[tag] = true
- list[#list+1] = { tag, listindex }
- end
- end
- end
- end
- elseif method == v_force then
- -- only for checking, can have duplicates, todo: collapse page numbers, although
- -- we then also needs deferred writes
- for listindex=1,#result do
- local r = result[listindex]
- local u = r.userdata
- if u and u.btxset == dataset then
- local tag = u.btxref
- if tag then
- if not keyword or validkeyword(dataset,tag,keyword) then
- list[#list+1] = { tag, listindex }
- end
- end
- end
- end
- elseif method == v_dataset then
- local luadata = datasets[dataset].luadata
- for tag, data in table.sortedhash(luadata) do
- if not keyword or validkeyword(dataset,tag,keyword) then
- list[#list+1] = { tag }
- end
- end
- end
-end
-
-lists.sorters = {
- [v_short] = function(dataset,rendering,list)
- local shorts = rendering.shorts
- local function compare(a,b)
- local aa, bb = a and a[1], b and b[1]
- if aa and bb then
- aa, bb = shorts[aa], shorts[bb]
- return aa and bb and aa < bb
- end
- return false
- end
- sort(list,compare)
- end,
- [v_reference] = function(dataset,rendering,list)
- local function compare(a,b)
- local aa, bb = a and a[1], b and b[1]
- if aa and bb then
- return aa and bb and aa < bb
- end
- return false
- end
- sort(list,compare)
- end,
- [v_dataset] = function(dataset,rendering,list)
- local function compare(a,b)
- local aa, bb = a and a[1], b and b[1]
- if aa and bb then
- aa, bb = list[aa].index or 0, list[bb].index or 0
- return aa and bb and aa < bb
- end
- return false
- end
- sort(list,compare)
- end,
- -- [v_default] = function(dataset,rendering,list) -- not really needed
- -- local ordered = rendering.ordered
- -- local function compare(a,b)
- -- local aa, bb = a and a[1], b and b[1]
- -- if aa and bb then
- -- aa, bb = ordered[aa], ordered[bb]
- -- return aa and bb and aa < bb
- -- end
- -- return false
- -- end
- -- sort(list,compare)
- -- end,
- [v_author] = function(dataset,rendering,list)
- local valid = publications.authors.preparedsort(dataset,list,v_author,v_editor)
- if #valid == 0 or #valid ~= #list then
- -- nothing to sort
- else
- -- if needed we can wrap compare and use the list directly but this is cleaner
- sorters.sort(valid,sortcomparer)
- for i=1,#valid do
- local v = valid[i]
- valid[i] = list[v.index]
- end
- return valid
- end
- end,
-}
-
-function lists.flushentries(dataset,sortvariant)
- local rendering = renderings[dataset]
- local list = rendering.list
- local sort = lists.sorters[sortvariant] or lists.sorters[v_default]
- if type(sort) == "function" then
- list = sort(dataset,rendering,list) or list
- end
- for i=1,#list do
- ctx_setvalue("currentbtxindex",i)
- ctx_btxhandlelistentry(list[i][1]) -- we can pass i here too ... more efficient to avoid the setvalue
- end
-end
-
-function lists.fetchentries(dataset)
- local list = renderings[dataset].list
- for i=1,#list do
- ctx_setvalue("currentbtxindex",i)
- ctx_btxchecklistentry(list[i][1])
- end
-end
-
-function lists.filterall(dataset)
- local r = renderings[dataset]
- local list = r.list
- local registered = r.registered
- for i=1,#registered do
- list[i] = { registered[i], i }
- end
-end
-
-function lists.registerplaced(dataset,tag)
- renderings[dataset].used[tag] = true
-end
-
-function lists.doifalreadyplaced(dataset,tag)
- commands.doifelse(renderings[dataset].used[tag])
-end
-
--- we ask for <n>:tag but when we can't find it we go back
--- to look for previous definitions, and when not found again
--- we look forward
-
-local function compare(a,b)
- local aa, bb = a and a[3], b and b[3]
- return aa and bb and aa < bb
-end
-
--- maybe hash subsets
--- how efficient is this? old leftovers?
-
--- rendering ?
-
-local f_reference = formatters["r:%s:%s:%s"] -- dataset, instance (block), tag
-local f_destination = formatters["d:%s:%s:%s"] -- dataset, instance (block), tag
-
-function lists.resolve(dataset,reference) -- maybe already feed it split
- -- needs checking (the prefix in relation to components)
- local subsets = nil
- local block = tex.count.btxblock
- local collected = references.collected
- local prefix = nil -- todo: dataset ?
- if prefix and prefix ~= "" then
- subsets = { collected[prefix] or collected[""] }
- else
- local components = references.productdata.components
- local subset = collected[""]
- if subset then
- subsets = { subset }
- else
- subsets = { }
- end
- for i=1,#components do
- local subset = collected[components[i]]
- if subset then
- subsets[#subsets+1] = subset
- end
- end
- end
--- inspect(subsets)
- if #subsets > 0 then
- local result, nofresult, done = { }, 0, { }
- for i=1,#subsets do
- local subset = subsets[i]
- for rest in gmatch(reference,"[^, ]+") do
- local blk, tag, found = block, nil, nil
- if block then
- tag = f_destination(dataset,blk,rest)
- found = subset[tag]
- if not found then
- for i=block-1,1,-1 do
- tag = f_destination(dataset,blk,rest)
--- tag = i .. ":" .. rest
- found = subset[tag]
- if found then
- blk = i
- break
- end
- end
- end
- end
- if not found then
- blk = "*"
- tag = f_destination(dataset,blk,rest)
- found = subset[tag]
- end
- if found then
- local current = tonumber(found.entries and found.entries.text) -- tonumber needed
- if current and not done[current] then
- nofresult = nofresult + 1
- result[nofresult] = { blk, rest, current }
- done[current] = true
- end
- end
- end
- end
- local first, last, firsti, lasti, firstr, lastr
- local collected, nofcollected = { }, 0
- for i=1,nofresult do
- local r = result[i]
- local current = r[3]
- if not first then
- first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r
- elseif current == last + 1 then
- last, lasti, lastr = current, i, r
- else
- if last > first + 1 then
- nofcollected = nofcollected + 1
- collected[nofcollected] = { firstr, lastr }
- else
- nofcollected = nofcollected + 1
- collected[nofcollected] = firstr
- if last > first then
- nofcollected = nofcollected + 1
- collected[nofcollected] = lastr
- end
- end
- first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r
- end
- end
- if first and last then
- if last > first + 1 then
- nofcollected = nofcollected + 1
- collected[nofcollected] = { firstr, lastr }
- else
- nofcollected = nofcollected + 1
- collected[nofcollected] = firstr
- if last > first then
- nofcollected = nofcollected + 1
- collected[nofcollected] = lastr
- end
- end
- end
- if nofcollected > 0 then
--- inspect(reference)
--- inspect(result)
--- inspect(collected)
- for i=1,nofcollected do
- local c = collected[i]
- if i == nofcollected then
- ctx_btxlistvariantparameter("lastpubsep")
- elseif i > 1 then
- ctx_btxlistvariantparameter("pubsep")
- end
- if #c == 3 then -- a range (3 is first or last)
- ctx_btxdirectlink(f_reference(dataset,c[1],c[2]),c[3])
- else
- local f, l = c[2], c[2]
- ctx_btxdirectlink(f_reference(dataset,f[1],f[2]),f[3])
- context.endash() -- to do
- ctx_btxdirectlink(f_reference(dataset,l[4],l[5]),l[6])
- end
- end
- else
- context("[btx error 1]")
- end
- else
- context("[btx error 2]")
- end
-end
-
-local done = { }
-
-function commands.btxreference(dataset,block,tag,data)
- local ref = f_reference(dataset,block,tag)
- if not done[ref] then
- done[ref] = true
--- context("<%s>",data)
- ctx_dodirectfullreference(ref,data)
- end
-end
-
-local done = { }
-
-function commands.btxdestination(dataset,block,tag,data)
- local ref = f_destination(dataset,block,tag)
- if not done[ref] then
- done[ref] = true
--- context("<<%s>>",data)
- ctx_dodirectfullreference(ref,data)
- end
-end
-
-commands.btxsetlistmethod = lists.setmethod
-commands.btxresolvelistreference = lists.resolve
-commands.btxregisterlistentry = lists.registerplaced
-commands.btxaddtolist = lists.addentry
-commands.btxcollectlistentries = lists.collectentries
-commands.btxfetchlistentries = lists.fetchentries
-commands.btxflushlistentries = lists.flushentries
-commands.btxdoifelselistentryplaced = lists.doifalreadyplaced
-
-local citevariants = { }
-publications.citevariants = citevariants
-
--- helper
-
-local function sortedtags(dataset,list,sorttype)
- local luadata = datasets[dataset].luadata
- local valid = { }
- for i=1,#list do
- local tag = list[i]
- local entry = luadata[tag]
- if entry then
- local key = entry[sorttype]
- if key then
- valid[#valid+1] = {
- tag = tag,
- split = sortsplitter(sortstripper(key))
- }
- else
- end
- end
- end
- if #valid == 0 or #valid ~= #list then
- return list
- else
- sorters.sort(valid,basicsorter)
- for i=1,#valid do
- valid[i] = valid[i].tag
- end
- return valid
- end
-end
-
--- todo: standard : current
-
-local prefixsplitter = lpeg.splitat("::")
-
-function commands.btxhandlecite(dataset,tag,mark,variant,sorttype,setup) -- variant for tracing
- local prefix, rest = lpegmatch(prefixsplitter,tag)
- if rest then
- dataset = prefix
- else
- rest = tag
- end
- ctx_setvalue("currentbtxdataset",dataset)
- local tags = settings_to_array(rest)
- if #tags > 0 then
- if sorttype and sorttype ~= "" then
- tags = sortedtags(dataset,tags,sorttype)
- end
- ctx_btxcitevariantparameter(v_left)
- for i=1,#tags do
- local tag = tags[i]
- ctx_setvalue("currentbtxtag",tag)
- if i > 1 then
- ctx_btxcitevariantparameter(v_middle)
- end
- if mark ~= false then
- ctx_btxdomarkcitation(dataset,tag)
- end
- ctx_directsetup(setup) -- cite can become alternative
- end
- ctx_btxcitevariantparameter(v_right)
- else
- -- error
- end
-end
-
-function commands.btxhandlenocite(dataset,tag,mark)
- if mark ~= false then
- local prefix, rest = lpegmatch(prefixsplitter,tag)
- if rest then
- dataset = prefix
- else
- rest = tag
- end
- ctx_setvalue("currentbtxdataset",dataset)
- local tags = settings_to_array(rest)
- for i=1,#tags do
- ctx_btxdomarkcitation(dataset,tags[i])
- end
- end
-end
-
-function commands.btxcitevariant(dataset,block,tags,variant)
- local action = citevariants[variant] or citevariants.default
- if action then
- action(dataset,tags,variant)
- end
-end
-
-function citevariants.default(dataset,tags,variant)
- local content = getfield(dataset,tags,variant)
- if content then
- context(content)
- end
-end
-
--- todo : sort
--- todo : choose between publications or commands namespace
--- todo : use details.author
--- todo : sort details.author
-
-local function collectauthoryears(dataset,tags)
- local luadata = datasets[dataset].luadata
- local list = settings_to_array(tags)
- local found = { }
- local result = { }
- local order = { }
- for i=1,#list do
- local tag = list[i]
- local entry = luadata[tag]
- if entry then
- local year = entry.year
- local author = entry.author
- if author and year then
- local a = found[author]
- if not a then
- a = { }
- found[author] = a
- order[#order+1] = author
- end
- local y = a[year]
- if not y then
- y = { }
- a[year] = y
- end
- y[#y+1] = tag
- end
- end
- end
- -- found = { author = { year_1 = { e1, e2, e3 } } }
- for i=1,#order do
- local author = order[i]
- local years = found[author]
- local yrs = { }
- for year, entries in next, years do
- if subyears then
- -- -- add letters to all entries of an author and if so shouldn't
- -- -- we tag all years of an author as soon as we do this?
- -- if #entries > 1 then
- -- for i=1,#years do
- -- local entry = years[i]
- -- -- years[i] = year .. string.char(i + string.byte("0") - 1)
- -- end
- -- end
- else
- yrs[#yrs+1] = year
- end
- end
- result[i] = { author = author, years = yrs }
- end
- return result, order
-end
-
--- (name, name and name) .. how names? how sorted?
--- todo: we loop at the tex end .. why not here
--- \cite[{hh,afo},kvm]
-
--- maybe we will move this tex anyway
-
-function citevariants.author(dataset,tags)
- local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
- publications.citeconcat(order)
-end
-
-local function authorandyear(dataset,tags,formatter)
- local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
- for i=1,#result do
- local r = result[i]
- order[i] = formatter(r.author,r.years) -- reuse order
- end
- publications.citeconcat(order)
-end
-
-function citevariants.authoryear(dataset,tags)
- authorandyear(dataset,tags,formatters["%s (%, t)"])
-end
-
-function citevariants.authoryears(dataset,tags)
- authorandyear(dataset,tags,formatters["%s, %, t"])
-end
-
-function citevariants.authornum(dataset,tags)
- local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
- publications.citeconcat(order)
- ctx_btxcitevariantparameter(v_inbetween)
- lists.resolve(dataset,tags) -- left/right ?
-end
-
--- function citevariants.short(dataset,tags)
--- local short = getdetail(dataset,tags,"short")
--- if short then
--- context(short)
--- end
--- end
-
-function citevariants.short(dataset,tags)
- local short = getdetail(dataset,tags,"short")
- local suffix = getdetail(dataset,tags,"suffix")
- if suffix then
- context(short .. suffix)
- elseif short then
- context(short)
- end
-end
-
-function citevariants.page(dataset,tags)
- local pages = getdetail(dataset,tags,"pages")
- if not pages then
- -- nothing
- elseif type(pages) == "table" then
- context(pages[1])
- ctx_btxcitevariantparameter(v_inbetween)
- context(pages[2])
- else
- context(pages)
- end
-end
-
-function citevariants.num(dataset,tags)
--- ctx_btxdirectlink(f_destination(dataset,block,tags),listindex) -- not okay yet
- lists.resolve(dataset,tags)
-end
-
-function citevariants.serial(dataset,tags) -- the traditional fieldname is "serial" and not "index"
- local index = getfield(dataset,tags,"index")
- if index then
- context(index)
- end
-end
-
--- List variants
-
-local listvariants = { }
-publications.listvariants = listvariants
-
--- function commands.btxhandlelist(dataset,block,tag,variant,setup)
--- if sorttype and sorttype ~= "" then
--- tags = sortedtags(dataset,tags,sorttype)
--- end
--- ctx_setvalue("currentbtxtag",tag)
--- ctx_btxlistvariantparameter(v_left)
--- ctx_directsetup(setup)
--- ctx_btxlistvariantparameter(v_right)
--- end
-
-function commands.btxlistvariant(dataset,block,tags,variant,listindex)
- local action = listvariants[variant] or listvariants.default
- if action then
- action(dataset,block,tags,variant,tonumber(listindex) or 0)
- end
-end
-
-function listvariants.default(dataset,block,tags,variant)
- context("?")
-end
-
-function listvariants.num(dataset,block,tags,variant,listindex)
- ctx_btxdirectlink(f_destination(dataset,block,tags),listindex) -- not okay yet
-end
-
--- function listvariants.short(dataset,block,tags,variant,listindex)
--- local short = getdetail(dataset,tags,variant,variant)
--- if short then
--- context(short)
--- end
--- end
-
-function listvariants.short(dataset,block,tags,variant,listindex)
- local short = getdetail(dataset,tags,"short","short")
- local suffix = getdetail(dataset,tags,"suffix","suffix")
- if suffix then
- context(short .. suffix)
- elseif short then
- context(short)
- end
-end
diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv
deleted file mode 100644
index 42226695c..000000000
--- a/tex/context/base/publ-ini.mkiv
+++ /dev/null
@@ -1,963 +0,0 @@
-%D \module
-%D [ file=publ-ini,
-%D version=2013.05.12,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=Initialization,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-% todo: we cannot use 'default' as this wipes metadata names (maybe no longer do that)
-% todo: \v!cite => \s!cite
-% todo: interface with (ml)bibtex (export -> call -> import)
-% todo: check if 'all' etc are ok ... either use list or use other criterium
-
-% \definecolor[btx:field] [darkred]
-% \definecolor[btx:crossref][darkblue]
-% \definecolor[btx:key] [darkgreen]
-% \definecolor[btx:todo] [darkyellow]
-
-%D We operate on several axis:
-%D
-%D \startitemize[packed]
-%D \startitem we can have several databases (or combinations) \stopitem
-%D \startitem we can add entries to them if needed (coded in tex) \stopitem
-%D \startitem we can have several lists each using one of the databases \stopitem
-%D \startitem we can render each list or citation independently \stopitem
-%D \stopitemize
-%D
-%D We assume that the rendering of a list entry is consistent in a document,
-%D although one can redefine properties if needed. Adding more granularity would
-%D complicate the user interface beyond comprehension.
-
-\writestatus{loading}{ConTeXt Publication Support / Initialization}
-
-\registerctxluafile{publ-dat}{1.001}
-\registerctxluafile{publ-aut}{1.001}
-\registerctxluafile{publ-usr}{1.001}
-\registerctxluafile{publ-ini}{1.001}
-\registerctxluafile{publ-oth}{1.001} % this could become an option
-
-\unprotect
-
-\def\s!btx{btx}
-\def\v!btxlist{btxlist}
-
-% a dedicated construction mechanism
-
-\installcorenamespace {btxlist}
-
-\installcommandhandler \??btxlist {btxlist} \??btxlist
-
-\unexpanded\setvalue{\??constructioninitializer\v!btxlist}%
- {\let\currentbtxlist \currentconstruction
- \let\constructionparameter \btxlistparameter
- \let\detokenizedconstructionparameter\detokenizedbtxlistparameter
- \let\letconstructionparameter \letbtxlistparameter
- \let\useconstructionstyleandcolor \usebtxliststyleandcolor
- \let\setupcurrentconstruction \setupcurrentbtxlist}
-
-\expandafter\let\csname\??constructionmainhandler \v!btxlist\expandafter\endcsname\csname\??constructionmainhandler \v!construction\endcsname
-\expandafter\let\csname\??constructioncommandhandler\v!btxlist\expandafter\endcsname\csname\??constructioncommandhandler\v!construction\endcsname
-\expandafter\let\csname\??constructiontexthandler \v!btxlist\expandafter\endcsname\csname\??constructiontexthandler \v!construction\endcsname
-
-\unexpanded\setvalue{\??constructioncommandhandler\v!btxlist}%
- {\csname\??constructionstarthandler\v!construction\endcsname
- \csname\??constructionstophandler \v!construction\endcsname
- \endgroup}
-
-\unexpanded\setvalue{\??constructionstarthandler\v!btxlist}%
- {\csname\??constructionstarthandler\v!construction\endcsname}
-
-\unexpanded\setvalue{\??constructionstophandler\v!btxlist}%
- {\csname\??constructionstophandler\v!construction\endcsname
- \endgroup}
-
-\unexpanded\def\startbtxlistentry#1%
- {\begingroup
- \strc_constructions_initialize{#1}%
- \csname\??constructionstarthandler\currentconstructionhandler\endcsname}
-
-\unexpanded\def\stopbtxlistentry
- {\csname\??constructionstophandler\currentconstructionhandler\endcsname}
-
-\unexpanded\setvalue{\??constructiontexthandler\v!btxlist}%
- {\begingroup
- \useconstructionstyleandcolor\c!headstyle\c!headcolor % move to \currentconstructiontext
- \the\everyconstruction
- \constructionparameter\c!headcommand
- {\strut
- \constructionparameter\c!text
- \btx_reference_inject}%
- \endgroup}
-
-\unexpanded\def\strc_constructions_initialize#1% class instance
- {\edef\currentconstruction{#1}%
- \let\currentconstructionlistentry\!!zerocount
- \expandafter\let\expandafter\currentconstructionmain \csname\??constructionmain \currentconstruction\endcsname
- \expandafter\let\expandafter\currentconstructionlevel \csname\??constructionlevel\currentconstruction\endcsname
- \expandafter\let\expandafter\currentconstructionhandler\csname\??constructionclass\currentconstruction\endcsname
- \csname\??constructioninitializer\currentconstructionhandler\endcsname}
-
-\appendtoks
- % \ifx\currentbtxlistparent\empty
- % \defineconstruction[\currentbtxlist][\currentbtxlistparent][\s!handler=\v!btxlist,\c!level=1]%
- % \else
- % \defineconstruction[\currentbtxlist][\s!handler=\v!btxlist,\c!level=1]%
- % \fi
- \ifx\currentbtxlistparent\empty
- \letvalue{\??constructionmain\currentbtxlist}\currentbtxlist
- \else
- \letvalue{\??constructionmain\currentbtxlist}\currentbtxlistparent
- \fi
- \setevalue{\??constructionlevel\currentbtxlist}{\number\btxlistparameter\c!level}%
- \setevalue{\??constructionclass\currentbtxlist}{\btxlistparameter\s!handler}%
-\to \everydefinebtxlist
-
-\setupbtxlist
- [\s!handler=\v!btxlist,
- \c!level=1]
-
-\setupbtxlist
- [\c!alternative=\v!left,
- \c!headstyle=,
- \c!titlestyle=,
- %\c!style=,
- %\c!color=,
- %\c!headcolor=,
- %\c!titlecolor=,
- \c!width=4\emwidth,
- \c!distance=\emwidth,
- %\c!titledistance=.5\emwidth,
- %\c!hang=,
- %\c!sample=,
- %\c!align=,
- %\c!headalign=,
- \c!margin=\v!no,
- \c!before=\blank,
- \c!inbetween=\blank,
- \c!after=\blank,
- %\c!indentnext=\v!yes,
- %\c!indenting=\v!never,
- %\c!titleleft=(,
- %\c!titleright=),
- %\c!closesymbol=,
- %\c!closecommand=\wordright,
- \c!display=\v!yes,
- \c!command=,
- %\c!titlecommand=,
- %\c!expansion=\v!no,
- %\c!xmlsetup=,
- %\s!catcodes=,
- %\c!title=\v!yes,
- %\c!text=,
- ]
-
-% here starts the bib stuff
-
-\installcorenamespace {btxdataset}
-\installcorenamespace {btxlistvariant}
-\installcorenamespace {btxcitevariant}
-\installcorenamespace {btxrendering}
-\installcorenamespace {btxcommand}
-\installcorenamespace {btxnumbering}
-
-\installcommandhandler \??btxdataset {btxdataset} \??btxdataset
-\installcommandhandler \??btxlistvariant {btxlistvariant} \??btxlistvariant
-\installcommandhandler \??btxcitevariant {btxcitevariant} \??btxcitevariant
-\installcommandhandler \??btxrendering {btxrendering} \??btxrendering
-
-\unexpanded\def\usebtxdataset
- {\dodoubleargument\publ_use_dataset}
-
-\def\publ_use_dataset[#1][#2]%
- {\ifsecondargument
- \ctxcommand{usebtxdataset("#1","#2")}%
- \else
- \ctxcommand{usebtxdataset("\v!standard","#1")}%
- \fi}
-
-\definebtxdataset
- [\v!standard]
-
-% \usebtxdataset
-% [standard]
-% [mybibs.bib]
-
-\unexpanded\def\startpublication
- {\dodoubleempty\publ_set_publication}
-
-\let\stoppublication\relax
-
-\def\publ_set_publication[#1][#2]%
- {\begingroup
- \catcode\commentasciicode\othercatcode
- \ifsecondargument
- \expandafter\publ_set_publication_indeed
- \else\iffirstargument
- \doubleexpandafter\publ_set_publication_checked
- \else
- \doubleexpandafter\publ_set_publication_default
- \fi\fi{#1}{#2}}
-
-\def\publ_set_publication_default#1#2%
- {\publ_set_publication_indeed\v!standard{#1}}
-
-\def\publ_set_publication_checked#1#2%
- {\doifassignmentelse{#1}
- {\publ_set_publication_indeed\v!standard{#1}}
- {\publ_set_publication_indeed{#1}{}}}
-
-\def\publ_set_publication_indeed#1#2#3\stoppublication
- {\ctxcommand{addbtxentry("#1",\!!bs#2\!!es,\!!bs\detokenize{#3}\!!es)}%
- \endgroup
- \ignorespaces}
-
-% commands
-
-\unexpanded\def\btxcommand#1%
- {\ifcsname\??btxcommand#1\endcsname
- \expandafter\publ_command_yes
- \else
- \expandafter\publ_command_nop
- \fi{#1}}
-
-\let\btxcmd\btxcommand
-
-\def\publ_command_yes#1%
- {\csname\??btxcommand#1\endcsname}
-
-\def\publ_command_nop#1%
- {\ifcsname#1\endcsname
- \writestatus\m!publications{unknown command: #1, using built-in context variant #1}%
- %\setuxvalue{\??btxcommand#1}{\expandafter\noexpand\csname#1\endcsname}%
- \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname#1\endcsname
- \else\ifcsname\utfupper{#1}\endcsname
- \writestatus\m!publications{unknown command: #1, using built-in context variant \utfupper{#1}}%
- %\setuxvalue{\??btxcommand#1}{\expandafter\noexpand\csname\utfupper{#1}\endcsname}%
- \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname\utfupper{#1}\endcsname
- \else
- \writestatus\m!publications{unknown command: #1}%
- \setugvalue{\??btxcommand#1}{\underbar{\tttf#1}}%
- \fi\fi
- \publ_command_yes{#1}}
-
-\unexpanded\def\definebtxcommand#1% {body} #1..#n{body}
- {\setuvalue{\??btxcommand\strippedcsname#1}}%
-
-% access
-
-\let\currentbtxtag \empty
-\let\currentbtxdataset\v!standard
-
-\unexpanded\def\setbtxdataset[#1]%
- {\edef\currentbtxdataset{\ctxcommand{setbtxdataset("#1")}}}
-
-\unexpanded\def\setbtxentry[#1]%
- {\edef\currentbtxtag{\ctxcommand{setbtxentry("\currentbtxdataset","#1")}}}
-
-% \let\btxsetdataset\setbtxdataset
-% \let\btxsetentry \setbtxentry
-
-\def\btxfield #1{\ctxcommand{btxfield("\currentbtxdataset","\currentbtxtag","#1")}}
-\def\btxdetail #1{\ctxcommand{btxdetail("\currentbtxdataset","\currentbtxtag","#1")}}
-\def\btxflush #1{\ctxcommand{btxflush("\currentbtxdataset","\currentbtxtag","#1")}}
-%def\btxrendering#1{\ctxcommand{btxrendering("\currentbtxdataset","\currentbtxtag","#1","\btxrenderingparameter\c!interaction")}}
-\def\btxdoifelse #1{\ctxcommand{btxdoifelse("\currentbtxdataset","\currentbtxtag","#1")}}
-\def\btxdoif #1{\ctxcommand{btxdoif("\currentbtxdataset","\currentbtxtag","#1")}}
-\def\btxdoifnot #1{\ctxcommand{btxdoifnot("\currentbtxdataset","\currentbtxtag","#1")}}
-
-\let\btxsetup \directsetup
-
-%D How complex will we go? Can we assume that e.g. an apa style will not be mixed
-%D with another one? I think this assumption is okay. For manuals we might want to
-%D mix but we can work around it.
-
-%D Rendering.
-
-\unexpanded\def\btxspace {\removeunwantedspaces\space}
-\unexpanded\def\btxperiod {\removeunwantedspaces.\space}
-\unexpanded\def\btxcomma {\removeunwantedspaces,\space}
-\unexpanded\def\btxlparent {\removeunwantedspaces\space(}
-\unexpanded\def\btxrparent {\removeunwantedspaces)\space}
-\unexpanded\def\btxlbracket{\removeunwantedspaces\space[}
-\unexpanded\def\btxrbracket{\removeunwantedspaces]\space}
-
-%D Rendering lists and citations.
-
-\newconditional\c_btx_trace
-
-\installtextracker
- {btxrendering}
- {\settrue \c_btx_trace}
- {\setfalse\c_btx_trace}
-
-\unexpanded\def\startbtxrendering
- {\begingroup
- \dosingleempty\btx_start_rendering}
-
-\def\btx_start_rendering[#1]%
- {\edef\currentbtxrendering{#1}}
-
-\unexpanded\def\stopbtxrendering
- {\endgroup}
-
-\unexpanded\def\btxtodo#1%
- {[#1]}
-
-%D Specific rendering definitions (like apa):
-
-\unexpanded\def\loadbtxdefinitionfile[#1]%
- {\ctxcommand{loadbtxdefinitionfile("#1")}}
-
-%D Lists:
-
-\newdimen\d_publ_number_width
-%newdimen\d_publ_number_distance
-
-\ifdefined\btxblock \else \newcount\btxblock \fi \btxblock\plusone
-\ifdefined\btxcounter \else \newcount\btxcounter \fi
-
-\newtoks \everysetupbtxlistplacement % name will change
-\newtoks \everysetupbtxciteplacement % name will change
-
-% \def\publ_list_processor % bibref -> btx (old method, keep as reference)
-% {\ctxcommand{btxaddtolist("\currentbtxrendering",\currentlistindex,"btxref")}}
-
-\definelist % only used for selecting
- [btx]
-
-\setuplist
- [btx]%
- [\c!state=\v!start]%
-
-\appendtoks
- \ifx\currentbtxrenderingparent\empty
- \definebtxlist
- [\currentbtxrendering]%
- \else
- \definebtxlist
- [\currentbtxrendering]%
- [\currentbtxrenderingparent]%
- \fi
-\to \everydefinebtxrendering
-
-\unexpanded\def\btx_entry_inject
- {\begingroup
- \edef\currentbtxcategory{\btxfield{category}}%
- \ignorespaces
- \directsetup{\s!btx:\currentbtxalternative:\currentbtxcategory}%
- \removeunwantedspaces
- \endgroup}
-
-\unexpanded\def\completebtxrendering{\dodoubleempty\publ_place_list_complete}
-\unexpanded\def\placebtxrendering {\dodoubleempty\publ_place_list_standard}
-
-\let\completelistofpublications\completebtxrendering
-\let\placelistofpublications \placebtxrendering
-
-\def\publ_place_list_check_criterium
- {\edef\currentbtxcriterium{\btxrenderingparameter\c!criterium}% \v!cite will become \s!cite
- \ifx\currentbtxcriterium\empty
- \let\currentbtxcriterium\v!previous
- \else\ifx\currentbtxcriterium\v!cite
- \let\currentbtxcriterium\v!here
- \fi\fi}
-
-\def\publ_place_list_complete[#1][#2]% title might become obsolete, just headtext
- {\begingroup
- \edef\currentbtxrendering{#1}%
- \setupcurrentbtxrendering[#2]%
- \let\currentlist\s!btx
- \let\currentbtxlist\currentbtxrendering
- \publ_place_list_check_criterium
- \edef\currentbtxrenderingtitle{\btxrenderingparameter\c!title}%
- \ifx\currentbtxrenderingtitle\empty
- \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\headtext{\currentbtxrendering}}]}%
- \else
- \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\currentbtxrenderingtitle}]}%
- \fi
- \publ_place_list_indeed
- \stopnamedsection
- \endgroup}
-
-\def\publ_place_list_standard[#1][#2]%
- {\begingroup
- \edef\currentbtxrendering{#1}%
- \setupcurrentbtxrendering[#2]%
- \let\currentlist\s!btx
- \let\currentbtxlist\currentbtxrendering
- \publ_place_list_check_criterium
- \publ_place_list_indeed
- \endgroup}
-
-\newconditional\c_publ_place_all
-\newconditional\c_publ_place_register % to be interfaced
-\newconditional\c_publ_place_check % to be interfaced
-
-\appendtoks
- \ifx\currentbtxcriterium\v!all % move this check to lua ... easier to test there anyway
- \settrue\c_publ_place_all
- \else
- \setfalse\c_publ_place_all
- \fi
-\to \everysetupbtxlistplacement
-
-\def\publ_place_list_indeed
- {\startbtxrendering[\currentbtxrendering]%
- \directsetup{\btxrenderingparameter\c!setups}%
- % \determinelistcharacteristics[\currentbtxrendering]%
- \edef\currentbtxalternative{\btxrenderingparameter\c!alternative}%
- \edef\currentbtxdataset{\btxrenderingparameter\c!dataset}%
- \let\currentlist\s!btx
- \let\currentbtxlist\currentbtxrendering
- \the\everysetupbtxlistplacement
- \forgetall
- \ctxcommand{btxsetlistmethod("\currentbtxdataset","\btxrenderingparameter\c!method")}%
- \startpacked[\v!blank]%
- % here we just collect items
- \ctxcommand{btxcollectlistentries {
- names = "btx",
- criterium = "\currentbtxcriterium",
- number = "\btxrenderingparameter\c!number",
- btxdataset = "\currentbtxdataset",
- keyword = "\btxrenderingparameter\c!keyword",
- }}%
- % next we analyze the width
- \ifx\btx_reference_inject_indeed\relax \else
- \edef\p_width{\btxrenderingparameter\c!width}%
- \ifx\p_width\v!auto
- \scratchcounter\btxcounter
- \setbox\scratchbox\vbox{\settrialtypesetting\ctxcommand{btxfetchlistentries("\currentbtxdataset")}}%
- \d_publ_number_width\wd\scratchbox
- \global\btxcounter\scratchcounter
- \letbtxlistparameter\c!width\d_publ_number_width
- \fi
- \fi
- % this actually typesets them
- \ctxcommand{btxflushlistentries("\currentbtxdataset","\btxrenderingparameter\c!sorttype")}%
- \stoppacked
- \stopbtxrendering
- \global\advance\btxblock\plusone}
-
-\def\currentbtxblock{\number\btxblock}
-
-\def\publ_place_list_entry_checked
- {\ctxcommand{btxdoifelselistentryplaced("\currentbtxdataset","\currentbtxtag")}\donothing\publ_place_list_entry}
-
-\def\publ_place_list_entry_register
- {\ctxcommand{btxregisterlistentry("\currentbtxdataset","\currentbtxtag")}}
-
-\unexpanded\def\btxhandlelistentry#1% called at the lua end
- {\begingroup
- \edef\currentbtxtag{#1}%
- \ifconditional\c_publ_place_all
- \publ_place_list_entry
- \else\ifconditional\c_publ_place_check
- \publ_place_list_entry_checked
- \else
- \publ_place_list_entry
- \fi\fi
- \endgroup}
-
-\unexpanded\def\publ_place_list_entry
- {\global\advance\btxcounter\plusone
- \ifconditional\c_publ_place_register
- \publ_place_list_entry_register
- \fi
- \let\currentlist\s!btx
- \startbtxlistentry\currentbtxrendering
- \btx_entry_inject
- \stopbtxlistentry}
-
-\unexpanded\def\btxchecklistentry#1% called at the lua end
- {\begingroup
- \edef\currentbtxtag{#1}%
- \ifx\currentbtxcriterium\v!all % move this check to lua ... easier to test there anyway
- \publ_check_list_entry
- \else
- \ctxcommand{btxdoifelselistentryplaced("\currentbtxdataset","\currentbtxtag")}\donothing\publ_check_list_entry
- \fi
- \endgroup}
-
-\unexpanded\def\publ_check_list_entry
- {\global\advance\btxcounter\plusone
- % todo, switch to font
- \hbox{\btx_reference_checked}%
- \par}
-
-\unexpanded\def\btx_reference_inject % we can use a faster \reference
- {\dontleavehmode\begingroup % no box
- \iftrialtypesetting\else
- \ctxcommand{btxdestination("\currentbtxdataset","\currentbtxblock","\currentbtxtag","\number\btxcounter")}%
- \fi
- \btx_reference_inject_indeed
- \endgroup}
-
-\unexpanded\def\btx_reference_checked
- {\dontleavehmode\hbox\bgroup
- \btx_reference_inject_indeed
- \egroup}
-
-\setuvalue{\??btxnumbering\v!short }{\btxlistvariant{short}} % these will be setups
-\setuvalue{\??btxnumbering\v!bib }{\btxlistvariant{num}} % these will be setups
-\setuvalue{\??btxnumbering\s!unknown}{\btxlistvariant{num}} % these will be setups
-\setuvalue{\??btxnumbering\v!yes }{\btxlistvariant{num}} % these will be setups
-
-\appendtoks
- \edef\p_btx_numbering{\btxrenderingparameter\c!numbering}%
- \letlistparameter\c!numbercommand\firstofoneargument % for the moment, no doubling needed
- \ifx\p_btx_numbering\v!no
- \letlistparameter\c!textcommand\outdented % needed? we can use titlealign
- \letlistparameter\c!symbol \v!none
- \letlistparameter\c!aligntitle \v!yes
- \let\btx_reference_inject_indeed\relax
- \else
- \ifcsname\??btxnumbering\p_btx_numbering\endcsname \else
- \let\p_btx_numbering\s!unknown
- \fi
- \letlistparameter\c!headnumber\v!always
- \expandafter\let\expandafter\btx_reference_inject_indeed\csname\??btxnumbering\p_btx_numbering\endcsname
- \fi
-\to \everysetupbtxlistplacement
-
-% \appendtoks
-% \edef\currentbtxcriterium{\btxrenderingparameter\c!criterium}%
-% \to \everysetupbtxlistplacement
-
-\unexpanded\def\btxflushauthor
- {\doifnextoptionalelse\btx_flush_author_yes\btx_flush_author_nop}
-
-\def\btx_flush_author_yes[#1]{\btx_flush_author{#1}}
-\def\btx_flush_author_nop {\btx_flush_author{\btxlistvariantparameter\c!author}}
-
-\unexpanded\def\btx_flush_author#1#2%
- {\edef\currentbtxfield{#2}%
- \let\currentbtxlistvariant\currentbtxfield
- \ctxcommand{btxauthor("\currentbtxdataset","\currentbtxtag","\currentbtxfield",{
- combiner = "#1",
- etallimit = \number\btxlistvariantparameter\c!etallimit,
- etaldisplay = \number\btxlistvariantparameter\c!etaldisplay,
- })}}
-
-\unexpanded\def\btxflushauthornormal {\btx_flush_author{normal}} % #1
-\unexpanded\def\btxflushauthornormalshort {\btx_flush_author{normalshort}} % #1
-\unexpanded\def\btxflushauthorinverted {\btx_flush_author{inverted}} % #1
-\unexpanded\def\btxflushauthorinvertedshort{\btx_flush_author{invertedshort}} % #1
-
-% \btxflushauthor{author}
-% \btxflushauthor{artauthor}
-% \btxflushauthor{editor}
-%
-% \btxflushauthor[normal]{author}
-% \btxflushauthor[normalshort]{author}
-% \btxflushauthor[inverted]{author}
-% \btxflushauthor[invertedshort]{author}
-
-% Interaction
-
-\newconditional\btxinteractive
-
-\unexpanded\def\btxdoifelseinteraction
- {\iflocation
- \edef\p_interaction{\btxcitevariantparameter\c!interaction}%
- \ifx\p_interaction\v!stop
- \doubleexpandafter\secondoftwoarguments
- \else
- \doubleexpandafter\firstoftwoarguments
- \fi
- \else
- \expandafter\secondoftwoarguments
- \fi}
-
-\appendtoks
- \iflocation
- \edef\p_interaction{\btxlistvariantparameter\c!interaction}%
- \ifx\p_interaction\v!stop
- \let\doifelsebtxinteractionelse\secondoftwoarguments
- \setfalse\btxinteractive
- \else
- \let\doifelsebtxinteractionelse\firstoftwoarguments
- \settrue\btxinteractive
- \fi
- \else
- \let\doifelsebtxinteractionelse\secondoftwoarguments
- \setfalse\btxinteractive
- \fi
-\to \everysetupbtxlistplacement
-
-% bib -> btx
-
-\unexpanded\def\btxgotolink#1[#2]{\doifreferencefoundelse{\bibrefprefix#2}{\goto{#1}[\bibrefprefix#2]}{#1}}
-\unexpanded\def\btxatlink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\at [\bibrefprefix#1]}{#1}}
-\unexpanded\def\btxinlink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\expanded{\goto{\currentreferencetext}}[\bibrefprefix#1]}{#1}}
-
-\unexpanded\def\btxdirectlink#1#2{\goto{#2 {\tttf[#1]}}[#1]}
-\unexpanded\def\btxdirectlink#1#2{\goto{#2}[#1]}
-
-\let\gotobiblink\btxgotolink
-\let\atbiblink \btxatlink
-\let\inbiblink \btxinlink
-
-\unexpanded\def\btxnumberedreference[#1]% \bibtexnumref (replaced by \cite[num])
- {\dontleavehmode
- \begingroup
- \btxcitevariantparameter\v!left
- \penalty\plustenthousand % todo
- \ctxcommand{btxresolvelistreference("\currentbtxdataset","#1")}% todo: split dataset from #1, so another call
- \btxcitevariantparameter\v!right
- \endgroup}
-
-% \def\btxnumberedplaceholder[#1]% \nobibtexnumref
-% {[#1]}
-
-\appendtoks
- % for old times sake, for a while at least
- \let\maybeyear\gobbleoneargument
- \let\noopsort \gobbleoneargument
-\to \everysetupbtxlistplacement
-
-\appendtoks
- % for old times sake, for a while at least
- \let\maybeyear\gobbleoneargument
- \let\noopsort \gobbleoneargument
-\to \everysetupbtxciteplacement
-
-\appendtoks
- \doifnot{\btxrenderingparameter\c!continue}\v!yes
- {\global\btxcounter\zerocount}%
-\to \everysetupbtxlistplacement
-
-%D When a publication is cited, we need to signal that somehow. This is done with the
-%D following (not user) command. We could tag without injecting a node but this way
-%D we also store the location, which makes it possible to ask local lists.
-
-\newconditional\c_publ_cite_write
-
-% for reference, but split now done at the lua end
-%
-% \def\publ_cite_write#1% not used
-% {\splitstring#1\at::\to\askedbtxrendering\and\askedbtxtag
-% \ifx\askedbtxtag\empty
-% \let\currentbtxtag \askedbtxrendering
-% \else
-% \let\currentbtxtag \askedbtxtag
-% \let\currentbtxrendering\askedbtxrendering
-% \fi
-% \iftrialtypesetting \else
-% \processcommacommand[\currentbtxtag]{\publ_cite_indeed\currentbtxrendering}%
-% \fi}
-
-\def\publ_cite_indeed#1#2%
- {\expanded{\writedatatolist[btx][btxset=#1,btxref=#2]}}
-
-\def\btxdomarkcitation#1#2% called from lua end
- {\iftrialtypesetting \else
- \writedatatolist[btx][btxset=#1,btxref=#2]% \c!location=\v!here
- \fi}
-
-%D \macros{cite,nocite,citation,nocitation,usecitation}
-%D
-%D The inline \type {\cite} command creates a (often) short reference to a publication
-%D and for historic reasons uses a strict test for brackets. This means, at least
-%D in the default case that spaces are ignored in the argument scanner. The \type
-%D {\citation} commands is more liberal but also gobbles following spaces. Both
-%D commands insert a reference as well as a visual clue.
-%D
-%D The \type {no} commands all do the same (they are synonyms): they make sure that
-%D a reference is injected but show nothing. However, they do create a node so best
-%D attach them to some text in order to avoid spacing interferences. A slightly
-%D less efficient alternative is \type {\cite[none][tag]}.
-
-% [tags]
-% [settings|variant][tags]
-% [base::tags]
-% [settings|variant][base::tags]
-
-\unexpanded\def\btxcite
- {\dontleavehmode
- \begingroup
- \strictdoifnextoptionalelse\publ_cite_tags_options\publ_cite_tags_indeed}
-
-\unexpanded\def\publ_cite_tags_indeed#1%
- {\letinteractionparameter\c!style\empty
- \edef\currentbtxcitevariant{\btxcitevariantparameter\c!alternative}%
- \edef\currentbtxcitetag{#1}%
- \publ_cite_variant
- \endgroup}
-
-\let\publ_citation_tags_indeed\publ_cite_tags_indeed
-
-\unexpanded\def\publ_cite_tags_options[#1]%
- {\strictdoifnextoptionalelse{\publ_cite_tags_options_indeed{#1}}{\publ_cite_tags_indeed{#1}}}
-
-\unexpanded\def\publ_cite_tags_options_indeed#1[#2]%
- {\edef\currentbtxcitetag{#2}%
- \doifassignmentelse{#1}
- {\publ_cite_tags_settings_indeed{#1}}
- {\publ_cite_tags_variants_indeed{#1}}}
-
-\def\publ_cite_tags_settings_indeed#1%
- {\letinteractionparameter\c!style\empty
- %\letinteractionparameter\c!color\empty
- \getdummyparameters[\c!alternative=,\c!extras=,#1]%
- \edef\p_alternative{\dummyparameter\c!alternative}%
- \ifx\p_alternative\empty \else
- \let\currentbtxcitevariant\p_alternative
- \fi
- \setupcurrentbtxcitevariantparameters[#1]%
- \edef\p_extras{\dummyparameter\c!extras}%
- \ifx\p_extras\empty \else
- \edef\p_right{\btxcitevariantparameter\c!right}%
- \ifx\p_right\empty \else
- \setexpandedbtxcitevariantparameter\p_right{\p_extras\p_right}%
- \fi
- \fi
- \publ_cite_variant
- \endgroup}
-
-\def\publ_cite_tags_variants_indeed#1%
- {\letinteractionparameter\c!style\empty
- \edef\currentbtxcitevariant{#1}%
- \publ_cite_variant
- \endgroup}
-
-\newconditional\btxcitecompress
-
-\def\publ_cite_variant
- {\edef\p_compress{\btxcitevariantparameter\c!compress}%
- % \ifx\p_compress\v!no
- % \setfalse\btxcitecompress
- % \else
- % \settrue\btxcitecompress
- % \fi
- \begingroup
- \settrue\c_publ_cite_write
- \publ_cite_handle_variant_indeed[\currentbtxcitetag]}
-
-\unexpanded\def\publ_cite_handle_variant#1%
- {\begingroup
- \the\everysetupbtxciteplacement
- \edef\currentbtxcitevariant{#1}%
- \dosingleargument\publ_cite_handle_variant_indeed}
-
-\def\publ_cite_handle_variant_indeed[#1]%
- {\usebtxcitevariantstyleandcolor\c!style\c!color
- \letbtxcitevariantparameter\c!alternative\currentbtxcitevariant
- \ctxcommand{btxhandlecite(%
- "\currentbtxdataset",%
- "#1",%
- \iftrialtypesetting false\else true\fi,%
- "\currentbtxcitevariant",%
- "\btxcitevariantparameter\c!sorttype",%
- "\btxcitevariantparameter\c!setups"%
- )}%
- \endgroup}
-
-\unexpanded\def\btxcitation
- {\dontleavehmode
- \begingroup
- \dodoubleempty\publ_citation}
-
-\def\publ_citation[#1][#2]% could be made more efficient but not now
- {\ifsecondargument
- \publ_cite_tags_options_indeed{#1}[#2]%
- \else
- \publ_cite_tags_indeed{#1}%
- \fi}
-
-\unexpanded\def\btxnocite
- {\dosingleempty\publ_cite_no}
-
-\unexpanded\def\publ_cite_no[#1]%
- {\iftrialtypesetting \else
- \ctxcommand{btxhandlenocite("\currentbtxdataset","#1",true)}%
- \fi}
-
-%D Compatibility:
-
-\let\cite \btxcite
-\let\citation \btxcitation
-\let\nocite \btxnocite
-\let\nocitation \btxnocite
-\let\usepublication\btxnocite
-
-%D Cite helpers:
-
-\unexpanded\def\btxcitevariant#1%
- {\ctxcommand{btxcitevariant("\currentbtxdataset","\currentbtxblock","\currentbtxtag","#1")}}
-
-%D List helpers:
-
-\def\currentbtxindex{0}
-
-\unexpanded\def\btxlistvariant#1% was \currentbtxindex
- {\begingroup
- \edef\currentbtxlistvariant{#1}%
- \btxlistvariantparameter\c!left
- \ctxcommand{btxlistvariant("\currentbtxdataset","\currentbtxblock","\currentbtxtag","#1","\number\btxcounter")}% some can go
- \btxlistvariantparameter\c!right
- \endgroup}
-
-%D Whatever helpers:
-
-\unexpanded\def\btxsingularplural#1%
- {\ctxcommand{btxsingularorplural("\currentbtxdataset","\currentbtxtag","#1")}}
-
-\let\btxsingularorplural\btxsingularplural
-
-%D Loading variants:
-
-\appendtoks
- \loadbtxdefinitionfile[\btxrenderingparameter\c!alternative]
-\to \everysetupbtxrendering
-
-%D Defaults:
-
-\setupbtxrendering
- [\c!dataset=\v!standard,
- \c!method=\v!global,
- \c!setups=btx:rendering:\btxrenderingparameter\c!alternative,
- \c!alternative=apa,
- \c!sorttype=,
- \c!criterium=,
- \c!refcommand=authoryears, % todo
- \c!numbering=\v!yes,
-% \c!autohang=\v!no,
- \c!width=\v!auto,
- \c!distance=1.5\emwidth]
-
-\definebtxrendering
- [\v!standard]
-
-\setupbtxcitevariant
- [\c!interaction=\v!start,
- \c!setups=btx:cite:\btxcitevariantparameter\c!alternative,
- \c!alternative=num,
- \c!andtext={ and },
- \c!otherstext={ et al.},
- \c!pubsep={, },
- \c!lastpubsep={ and },
- \c!compress=\v!no,
- \c!inbetween={ },
- \c!left=,
- \c!right=]
-
-\definebtxcitevariant
- [author]
- [%c!sorttype=,
- \c!left={(},
- \c!middle={, },
- \c!right={)}]
-
-\definebtxcitevariant
- [authoryear]
- [\c!compress=\v!yes,
- \c!inbetween={, },
- \c!left={(},
- \c!middle={, },
- \c!right={)}]
-
-\definebtxcitevariant
- [authoryears]
- [authoryear]
-
-\definebtxcitevariant
- [authornum]
- [author]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [year]
- [\c!left={(},
- \c!right={)}]
-
-\definebtxcitevariant
- [key]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [serial]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [page]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [short]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [type]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [doi]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [url]
- [\c!left={[},
- \c!right={]}]
-
-\definebtxcitevariant
- [page]
- [\c!left=,
- \c!right=,
- \c!inbetween=\endash]
-
-\definebtxcitevariant
- [num]
- [\c!compress=\v!yes,
- \c!inbetween={--},
- \c!left={[},
- \c!right={]}]
-
-\setupbtxlistvariant
- [\c!namesep={, },
- \c!lastnamesep={ and },
- \c!finalnamesep={ and },
- \c!firstnamesep={ },
- \c!juniorsep={ },
- \c!vonsep={ },
- \c!surnamesep={, },
- \c!surnameinitialsep={, },
- \c!surnamefirstnamesep={, },
- \c!etallimit=5,
- \c!etaldisplay=5,
- \c!etaltext={ et al.},
- \c!monthconversion=\v!number,
- \c!authorconversion=\v!normal]
-
-\definebtxlistvariant
- [author]
- [author=invertedshort] % we could also do this in the apa style itself
-
-\definebtxlistvariant
- [editor]
- [author]
-
-\definebtxlistvariant
- [artauthor]
- [author]
-
-% Do we want these in the format? Loading them delayed is somewhat messy.
-
-\loadbtxdefinitionfile[apa]
-\loadbtxdefinitionfile[cite]
-\loadbtxdefinitionfile[commands]
-\loadbtxdefinitionfile[definitions]
-
-\protect
diff --git a/tex/context/base/publ-old.mkiv b/tex/context/base/publ-old.mkiv
deleted file mode 100644
index f616428e6..000000000
--- a/tex/context/base/publ-old.mkiv
+++ /dev/null
@@ -1,22 +0,0 @@
-%D \module
-%D [ file=publ-old,
-%D version=2013.12.24,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=Old Fashioned \BIBTEX,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\unprotect
-
-% we could use runtime commands instead
-
-\unexpanded\def\setupbibtex {\usemodule[oldbibtex]\setupbibtex}
-\unexpanded\def\setuppublications {\usemodule[oldbibtex]\setuppublications}
-\unexpanded\def\setuppublicationlist{\usemodule[oldbibtex]\setuppublicationlist}
-
-\protect
diff --git a/tex/context/base/publ-oth.lua b/tex/context/base/publ-oth.lua
deleted file mode 100644
index 14da19f9c..000000000
--- a/tex/context/base/publ-oth.lua
+++ /dev/null
@@ -1,146 +0,0 @@
-if not modules then modules = { } end modules ['publ-oth'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local P, S, C, Ct, Cf, Cg, Cmt, Carg = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.Cmt, lpeg.Carg
-local lpegmatch = lpeg.match
-
-local p_endofline = lpeg.patterns.newline
-
-local loaders = publications.loaders
-local getindex = publications.getindex
-
-local function addfield(t,k,v,fields)
- k = fields[k]
- if k then
- local tk = t[k]
- if tk then
- t[k] = tk .. " and " .. v
- else
- t[k] = v
- end
- end
- return t
-end
-
-local function checkfield(_,_,t,categories,all)
- local tag = t.tag
- if tag then
- local category = t.category
- t.tag = nil
- t.category = categories[category] or category
- all[tag] = t
- end
- return true
-end
-
--- endnotes --
-
-local fields = {
- ["@"] = "tag",
- ["0"] = "category",
- ["A"] = "author",
- ["E"] = "editor",
- ["T"] = "title",
- ["D"] = "year",
- ["I"] = "publisher",
-}
-
-local categories = {
- ["Journal Article"] = "article",
-}
-
-local entry = P("%") * Cg(C(1) * (S(" \t")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
-local record = Cf(Ct("") * (entry^1), addfield)
-local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
-
-function publications.endnotes_to_btx(data)
- local all = { }
- lpegmatch(records,data,1,fields,categories,all)
- return all
-end
-
-function loaders.endnote(dataset,filename)
- -- we could combine the next into checkfield but let's not create too messy code
- loaders.lua(dataset,publications.endnotes_to_btx(io.loaddata(filename) or ""))
-end
-
--- refman --
-
-local entry = Cg(C((1-lpeg.S(" \t")-p_endofline)^1) * (S(" \t-")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
-local record = Cf(Ct("") * (entry^1), addfield)
-local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
-
-local fields = {
- ["SN"] = "tag",
- ["TY"] = "category",
- ["A1"] = "author",
- ["E1"] = "editor",
- ["T1"] = "title",
- ["Y1"] = "year",
- ["PB"] = "publisher",
-}
-
-local categories = {
- ["JOUR"] = "article",
-}
-
-function publications.refman_to_btx(data)
- local all = { }
- lpegmatch(records,data,1,fields,categories,all)
- return all
-end
-
-function loaders.refman(dataset,filename)
- -- we could combine the next into checkfield but let's not create too messy code
- loaders.lua(dataset,publications.refman_to_btx(io.loaddata(filename) or ""))
-end
-
--- test --
-
--- local endnote = [[
--- %0 Journal Article
--- %T Scientific Visualization, Overviews, Methodologies, and Techniques
--- %A Nielson, Gregory M
--- %A Hagen, Hans
--- %A Müller, Heinrich
--- %@ 0818677776
--- %D 1994
--- %I IEEE Computer Society
---
--- %0 Journal Article
--- %T Scientific Visualization, Overviews, Methodologies, and Techniques
--- %A Nielson, Gregory M
--- %A Hagen, Hans
--- %A Müller, Heinrich
--- %@ 0818677775
--- %D 1994
--- %I IEEE Computer Society
--- ]]
---
--- local refman = [[
--- TY - JOUR
--- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
--- A1 - Nielson, Gregory M
--- A1 - Hagen, Hans
--- A1 - Müller, Heinrich
--- SN - 0818677776
--- Y1 - 1994
--- PB - IEEE Computer Society
---
--- TY - JOUR
--- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
--- A1 - Nielson, Gregory M
--- A1 - Hagen, Hans
--- A1 - Müller, Heinrich
--- SN - 0818677775
--- Y1 - 1994
--- PB - IEEE Computer Society
--- ]]
---
--- inspect(publications.endnotes_to_btx(endnote))
--- inspect(publications.refman_to_btx(refman))
diff --git a/tex/context/base/publ-tra.lua b/tex/context/base/publ-tra.lua
deleted file mode 100644
index 98c81d800..000000000
--- a/tex/context/base/publ-tra.lua
+++ /dev/null
@@ -1,296 +0,0 @@
-if not modules then modules = { } end modules ['publ-tra'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local sortedhash = table.sortedhash
-
-local tracers = { }
-publications.tracers = tracers
-local datasets = publications.datasets
-
-local context = context
-local NC, NR = context.NC, context.NR
-local bold = context.bold
-local darkgreen, darkred, darkblue = context.darkgreen, context.darkred, context.darkblue
-
-local fields = table.sorted {
- "abstract",
- "address",
- "annotate",
- "author",
- "booktitle",
- "chapter",
- "comment",
- "country",
- "doi",
- "edition",
- "editor",
- "eprint",
- "howpublished",
- "institution",
- "isbn",
- "issn",
- "journal",
- "key",
- "keyword",
- "keywords",
- "language",
- "lastchecked",
- "month",
- "names",
- "note",
- "notes",
- "number",
- "organization",
- "pages",
- "publisher",
- "school",
- "series",
- "size",
- "title",
- "type",
- "url",
- "volume",
- "year",
- "nationality",
- "assignee",
- "bibnumber",
- "day",
- "dayfiled",
- "monthfiled",
- "yearfiled",
- "revision",
-}
-
-local citevariants = table.sorted {
- "author",
- "authoryear",
- "authoryears",
- "authornum",
- "year",
- "short",
- "serial",
- "key",
- "doi",
- "url",
- "type",
- "page",
- "none",
- "num",
-}
-
-local listvariants = table.sorted {
- "author",
- "editor",
- "artauthor",
-}
-
--- local categories = table.sorted {
--- "article",
--- "book",
--- "booklet",
--- "conference",
--- "inbook",
--- "incollection",
--- "inproceedings",
--- "manual",
--- "mastersthesis",
--- "misc",
--- "phdthesis",
--- "proceedings",
--- "techreport",
--- "unpublished",
--- }
-
-local categories = {
- article = {
- required = { "author", "title", "journal", "year" },
- optional = { "volume", "number", "pages", "month", "note" },
- },
- book = {
- required = { { "author", "editor" }, "title", "publisher", "year" },
- optional = { { "volume", "number" }, "series", "address", "edition", "month","note" },
- },
- booklet = {
- required = { "title" },
- optional = { "author", "howpublished", "address", "month", "year", "note" },
- },
- inbook = {
- required = { { "author", "editor" }, "title", { "chapter", "pages" }, "publisher","year" },
- optional = { { "volume", "number" }, "series", "type", "address", "edition", "month", "note" },
- },
- incollection = {
- required = { "author", "title", "booktitle", "publisher", "year" },
- optional = { "editor", { "volume", "number" }, "series", "type", "chapter", "pages", "address", "edition", "month", "note" },
- },
- inproceedings = {
- required = { "author", "title", "booktitle", "year" },
- optional = { "editor", { "volume", "number" }, "series", "pages", "address", "month","organization", "publisher", "note" },
- },
- manual = {
- required = { "title" },
- optional = { "author", "organization", "address", "edition", "month", "year", "note" },
- },
- mastersthesis = {
- required = { "author", "title", "school", "year" },
- optional = { "type", "address", "month", "note" },
- },
- misc = {
- required = { "author", "title", "howpublished", "month", "year", "note" },
- optional = { "author", "title", "howpublished", "month", "year", "note" },
- },
- phdthesis = {
- required = { "author", "title", "school", "year" },
- optional = { "type", "address", "month", "note" },
- },
- proceedings = {
- required = { "title", "year" },
- optional = { "editor", { "volume", "number" }, "series", "address", "month", "organization", "publisher", "note" },
- },
- techreport = {
- required = { "author", "title", "institution", "year" },
- optional = { "type", "number", "address", "month", "note" },
- },
- unpublished = {
- required = { "author", "title", "note" },
- optional = { "month", "year" },
- },
-}
-
-
-publications.tracers.fields = fields
-publications.tracers.categories = categories
-publications.tracers.citevariants = citevariants
-publications.tracers.listvariants = listvariants
--- -- --
-
-function tracers.showdatasetfields(dataset)
- local luadata = datasets[dataset].luadata
- if next(luadata) then
- context.starttabulate { "|lT|lT|pT|" }
- NC() bold("tag")
- NC() bold("category")
- NC() bold("fields")
- NC() NR() context.FL() -- HL()
- for k, v in sortedhash(luadata) do
- NC() context(k)
- NC() context(v.category)
- NC()
- for k, v in sortedhash(v) do
- if k ~= "details" and k ~= "tag" and k ~= "category" then
- context("%s ",k)
- end
- end
- NC() NR()
- end
- context.stoptabulate()
- end
-end
-
-function tracers.showdatasetcompleteness(dataset)
-
- dataset = datasets[dataset]
-
- local preamble = { "|lBTw(10em)|p|" }
-
- local function required(key,value,indirect)
- NC() darkgreen(key)
- NC() if indirect then
- darkblue(value)
- elseif value then
- context(value)
- else
- darkred("\\tttf [missing]")
- end
- NC() NR()
- end
-
- local function optional(key,value,indirect)
- NC() context(key)
- NC() if indirect then
- darkblue(value)
- elseif value then
- context(value)
- end
- NC() NR()
- end
-
- local function identified(tag,crossref)
- NC() context("tag")
- NC() if crossref then
- context("\\tttf %s\\hfill\\darkblue => %s",tag,crossref)
- else
- context("\\tttf %s",tag)
- end
- NC() NR()
- end
-
- local luadata = datasets[dataset].luadata
-
- if next(luadata) then
- for tag, entry in table.sortedhash(luadata) do
- local category = entry.category
- local fields = categories[category]
- if fields then
- context.starttabulate(preamble)
- identified(tag,entry.crossref)
- context.HL()
- local requiredfields = fields.required
- local optionalfields = fields.optional
- for i=1,#requiredfields do
- local r = requiredfields[i]
- if type(r) == "table" then
- local okay = true
- for i=1,#r do
- local ri = r[i]
- if rawget(entry,ri) then
- required(ri,entry[ri])
- okay = true
- elseif entry[ri] then
- required(ri,entry[ri],true)
- okay = true
- end
- end
- if not okay then
- required(table.concat(r,"\\letterbar "))
- end
- elseif rawget(entry,r) then
- required(r,entry[r])
- elseif entry[r] then
- required(r,entry[r],true)
- else
- required(r)
- end
- end
- for i=1,#optionalfields do
- local o = optionalfields[i]
- if type(o) == "table" then
- for i=1,#o do
- local oi = o[i]
- if rawget(entry,oi) then
- optional(oi,entry[oi])
- elseif entry[oi] then
- optional(oi,entry[oi],true)
- end
- end
- elseif rawget(entry,o) then
- optional(o,entry[o])
- elseif entry[o] then
- optional(o,entry[o],true)
- end
- end
- context.stoptabulate()
- else
- -- error
- end
- end
- end
-
-end
-
-commands.showbtxdatasetfields = tracers.showdatasetfields
-commands.showbtxdatasetcompleteness = tracers.showdatasetcompleteness
diff --git a/tex/context/base/publ-tra.mkiv b/tex/context/base/publ-tra.mkiv
deleted file mode 100644
index 49fb6d962..000000000
--- a/tex/context/base/publ-tra.mkiv
+++ /dev/null
@@ -1,35 +0,0 @@
-%D \module
-%D [ file=publ-tra,
-%D version=2013.12.24,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=Tracing,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-% todo: make this a runtime module
-% todo: use the module interface
-
-\writestatus{loading}{ConTeXt Publication Support / Tracing}
-
-\registerctxluafile{publ-tra}{1.001}
-
-\unprotect
-
-\unexpanded\def\showbtxdatasetfields
- {\dosingleempty\publ_dataset_show_fields}
-
-\def\publ_dataset_show_fields[#1]%
- {\ctxcommand{showbtxdatasetfields("\iffirstargument#1\else\currentbtxdataset\fi")}}
-
-\unexpanded\def\showbtxdatasetcompleteness
- {\dosingleempty\publ_dataset_show_completeness}
-
-\def\publ_dataset_show_completeness[#1]%
- {\ctxcommand{showbtxdatasetcompleteness("\iffirstargument#1\else\currentbtxdataset\fi")}}
-
-\protect \endinput
diff --git a/tex/context/base/publ-usr.lua b/tex/context/base/publ-usr.lua
deleted file mode 100644
index 6bb93ebee..000000000
--- a/tex/context/base/publ-usr.lua
+++ /dev/null
@@ -1,91 +0,0 @@
-if not modules then modules = { } end modules ['publ-usr'] = {
- version = 1.001,
- comment = "this module part of publication support",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- local chardata = characters.data
-
--- local str = [[
--- \startpublication[k=Berdnikov:TB21-2-129,t=article,a={{Berdnikov},{}},y=2000,n=2257,s=BHHJ00]
--- \artauthor[]{Alexander}[A.]{}{Berdnikov}
--- \artauthor[]{Hans}[H.]{}{Hagen}
--- \artauthor[]{Taco}[T.]{}{Hoekwater}
--- \artauthor[]{Bogus{\l}aw}[B.]{}{Jackowski}
--- \pubyear{2000}
--- \arttitle{{Even more MetaFun with \MP: A request for permission}}
--- \journal{TUGboat}
--- \issn{0896-3207}
--- \volume{21}
--- \issue{2}
--- \pages{129--130}
--- \month{6}
--- \stoppublication
--- ]]
-
-local remapped = {
- artauthor = "author",
- arttitle = "title",
-}
-
-local P, Cs, R, Cc, Carg = lpeg.P, lpeg.Cs, lpeg.R, lpeg.Cc, lpeg.Carg
-
-local function register(target,key,a,b,c,d,e)
- key = remapped[key] or key
- if b and d and e then
- local s = nil
- if b ~= "" and b then
- s = s and s .. " " .. b or b
- end
- if d ~= "" and d then
- s = s and s .. " " .. d or d
- end
- if e ~= "" and e then
- s = s and s .. " " .. e or e
- end
- if a ~= "" and a then
- s = s and s .. " " .. a or a
- end
- local value = target[key]
- if s then
- if value then
- target[key] = value .. " and " .. s
- else
- target[key] = s
- end
- else
- if not value then
- target[key] = s
- end
- end
- else
- target[key] = b
- end
-end
-
-local leftbrace = P("{")
-local rightbrace = P("}")
-local leftbracket = P("[")
-local rightbracket = P("]")
-
-local key = P("\\") * Cs(R("az","AZ")^1) * lpeg.patterns.space^0
-local mandate = leftbrace * Cs(lpeg.patterns.balanced) * rightbrace + Cc(false)
-local optional = leftbracket * Cs((1-rightbracket)^0) * rightbracket + Cc(false)
-local value = optional^-1 * mandate^-1 * optional^-1 * mandate^-2
-
-local pattern = ((Carg(1) * key * value) / register + P(1))^0
-
-function publications.addtexentry(dataset,settings,content)
- settings = utilities.parsers.settings_to_hash(settings)
- local data = {
- tag = settings.tag or settings.k or "no tag",
- category = settings.category or settings.t or "article",
- }
- lpeg.match(pattern,content,1,data) -- can set tag too
- dataset.userdata[data.tag] = data
- dataset.luadata[data.tag] = data
- publications.markasupdated(dataset)
- return data
-end
diff --git a/tex/context/base/publ-usr.mkiv b/tex/context/base/publ-usr.mkiv
deleted file mode 100644
index cb078f424..000000000
--- a/tex/context/base/publ-usr.mkiv
+++ /dev/null
@@ -1,2 +0,0 @@
-% todo
-
diff --git a/tex/context/base/publ-xml.mkiv b/tex/context/base/publ-xml.mkiv
deleted file mode 100644
index 007f9bb27..000000000
--- a/tex/context/base/publ-xml.mkiv
+++ /dev/null
@@ -1,114 +0,0 @@
-%D \module
-%D [ file=publ-xml,
-%D version=2013.12.24,
-%D title=\CONTEXT\ Publication Support,
-%D subtitle=XML,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{ConTeXt Publication Support / XML}
-
-\unprotect
-
-\unexpanded\def\convertbtxdatasettoxml
- {\dosingleempty\publ_convert_to_xml}
-
-\def\publ_convert_to_xml[#1]%
- {\ctxcommand{convertbtxdatasettoxml("\iffirstargument#1\else\v!standard\fi",true)}} % or current when not empty
-
-% \startxmlsetups btx:initialize
-% \xmlregistereddocumentsetups{#1}{}
-% \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
-% \xmlmain{#1}
-% \stopxmlsetups
-
-\startxmlsetups btx:initialize
- \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
- \xmlmain{#1}
-\stopxmlsetups
-
-% \startxmlsetups btx:entry
-% \xmlflush{#1}
-% \stopxmlsetups
-
-\startxmlsetups btx:field
- \xmlflushcontext{#1}
-\stopxmlsetups
-
-\protect \endinput
-
-% \startxmlsetups bibtex:entry:getkeys
-% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='author']/text()}}
-% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='year' ]/text()}}
-% \xmladdsortentry{bibtex}{#1}{\xmlatt{#1}{tag}}
-% \stopxmlsetups
-
-% \startbuffer
-% \startxmlsetups xml:bibtex:sorter
-% \xmlresetsorter{bibtex}
-% % \xmlfilter{#1}{entry/command(bibtex:entry:getkeys)}
-% \xmlfilter{#1}{
-% bibtex
-% /entry[@category='article']
-% /field[@name='author' and find(text(),'Knuth')]
-% /../command(bibtex:entry:getkeys)}
-% \xmlsortentries{bibtex}
-% \xmlflushsorter{bibtex}{bibtex:entry:flush}
-% \stopxmlsetups
-% \stopbuffer
-
-% \bgroup
-% \setups[bibtex-commands]
-% \getbuffer
-% \egroup
-
-% \startxmlsetups bibtex:entry:flush
-% \xmlfilter{#1}{/field[@name='author']/context()} / %
-% \xmlfilter{#1}{/field[@name='year' ]/context()} / %
-% \xmlatt{#1}{tag}\par
-% \stopxmlsetups
-
-% \startpacked
-% \getbuffer
-% \stoppacked
-
-
-% \unexpanded\def\btx_xml_list_handle_entry
-% {\begingroup
-% \ignorespaces
-% \xmlfilter{btx:\currentbtxrendering}{/bibtex/entry[@tag='\currentbtxtag']/command(btx:format)}%
-% \removeunwantedspaces
-% \endgroup}
-
-% \startxmlsetups btx:format
-% \btxlistparameter\c!before\relax % prevents lookahead
-% \edef\currentbibxmlnode {#1}
-% \edef\currentbibxmltag {\xmlatt{#1}{tag}}
-% \edef\currentbtxcategory{\xmlatt{#1}{category}}
-% \ignorespaces
-% \xmlcommand{#1}{.}{btx:\currentbtxformat:\currentbibxmlcategory}
-% \removeunwantedspaces
-% \btxlistparameter\c!after\relax % prevents lookahead
-% \stopxmlsetups
-
-% \startxmlsetups btx:list
-% \xmlfilter{#1}{/bibtex/entry/command(bibtex:format)}
-% \stopxmlsetups
-
-% \startxmlsetups btx:btx
-% \xmlfilter{#1}{/entry/command(btx:format)}
-% \stopxmlsetups
-
-% \unexpanded\def\btx_xml_doifelse#1{\xmldoifelse\currentbibxmlnode{/field[@name='#1']}}
-% \unexpanded\def\btx_xml_doif #1{\xmldoif \currentbibxmlnode{/field[@name='#1']}}
-% \unexpanded\def\btx_xml_doifnot #1{\xmldoifnot \currentbibxmlnode{/field[@name='#1']}}
-% \def\btx_xml_flush #1{\xmlcontext \currentbibxmlnode{/field[@name='#1']}}
-% \def\btx_xml_setup {\xmlsetup \currentbibxmlnode} % {#1}
-% \unexpanded\def\btx_xml_todo #1{[#1]}
-
-% \xmlfilter{#1}{/field[@name='\currentbtxfield']/btxconcat('\currentbtxfield')}
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index e9ea6393b..7d8064b29 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -34,7 +34,6 @@
%logo [FGA] {fga}
%logo [FGBBS] {fgbbs}
\logo [ACROBAT] {Acro\-bat}
-\logo [APA] {apa}
\logo [AFM] {afm}
\logo [API] {api}
\logo [ALEPH] {Aleph} % {\mathematics{\aleph}}
@@ -48,7 +47,6 @@
\logo [ASCIITEX] {ascii\TeX}
\logo [BACHOTEX] {Bacho\TeX}
\logo [BIBTEX] {bib\TeX}
-\logo [MLBIBTEX] {MLbib\TeX}
\logo [BLUESKY] {BlueSky}
\logo [BMP] {bmp}
\logo [BSD] {bsd}
diff --git a/tex/context/base/s-inf-03.mkiv b/tex/context/base/s-inf-03.mkiv
index fc654fef5..822173d00 100644
--- a/tex/context/base/s-inf-03.mkiv
+++ b/tex/context/base/s-inf-03.mkiv
@@ -16,7 +16,7 @@
\definefont
[TitlePageFont]
- [MonoBold at 15pt]
+ [MonoBold at 16pt]
\setupbodyfont
[tt,8pt]
@@ -25,7 +25,7 @@
\definefont
[TitlePageFont]
- [MonoBold at 18pt]
+ [MonoBold at 20pt]
\setupbodyfont
[tt]
@@ -352,7 +352,6 @@ for k, v in table.sortedpairs(_G) do
end
end
-
\stopluacode
\stoptext
diff --git a/tex/context/base/s-languages-hyphenation.lua b/tex/context/base/s-languages-hyphenation.lua
index c16c5bd2d..660392f80 100644
--- a/tex/context/base/s-languages-hyphenation.lua
+++ b/tex/context/base/s-languages-hyphenation.lua
@@ -24,7 +24,7 @@ local newglue = nodepool.glue
local insert_node_after = node.insert_after
local traverse_by_id = node.traverse_id
local hyphenate = lang.hyphenate
-local find_tail = node.tail
+local find_tail = node.slide
local remove_node = nodes.remove
local tracers = nodes.tracers
diff --git a/tex/context/base/s-math-coverage.lua b/tex/context/base/s-math-coverage.lua
index 5f1c7cc5a..a74e24450 100644
--- a/tex/context/base/s-math-coverage.lua
+++ b/tex/context/base/s-math-coverage.lua
@@ -123,7 +123,7 @@ function moduledata.math.coverage.showalphabets()
end
function moduledata.math.coverage.showcharacters()
- context.startmixedcolumns()
+ context.startcolumns()
context.setupalign { "nothyphenated" }
context.starttabulate { "|T|i2|Tpl|" }
for u, d in table.sortedpairs(chardata) do
@@ -150,7 +150,7 @@ function moduledata.math.coverage.showcharacters()
end
end
context.stoptabulate()
- context.stopmixedcolumns()
+ context.stopcolumns()
end
-- This is a somewhat tricky table as we need to bypass the math machinery.
diff --git a/tex/context/base/scrp-cjk.lua b/tex/context/base/scrp-cjk.lua
index 9050da6be..681fc4c43 100644
--- a/tex/context/base/scrp-cjk.lua
+++ b/tex/context/base/scrp-cjk.lua
@@ -14,29 +14,15 @@ if not modules then modules = { } end modules ['scrp-cjk'] = {
-- sense either because otherwise a wanted space at the end of a
-- line would have to be a hard coded ones.
-local utfchar = utf.getchar
-
-local nuts = nodes.nuts
-local tonut = nodes.tonut
-local tonode = nodes.tonode
-
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local copy_node = nuts.copy
-local remove_node = nuts.remove
-local traverse_id = nuts.traverse_id
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getattr = nuts.getattr
-local getsubtype = nuts.getsubtype
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-
-local nodepool = nuts.pool
+local utfchar = utf.char
+
+local insert_node_after = nodes.insert_after
+local insert_node_before = nodes.insert_before
+local remove_node = nodes.remove
+local copy_node = nodes.copy
+local traverse_id = nodes.traverse_id
+
+local nodepool = nodes.pool
local new_glue = nodepool.glue
local new_kern = nodepool.kern
local new_penalty = nodepool.penalty
@@ -102,20 +88,20 @@ end
-- at font definition time and/or just assume a correct font
local function trace_detail(current,what)
- local prev = getprev(current)
- local c_id = getid(current)
- local p_id = prev and getid(prev)
+ local prev = current.prev
+ local c_id = current.id
+ local p_id = prev and prev.id
if c_id == glyph_code then
- local c_ch = getchar(current)
+ local c_ch = current.char
if p_id == glyph_code then
- local p_ch = p_id and getchar(prev)
+ local p_ch = p_id and prev.char
report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,c_ch,hash[c_ch])
else
report_details("[%s] [%C %a]",what,c_ch,hash[c_ch])
end
else
if p_id == glyph_code then
- local p_ch = p_id and getchar(prev)
+ local p_ch = p_id and prev.char
report_details("[%C %a] [%s]",p_ch,hash[p_ch],what)
else
report_details("[%s]",what)
@@ -124,8 +110,8 @@ local function trace_detail(current,what)
end
local function trace_detail_between(p,n,what)
- local p_ch = getchar(p)
- local n_ch = getchar(n)
+ local p_ch = p.char
+ local n_ch = n.char
report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,n_ch,hash[n_ch])
end
@@ -441,29 +427,29 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = getnext(first), getid(first)
+ local upcoming, id = first.next, first.id
if id == glyph_code then
- local a = getattr(first,a_scriptstatus)
+ local a = first[a_scriptstatus]
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = getfont(first)
+ local font = first.font
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
+ set_parameters(font,numbertodataset[first[a_scriptinjection]])
end
action(head,first)
end
end
previous = current
else -- glue
- local p, n = getprev(first), upcoming
+ local p, n = first.prev, upcoming
if p and n then
- local pid, nid = getid(p), getid(n)
+ local pid, nid = p.id, n.id
if pid == glyph_code and nid == glyph_code then
- local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
+ local pa, na = p[a_scriptstatus], n[a_scriptstatus]
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -509,24 +495,23 @@ scripts.installmethod {
}
function scripts.decomposehangul(head)
- local head = tonut(head)
local done = false
for current in traverse_id(glyph_code,head) do
- local lead_consonant, medial_vowel, tail_consonant = decomposed(getchar(current))
+ local lead_consonant, medial_vowel, tail_consonant = decomposed(current.char)
if lead_consonant then
- setfield(current,"char",lead_consonant)
+ current.char = lead_consonant
local m = copy_node(current)
- setfield(m,"char",medial_vowel)
+ m.char = medial_vowel
head, current = insert_node_after(head,current,m)
if tail_consonant then
local t = copy_node(current)
- setfield(t,"char",tail_consonant)
+ t.char = tail_consonant
head, current = insert_node_after(head,current,t)
end
done = true
end
end
- return tonode(head), done
+ return head, done
end
-- nodes.tasks.prependaction("processors","normalizers","scripts.decomposehangul")
@@ -697,29 +682,29 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = getnext(first), getid(first)
+ local upcoming, id = first.next, first.id
if id == glyph_code then
- local a = getattr(first,a_scriptstatus)
+ local a = first[a_scriptstatus]
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = getfont(first)
+ local font = first.font
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
+ set_parameters(font,numbertodataset[first[a_scriptinjection]])
end
action(head,first)
end
end
previous = current
else -- glue
- local p, n = getprev(first), upcoming
+ local p, n = first.prev, upcoming
if p and n then
- local pid, nid = getid(p), getid(n)
+ local pid, nid = p.id, n.id
if pid == glyph_code and nid == glyph_code then
- local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
+ local pa, na = p[a_scriptstatus], n[a_scriptstatus]
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -919,32 +904,34 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = getnext(first), getid(first)
+ local upcoming, id = first.next, first.id
if id == glyph_code then
- local a = getattr(first,a_scriptstatus)
+ local a = first[a_scriptstatus]
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = getfont(first)
+ local font = first.font
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
+ set_parameters(font,numbertodataset[first[a_scriptinjection]])
end
action(head,first)
end
end
previous = current
- -- elseif id == math_code then
- -- upcoming = getnext(end_of_math(current))
- -- previous = "start"
+
+-- elseif id == math_code then
+-- upcoming = end_of_math(current).next
+-- previous = "start"
+
else -- glue
- local p, n = getprev(first), upcoming -- we should remember prev
+ local p, n = first.prev, upcoming -- we should remember prev
if p and n then
- local pid, nid = getid(p), getid(n)
+ local pid, nid = p.id, n.id
if pid == glyph_code and nid == glyph_code then
- local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
+ local pa, na = p[a_scriptstatus], n[a_scriptstatus]
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -953,17 +940,17 @@ local function process(head,first,last)
or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean
previous = "start"
else -- if head ~= first then
- if id == glue_code and getsubtype(first) == userskip_code then -- also scriptstatus check?
- -- for the moment no distinction possible between space and userskip
- local w = getfield(getfield(first,"spec"),"width")
- local s = spacedata[getfont(p)]
- if w == s then -- could be option
- if trace_details then
- trace_detail_between(p,n,"space removed")
- end
- remove_node(head,first,true)
- end
- end
+if id == glue_code and first.subtype == userskip_code then -- also scriptstatus check?
+ -- for the moment no distinction possible between space and userskip
+ local w = first.spec.width
+ local s = spacedata[p.font]
+ if w == s then -- could be option
+ if trace_details then
+ trace_detail_between(p,n,"space removed")
+ end
+ remove_node(head,first,true)
+ end
+end
previous = pcjk
-- else
-- previous = pcjk
diff --git a/tex/context/base/scrp-eth.lua b/tex/context/base/scrp-eth.lua
index 8ecbce522..597afa1b5 100644
--- a/tex/context/base/scrp-eth.lua
+++ b/tex/context/base/scrp-eth.lua
@@ -9,17 +9,9 @@ if not modules then modules = { } end modules ['scrp-eth'] = {
-- at some point I will review the script code but for the moment we
-- do it this way; so space settings like with cjk yet
-local nuts = nodes.nuts
+local insert_node_before = node.insert_before
-local getnext = nuts.getnext
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getattr = nuts.getattr
-
-local insert_node_before = nuts.insert_before
-
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_glue = nodepool.glue
local new_penalty = nodepool.penalty
@@ -45,13 +37,13 @@ local inter_character_stretch_factor = 1
local inter_character_shrink_factor = 1
local function space_glue(current)
- local data = numbertodataset[getattr(current,a_scriptinjection)]
+ local data = numbertodataset[current[a_scriptinjection]]
if data then
inter_character_space_factor = data.inter_character_space_factor or 1
inter_character_stretch_factor = data.inter_character_stretch_factor or 1
inter_character_shrink_factor = data.inter_character_shrink_factor or 1
end
- local font = getfont(current)
+ local font = current.font
if lastfont ~= font then
local pf = parameters[font]
space = pf.space
@@ -112,9 +104,9 @@ local function process(head,first,last)
local injector = false
local current = first
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local scriptstatus = getattr(current,a_scriptstatus)
+ local scriptstatus = current[a_scriptstatus]
local category = numbertocategory[scriptstatus]
if injector then
local action = injector[category]
@@ -129,7 +121,7 @@ local function process(head,first,last)
if current == last then
break
else
- current = getnext(current)
+ current = current.next
end
end
end
diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua
index a6bfe4cf9..56422e622 100644
--- a/tex/context/base/scrp-ini.lua
+++ b/tex/context/base/scrp-ini.lua
@@ -14,7 +14,7 @@ local attributes, nodes, node = attributes, nodes, node
local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end)
local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end)
local trace_splitting = false trackers.register("scripts.splitting", function(v) trace_splitting = v end)
-local trace_splitdetail = false trackers.register("scripts.splitting.detail", function(v) trace_splitdetail = v end)
+local trace_splitdetail = false trackers.register("scripts.splitring.detail", function(v) trace_splitdetail = v end)
local report_preprocessing = logs.reporter("scripts","preprocessing")
local report_splitting = logs.reporter("scripts","splitting")
@@ -22,6 +22,9 @@ local report_splitting = logs.reporter("scripts","splitting")
local utfbyte, utfsplit = utf.byte, utf.split
local gmatch = string.gmatch
+local first_glyph = node.first_glyph or node.first_character
+local traverse_id = node.traverse_id
+
local texsetattribute = tex.setattribute
local nodecodes = nodes.nodecodes
@@ -45,23 +48,9 @@ local setmetatableindex = table.setmetatableindex
local enableaction = nodes.tasks.enableaction
local disableaction = nodes.tasks.disableaction
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getchar = nuts.getchar
-local getfont = nuts.getfont
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-
-local insert_node_after = nuts.insert_after
-local first_glyph = nuts.first_glyph
-local traverse_id = nuts.traverse_id
-
-local nodepool = nuts.pool
+local insert_node_after = node.insert_after
+local nodepool = nodes.pool
local new_glue = nodepool.glue
local new_rule = nodepool.rule
local new_penalty = nodepool.penalty
@@ -411,7 +400,7 @@ scripts.numbertocategory = numbertocategory
local function colorize(start,stop)
for n in traverse_id(glyph_code,start) do
- local kind = numbertocategory[getattr(n,a_scriptstatus)]
+ local kind = numbertocategory[n[a_scriptstatus]]
if kind then
local ac = scriptcolors[kind]
if ac then
@@ -443,17 +432,16 @@ end
-- we can have a fonts.hashes.originals
function scripts.injectors.handler(head)
- head = tonut(head)
local start = first_glyph(head) -- we already have glyphs here (subtype 1)
if not start then
- return tonode(head), false
+ return head, false
else
local last_a, normal_process, lastfont, originals = nil, nil, nil, nil
local done, first, last, ok = false, nil, nil, false
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- local a = getattr(start,a_scriptinjection)
+ local a = start[a_scriptinjection]
if a then
if a ~= last_a then
if first then
@@ -475,7 +463,7 @@ function scripts.injectors.handler(head)
normal_process = handler.injector
end
if normal_process then
- local f = getfont(start)
+ local f = start.font
if f ~= lastfont then
originals = fontdata[f].resources
if resources then
@@ -485,13 +473,13 @@ function scripts.injectors.handler(head)
end
lastfont = f
end
- local c = getchar(start)
+ local c = start.char
if originals then
c = originals[c] or c
end
local h = hash[c]
if h then
- setattr(start,a_scriptstatus,categorytonumber[h])
+ start[a_scriptstatus] = categorytonumber[h]
if not first then
first, last = start, start
else
@@ -552,7 +540,7 @@ function scripts.injectors.handler(head)
first, last = nil, nil
end
end
- start = getnext(start)
+ start = start.next
end
if ok then
if trace_analyzing then
@@ -565,7 +553,7 @@ function scripts.injectors.handler(head)
end
done = true
end
- return tonode(head), done
+ return head, done
end
end
@@ -695,11 +683,11 @@ end)
local categories = characters.categories or { }
local function hit(root,head)
- local current = getnext(head)
+ local current = head.next
local lastrun = false
local lastfinal = false
- while current and getid(current) == glyph_code do
- local char = getchar(current)
+ while current and current.id == glyph_code do
+ local char = current.char
local newroot = root[char]
if newroot then
local final = newroot.final
@@ -713,7 +701,7 @@ local function hit(root,head)
else
return lastrun, lastfinal
end
- current = getnext(current)
+ current = current.next
end
if lastrun then
return lastrun, lastfinal
@@ -722,13 +710,12 @@ end
local tree, attr, proc
-function splitters.handler(head) -- todo: also first_glyph test
- head = tonut(head)
+function splitters.handler(head)
local current = head
local done = false
while current do
- if getid(current) == glyph_code then
- local a = getattr(current,a_scriptsplitting)
+ if current.id == glyph_code then
+ local a = current[a_scriptsplitting]
if a then
if a ~= attr then
local handler = numbertohandler[a]
@@ -737,14 +724,14 @@ function splitters.handler(head) -- todo: also first_glyph test
proc = handler.splitter
end
if proc then
- local root = tree[getchar(current)]
+ local root = tree[current.char]
if root then
-- we don't check for attributes in the hitter (yet)
local last, final = hit(root,current)
if last then
- local next = getnext(last)
- if next and getid(next) == glyph_code then
- local nextchar = getchar(next)
+ local next = last.next
+ if next and next.id == glyph_code then
+ local nextchar = next.char
if tree[nextchar] then
if trace_splitdetail then
if type(final) == "string" then
@@ -773,9 +760,9 @@ function splitters.handler(head) -- todo: also first_glyph test
end
end
end
- current = getnext(current)
+ current = current.next
end
- return tonode(head), done
+ return head, done
end
local function marker(head,current,font,color) -- could become: nodes.tracers.marker
@@ -805,8 +792,8 @@ end
local last_a, last_f, last_s, last_q
function splitters.insertafter(handler,head,first,last,detail)
- local a = getattr(first,a_scriptsplitting)
- local f = getfont(first)
+ local a = first[a_scriptsplitting]
+ local f = first.font
if a ~= last_a or f ~= last_f then
last_s = emwidths[f] * numbertodataset[a].inter_word_stretch_factor
last_a = a
@@ -883,15 +870,15 @@ setmetatableindex(cache_nop,function(t,k) local v = { } t[k] = v return v end)
-- playing nice
function autofontfeature.handler(head)
- for n in traverse_id(glyph_code,tonut(head)) do
- -- if getattr(n,a_scriptinjection) then
+ for n in traverse_id(glyph_code,head) do
+ -- if n[a_scriptinjection] then
-- -- already tagged by script feature, maybe some day adapt
-- else
- local char = getchar(n)
+ local char = n.char
local script = otfscripts[char]
if script then
- local dynamic = getattr(n,0) or 0
- local font = getfont(n)
+ local dynamic = n[0] or 0
+ local font = n.font
if dynamic > 0 then
local slot = cache_yes[font]
local attr = slot[script]
@@ -917,7 +904,7 @@ function autofontfeature.handler(head)
end
end
if attr ~= 0 then
- setattr(n,0,attr)
+ n[0] = attr
-- maybe set scriptinjection when associated
end
end
diff --git a/tex/context/base/sort-ini.lua b/tex/context/base/sort-ini.lua
index d279f1253..479d1c489 100644
--- a/tex/context/base/sort-ini.lua
+++ b/tex/context/base/sort-ini.lua
@@ -82,7 +82,7 @@ local v_first = variables.first
local v_last = variables.last
local validmethods = table.tohash {
- "ch", -- raw character (for tracing)
+ -- "ch", -- raw character
"mm", -- minus mapping
"zm", -- zero mapping
"pm", -- plus mapping
@@ -120,7 +120,7 @@ local sorters = sorters
local constants = sorters.constants
local data, language, method, digits
-local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence, usedinsequence
+local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence
local thefirstofsplit
local mte = { -- todo: assign to t
@@ -334,9 +334,6 @@ local function setlanguage(l,m,d,u)
end
end
data.sequence = sequence
- usedinsequence = table.tohash(sequence)
- data.usedinsequence = usedinsequence
--- usedinsequence.ch = true -- better just store the string
if trace_tests then
report_sorters("using sort sequence: % t",sequence)
end
@@ -375,9 +372,7 @@ local function basicsort(sort_a,sort_b)
return 0
end
--- todo: compile compare function
-
-local function basic(a,b) -- trace ea and eb
+function comparers.basic(a,b) -- trace ea and eb
local ea, eb = a.split, b.split
local na, nb = #ea, #eb
if na == 0 and nb == 0 then
@@ -437,12 +432,6 @@ local function basic(a,b) -- trace ea and eb
end
end
-comparers.basic = basic
-
-function sorters.basicsorter(a,b)
- return basic(a,b) == -1
-end
-
local function numify(s)
s = digitsoffset + tonumber(s) -- alternatively we can create range
if s > digitsmaximum then
@@ -488,7 +477,7 @@ sorters.firstofsplit = firstofsplit
-- for the moment we use an inefficient bunch of tables but once
-- we know what combinations make sense we can optimize this
-function splitters.utf(str,checked) -- we could append m and u but this is cleaner, s is for tracing
+function splitters.utf(str) -- we could append m and u but this is cleaner, s is for tracing
if #replacements > 0 then
-- todo make an lpeg for this
for k=1,#replacements do
@@ -591,31 +580,18 @@ function splitters.utf(str,checked) -- we could append m and u but this is clean
-- p_mapping = { p_mappings[fs][1] }
-- end
-- end
+ local t = {
+ ch = char,
+ uc = byte,
+ mc = m_case,
+ zc = z_case,
+ pc = p_case,
+ mm = m_mapping,
+ zm = z_mapping,
+ pm = p_mapping,
+ }
- if checked then
- return {
- ch = trace_tests and char or nil, -- not in sequence
- uc = usedinsequence.uc and byte or nil,
- mc = usedinsequence.mc and m_case or nil,
- zc = usedinsequence.zc and z_case or nil,
- pc = usedinsequence.pc and p_case or nil,
- mm = usedinsequence.mm and m_mapping or nil,
- zm = usedinsequence.zm and z_mapping or nil,
- pm = usedinsequence.pm and p_mapping or nil,
- }
- else
- return {
- ch = char,
- uc = byte,
- mc = m_case,
- zc = z_case,
- pc = p_case,
- mm = m_mapping,
- zm = z_mapping,
- pm = p_mapping,
- }
- end
-
+ return t
end
local function packch(entry)
diff --git a/tex/context/base/sort-lan.lua b/tex/context/base/sort-lan.lua
index 6b0cc5007..6d16c0d80 100644
--- a/tex/context/base/sort-lan.lua
+++ b/tex/context/base/sort-lan.lua
@@ -310,7 +310,7 @@ local ch, CH = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11)
definitions["cz"] = {
replacements = {
- { "ch", ch }, { "Ch", ch }, { "CH", ch }
+ { "ch", ch }, { "CH", CH }
},
entries = {
["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["č"] = "č",
diff --git a/tex/context/base/spac-ali.lua b/tex/context/base/spac-ali.lua
index 08e33c5b8..25cc6cd66 100644
--- a/tex/context/base/spac-ali.lua
+++ b/tex/context/base/spac-ali.lua
@@ -10,26 +10,13 @@ local div = math.div
local format = string.format
local tasks = nodes.tasks
+local appendaction = tasks.appendaction
+local prependaction = tasks.prependaction
+local disableaction = tasks.disableaction
local enableaction = tasks.enableaction
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getsubtype = nuts.getsubtype
-
-local hpack_nodes = nuts.hpack -- nodes.fasthpack not really faster here
-local linked_nodes = nuts.linked
+local slide_nodes = node.slide
+local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
local unsetvalue = attributes.unsetvalue
@@ -40,6 +27,8 @@ local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local line_code = listcodes.line
+local nodepool = nodes.pool
+
local new_stretch = nodepool.stretch
local a_realign = attributes.private("realign")
@@ -67,10 +56,10 @@ local function handler(head,leftpage,realpageno)
local current = head
local done = false
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code then
- if getsubtype(current) == line_code then
- local a = getattr(current,a_realign)
+ if current.subtype == line_code then
+ local a = current[a_realign]
if not a or a == 0 then
-- skip
else
@@ -86,12 +75,12 @@ local function handler(head,leftpage,realpageno)
action = leftpage and 2 or 1
end
if action == 1 then
- setfield(current,"list",hpack_nodes(linked_nodes(getlist(current),new_stretch(3)),getfield(current,"width"),"exactly"))
+ current.list = hpack_nodes(current.list .. new_stretch(3),current.width,"exactly")
if trace_realign then
report_realign("flushing left, align %a, page %a, realpage %a",align,pageno,realpageno)
end
elseif action == 2 then
- setfield(current,"list",hpack_nodes(linked_nodes(new_stretch(3),getlist(current)),getfield(current,"width"),"exactly"))
+ current.list = hpack_nodes(new_stretch(3) .. current.list,current.width,"exactly")
if trace_realign then
report_realign("flushing right. align %a, page %a, realpage %a",align,pageno,realpageno)
end
@@ -101,14 +90,14 @@ local function handler(head,leftpage,realpageno)
done = true
nofrealigned = nofrealigned + 1
end
- setattr(current,a_realign,unsetvalue)
+ current[a_realign] = unsetvalue
end
end
- handler(getlist(current),leftpage,realpageno)
+ handler(current.list,leftpage,realpageno)
elseif id == vlist_code then
- handler(getlist(current),leftpage,realpageno)
+ handler(current.list,leftpage,realpageno)
end
- current = getnext(current)
+ current = current.next
end
return head, done
end
@@ -116,8 +105,7 @@ end
function alignments.handler(head)
local leftpage = isleftpage(true,false)
local realpageno = texgetcount("realpageno")
- local head, done = handler(tonut(head),leftpage,realpageno)
- return tonode(head), done
+ return handler(head,leftpage,realpageno)
end
local enabled = false
diff --git a/tex/context/base/spac-ali.mkiv b/tex/context/base/spac-ali.mkiv
index cf95064a2..9c7e81379 100644
--- a/tex/context/base/spac-ali.mkiv
+++ b/tex/context/base/spac-ali.mkiv
@@ -585,36 +585,13 @@
\unexpanded\def\spac_align_use_now#1%
{\csname\??alignmentnormalcache#1\endcsname}
-% Maybe we need something different in columns.
+% The keywords:
\unexpanded\def\installalign#1#2% beware: commands must be unexpandable!
{\ifcsname\??aligncommand#1\endcsname \else
\setvalue{\??aligncommand#1}{\t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
\fi}
-% beware, toks stuff and states are set at a differt time, so installalign is
-% only for special options
-%
-% \setvalue{\??aligncommand whatever}%
-% {\c_spac_align_state_horizontal\plushundred
-% \t_spac_align_collected\expandafter{\the\t_spac_align_collected .....}}
-%
-% this one could deal with both
-%
-% \unexpanded\def\installalignoption#1#2%
-% {\ifcsname\??aligncommand#1\endcsname \else
-% \setvalue{\??aligncommand#1}%
-% {\spac_align_set_horizontal_none
-% \c_spac_align_state_horizontal\plushundred % don't set
-% \t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
-% \fi}
-%
-% \installalignoption
-% {whatever}
-% {}
-
-% The keywords:
-
\letvalue{\??aligncommand\empty }\empty
\setvalue{\??aligncommand\v!broad }{\c_spac_align_state_broad \plusone }
\setvalue{\??aligncommand\v!wide }{\c_spac_align_state_broad \plustwo }
diff --git a/tex/context/base/spac-chr.lua b/tex/context/base/spac-chr.lua
index 4122a64b6..db98b42a6 100644
--- a/tex/context/base/spac-chr.lua
+++ b/tex/context/base/spac-chr.lua
@@ -22,29 +22,14 @@ report_characters = logs.reporter("typesetting","characters")
local nodes, node = nodes, node
-local nuts = nodes.nuts
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-local copy_node_list = nuts.copy_list
-local traverse_id = nuts.traverse_id
+local insert_node_after = nodes.insert_after
+local remove_node = nodes.remove
+local copy_node_list = nodes.copy_list
+local traverse_id = nodes.traverse_id
local tasks = nodes.tasks
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_penalty = nodepool.penalty
local new_glue = nodepool.glue
@@ -78,47 +63,48 @@ local c_zero = byte('0')
local c_period = byte('.')
local function inject_quad_space(unicode,head,current,fraction)
- local attr = getfield(current,"attr")
+ local attr = current.attr
if fraction ~= 0 then
- fraction = fraction * fontquads[getfont(current)]
+ fraction = fraction * fontquads[current.font]
end
local glue = new_glue(fraction)
- setfield(glue,"attr",attr)
- setfield(current,"attr",nil)
- setattr(glue,a_character,unicode)
+-- glue.attr = copy_node_list(attr)
+ glue.attr = attr
+ current.attr = nil
+ glue[a_character] = unicode
head, current = insert_node_after(head,current,glue)
return head, current
end
local function inject_char_space(unicode,head,current,parent)
- local attr = getfield(current,"attr")
- local font = getfont(current)
+ local attr = current.attr
+ local font = current.font
local char = fontcharacters[font][parent]
local glue = new_glue(char and char.width or fontparameters[font].space)
- setfield(glue,"attr",attr)
- setfield(current,"attr",nil)
- setattr(glue,a_character,unicode)
+ glue.attr = current.attr
+ current.attr = nil
+ glue[a_character] = unicode
head, current = insert_node_after(head,current,glue)
return head, current
end
local function inject_nobreak_space(unicode,head,current,space,spacestretch,spaceshrink)
- local attr = getfield(current,"attr")
+ local attr = current.attr
local glue = new_glue(space,spacestretch,spaceshrink)
local penalty = new_penalty(10000)
- setfield(glue,"attr",attr)
- setfield(current,"attr",nil)
- setattr(glue,a_character,unicode)
+ glue.attr = attr
+ current.attr = nil
+ glue[a_character] = unicode
head, current = insert_node_after(head,current,penalty)
head, current = insert_node_after(head,current,glue)
return head, current
end
local function nbsp(head,current)
- local para = fontparameters[getfont(current)]
- if getattr(current,a_alignstate) == 1 then -- flushright
+ local para = fontparameters[current.font]
+ if current[a_alignstate] == 1 then -- flushright
head, current = inject_nobreak_space(0x00A0,head,current,para.space,0,0)
- setfield(current,"subtype",space_skip_code)
+ current.subtype = space_skip_code
else
head, current = inject_nobreak_space(0x00A0,head,current,para.space,para.spacestretch,para.spaceshrink)
end
@@ -135,7 +121,7 @@ end
function characters.replacenbspaces(head)
for current in traverse_id(glyph_code,head) do
- if getchar(current) == 0x00A0 then
+ if current.char == 0x00A0 then
local h = nbsp(head,current)
if h then
head = remove_node(h,current,true)
@@ -161,21 +147,21 @@ local methods = {
-- don't have the 'local' value.
[0x00A0] = function(head,current) -- nbsp
- local next = getnext(current)
- if next and getid(next) == glyph_code then
- local char = getchar(next)
+ local next = current.next
+ if next and next.id == glyph_code then
+ local char = next.char
if char == 0x200C or char == 0x200D then -- nzwj zwj
- next = getnext(next)
- if next and nbsphash[getchar(next)] then
+ next = next.next
+ if next and nbsphash[next.char] then
return false
end
elseif nbsphash[char] then
return false
end
end
- local prev = getprev(current)
- if prev and getid(prev) == glyph_code and nbsphash[getchar(prev)] then
- return false
+ local prev = current.prev
+ if prev and prev.id == glyph_code and nbsphash[prev.char] then
+ return false -- kannada
end
return nbsp(head,current)
end,
@@ -229,11 +215,11 @@ local methods = {
end,
[0x202F] = function(head,current) -- narrownobreakspace
- return inject_nobreak_space(0x202F,head,current,fontquads[getfont(current)]/8)
+ return inject_nobreak_space(0x202F,head,current,fontquads[current.font]/8)
end,
[0x205F] = function(head,current) -- math thinspace
- return inject_nobreak_space(0x205F,head,current,fontparameters[getfont(current)].space/8)
+ return inject_nobreak_space(0x205F,head,current,fontparameters[current.font].space/8)
end,
-- [0xFEFF] = function(head,current) -- zerowidthnobreakspace
@@ -242,15 +228,14 @@ local methods = {
}
-function characters.handler(head) -- todo: use traverse_id
- head = tonut(head)
+function characters.handler(head)
local current = head
local done = false
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local next = getnext(current)
- local char = getchar(current)
+ local next = current.next
+ local char = current.char
local method = methods[char]
if method then
if trace_characters then
@@ -264,8 +249,8 @@ function characters.handler(head) -- todo: use traverse_id
end
current = next
else
- current = getnext(current)
+ current = current.next
end
end
- return tonode(head), done
+ return head, done
end
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index 7d78d6c12..0035c4119 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -37,6 +37,7 @@ local nodes, node, trackers, attributes, context, commands, tex = nodes, node,
local texlists = tex.lists
local texgetdimen = tex.getdimen
local texnest = tex.nest
+local texgetbox = tex.getbox
local variables = interfaces.variables
@@ -62,41 +63,23 @@ local a_skiporder = attributes.private('skiporder')
local a_snapmethod = attributes.private('snapmethod')
local a_snapvbox = attributes.private('snapvbox')
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-local ntostring = nuts.tostring
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getsubtype = nuts.getsubtype
-local getbox = nuts.getbox
-
-local find_node_tail = nuts.tail
-local free_node = nuts.free
-local free_node_list = nuts.flush_list
-local copy_node = nuts.copy
-local traverse_nodes = nuts.traverse
-local traverse_nodes_id = nuts.traverse_id
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-local count_nodes = nuts.count
-local hpack_node = nuts.hpack
-local vpack_node = nuts.vpack
-local writable_spec = nuts.writable_spec
-local nodereference = nuts.reference
-
-local listtoutf = nodes.listtoutf
+local find_node_tail = node.tail
+local free_node = node.free
+local free_node_list = node.flush_list
+local copy_node = node.copy
+local traverse_nodes = node.traverse
+local traverse_nodes_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove
+local count_nodes = nodes.count
local nodeidstostring = nodes.idstostring
+local hpack_node = node.hpack
+local vpack_node = node.vpack
+local writable_spec = nodes.writable_spec
+local listtoutf = nodes.listtoutf
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_penalty = nodepool.penalty
local new_kern = nodepool.kern
@@ -196,26 +179,28 @@ end
-- local rule_id = nodecodes.rule
-- local vlist_id = nodecodes.vlist
-- function nodes.makevtop(n)
--- if getid(n) == vlist_id then
--- local list = getlist(n)
--- local height = (list and getid(list) <= rule_id and getfield(list,"height")) or 0
--- setfield(n,"depth",getfield(n,"depth") - height + getfield(n,"height")
--- setfield(n,"height",height
+-- if n.id == vlist_id then
+-- local list = n.list
+-- local height = (list and list.id <= rule_id and list.height) or 0
+-- n.depth = n.depth - height + n.height
+-- n.height = height
-- end
-- end
+local reference = nodes.reference
+
local function validvbox(parentid,list)
if parentid == hlist_code then
- local id = getid(list)
+ local id = list.id
if id == whatsit_code then -- check for initial par subtype
- list = getnext(list)
+ list = list.next
if not next then
return nil
end
end
local done = nil
for n in traverse_nodes(list) do
- local id = getid(n)
+ local id = n.id
if id == vlist_code or id == hlist_code then
if done then
return nil
@@ -229,9 +214,9 @@ local function validvbox(parentid,list)
end
end
if done then
- local id = getid(done)
+ local id = done.id
if id == hlist_code then
- return validvbox(id,getlist(done))
+ return validvbox(id,done.list)
end
end
return done -- only one vbox
@@ -241,19 +226,19 @@ end
local function already_done(parentid,list,a_snapmethod) -- todo: done when only boxes and all snapped
-- problem: any snapped vbox ends up in a line
if list and parentid == hlist_code then
- local id = getid(list)
+ local id = list.id
if id == whatsit_code then -- check for initial par subtype
- list = getnext(list)
+ list = list.next
if not next then
return false
end
end
--~ local i = 0
for n in traverse_nodes(list) do
- local id = getid(n)
---~ i = i + 1 print(i,nodecodes[id],getattr(n,a_snapmethod))
+ local id = n.id
+--~ i = i + 1 print(i,nodecodes[id],n[a_snapmethod])
if id == hlist_code or id == vlist_code then
- local a = getattr(n,a_snapmethod)
+ local a = n[a_snapmethod]
if not a then
-- return true -- not snapped at all
elseif a == 0 then
@@ -291,11 +276,11 @@ end
-- check variables.none etc
local function snap_hlist(where,current,method,height,depth) -- method.strut is default
- local list = getlist(current)
+ local list = current.list
local t = trace_vsnapping and { }
if t then
t[#t+1] = formatters["list content: %s"](listtoutf(list))
- t[#t+1] = formatters["parent id: %s"](nodereference(current))
+ t[#t+1] = formatters["parent id: %s"](reference(current))
t[#t+1] = formatters["snap method: %s"](method.name)
t[#t+1] = formatters["specification: %s"](method.specification)
end
@@ -327,8 +312,7 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
t[#t+1] = formatters["auto: snapht %p snapdp %p"](snapht,snapdp)
end
end
- local h = height or getfield(current,"height")
- local d = depth or getfield(current,"depth")
+ local h, d = height or current.height, depth or current.depth
local hr, dr, ch, cd = method.hfraction or 1, method.dfraction or 1, h, d
local tlines, blines = method.tlines or 1, method.blines or 1
local done, plusht, plusdp = false, snapht, snapdp
@@ -355,22 +339,22 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if method.first then
local thebox = current
- local id = getid(thebox)
+ local id = thebox.id
if id == hlist_code then
- thebox = validvbox(id,getlist(thebox))
- id = thebox and getid(thebox)
+ thebox = validvbox(id,thebox.list)
+ id = thebox and thebox.id
end
if thebox and id == vlist_code then
- local list = getlist(thebox)
+ local list = thebox.list
local lh, ld
for n in traverse_nodes_id(hlist_code,list) do
- lh = getfield(n,"height")
- ld = getfield(n,"depth")
+ lh = n.height
+ ld = n.depth
break
end
if lh then
- local ht = getfield(thebox,"height")
- local dp = getfield(thebox,"depth")
+ local ht = thebox.height
+ local dp = thebox.depth
if t then
t[#t+1] = formatters["first line: height %p depth %p"](lh,ld)
t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
@@ -378,9 +362,9 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
local delta = h - lh
ch, cd = lh, delta + d
h, d = ch, cd
- local shifted = hpack_node(getlist(current))
- setfield(shifted,"shift",delta)
- setfield(current,"list",shifted)
+ local shifted = hpack_node(current.list)
+ shifted.shift = delta
+ current.list = shifted
done = true
if t then
t[#t+1] = formatters["first: height %p depth %p shift %p"](ch,cd,delta)
@@ -393,21 +377,20 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
end
elseif method.last then
local thebox = current
- local id = getid(thebox)
+ local id = thebox.id
if id == hlist_code then
- thebox = validvbox(id,getlist(thebox))
- id = thebox and getid(thebox)
+ thebox = validvbox(id,thebox.list)
+ id = thebox and thebox.id
end
if thebox and id == vlist_code then
- local list = getlist(thebox)
- local lh, ld
+ local list, lh, ld = thebox.list
for n in traverse_nodes_id(hlist_code,list) do
- lh = getfield(n,"height")
- ld = getfield(n,"depth")
+ lh = n.height
+ ld = n.depth
end
if lh then
- local ht = getfield(thebox,"height")
- local dp = getfield(thebox,"depth")
+ local ht = thebox.height
+ local dp = thebox.depth
if t then
t[#t+1] = formatters["last line: height %p depth %p" ](lh,ld)
t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
@@ -415,9 +398,9 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
local delta = d - ld
cd, ch = ld, delta + h
h, d = ch, cd
- local shifted = hpack_node(getlist(current))
- setfield(shifted,"shift",delta)
- setfield(current,"list",shifted)
+ local shifted = hpack_node(current.list)
+ shifted.shift = delta
+ current.list = shifted
done = true
if t then
t[#t+1] = formatters["last: height %p depth %p shift %p"](ch,cd,delta)
@@ -478,25 +461,25 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if offset then
-- we need to set the attr
if t then
- t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,getfield(current,"width"),getfield(current,"height"),getfield(current,"depth"))
+ t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
end
- local shifted = hpack_node(getlist(current))
- setfield(shifted,"shift",offset)
- setfield(current,"list",shifted)
+ local shifted = hpack_node(current.list)
+ shifted.shift = offset
+ current.list = shifted
if t then
- t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,getfield(current,"width"),getfield(current,"height"),getfield(current,"depth"))
+ t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
end
- setattr(shifted,a_snapmethod,0)
- setattr(current,a_snapmethod,0)
+ shifted[a_snapmethod] = 0
+ current[a_snapmethod] = 0
end
if not height then
- setfield(current,"height",ch)
+ current.height = ch
if t then
t[#t+1] = formatters["forced height: %p"](ch)
end
end
if not depth then
- setfield(current,"depth",cd)
+ current.depth = cd
if t then
t[#t+1] = formatters["forced depth: %p"](cd)
end
@@ -510,17 +493,17 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
t[#t+1] = formatters["final depth: %p -> %p"](d,cd)
end
if t then
- report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[getid(current)],t)
+ report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[current.id],t)
end
return h, d, ch, cd, lines
end
local function snap_topskip(current,method)
- local spec = getfield(current,"spec")
- local w = getfield(spec,"width")
+ local spec = current.spec
+ local w = spec.width
local wd = w
- if getfield(spec,"writable") then
- setfield(spec,"width",0)
+ if spec.writable then
+ spec.width = 0
wd = 0
end
return w, wd
@@ -681,18 +664,18 @@ local trace_list, tracing_info, before, after = { }, false, "", ""
local function nodes_to_string(head)
local current, t = head, { }
while current do
- local id = getid(current)
+ local id = current.id
local ty = nodecodes[id]
if id == penalty_code then
- t[#t+1] = formatters["%s:%s"](ty,getfield(current,"penalty"))
+ t[#t+1] = formatters["%s:%s"](ty,current.penalty)
elseif id == glue_code then -- or id == kern_code then -- to be tested
t[#t+1] = formatters["%s:%p"](ty,current)
elseif id == kern_code then
- t[#t+1] = formatters["%s:%p"](ty,getfield(current,"kern"))
+ t[#t+1] = formatters["%s:%p"](ty,current.kern)
else
t[#t+1] = ty
end
- current = getnext(current)
+ current = current.next
end
return concat(t," + ")
end
@@ -716,7 +699,7 @@ local function trace_info(message, where, what)
end
local function trace_node(what)
- local nt = nodecodes[getid(what)]
+ local nt = nodecodes[what.id]
local tl = trace_list[#trace_list]
if tl and tl[1] == "node" then
trace_list[#trace_list] = { "node", formatters["%s + %s"](tl[2],nt) }
@@ -726,8 +709,8 @@ local function trace_node(what)
end
local function trace_done(str,data)
- if getid(data) == penalty_code then
- trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,getfield(data,"penalty")) }
+ if data.id == penalty_code then
+ trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,data.penalty) }
else
trace_list[#trace_list+1] = { "glue", formatters["%s | %p"](str,data) }
end
@@ -765,31 +748,22 @@ local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
local topskip_code = skipcodes.topskip
local splittopskip_code = skipcodes.splittopskip
--- local function free_glue_node(n)
--- free_node(n)
--- local s = getfield(n,"spec")
--- if s then
--- free_node(s)
--- end
--- end
-
local free_glue_node = free_node
-local free_glue_spec = function() end -- free_node
function vspacing.snapbox(n,how)
local sv = snapmethods[how]
if sv then
- local box = getbox(n)
- local list = getlist(box)
+ local box = texgetbox(n)
+ local list = box.list
if list then
- local s = getattr(list,a_snapmethod)
+ local s = list[a_snapmethod]
if s == 0 then
if trace_vsnapping then
-- report_snapper("box list not snapped, already done")
end
else
- local ht = getfield(box,"height")
- local dp = getfield(box,"depth")
+ local ht = box.height
+ local dp = box.depth
if false then -- todo: already_done
-- assume that the box is already snapped
if trace_vsnapping then
@@ -798,14 +772,14 @@ function vspacing.snapbox(n,how)
end
else
local h, d, ch, cd, lines = snap_hlist("box",box,sv,ht,dp)
- setfield(box,"height",ch)
- setfield(box,"depth",cd)
+ box.height= ch
+ box.depth = cd
if trace_vsnapping then
report_snapper("box list snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s",
h,d,ch,cd,sv.name,sv.specification,"direct",lines,listtoutf(list))
end
- setattr(box,a_snapmethod,0) --
- setattr(list,a_snapmethod,0) -- yes or no
+ box[a_snapmethod] = 0 --
+ list[a_snapmethod] = 0 -- yes or no
end
end
end
@@ -827,10 +801,8 @@ local w, h, d = 0, 0, 0
----- w, h, d = 100*65536, 65536, 65536
local function forced_skip(head,current,width,where,trace)
- if head == current then
- if getsubtype(head) == baselineskip_code then
- width = width - getfield(getfield(head,"spec"),"width")
- end
+ if head == current and head.subtype == baselineskip_code then
+ width = width - head.spec.width
end
if width == 0 then
-- do nothing
@@ -862,25 +834,25 @@ local special_penalty_max = 35000
local function specialpenalty(start,penalty)
-- nodes.showsimplelist(texlists.page_head,1)
- local current = find_node_tail(tonut(texlists.page_head)) -- no texlists.page_tail yet
+ local current = find_node_tail(texlists.page_head)
while current do
- local id = getid(current)
+ local id = current.id
if id == glue_code then
- current = getprev(current)
+ current = current.prev
elseif id == penalty_code then
- local p = getfield(current,"penalty")
+ local p = current.penalty
if p == penalty then
if trace_vspacing then
report_vspacing("overloading penalty %a",p)
end
return current
elseif p >= 10000 then
- current = getprev(current)
+ current = current.prev
else
break
end
else
- current = getprev(current)
+ current = current.prev
end
end
end
@@ -903,12 +875,12 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
head = insert_node_before(head,current,p)
end
if glue_data then
- local spec = getfield(glue_data,"spec")
+ local spec = glue_data.spec
if force_glue then
if trace then trace_done("flushed due to " .. why,glue_data) end
- head = forced_skip(head,current,getfield(spec,"width"),"before",trace)
+ head = forced_skip(head,current,spec.width,"before",trace)
free_glue_node(glue_data)
- elseif getfield(spec,"writable") then
+ elseif spec.writable then
if trace then trace_done("flushed due to " .. why,glue_data) end
head = insert_node_before(head,current,glue_data)
else
@@ -928,12 +900,12 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
if trace then trace_info("start analyzing",where,what) end
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code or id == vlist_code then
-- needs checking, why so many calls
if snap then
- local list = getlist(current)
- local s = getattr(current,a_snapmethod)
+ local list = current.list
+ local s = current[a_snapmethod]
if not s then
-- if trace_vsnapping then
-- report_snapper("mvl list not snapped")
@@ -947,8 +919,8 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if sv then
-- check if already snapped
if list and already_done(id,list,a_snapmethod) then
- local ht = getfield(current,"height")
- local dp = getfield(current,"depth")
+ local ht = current.height
+ local dp = current.depth
-- assume that the box is already snapped
if trace_vsnapping then
report_snapper("mvl list already snapped at (%p,%p): %s",ht,dp,listtoutf(list))
@@ -963,39 +935,40 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif trace_vsnapping then
report_snapper("mvl %a not snapped due to unknown snap specification: %s",nodecodes[id],listtoutf(list))
end
- setattr(current,a_snapmethod,0)
+ current[a_snapmethod] = 0
end
else
--
end
-- tex.prevdepth = 0
flush("list")
- current = getnext(current)
+ current = current.next
elseif id == penalty_code then
- -- natural_penalty = getfield(current,"penalty")
+ -- natural_penalty = current.penalty
-- if trace then trace_done("removed penalty",current) end
-- head, current = remove_node(head, current, true)
- current = getnext(current)
+ current = current.next
elseif id == kern_code then
- if snap and trace_vsnapping and getfield(current,"kern") ~= 0 then
- report_snapper("kern of %p kept",getfield(current,"kern"))
+ if snap and trace_vsnapping and current.kern ~= 0 then
+ report_snapper("kern of %p kept",current.kern)
end
flush("kern")
- current = getnext(current)
+ current = current.next
elseif id == glue_code then
- local subtype = getsubtype(current)
+ local subtype = current.subtype
if subtype == userskip_code then
- local sc = getattr(current,a_skipcategory) -- has no default, no unset (yet)
- local so = getattr(current,a_skiporder) or 1 -- has 1 default, no unset (yet)
- local sp = getattr(current,a_skippenalty) -- has no default, no unset (yet)
+ local sc = current[a_skipcategory] -- has no default, no unset (yet)
+ local so = current[a_skiporder] or 1 -- has 1 default, no unset (yet)
+ local sp = current[a_skippenalty] -- has no default, no unset (yet)
if sp and sc == penalty then
- if where == "page" and sp >= special_penalty_min and sp <= special_penalty_max then
- local previousspecial = specialpenalty(current,sp)
- if previousspecial then
- setfield(previousspecial,"penalty",0)
- sp = 0
- end
- end
+
+if where == "page" and sp >= special_penalty_min and sp <= special_penalty_max then
+ local previousspecial = specialpenalty(current,sp)
+ if previousspecial then
+ previousspecial.penalty = 0
+ sp = 0
+ end
+end
if not penalty_data then
penalty_data = sp
elseif penalty_order < so then
@@ -1010,38 +983,37 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_done("flush",glue_data) end
head = insert_node_before(head,current,glue_data)
if trace then trace_natural("natural",current) end
- current = getnext(current)
+ current = current.next
else
-- not look back across head
-- todo: prev can be whatsit (latelua)
- local previous = getprev(current)
- if previous and getid(previous) == glue_code and getsubtype(previous) == userskip_code then
- local ps = getfield(previous,"spec")
- if getfield(ps,"writable") then
- local cs = getfield(current,"spec")
- if getfield(cs,"writable") and getfield(ps,"stretch_order") == 0 and getfield(ps,"shrink_order") == 0 and getfield(cs,"stretch_order") == 0 and getfield(cs,"shrink_order") == 0 then
- local pw, pp, pm = getfield(ps,"width"), getfield(ps,"stretch"), getfield(ps,"shrink")
- local cw, cp, cm = getfield(cs,"width"), getfield(cs,"stretch"), getfield(cs,"shrink")
+ local previous = current.prev
+ if previous and previous.id == glue_code and previous.subtype == userskip_code then
+ local ps = previous.spec
+ if ps.writable then
+ local cs = current.spec
+ if cs.writable and ps.stretch_order == 0 and ps.shrink_order == 0 and cs.stretch_order == 0 and cs.shrink_order == 0 then
+ local pw, pp, pm = ps.width, ps.stretch, ps.shrink
+ local cw, cp, cm = cs.width, cs.stretch, cs.shrink
-- ps = writable_spec(previous) -- no writable needed here
-- ps.width, ps.stretch, ps.shrink = pw + cw, pp + cp, pm + cm
- free_glue_spec(ps)
- setfield(previous,"spec",new_gluespec(pw + cw, pp + cp, pm + cm)) -- else topskip can disappear
+ previous.spec = new_gluespec(pw + cw, pp + cp, pm + cm) -- else topskip can disappear
if trace then trace_natural("removed",current) end
head, current = remove_node(head, current, true)
-- current = previous
if trace then trace_natural("collapsed",previous) end
- -- current = getnext(current)
+ -- current = current.next
else
if trace then trace_natural("filler",current) end
- current = getnext(current)
+ current = current.next
end
else
if trace then trace_natural("natural (no prev spec)",current) end
- current = getnext(current)
+ current = current.next
end
else
if trace then trace_natural("natural (no prev)",current) end
- current = getnext(current)
+ current = current.next
end
end
glue_order, glue_data = 0, nil
@@ -1074,12 +1046,12 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif glue_order == so then
-- is now exclusive, maybe support goback as combi, else why a set
if sc == largest then
- local cs, gs = getfield(current,"spec"), getfield(glue_data,"spec")
- local cw, gw = getfield(cs,"width"), getfield(gs,"width")
+ local cs, gs = current.spec, glue_data.spec
+ local cw, gw = cs.width, gs.width
if cw > gw then
if trace then trace_skip("largest",sc,so,sp,current) end
free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head,current)
+ head, current, glue_data = remove_node(head, current)
else
if trace then trace_skip("remove smallest",sc,so,sp,current) end
head, current = remove_node(head, current, true)
@@ -1087,7 +1059,7 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif sc == goback then
if trace then trace_skip("goback",sc,so,sp,current) end
free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head,current)
+ head, current, glue_data = remove_node(head, current)
elseif sc == force then
-- last one counts, some day we can provide an accumulator and largest etc
-- but not now
@@ -1101,11 +1073,11 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
head, current = remove_node(head, current, true)
elseif sc == add then
if trace then trace_skip("add",sc,so,sp,current) end
- -- local old, new = glue_data.spec, getfield(current,"spec")
- local old, new = writable_spec(glue_data), getfield(current,"spec")
- setfield(old,"width",getfield(old,"width") + getfield(new,"width"))
- setfield(old,"stretch",getfield(old,"stretch") + getfield(new,"stretch"))
- setfield(old,"shrink",getfield(old,"shrink") + getfield(new,"shrink"))
+ -- local old, new = glue_data.spec, current.spec
+ local old, new = writable_spec(glue_data), current.spec
+ old.width = old.width + new.width
+ old.stretch = old.stretch + new.stretch
+ old.shrink = old.shrink + new.shrink
-- toto: order
head, current = remove_node(head, current, true)
else
@@ -1121,13 +1093,12 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
elseif subtype == lineskip_code then
if snap then
- local s = getattr(current,a_snapmethod)
+ local s = current[a_snapmethod]
if s and s ~= 0 then
- setattr(current,a_snapmethod,0)
- local spec = getfield(current,"spec")
- if getfield(spec,"writable") then
+ current[a_snapmethod] = 0
+ if current.spec.writable then
local spec = writable_spec(current)
- setfield(spec,"width",0)
+ spec.width = 0
if trace_vsnapping then
report_snapper("lineskip set to zero")
end
@@ -1140,16 +1111,15 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_skip("lineskip",sc,so,sp,current) end
flush("lineskip")
end
- current = getnext(current)
+ current = current.next
elseif subtype == baselineskip_code then
if snap then
- local s = getattr(current,a_snapmethod)
+ local s = current[a_snapmethod]
if s and s ~= 0 then
- setattr(current,a_snapmethod,0)
- local spec = getfield(current,"spec")
- if getfield(spec,"writable") then
+ current[a_snapmethod] = 0
+ if current.spec.writable then
local spec = writable_spec(current)
- setfield(spec,"width",0)
+ spec.width = 0
if trace_vsnapping then
report_snapper("baselineskip set to zero")
end
@@ -1162,17 +1132,17 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_skip("baselineskip",sc,so,sp,current) end
flush("baselineskip")
end
- current = getnext(current)
+ current = current.next
elseif subtype == parskip_code then
-- parskip always comes later
if ignore_whitespace then
if trace then trace_natural("ignored parskip",current) end
head, current = remove_node(head, current, true)
elseif glue_data then
- local ps = getfield(current,"spec")
- local gs = getfield(glue_data,"spec")
- if getfield(ps,"writable") and getfield(gs,"writable") and getfield(ps,"width") > getfield(gs,"width") then
- setfield(glue_data,"spec",copy_node(ps))
+ local ps = current.spec
+ local gs = glue_data.spec
+ if ps.writable and gs.writable and ps.width > gs.width then
+ glue_data.spec = copy_node(ps)
if trace then trace_natural("taking parskip",current) end
else
if trace then trace_natural("removed parskip",current) end
@@ -1184,9 +1154,9 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
elseif subtype == topskip_code or subtype == splittopskip_code then
if snap then
- local s = getattr(current,a_snapmethod)
+ local s = current[a_snapmethod]
if s and s ~= 0 then
- setattr(current,a_snapmethod,0)
+ current[a_snapmethod] = 0
local sv = snapmethods[s]
local w, cw = snap_topskip(current,sv)
if trace_vsnapping then
@@ -1200,46 +1170,46 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if trace then trace_skip("topskip",sc,so,sp,current) end
flush("topskip")
end
- current = getnext(current)
+ current = current.next
elseif subtype == abovedisplayskip_code then
--
if trace then trace_skip("above display skip (normal)",sc,so,sp,current) end
flush("above display skip (normal)")
- current = getnext(current)
+ current = current.next
--
elseif subtype == belowdisplayskip_code then
--
if trace then trace_skip("below display skip (normal)",sc,so,sp,current) end
flush("below display skip (normal)")
- current = getnext(current)
- --
+ current = current.next
+ --
elseif subtype == abovedisplayshortskip_code then
--
if trace then trace_skip("above display skip (short)",sc,so,sp,current) end
flush("above display skip (short)")
- current = getnext(current)
+ current = current.next
--
elseif subtype == belowdisplayshortskip_code then
--
if trace then trace_skip("below display skip (short)",sc,so,sp,current) end
flush("below display skip (short)")
- current = getnext(current)
+ current = current.next
--
else -- other glue
if snap and trace_vsnapping then
- local spec = getfield(current,"spec")
- if getfield(spec,"writable") and getfield(spec,"width") ~= 0 then
- report_snapper("glue %p of type %a kept",getfield(spec,"width"),skipcodes[subtype])
- -- setfield(spec,"width",0)
+ local spec = current.spec
+ if spec.writable and spec.width ~= 0 then
+ report_snapper("glue %p of type %a kept",current.spec.width,skipcodes[subtype])
+ -- spec.width = 0
end
end
- if trace then trace_skip(formatters["glue of type %a"](subtype),sc,so,sp,current) end
+ if trace then trace_skip(formatter["glue of type %a"](subtype),sc,so,sp,current) end
flush("some glue")
- current = getnext(current)
+ current = current.next
end
else
- flush(formatters["node with id %a"](id))
- current = getnext(current)
+ flush("something else")
+ current = current.next
end
end
if trace then trace_info("stop analyzing",where,what) end
@@ -1260,8 +1230,7 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if not tail then tail = find_node_tail(head) end
if trace then trace_done("result",glue_data) end
if force_glue then
- local spec = getfield(glue_data,"spec")
- head, tail = forced_skip(head,tail,getfield(spec,"width"),"after",trace)
+ head, tail = forced_skip(head,tail,glue_data.spec.width,"after",trace)
free_glue_node(glue_data)
else
head, tail = insert_node_after(head,tail,glue_data)
@@ -1274,7 +1243,7 @@ texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevd
end
show_tracing(head)
if oldhead ~= head then
- trace_info("head has been changed from %a to %a",nodecodes[getid(oldhead)],nodecodes[getid(head)])
+ trace_info("head has been changed from %a to %a",nodecodes[oldhead.id],nodecodes[head.id])
end
end
return head, true
@@ -1302,17 +1271,16 @@ end
function vspacing.pagehandler(newhead,where)
-- local newhead = texlists.contrib_head
if newhead then
- newhead = tonut(newhead)
local newtail = find_node_tail(newhead) -- best pass that tail, known anyway
local flush = false
stackhack = true -- todo: only when grid snapping once enabled
-- todo: fast check if head = tail
for n in traverse_nodes(newhead) do -- we could just look for glue nodes
- local id = getid(n)
+ local id = n.id
if id ~= glue_code then
flush = true
- elseif getsubtype(n) == userskip_code then
- if getattr(n,a_skipcategory) then
+ elseif n.subtype == userskip_code then
+ if n[a_skipcategory] then
stackhack = true
else
flush = true
@@ -1324,36 +1292,35 @@ function vspacing.pagehandler(newhead,where)
if flush then
if stackhead then
if trace_collect_vspacing then report("appending %s nodes to stack (final): %s",newhead) end
- setfield(stacktail,"next",newhead)
- setfield(newhead,"prev",stacktail)
+ stacktail.next = newhead
+ newhead.prev = stacktail
newhead = stackhead
stackhead, stacktail = nil, nil
end
if stackhack then
stackhack = false
if trace_collect_vspacing then report("processing %s nodes: %s",newhead) end
- -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
- newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
else
if trace_collect_vspacing then report("flushing %s nodes: %s",newhead) end
-- texlists.contrib_head = newhead
end
- return tonode(newhead)
else
if stackhead then
if trace_collect_vspacing then report("appending %s nodes to stack (intermediate): %s",newhead) end
- setfield(stacktail,"next",newhead)
- setfield(newhead,"prev",stacktail)
+ stacktail.next = newhead
+ newhead.prev = stacktail
else
if trace_collect_vspacing then report("storing %s nodes in stack (initial): %s",newhead) end
stackhead = newhead
end
stacktail = newtail
-- texlists.contrib_head = nil
- -- newhead = nil
+ newhead = nil
end
end
- return nil
+ return newhead
end
local ignore = table.tohash {
@@ -1363,23 +1330,18 @@ local ignore = table.tohash {
}
function vspacing.vboxhandler(head,where)
- if head and not ignore[where] then
- local h = tonut(head)
- if getnext(h) then
- h = collapser(h,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper
- return tonode(h)
- end
+ if head and not ignore[where] and head.next then
+ head = collapser(head,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper
end
return head
end
function vspacing.collapsevbox(n) -- for boxes but using global a_snapmethod
- local box = getbox(n)
+ local box = texgetbox(n)
if box then
- local list = getlist(box)
+ local list = box.list
if list then
- list = collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod)
- setfield(box,"list",vpack_node(list))
+ box.list = vpack_node(collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod))
end
end
end
@@ -1390,9 +1352,7 @@ end
local outer = texnest[0]
function vspacing.resetprevdepth()
- if texlists.hold_head then
- outer.prevdepth = 0
- end
+ outer.prevdepth = 0
end
-- interface
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index ae09bb5ae..a74501e41 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 8f1f3e5c8..a591afb75 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/status-mkiv.lua b/tex/context/base/status-mkiv.lua
index 339bc24f6..caa7dc16c 100644
--- a/tex/context/base/status-mkiv.lua
+++ b/tex/context/base/status-mkiv.lua
@@ -2014,13 +2014,13 @@ return {
{
category = "mkiv",
filename = "bibl-bib",
- loading = "on demand",
+ loading = "always",
status = "pending",
},
{
category = "mkiv",
filename = "bibl-tra",
- loading = "on demand",
+ loading = "always",
status = "pending",
},
{
@@ -2534,60 +2534,6 @@ return {
loading = "on demand",
status = "okay",
},
- {
- category = "mkiv",
- filename = "publ-ini",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-old",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-tra",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-usr",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-xml",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-imp-apa",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-imp-cite",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-imp-definitions",
- loading = "always",
- status = "pending",
- },
- {
- category = "mkiv",
- filename = "publ-imp-commands",
- loading = "always",
- status = "pending",
- },
},
lua = {
{
@@ -2660,12 +2606,12 @@ return {
{
category = "lua",
filename = "bibl-bib",
- loading = "on demand",
+ status = "todo",
},
{
category = "lua",
filename = "bibl-tra",
- loading = "on demand",
+ status = "todo",
},
{
category = "lua",
@@ -5051,42 +4997,6 @@ return {
filename = "x-mathml",
status = "todo",
},
- {
- category = "lua",
- filename = "publ-ini",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
- {
- category = "lua",
- filename = "publ-aut",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
- {
- category = "lua",
- filename = "publ-dat",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
- {
- category = "lua",
- filename = "publ-oth",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
- {
- category = "lua",
- filename = "publ-tra",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
- {
- category = "lua",
- filename = "publ-usr",
- loading = "publ-ini.mkiv",
- status = "pending",
- },
},
metafun = {
{
diff --git a/tex/context/base/strc-lst.mkvi b/tex/context/base/strc-lst.mkvi
index f78881221..63c3e030a 100644
--- a/tex/context/base/strc-lst.mkvi
+++ b/tex/context/base/strc-lst.mkvi
@@ -889,7 +889,6 @@
\startsetups[\??listrenderings:abc]
\endgraf % are we grouped?
-% \advance % yes or no ... \rightskip is also honored
\leftskip\listparameter\c!margin % after \endgraf !
\listparameter\c!before
\endgraf
diff --git a/tex/context/base/strc-mar.lua b/tex/context/base/strc-mar.lua
index 258787d0a..b3a6e8f35 100644
--- a/tex/context/base/strc-mar.lua
+++ b/tex/context/base/strc-mar.lua
@@ -19,27 +19,14 @@ local commands = commands
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getbox = nuts.getbox
-
-local traversenodes = nuts.traverse
-
+local traversenodes = nodes.traverse
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local texsetattribute = tex.setattribute
+local texgetbox = tex.getbox
local a_marks = attributes.private("structure","marks")
@@ -119,9 +106,9 @@ end
local function sweep(head,first,last)
for n in traversenodes(head) do
- local id = getid(n)
+ local id = n.id
if id == glyph_code then
- local a = getattr(n,a_marks)
+ local a = n[a_marks]
if not a then
-- next
elseif first == 0 then
@@ -131,7 +118,7 @@ local function sweep(head,first,last)
end
elseif id == hlist_code or id == vlist_code then
if boxes_too then
- local a = getattr(n,a_marks)
+ local a = n[a_marks]
if not a then
-- next
elseif first == 0 then
@@ -140,7 +127,7 @@ local function sweep(head,first,last)
last = a
end
end
- local list = getlist(n)
+ local list = n.list
if list then
first, last = sweep(list,first,last)
end
@@ -156,9 +143,9 @@ setmetatableindex(classes, function(t,k) local s = settings_to_array(k) t[k] = s
local lasts = { }
function marks.synchronize(class,n,option)
- local box = getbox(n)
+ local box = texgetbox(n)
if box then
- local first, last = sweep(getlist(box),0,0)
+ local first, last = sweep(box.list,0,0)
if option == v_keep and first == 0 and last == 0 then
if trace_marks_get or trace_marks_set then
report_marks("action %a, class %a, box %a","retain at synchronize",class,n)
diff --git a/tex/context/base/strc-mat.mkiv b/tex/context/base/strc-mat.mkiv
index 18cb005cb..b9263cdb0 100644
--- a/tex/context/base/strc-mat.mkiv
+++ b/tex/context/base/strc-mat.mkiv
@@ -630,13 +630,13 @@
% \prevdepth-\maxdimen % texbook pagina 79-80
% \fi
% \noindent % else funny hlist with funny baselineskip
-% $$% \Ucheckedstartdisplaymath
+% $$% \Ustartdisplaymath
% \setdisplaydimensions
% \startinnermath}
%
% \unexpanded\def\stopdisplaymath
% {\stopinnermath
-% $$% \Ucheckedstopdisplaymath
+% $$% \Ustopdisplaymath
% \par
% \afterdisplayspace
% \par
@@ -663,13 +663,13 @@
\fi
\fi
\noindent % else funny hlist with funny baselineskip
- \Ucheckedstartdisplaymath
+ $$% \Ustartdisplaymath
\setdisplaydimensions
\startinnermath}
\unexpanded\def\stopdisplaymath
{\stopinnermath
- \Ucheckedstopdisplaymath
+ $$% \Ustopdisplaymath
\par
\ifvmode
\ifcase\c_strc_formulas_space_model
diff --git a/tex/context/base/strc-pag.lua b/tex/context/base/strc-pag.lua
index fd0a367aa..02ed5610f 100644
--- a/tex/context/base/strc-pag.lua
+++ b/tex/context/base/strc-pag.lua
@@ -40,19 +40,17 @@ local collected, tobesaved = allocate(), allocate()
pages.collected = collected
pages.tobesaved = tobesaved
-pages.nofpages = 0
local function initializer()
collected = pages.collected
tobesaved = pages.tobesaved
- pages.nofpages = #collected
end
job.register('structures.pages.collected', tobesaved, initializer)
local specification = { } -- to be checked
-function pages.save(prefixdata,numberdata,extradata)
+function pages.save(prefixdata,numberdata)
local realpage = texgetcount("realpageno")
local userpage = texgetcount("userpageno")
if realpage > 0 then
@@ -60,12 +58,10 @@ function pages.save(prefixdata,numberdata,extradata)
report_pages("saving page %s.%s",realpage,userpage)
end
local data = {
- number = userpage,
- viewerprefix = extradata.viewerprefix,
- state = extradata.state,
- block = sections.currentblock(),
- prefixdata = prefixdata and helpers.simplify(prefixdata),
- numberdata = numberdata and helpers.simplify(numberdata),
+ number = userpage,
+ block = sections.currentblock(),
+ prefixdata = prefixdata and helpers.simplify(prefixdata),
+ numberdata = numberdata and helpers.simplify(numberdata),
}
tobesaved[realpage] = data
if not collected[realpage] then
diff --git a/tex/context/base/strc-pag.mkiv b/tex/context/base/strc-pag.mkiv
index c4e9819ba..85cfeb40f 100644
--- a/tex/context/base/strc-pag.mkiv
+++ b/tex/context/base/strc-pag.mkiv
@@ -106,8 +106,6 @@
\let\setuppagenumber\setupuserpagenumber
\let\resetpagenumber\resetuserpagenumber
-% invisible =
-
\def\strc_pagenumbers_page_state_save % \normalexpanded?
{\ctxlua{structures.pages.save({
prefix = "\namedcounterparameter\s!userpage\c!prefix",
@@ -122,9 +120,6 @@
conversionset = "\namedcounterparameter\s!userpage\c!numberconversionset",
starter = \!!bs\namedcounterparameter\s!userpage\c!numberstarter\!!es,
stopper = \!!bs\namedcounterparameter\s!userpage\c!numberstopper\!!es,
- },{
- viewerprefix = \!!bs\namedcounterparameter\s!userpage\c!viewerprefix\!!es,
- state = \!!bs\namedcounterparameter\s!userpage\c!state\!!es,
}
)}}
diff --git a/tex/context/base/supp-box.lua b/tex/context/base/supp-box.lua
index 3c5a3383d..27078f46f 100644
--- a/tex/context/base/supp-box.lua
+++ b/tex/context/base/supp-box.lua
@@ -26,118 +26,101 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local glyph_code = nodecodes.glyph
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
+local new_penalty = nodes.pool.penalty
+local new_hlist = nodes.pool.hlist
+local new_glue = nodes.pool.glue
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getattribute = nuts.getattribute
-local getbox = nuts.getbox
-
-local setfield = nuts.setfield
-local setbox = nuts.setbox
-
-local free_node = nuts.free
-local copy_list = nuts.copy_list
-local copy_node = nuts.copy
-local find_tail = nuts.tail
-
-local listtoutf = nodes.listtoutf
-
-local nodepool = nuts.pool
-local new_penalty = nodepool.penalty
-local new_hlist = nodepool.hlist
-local new_glue = nodepool.glue
+local free_node = nodes.free
+local copy_list = nodes.copy_list
+local copy_node = nodes.copy
+local find_tail = nodes.tail
+local texsetbox = tex.setbox
+local texgetbox = tex.getbox
local texget = tex.get
-local function hyphenatedlist(head)
- local current = head and tonut(head)
- while current do
- local id = getid(current)
- local next = getnext(current)
- local prev = getprev(current)
+local function hyphenatedlist(list)
+ while list do
+ local id, next, prev = list.id, list.next, list.prev
if id == disc_code then
- local hyphen = getfield(current,"pre")
+ local hyphen = list.pre
if hyphen then
local penalty = new_penalty(-500)
- -- insert_after etc
- setfield(hyphen,"next",penalty)
- setfield(penalty,"prev",hyphen)
- setfield(prev,"next",hyphen)
- setfield(next,"prev", penalty)
- setfield(penalty,"next",next)
- setfield(hyphen,"prev",prev)
- setfield(current,"pre",nil)
- free_node(current)
+ hyphen.next, penalty.prev = penalty, hyphen
+ prev.next, next.prev = hyphen, penalty
+ penalty.next, hyphen.prev = next, prev
+ list.pre = nil
+ free_node(list)
end
elseif id == vlist_code or id == hlist_code then
- hyphenatedlist(getlist(current))
+ hyphenatedlist(list.list)
end
- current = next
+ list = next
end
end
commands.hyphenatedlist = hyphenatedlist
function commands.showhyphenatedinlist(list)
- report_hyphenation("show: %s",listtoutf(tonut(list),false,true))
+ report_hyphenation("show: %s",nodes.listtoutf(list,false,true))
end
local function checkedlist(list)
if type(list) == "number" then
- return getlist(getbox(tonut(list)))
+ return texgetbox(list).list
else
- return tonut(list)
+ return list
end
end
-local function applytochars(current,doaction,noaction,nested)
+local function applytochars(list,what,nested)
+ local doaction = context[what or "ruledhbox"]
+ local noaction = context
+ local current = checkedlist(list)
while current do
- local id = getid(current)
+ local id = current.id
if nested and (id == hlist_code or id == vlist_code) then
context.beginhbox()
- applytochars(getlist(current),what,nested)
+ applytochars(current.list,what,nested)
context.endhbox()
elseif id ~= glyph_code then
- noaction(tonode(copy_node(current)))
+ noaction(copy_node(current))
else
- doaction(tonode(copy_node(current)))
+ doaction(copy_node(current))
end
- current = getnext(current)
+ current = current.next
end
end
-local function applytowords(current,doaction,noaction,nested)
+local function applytowords(list,what,nested)
+ local doaction = context[what or "ruledhbox"]
+ local noaction = context
+ local current = checkedlist(list)
local start
while current do
- local id = getid(current)
+ local id = current.id
if id == glue_code then
if start then
- doaction(tonode(copy_list(start,current)))
+ doaction(copy_list(start,current))
start = nil
end
- noaction(tonode(copy_node(current)))
+ noaction(copy_node(current))
elseif nested and (id == hlist_code or id == vlist_code) then
context.beginhbox()
- applytowords(getlist(current),what,nested)
+ applytowords(current.list,what,nested)
context.egroup()
elseif not start then
start = current
end
- current = getnext(current)
+ current = current.next
end
if start then
- doaction(tonode(copy_list(start)))
+ doaction(copy_list(start))
end
end
-commands.applytochars = function(list,what,nested) applytochars(checkedlist(list),context[what or "ruledhbox"],context,nested) end
-commands.applytowords = function(list,what,nested) applytowords(checkedlist(list),context[what or "ruledhbox"],context,nested) end
+commands.applytochars = applytochars
+commands.applytowords = applytowords
local split_char = lpeg.Ct(lpeg.C(1)^0)
local split_word = lpeg.tsplitat(lpeg.patterns.space)
@@ -193,36 +176,36 @@ end
local a_vboxtohboxseparator = attributes.private("vboxtohboxseparator")
function commands.vboxlisttohbox(original,target,inbetween)
- local current = getlist(getbox(original))
+ local current = texgetbox(original).list
local head = nil
local tail = nil
while current do
- local id = getid(current)
- local next = getnext(current)
+ local id = current.id
+ local next = current.next
if id == hlist_code then
- local list = getlist(current)
+ local list = current.list
if head then
if inbetween > 0 then
local n = new_glue(0,0,inbetween)
- setfield(tail,"next",n)
- setfield(n,"prev",tail)
+ tail.next = n
+ n.prev = tail
tail = n
end
- setfield(tail,"next",list)
- setfield(list,"prev",tail)
+ tail.next = list
+ list.prev = tail
else
head = list
end
tail = find_tail(list)
-- remove last separator
- if getid(tail) == hlist_code and getattribute(tail,a_vboxtohboxseparator) == 1 then
+ if tail.id == hlist_code and tail[a_vboxtohboxseparator] == 1 then
local temp = tail
- local prev = getprev(tail)
+ local prev = tail.prev
if next then
- local list = getlist(tail)
- setfield(prev,"next",list)
- setfield(list,"prev",prev)
- setfield(tail,"list",nil)
+ local list = tail.list
+ prev.next = list
+ list.prev = prev
+ tail.list = nil
tail = find_tail(list)
else
tail = prev
@@ -230,21 +213,21 @@ function commands.vboxlisttohbox(original,target,inbetween)
free_node(temp)
end
-- done
- setfield(tail,"next",nil)
- setfield(current,"list",nil)
+ tail.next = nil
+ current.list = nil
end
current = next
end
local result = new_hlist()
- setfield(result,"list",head)
- setbox(target,result)
+ result.list = head
+ texsetbox(target,result)
end
function commands.hboxtovbox(original)
- local b = getbox(original)
+ local b = texgetbox(original)
local factor = texget("baselineskip").width / texget("hsize")
- setfield(b,"depth",0)
- setfield(b,"height",getfield(b,"width") * factor)
+ b.depth = 0
+ b.height = b.width * factor
end
function commands.boxtostring(n)
diff --git a/tex/context/base/supp-mat.mkiv b/tex/context/base/supp-mat.mkiv
index 925f25cc4..f77ee3454 100644
--- a/tex/context/base/supp-mat.mkiv
+++ b/tex/context/base/supp-mat.mkiv
@@ -53,36 +53,6 @@
\let\normalstartdmath \Ustartdisplaymath
\let\normalstopdmath \Ustopdisplaymath
-% \unexpanded\def\Ustartdisplaymath
-% {\ifinner
-% \ifhmode
-% \normalUstartmath
-% \let\Ustopdisplaymath\normalUstopmath
-% \else
-% \normalUstartdisplaymath
-% \let\Ustopdisplaymath\normalUstopdisplaymath
-% \fi
-% \else
-% \normalUstartdisplaymath
-% \let\Ustopdisplaymath\normalUstopdisplaymath
-% \fi}
-
-\unexpanded\def\Ucheckedstartdisplaymath
- {\ifinner
- \ifhmode
- \normalUstartmath
- \let\Ucheckedstopdisplaymath\normalUstopmath
- \else
- \normalUstartdisplaymath
- \let\Ucheckedstopdisplaymath\normalUstopdisplaymath
- \fi
- \else
- \normalUstartdisplaymath
- \let\Ucheckedstopdisplaymath\normalUstopdisplaymath
- \fi}
-
-\let\Ucheckedstopdisplaymath\relax
-
\def\normalmathaligntab{&} % \let\normalmathaligntab\aligntab does to work well in a let to & (a def works ok)
\let\normalsuper \Usuperscript % obsolete
@@ -90,8 +60,8 @@
\let\startimath \Ustartmath
\let\stopimath \Ustopmath
-\let\startdmath \Ustartdisplaymath % \Ucheckedstartdisplaymath
-\let\stopdmath \Ustopdisplaymath % \Ucheckedstopdisplaymath
+\let\startdmath \Ustartdisplaymath
+\let\stopdmath \Ustopmath
\unexpanded\def\mathematics#1{\relax \ifmmode#1\else\normalstartimath#1\normalstopimath\fi}
\unexpanded\def\displaymath#1{\noindent \ifmmode#1\else\normalstartdmath#1\normalstopdmath\fi}
diff --git a/tex/context/base/syst-ini.mkiv b/tex/context/base/syst-ini.mkiv
index 38c34556a..ab1c53131 100644
--- a/tex/context/base/syst-ini.mkiv
+++ b/tex/context/base/syst-ini.mkiv
@@ -246,10 +246,9 @@
% Watch out, for the moment we disable the check for already being defined
% later we will revert this but first all chardefs must be replaced.
-\normalprotected\def\newconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1}
-\normalprotected\def\setnewconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1#1} % just a number
-\normalprotected\def\setconstant {} % dummy, no checking, so it warns
-\normalprotected\def\setconstantvalue#1#2{\csname#1\endcsname\numexpr#2\relax}
+\normalprotected\def\newconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1}
+\normalprotected\def\setnewconstant#1{\ifdefined#1\let#1\undefined\fi\newcount#1#1} % just a number
+\normalprotected\def\setconstant {} % dummy, no checking, so it warns
% maybe setconstant with check
@@ -745,9 +744,6 @@
\normalprotected\def\settrue #1{\let#1\conditionaltrue }
\normalprotected\def\setfalse#1{\let#1\conditionalfalse}
-\normalprotected\def\settruevalue #1{\expandafter\let\csname#1\endcsname\conditionaltrue }
-\normalprotected\def\setfalsevalue#1{\expandafter\let\csname#1\endcsname\conditionalfalse}
-
\let\newconditional\setfalse
\let\ifconditional \ifcase
diff --git a/tex/context/base/tabl-ntb.mkiv b/tex/context/base/tabl-ntb.mkiv
index 42c61f16c..a1ae94712 100644
--- a/tex/context/base/tabl-ntb.mkiv
+++ b/tex/context/base/tabl-ntb.mkiv
@@ -1667,7 +1667,7 @@
\setupTABLE
[\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
- % \c!framecolor=\s!black,
+ \c!framecolor=\s!black,
\c!width=\v!fit,
\c!height=\v!fit,
\c!autowidth=\v!yes,
@@ -1719,7 +1719,7 @@
\setupTABLE
[\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
- % \c!framecolor=\s!black,
+ \c!framecolor=\s!black,
\c!width=\v!fit,
\c!height=\v!fit,
\c!autowidth=\v!yes,
diff --git a/tex/context/base/tabl-tbl.mkiv b/tex/context/base/tabl-tbl.mkiv
index 82d1be893..cd5efa7f7 100644
--- a/tex/context/base/tabl-tbl.mkiv
+++ b/tex/context/base/tabl-tbl.mkiv
@@ -429,8 +429,7 @@
\aligntab
\tabl_tabulate_column_vrule_inject
\tabl_tabulate_color_side_left
-% \tabl_tabulate_inject_pre_skip{\the\s_tabl_tabulate_pre}%
- \tabl_tabulate_inject_pre_skip{\the\dimexpr\s_tabl_tabulate_pre}% get rid of plus
+ \tabl_tabulate_inject_pre_skip{\the\s_tabl_tabulate_pre}%
\alignmark\alignmark
\aligntab
\tabl_tabulate_color_side_both
@@ -497,7 +496,7 @@
\egroup
\aligntab
\noexpand\dostoptagged
- \tabl_tabulate_inject_post_skip{\the\dimexpr\s_tabl_tabulate_post}% get rid of plus
+ \tabl_tabulate_inject_post_skip{\the\s_tabl_tabulate_post}%
\alignmark\alignmark
}}%
\t_tabl_tabulate_dummy\expandafter{\the\t_tabl_tabulate_dummy\NC}%
diff --git a/tex/context/base/tabl-xtb.lua b/tex/context/base/tabl-xtb.lua
index 653eb6e08..488ef5b78 100644
--- a/tex/context/base/tabl-xtb.lua
+++ b/tex/context/base/tabl-xtb.lua
@@ -25,21 +25,18 @@ this mechamism will be improved so that it can replace its older cousin.
-- todo: use linked list instead of r/c array
-local tonumber = tonumber
+local commands, context, tex, node = commands, context, tex, node
-local commands = commands
-local context = context
-local tex = tex
-
-local texgetcount = tex.getcount
-local texsetcount = tex.setcount
-local texgetdimen = tex.getdimen
-local texsetdimen = tex.setdimen
-local texget = tex.get
+local texgetcount = tex.getcount
+local texsetcount = tex.setcount
+local texgetbox = tex.getbox
+local texgetdimen = tex.getdimen
+local texsetdimen = tex.setdimen
+local texget = tex.get
-local format = string.format
-local concat = table.concat
-local points = number.points
+local format = string.format
+local concat = table.concat
+local points = number.points
local context = context
local context_beginvbox = context.beginvbox
@@ -52,23 +49,13 @@ local variables = interfaces.variables
local setmetatableindex = table.setmetatableindex
local settings_to_hash = utilities.parsers.settings_to_hash
-local nuts = nodes.nuts -- here nuts gain hardly nothing
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getlist = nuts.getlist
-local getfield = nuts.getfield
-local getbox = nuts.getbox
-
-local setfield = nuts.setfield
+local copy_node_list = node.copy_list
+local hpack_node_list = node.hpack
+local vpack_node_list = node.vpack
+local slide_node_list = node.slide
+local flush_node_list = node.flush_list
-local copy_node_list = nuts.copy_list
-local hpack_node_list = nuts.hpack
-local flush_node_list = nuts.flush_list
-
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_glue = nodepool.glue
local new_kern = nodepool.kern
@@ -228,20 +215,20 @@ function xtables.set_reflow_width()
while row[c].span do -- can also be previous row ones
c = c + 1
end
- local tb = getbox("b_tabl_x")
+ local tb = texgetbox("b_tabl_x")
local drc = row[c]
--
drc.list = true -- we don't need to keep the content around as we're in trial mode (no: copy_node_list(tb))
--
- local widths, width = data.widths, getfield(tb,"width")
+ local widths, width = data.widths, tb.width
if width > widths[c] then
widths[c] = width
end
- local heights, height = data.heights, getfield(tb,"height")
+ local heights, height = data.heights, tb.height
if height > heights[r] then
heights[r] = height
end
- local depths, depth = data.depths, getfield(tb,"depth")
+ local depths, depth = data.depths, tb.depth
if depth > depths[r] then
depths[r] = depth
end
@@ -332,14 +319,14 @@ function xtables.set_reflow_height()
-- while row[c].span do -- we could adapt drc.nx instead
-- c = c + 1
-- end
- local tb = getbox("b_tabl_x")
+ local tb = texgetbox("b_tabl_x")
local drc = row[c]
if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2
- local heights, height = data.heights, getfield(tb,"height")
+ local heights, height = data.heights, tb.height
if height > heights[r] then
heights[r] = height
end
- local depths, depth = data.depths, getfield(tb,"depth")
+ local depths, depth = data.depths, tb.depth
if depth > depths[r] then
depths[r] = depth
end
@@ -386,7 +373,7 @@ function xtables.set_construct()
-- end
local drc = row[c]
-- this will change as soon as in luatex we can reset a box list without freeing
- drc.list = copy_node_list(getbox("b_tabl_x"))
+ drc.list = copy_node_list(texgetbox("b_tabl_x"))
-- c = c + drc.nx - 1
-- data.currentcolumn = c
end
@@ -659,23 +646,23 @@ function xtables.construct()
end
local list = drc.list
if list then
- setfield(list,"shift",getfield(list,"height") + getfield(list,"depth"))
+ list.shift = list.height + list.depth
-- list = hpack_node_list(list) -- is somehow needed
- -- setfield(list,"width",0)
- -- setfield(list,"height",0)
- -- setfield(list,"depth",0)
+ -- list.width = 0
+ -- list.height = 0
+ -- list.depth = 0
-- faster:
local h = new_hlist()
- setfield(h,"list",list)
+ h.list = list
list = h
--
if start then
- setfield(stop,"next",list)
- setfield(list,"prev",stop)
+ stop.next = list
+ list.prev = stop
else
start = list
end
- stop = list
+ stop = list -- one node anyway, so not needed: slide_node_list(list)
end
local step = widths[c]
if c < nofcolumns then
@@ -683,8 +670,8 @@ function xtables.construct()
end
local kern = new_kern(step)
if stop then
- setfield(stop,"next",kern)
- setfield(kern,"prev",stop)
+ stop.next = kern
+ kern.prev = stop
else -- can be first spanning next row (ny=...)
start = kern
end
@@ -693,8 +680,8 @@ function xtables.construct()
if start then
if rightmargindistance > 0 then
local kern = new_kern(rightmargindistance)
- setfield(stop,"next",kern)
- setfield(kern,"prev",stop)
+ stop.next = kern
+ kern.prev = stop
-- stop = kern
end
return start, heights[r] + depths[r], hasspan
@@ -734,7 +721,7 @@ function xtables.construct()
texsetdimen("global","d_tabl_x_final_width",0)
else
texsetcount("global","c_tabl_x_state",1)
- texsetdimen("global","d_tabl_x_final_width",getfield(body[1][1],"width"))
+ texsetdimen("global","d_tabl_x_final_width",body[1][1].width)
end
end
@@ -747,8 +734,8 @@ local function inject(row,copy,package)
end
if package then
context_beginvbox()
- context(tonode(list))
- context(tonode(new_kern(row[2])))
+ context(list)
+ context(new_kern(row[2]))
context_endvbox()
context_nointerlineskip() -- figure out a better way
if row[4] then
@@ -756,13 +743,13 @@ local function inject(row,copy,package)
elseif row[3] then
context_blank(row[3] .. "sp") -- why blank ?
else
- context(tonode(new_glue(0)))
+ context(new_glue(0))
end
else
- context(tonode(list))
- context(tonode(new_kern(row[2])))
+ context(list)
+ context(new_kern(row[2]))
if row[3] then
- context(tonode(new_glue(row[3])))
+ context(new_glue(row[3]))
end
end
end
@@ -835,7 +822,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],repeatheader)
end
if rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
if not repeatheader then
results[head_mode] = { }
@@ -848,7 +835,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(more[i],true)
end
if rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
end
elseif headsize > 0 and repeatheader then -- following chunk gets head
@@ -858,7 +845,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],true)
end
if rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
end
else -- following chunk gets nothing
@@ -885,7 +872,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
-- all is flushed and footer fits
if footsize > 0 then
if rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
for i=1,#foot do
inject(foot[i])
@@ -899,7 +886,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
-- todo: try to flush a few more lines
if repeatfooter and footsize > 0 then
if rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
for i=1,#foot do
inject(foot[i],true)
@@ -951,13 +938,13 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i])
end
if #head > 0 and rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
for i=1,#body do
inject(body[i])
end
if #foot > 0 and rowdistance > 0 then
- context(tonode(new_glue(rowdistance)))
+ context(new_glue(rowdistance))
end
for i=1,#foot do
inject(foot[i])
@@ -977,24 +964,6 @@ function xtables.cleanup()
flush_node_list(r[1])
end
end
-
- -- local rows = data.rows
- -- for i=1,#rows do
- -- local row = rows[i]
- -- for i=1,#row do
- -- local cell = row[i]
- -- local list = cell.list
- -- if list then
- -- cell.width = getfield(list,"width")
- -- cell.height = getfield(list,"height")
- -- cell.depth = getfield(list,"depth")
- -- cell.list = true
- -- end
- -- end
- -- end
- -- data.result = nil
- -- inspect(data)
-
data = table.remove(stack)
end
diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua
index fa9b0cf10..3447214bd 100644
--- a/tex/context/base/task-ini.lua
+++ b/tex/context/base/task-ini.lua
@@ -35,7 +35,6 @@ appendaction("processors", "characters", "typesetters.cases.handler")
appendaction("processors", "characters", "typesetters.breakpoints.handler") -- disabled
appendaction("processors", "characters", "scripts.injectors.handler") -- disabled
-appendaction("processors", "words", "languages.replacements.handler") -- disabled
appendaction("processors", "words", "builders.kernel.hyphenation") -- always on
appendaction("processors", "words", "languages.words.check") -- disabled -- might move up, no disc check needed then
@@ -58,7 +57,6 @@ appendaction("processors", "lists", "typesetters.digits.handler")
appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling)
------------("processors", "lists", "typesetters.initials.handler") -- disabled
-appendaction("shipouts", "normalizers", "builders.paragraphs.expansion.trace") -- disabled
appendaction("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled
appendaction("shipouts", "normalizers", "typesetters.alignments.handler")
appendaction("shipouts", "normalizers", "nodes.references.handler") -- disabled
@@ -117,12 +115,11 @@ appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler")
-- experimental too
-appendaction("mvlbuilders", "normalizers", "typesetters.checkers.handler")
-appendaction("vboxbuilders", "normalizers", "typesetters.checkers.handler")
+appendaction("mvlbuilders", "normalizers","typesetters.checkers.handler")
+appendaction("vboxbuilders","normalizers","typesetters.checkers.handler")
-- speedup: only kick in when used
-disableaction("processors", "languages.replacements.handler")
disableaction("processors", "typesetters.characteralign.handler")
disableaction("processors", "scripts.autofontfeature.handler")
disableaction("processors", "scripts.splitters.handler")
@@ -143,7 +140,6 @@ disableaction("processors", "typesetters.kerns.handler")
disableaction("processors", "typesetters.italics.handler")
disableaction("processors", "nodes.handlers.stripping")
-disableaction("shipouts", "builders.paragraphs.expansion.trace")
disableaction("shipouts", "typesetters.alignments.handler")
disableaction("shipouts", "nodes.rules.handler")
disableaction("shipouts", "nodes.shifts.handler")
diff --git a/tex/context/base/trac-inf.lua b/tex/context/base/trac-inf.lua
index 067cff27c..802f2e667 100644
--- a/tex/context/base/trac-inf.lua
+++ b/tex/context/base/trac-inf.lua
@@ -123,10 +123,7 @@ function statistics.show()
-- this code will move
local register = statistics.register
register("used platform", function()
- local mask = lua.mask or "ascii"
- return format("%s, type: %s, binary subtree: %s, symbol mask: %s (%s)",
- os.platform or "unknown",os.type or "unknown", environment.texos or "unknown",
- mask,mask == "utf" and "τεχ" or "tex")
+ return format("%s, type: %s, binary subtree: %s",os.platform or "unknown",os.type or "unknown", environment.texos or "unknown")
end)
register("luatex banner", function()
return lower(status.banner)
diff --git a/tex/context/base/trac-jus.lua b/tex/context/base/trac-jus.lua
index 00c871159..38220a752 100644
--- a/tex/context/base/trac-jus.lua
+++ b/tex/context/base/trac-jus.lua
@@ -14,30 +14,14 @@ typesetters.checkers = checkers
local a_alignstate = attributes.private("alignstate")
local a_justification = attributes.private("justification")
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local setlist = nuts.setlist
-
-local traverse_id = nuts.traverse_id
-local get_list_dimensions = nuts.dimensions
-local linked_nodes = nuts.linked
-local copy_node = nuts.copy
-
-local tracedrule = nodes.tracers.pool.nuts.rule
-
-local nodepool = nuts.pool
-
-local new_rule = nodepool.rule
-local new_hlist = nodepool.hlist
-local new_glue = nodepool.glue
-local new_kern = nodepool.kern
-
+local tracers = nodes.tracers
+local tracedrule = tracers.rule
+
+local new_rule = nodes.pool.rule
+local new_hlist = nodes.pool.hlist
+local new_glue = nodes.pool.glue
+local new_kern = nodes.pool.kern
+local get_list_dimensions = node.dimensions
local hlist_code = nodes.nodecodes.hlist
local texsetattribute = tex.setattribute
@@ -75,35 +59,34 @@ trackers.register("visualizers.justification", function(v)
end)
function checkers.handler(head)
- for current in traverse_id(hlist_code,tonut(head)) do
- if getattr(current,a_justification) == 1 then
- setattr(current,a_justification,0)
- local width = getfield(current,"width")
+ for current in node.traverse_id(hlist_code,head) do
+ if current[a_justification] == 1 then
+ current[a_justification] = 0
+ local width = current.width
if width > 0 then
- local list = getlist(current)
+ local list = current.list
if list then
local naturalwidth, naturalheight, naturaldepth = get_list_dimensions(list)
local delta = naturalwidth - width
if naturalwidth == 0 or delta == 0 then
-- special box
elseif delta >= max_threshold then
- local rule = tracedrule(delta,naturalheight,naturaldepth,getfield(list,"glue_set") == 1 and "trace:dr" or "trace:db")
- setfield(current,"list",linked_nodes(list,new_hlist(rule)))
+ local rule = tracedrule(delta,naturalheight,naturaldepth,list.glue_set == 1 and "trace:dr"or "trace:db")
+ current.list = list .. new_hlist(rule)
elseif delta <= min_threshold then
- local alignstate = getattr(list,a_alignstate)
+ local alignstate = list[a_alignstate]
if alignstate == 1 then
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dc")
- setfield(current,"list",linked_nodes(new_hlist(rule),list))
+ current.list = new_hlist(rule) .. list
elseif alignstate == 2 then
- local lrule = tracedrule(-delta/2,naturalheight,naturaldepth,"trace:dy")
- local rrule = copy_node(lrule)
- setfield(current,"list",linked_nodes(new_hlist(lrule),list,new_kern(delta/2),new_hlist(rrule)))
+ local rule = tracedrule(-delta/2,naturalheight,naturaldepth,"trace:dy")
+ current.list = new_hlist(rule^1) .. list .. new_kern(delta/2) .. new_hlist(rule)
elseif alignstate == 3 then
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dm")
- setfield(current,"list",linked_nodes(list,new_kern(delta),new_hlist(rule)))
+ current.list = list .. new_kern(delta) .. new_hlist(rule)
else
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dg")
- setfield(current,"list",linked_nodes(list,new_kern(delta),new_hlist(rule)))
+ current.list = list .. new_kern(delta) .. new_hlist(rule)
end
end
end
diff --git a/tex/context/base/trac-par.lua b/tex/context/base/trac-par.lua
index aab57ce5c..262a9cc33 100644
--- a/tex/context/base/trac-par.lua
+++ b/tex/context/base/trac-par.lua
@@ -1,25 +1,8 @@
-if not modules then modules = { } end modules ['trac-par'] = {
- version = 1.001,
- comment = "companion to node-par.mkiv",
- author = "Hans Hagen",
- copyright = "ConTeXt Development Team",
- license = "see context related readme files",
- comment = "a translation of the built in parbuilder, initial convertsin by Taco Hoekwater",
-}
+-- for the moment here:
local utfchar = utf.char
local concat = table.concat
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getid = nuts.getid
-local getnext = nuts.getnext
-local getlist = nuts.getlist
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
@@ -59,14 +42,14 @@ local function colorize(n)
-- tricky: the built-in method creates dummy fonts and the last line normally has the
-- original font and that one then has ex.auto set
while n do
- local id = getid(n)
+ local id = n.id
if id == glyph_code then
- local ne = getfield(n,"expansion_factor")
+ local ne = n.expansion_factor
if ne == 0 then
if length > 0 then flush() end
setnodecolor(n,"hz:zero")
else
- local f = getfont(n)
+ local f = n.font
if f ~= font then
if length > 0 then
flush()
@@ -96,8 +79,8 @@ local function colorize(n)
end
if trace_verbose then
length = length + 1
- list[length] = utfchar(getchar(n))
- width = width + getfield(n,"width") -- no kerning yet
+ list[length] = utfchar(n.char)
+ width = width + n.width -- no kerning yet
end
end
end
@@ -105,13 +88,13 @@ local function colorize(n)
if length > 0 then
flush()
end
- colorize(getlist(n),flush)
+ colorize(n.list,flush)
else -- nothing to show on kerns
if length > 0 then
flush()
end
end
- n = getnext(n)
+ n = n.next
end
if length > 0 then
flush()
@@ -121,14 +104,14 @@ end
builders.paragraphs.expansion = builders.paragraphs.expansion or { }
function builders.paragraphs.expansion.trace(head)
- colorize(tonut(head),true)
+ colorize(head,true)
return head
end
local tasks = nodes.tasks
--- tasks.prependaction("shipouts","normalizers","builders.paragraphs.expansion.trace")
--- tasks.disableaction("shipouts","builders.paragraphs.expansion.trace")
+tasks.prependaction("shipouts","normalizers","builders.paragraphs.expansion.trace")
+tasks.disableaction("shipouts","builders.paragraphs.expansion.trace")
local function set(v)
if v then
diff --git a/tex/context/base/trac-pro.lua b/tex/context/base/trac-pro.lua
index 897b6a15c..d6e0d0339 100644
--- a/tex/context/base/trac-pro.lua
+++ b/tex/context/base/trac-pro.lua
@@ -26,8 +26,7 @@ local registered = { }
local function report_index(k,name)
if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
+ report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
else
report_system("reference to %a in protected namespace %a",k,name)
end
@@ -35,8 +34,7 @@ end
local function report_newindex(k,name)
if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name)
- debugger.showtraceback(report_system)
+ report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
else
report_system("assignment to %a in protected namespace %a",k,name)
end
diff --git a/tex/context/base/trac-tim.lua b/tex/context/base/trac-tim.lua
index b4744291c..15ac9bf1b 100644
--- a/tex/context/base/trac-tim.lua
+++ b/tex/context/base/trac-tim.lua
@@ -88,7 +88,7 @@ local function convert(name)
delta = factor/delta
end
for k=1,#s do
- s[k] = format("(%.3f,%.3f)",k,(s[k]-b)*delta)
+ s[k] = format("(%s,%s)",k,(s[k]-b)*delta)
end
paths[tagname] = concat(s,"--")
end
diff --git a/tex/context/base/trac-vis.lua b/tex/context/base/trac-vis.lua
index 420e9a00d..dc8bcc5e7 100644
--- a/tex/context/base/trac-vis.lua
+++ b/tex/context/base/trac-vis.lua
@@ -34,7 +34,6 @@ local formatters = string.formatters
-- todo: inline concat (more efficient)
local nodecodes = nodes.nodecodes
-local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
@@ -59,41 +58,21 @@ local rightskip_code = gluecodes.rightskip
local whatsitcodes = nodes.whatsitcodes
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local setfield = nuts.setfield
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local getbox = nuts.getbox
-local getlist = nuts.getlist
-local getleader = nuts.getleader
-
-local hpack_nodes = nuts.hpack
-local vpack_nodes = nuts.vpack
-local copy_node = nuts.copy
-local copy_list = nuts.copy_list
-local free_node = nuts.free
-local free_node_list = nuts.flush_list
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local traverse_nodes = nuts.traverse
-local linked_nodes = nuts.linked
-
-local fast_hpack = nuts.fasthpack
-local fast_hpack_string = nuts.typesetters.fast_hpack
+local hpack_nodes = node.hpack
+local vpack_nodes = node.vpack
+local fast_hpack_string = nodes.typesetters.fast_hpack
+local copy_node = node.copy
+local copy_list = node.copy_list
+local free_node = node.free
+local free_node_list = node.flush_list
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local fast_hpack = nodes.fasthpack
+local traverse_nodes = node.traverse
local texgetattribute = tex.getattribute
local texsetattribute = tex.setattribute
-
+local texgetbox = tex.getbox
local unsetvalue = attributes.unsetvalue
local current_font = font.current
@@ -102,7 +81,7 @@ local exheights = fonts.hashes.exheights
local emwidths = fonts.hashes.emwidths
local pt_factor = number.dimenfactors.pt
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_rule = nodepool.rule
local new_kern = nodepool.kern
local new_glue = nodepool.glue
@@ -314,39 +293,39 @@ local c_white_d = "trace:dw"
local function sometext(str,layer,color,textcolor) -- we can just paste verbatim together .. no typesteting needed
local text = fast_hpack_string(str,usedfont)
- local size = getfield(text,"width")
+ local size = text.width
local rule = new_rule(size,2*exheight,exheight/2)
local kern = new_kern(-size)
if color then
setcolor(rule,color)
end
if textcolor then
- setlistcolor(getlist(text),textcolor)
+ setlistcolor(text.list,textcolor)
end
- local info = linked_nodes(rule,kern,text)
+ local info = rule .. kern .. text
setlisttransparency(info,c_zero)
info = fast_hpack(info)
if layer then
- setattr(info,a_layer,layer)
+ info[a_layer] = layer
end
- local width = getfield(info,"width")
- setfield(info,"width",0)
- setfield(info,"height",0)
- setfield(info,"depth",0)
+ local width = info.width
+ info.width = 0
+ info.height = 0
+ info.depth = 0
return info, width
end
local f_cache = { }
local function fontkern(head,current)
- local kern = getfield(current,"kern")
+ local kern = current.kern
local info = f_cache[kern]
if info then
-- print("hit fontkern")
else
local text = fast_hpack_string(formatters[" %0.3f"](kern*pt_factor),usedfont)
local rule = new_rule(emwidth/10,6*exheight,2*exheight)
- local list = getlist(text)
+ local list = text.list
if kern > 0 then
setlistcolor(list,c_positive_d)
elseif kern < 0 then
@@ -356,12 +335,13 @@ local function fontkern(head,current)
end
setlisttransparency(list,c_text_d)
settransparency(rule,c_text_d)
- setfield(text,"shift",-5 * exheight)
- info = fast_hpack(linked_nodes(rule,text))
- setattr(info,a_layer,l_fontkern)
- setfield(info,"width",0)
- setfield(info,"height",0)
- setfield(info,"depth",0)
+ text.shift = -5 * exheight
+ info = rule .. text
+ info = fast_hpack(info)
+ info[a_layer] = l_fontkern
+ info.width = 0
+ info.height = 0
+ info.depth = 0
f_cache[kern] = info
end
head = insert_node_before(head,current,copy_list(info))
@@ -402,7 +382,7 @@ local tags = {
}
local function whatsit(head,current)
- local what = getsubtype(current)
+ local what = current.subtype
local info = w_cache[what]
if info then
-- print("hit whatsit")
@@ -410,7 +390,7 @@ local function whatsit(head,current)
local tag = whatsitcodes[what]
-- maybe different text colors per tag
info = sometext(formatters["W:%s"](tag and tags[tag] or what),usedfont,nil,c_white)
- setattr(info,a_layer,l_whatsit)
+ info[a_layer] = l_whatsit
w_cache[what] = info
end
head, current = insert_node_after(head,current,copy_list(info))
@@ -418,13 +398,13 @@ local function whatsit(head,current)
end
local function user(head,current)
- local what = getsubtype(current)
+ local what = current.subtype
local info = w_cache[what]
if info then
-- print("hit user")
else
info = sometext(formatters["U:%s"](what),usedfont)
- setattr(info,a_layer,l_user)
+ info[a_layer] = l_user
w_cache[what] = info
end
head, current = insert_node_after(head,current,copy_list(info))
@@ -434,14 +414,14 @@ end
local b_cache = { }
local function ruledbox(head,current,vertical,layer,what,simple,previous)
- local wd = getfield(current,"width")
+ local wd = current.width
if wd ~= 0 then
- local ht = getfield(current,"height")
- local dp = getfield(current,"depth")
- local next = getnext(current)
- local prev = previous -- getprev(current) ... prev can be wrong in math mode
- setfield(current,"next",nil)
- setfield(current,"prev",nil)
+ local ht = current.height
+ local dp = current.depth
+ local next = current.next
+ local prev = previous -- current.prev ... prev can be wrong in math mode
+ current.next = nil
+ current.prev = nil
local linewidth = emwidth/10
local baseline, baseskip
if dp ~= 0 and ht ~= 0 then
@@ -450,16 +430,16 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
if not baseline then
-- due to an optimized leader color/transparency we need to set the glue node in order
-- to trigger this mechanism
- local leader = linked_nodes(new_glue(2*linewidth),new_rule(6*linewidth,linewidth,0),new_glue(2*linewidth))
+ local leader = new_glue(2*linewidth) .. new_rule(6*linewidth,linewidth,0) .. new_glue(2*linewidth)
-- setlisttransparency(leader,c_text)
leader = fast_hpack(leader)
-- setlisttransparency(leader,c_text)
baseline = new_glue(0)
- setfield(baseline,"leader",leader)
- setfield(baseline,"subtype",cleaders_code)
- local spec = getfield(baseline,"spec")
- setfield(spec,"stretch",65536)
- setfield(spec,"stretch_order",2)
+ baseline.leader = leader
+ baseline.subtype = cleaders_code
+ local spec = baseline.spec
+ spec.stretch = 65536
+ spec.stretch_order = 2
setlisttransparency(baseline,c_text)
b_cache.baseline = baseline
end
@@ -481,49 +461,47 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
this = b_cache[what]
if not this then
local text = fast_hpack_string(what,usedfont)
- this = linked_nodes(new_kern(-getfield(text,"width")),text)
+ this = new_kern(-text.width) .. text
setlisttransparency(this,c_text)
this = fast_hpack(this)
- setfield(this,"width",0)
- setfield(this,"height",0)
- setfield(this,"depth",0)
+ this.width = 0
+ this.height = 0
+ this.depth = 0
b_cache[what] = this
end
end
-- we need to trigger the right mode (else sometimes no whatits)
- local info = linked_nodes(
- this and copy_list(this) or nil,
- new_rule(linewidth,ht,dp),
- new_rule(wd-2*linewidth,-dp+linewidth,dp),
- new_rule(linewidth,ht,dp),
- new_kern(-wd+linewidth),
+ local info =
+ (this and copy_list(this) or nil) ..
+ new_rule(linewidth,ht,dp) ..
+ new_rule(wd-2*linewidth,-dp+linewidth,dp) ..
+ new_rule(linewidth,ht,dp) ..
+ new_kern(-wd+linewidth) ..
new_rule(wd-2*linewidth,ht,-ht+linewidth)
- )
if baseskip then
- info = linked_nodes(info,baseskip,baseline)
+ info = info .. baseskip .. baseline
end
setlisttransparency(info,c_text)
info = fast_hpack(info)
- setfield(info,"width",0)
- setfield(info,"height",0)
- setfield(info,"depth",0)
- setattr(info,a_layer,layer)
- local info = linked_nodes(current,new_kern(-wd),info)
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ info[a_layer] = layer
+ local info = current .. new_kern(-wd) .. info
info = fast_hpack(info,wd)
if vertical then
info = vpack_nodes(info)
end
if next then
- setfield(info,"next",next)
- setfield(next,"prev",info)
+ info.next = next
+ next.prev = info
end
if prev then
- if getid(prev) == gluespec_code then
- report_visualize("ignoring invalid prev")
- -- weird, how can this happen, an inline glue-spec, probably math
+ if prev.id == gluespec_code then
+ -- weird, how can this happen, an inline glue-spec
else
- setfield(info,"prev",prev)
- setfield(prev,"next",info)
+ info.prev = prev
+ prev.next = info
end
end
if head == current then
@@ -537,14 +515,14 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
end
local function ruledglyph(head,current,previous)
- local wd = getfield(current,"width")
+ local wd = current.width
if wd ~= 0 then
- local ht = getfield(current,"height")
- local dp = getfield(current,"depth")
- local next = getnext(current)
+ local ht = current.height
+ local dp = current.depth
+ local next = current.next
local prev = previous
- setfield(current,"next",nil)
- setfield(current,"prev",nil)
+ current.next = nil
+ current.prev = nil
local linewidth = emwidth/20
local baseline
if dp ~= 0 and ht ~= 0 then
@@ -552,32 +530,31 @@ local function ruledglyph(head,current,previous)
end
local doublelinewidth = 2*linewidth
-- could be a pdf rule
- local info = linked_nodes(
- new_rule(linewidth,ht,dp),
- new_rule(wd-doublelinewidth,-dp+linewidth,dp),
- new_rule(linewidth,ht,dp),
- new_kern(-wd+linewidth),
- new_rule(wd-doublelinewidth,ht,-ht+linewidth),
- new_kern(-wd+doublelinewidth),
+ local info =
+ new_rule(linewidth,ht,dp) ..
+ new_rule(wd-doublelinewidth,-dp+linewidth,dp) ..
+ new_rule(linewidth,ht,dp) ..
+ new_kern(-wd+linewidth) ..
+ new_rule(wd-doublelinewidth,ht,-ht+linewidth) ..
+ new_kern(-wd+doublelinewidth) ..
baseline
- )
setlistcolor(info,c_glyph)
setlisttransparency(info,c_glyph_d)
info = fast_hpack(info)
- setfield(info,"width",0)
- setfield(info,"height",0)
- setfield(info,"depth",0)
- setattr(info,a_layer,l_glyph)
- local info = linked_nodes(current,new_kern(-wd),info)
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ info[a_layer] = l_glyph
+ local info = current .. new_kern(-wd) .. info
info = fast_hpack(info)
- setfield(info,"width",wd)
+ info.width = wd
if next then
- setfield(info,"next",next)
- setfield(next,"prev",info)
+ info.next = next
+ next.prev = info
end
if prev then
- setfield(info,"prev",prev)
- setfield(prev,"next",info)
+ info.prev = prev
+ prev.next = info
end
if head == current then
return info, info
@@ -622,9 +599,9 @@ local tags = {
-- we sometimes pass previous as we can have issues in math (not watertight for all)
local function ruledglue(head,current,vertical)
- local spec = getfield(current,"spec")
- local width = getfield(spec,"width")
- local subtype = getsubtype(current)
+ local spec = current.spec
+ local width = spec.width
+ local subtype = current.subtype
local amount = formatters["%s:%0.3f"](tags[subtype] or (vertical and "VS") or "HS",width*pt_factor)
local info = g_cache[amount]
if info then
@@ -652,13 +629,13 @@ local function ruledglue(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, getnext(current)
+ return head, current.next
end
local k_cache = { }
local function ruledkern(head,current,vertical)
- local kern = getfield(current,"kern")
+ local kern = current.kern
local info = k_cache[kern]
if info then
-- print("kern hit")
@@ -678,13 +655,13 @@ local function ruledkern(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, getnext(current)
+ return head, current.next
end
local p_cache = { }
local function ruledpenalty(head,current,vertical)
- local penalty = getfield(current,"penalty")
+ local penalty = current.penalty
local info = p_cache[penalty]
if info then
-- print("penalty hit")
@@ -704,7 +681,7 @@ local function ruledpenalty(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, getnext(current)
+ return head, current.next
end
local function visualize(head,vertical)
@@ -725,8 +702,8 @@ local function visualize(head,vertical)
local attr = unsetvalue
local prev_trace_fontkern = nil
while current do
- local id = getid(current)
- local a = getattr(current,a_visual) or unsetvalue
+ local id = current.id
+ local a = current[a_visual] or unsetvalue
if a ~= attr then
prev_trace_fontkern = trace_fontkern
if a == unsetvalue then
@@ -759,30 +736,30 @@ local function visualize(head,vertical)
attr = a
end
if trace_strut then
- setattr(current,a_layer,l_strut)
+ current[a_layer] = l_strut
elseif id == glyph_code then
if trace_glyph then
head, current = ruledglyph(head,current,previous)
end
elseif id == disc_code then
if trace_glyph then
- local pre = getfield(current,"pre")
+ local pre = current.pre
if pre then
- setfield(current,"pre",ruledglyph(pre,pre))
+ current.pre = ruledglyph(pre,pre)
end
- local post = getfield(current,"post")
+ local post = current.post
if post then
- setfield(current,"post",ruledglyph(post,post))
+ current.post = ruledglyph(post,post)
end
- local replace = getfield(current,"replace")
+ local replace = current.replace
if replace then
- setfield(current,"replace",ruledglyph(replace,replace))
+ current.replace = ruledglyph(replace,replace)
end
end
elseif id == kern_code then
- local subtype = getsubtype(current)
+ local subtype = current.subtype
-- tricky ... we don't copy the trace attribute in node-inj (yet)
- if subtype == font_kern_code or getattr(current,a_fontkern) then
+ if subtype == font_kern_code or current[a_fontkern] then
if trace_fontkern or prev_trace_fontkern then
head, current = fontkern(head,current)
end
@@ -792,9 +769,9 @@ local function visualize(head,vertical)
end
end
elseif id == glue_code then
- local content = getleader(current)
+ local content = current.leader
if content then
- setfield(current,"leader",visualize(content,false))
+ current.leader = visualize(content,false)
elseif trace_glue then
head, current = ruledglue(head,current,vertical)
end
@@ -803,21 +780,21 @@ local function visualize(head,vertical)
head, current = ruledpenalty(head,current,vertical)
end
elseif id == disc_code then
- setfield(current,"pre",visualize(getfield(current,"pre")))
- setfield(current,"post",isualize(getfield(current,"post")))
- setfield(current,"replace",visualize(getfield(current,"replace")))
+ current.pre = visualize(current.pre)
+ current.post = visualize(current.post)
+ current.replace = visualize(current.replace)
elseif id == hlist_code then
- local content = getlist(current)
+ local content = current.list
if content then
- setfield(current,"list",visualize(content,false))
+ current.list = visualize(content,false)
end
if trace_hbox then
head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple,previous)
end
elseif id == vlist_code then
- local content = getlist(current)
+ local content = current.list
if content then
- setfield(current,"list",visualize(content,true))
+ current.list = visualize(content,true)
end
if trace_vtop then
head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple,previous)
@@ -834,7 +811,7 @@ local function visualize(head,vertical)
end
end
previous = current
- current = getnext(current)
+ current = current.next
end
return head
end
@@ -863,36 +840,25 @@ local function cleanup()
-- report_visualize("cache: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb)
end
-local function handler(head)
+function visualizers.handler(head)
if usedfont then
starttiming(visualizers)
-- local l = texgetattribute(a_layer)
-- local v = texgetattribute(a_visual)
-- texsetattribute(a_layer,unsetvalue)
-- texsetattribute(a_visual,unsetvalue)
- head = visualize(tonut(head))
+ head = visualize(head)
-- texsetattribute(a_layer,l)
-- texsetattribute(a_visual,v)
-- -- cleanup()
stoptiming(visualizers)
- return tonode(head), true
- else
- return head, false
end
+ return head, false
end
-visualizers.handler = handler
-
function visualizers.box(n)
- if usedfont then
- starttiming(visualizers)
- local box = getbox(n)
- setfield(box,"list",visualize(getlist(box)))
- stoptiming(visualizers)
- return head, true
- else
- return head, false
- end
+ local box = texgetbox(n)
+ box.list = visualizers.handler(box.list)
end
local last = nil
@@ -906,9 +872,9 @@ local mark = {
local function markfonts(list)
for n in traverse_nodes(list) do
- local id = getid(n)
+ local id = n.id
if id == glyph_code then
- local font = getfont(n)
+ local font = n.font
local okay = used[font]
if not okay then
last = last + 1
@@ -917,14 +883,14 @@ local function markfonts(list)
end
setcolor(n,okay)
elseif id == hlist_code or id == vlist_code then
- markfonts(getlist(n))
+ markfonts(n.list)
end
end
end
function visualizers.markfonts(list)
last, used = 0, { }
- markfonts(type(n) == "number" and getlist(getbox(n)) or n)
+ markfonts(type(n) == "number" and texgetbox(n).list or n)
end
function commands.markfonts(n)
diff --git a/tex/context/base/type-imp-buy.mkiv b/tex/context/base/type-imp-buy.mkiv
index dbfffe57c..9815cc44b 100644
--- a/tex/context/base/type-imp-buy.mkiv
+++ b/tex/context/base/type-imp-buy.mkiv
@@ -11,125 +11,27 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% monotype sabon
-
\starttypescriptcollection[sabon]
- \starttypescript [serif] [sabon] [name]
- %
- \definefontsynonym [Serif] [SabonMT]
- \definefontsynonym [SerifItalic] [SabonMT-Italic]
- \definefontsynonym [SerifSlanted] [SabonMT-Italic]
- \definefontsynonym [SerifBold] [SabonMT-SemiBold]
- \definefontsynonym [SerifBoldItalic] [SabonMT-SemiBoldItalic]
- \definefontsynonym [SerifBoldSlanted][SabonMT-SemiBoldItalic]
- \definefontsynonym [SerifCaps] [SabonMT-RegularSC]
- %
- \definefontsynonym[SabonMT] [sab_____]
- \definefontsynonym[SabonMT-Italic] [sabi____]
- \definefontsynonym[SabonMT-ItalicOsF] [saboi___]
- \definefontsynonym[SabonMT-SemiBoldOsF] [sabos___]
- \definefontsynonym[SabonMT-SemiBold] [sabs____]
- \definefontsynonym[SabonMT-RegularSC] [sabsc___]
- \definefontsynonym[SabonMT-SemiBoldItalic] [sabsi___]
- \definefontsynonym[SabonMT-SemiBoldItalicOsF][sasio___]
- %
- \stoptypescript
+\starttypescript [serif] [sabon] [name]
+
+ \definefontsynonym [Serif] [SabonMT]
+ \definefontsynonym [SerifItalic] [SabonMT-Italic]
+ \definefontsynonym [SerifSlanted] [SabonMT-Italic]
+ \definefontsynonym [SerifBold] [SabonMT-SemiBold]
+ \definefontsynonym [SerifBoldItalic] [SabonMT-SemiBoldItalic]
+ \definefontsynonym [SerifBoldSlanted] [SabonMT-SemiBoldItalic]
+ \definefontsynonym [SerifCaps] [SabonMT-RegularSC]
+
+ \definefontsynonym[SabonMT] [sab_____]
+ \definefontsynonym[SabonMT-Italic] [sabi____]
+ \definefontsynonym[SabonMT-ItalicOsF] [saboi___]
+ \definefontsynonym[SabonMT-SemiBoldOsF] [sabos___]
+ \definefontsynonym[SabonMT-SemiBold] [sabs____]
+ \definefontsynonym[SabonMT-RegularSC] [sabsc___]
+ \definefontsynonym[SabonMT-SemiBoldItalic] [sabsi___]
+ \definefontsynonym[SabonMT-SemiBoldItalicOsF][sasio___]
+\stoptypescript
\stoptypescriptcollection
-% itc stone
-
-\starttypescriptcollection[stone]
-
- \starttypescript [sans] [stone] [name]
- %
- \definefontsynonym [Sans] [StoneSansITC-Medium]
- \definefontsynonym [SansItalic] [StoneSansITC-MediumItalic]
- \definefontsynonym [SansSlanted] [StoneSansITC-MediumItalic]
- \definefontsynonym [SansBold] [StoneSansITC-Bold]
- \definefontsynonym [SansBoldItalic] [StoneSansITC-BoldItalic]
- \definefontsynonym [SansBoldSlanted][StoneSansITC-BoldItalic]
- \definefontsynonym [SansCaps] [StoneSansSCITC-Medium]
- %
- \definefontsynonym[StoneSansITC-Bold] [stosnb]
- \definefontsynonym[StoneSansITC-BoldItalic] [stosnbi]
- \definefontsynonym[StoneSansITC-Medium] [stosnm]
- \definefontsynonym[StoneSansITC-MediumItalic][stosnmi]
- \definefontsynonym[StoneSansSemITC-Semi] [stosns]
- \definefontsynonym[StoneSansSemITC-SemiIta] [stosnsi]
- \definefontsynonym[StoneSansSCITC-Medium] [stosnscm]
- \definefontsynonym[StoneSansSemSCITC-Semi] [stosnscs]
- %
- \stoptypescript
-
- \starttypescript [serif] [stone] [name]
- %
- \definefontsynonym [Serif] [StoneSerifITC-Medium]
- \definefontsynonym [SerifItalic] [StoneSerifITC-MediumItalic]
- \definefontsynonym [SerifSlanted] [StoneSerifITC-MediumItalic]
- \definefontsynonym [SerifBold] [StoneSerifITC-Bold]
- \definefontsynonym [SerifBoldItalic] [StoneSerifITC-BoldItalic]
- \definefontsynonym [SerifBoldSlanted][StoneSerifITC-BoldItalic]
- \definefontsynonym [SerifCaps] [StoneSerifSCITC-Medium]
- %
- \definefontsynonym[StoneSerifITC-Bold] [stosfb]
- \definefontsynonym[StoneSerifITC-BoldItalic] [stosfbi]
- \definefontsynonym[StoneSerifITC-Medium] [stosfm]
- \definefontsynonym[StoneSerifITC-MediumItalic][stosfmi]
- \definefontsynonym[StoneSerifSemITC-Semi] [stosfs]
- \definefontsynonym[StoneSerifSemITC-SemiIta] [stosfsi]
- \definefontsynonym[StoneSerifSCITC-Medium] [stosfscm]
- \definefontsynonym[StoneSerifSemSCITC-Semi] [stosfscs]
- %
- \stoptypescript
-
- \starttypescript [sans] [stone-oldstyle] [name]
- %
- \definefontsynonym [Sans] [StoneSansOSITC-Medium]
- \definefontsynonym [SansItalic] [StoneSansOSITC-MediumItalic]
- \definefontsynonym [SansSlanted] [StoneSansOSITC-MediumItalic]
- \definefontsynonym [SansBold] [StoneSansOSITC-Bold]
- \definefontsynonym [SansBoldItalic] [StoneSansOSITC-BoldItalic]
- \definefontsynonym [SansBoldSlanted][StoneSansOSITC-BoldItalic]
- \definefontsynonym [SansCaps] [StoneSansSCITC-Medium]
- %
- \definefontsynonym[StoneSansOSITC-Bold] [stosnob]
- \definefontsynonym[StoneSansOSITC-BoldItalic] [stosnobi]
- \definefontsynonym[StoneSansOSITC-Medium] [stosnom]
- \definefontsynonym[StoneSansOSITC-MediumItalic][stosnomi]
- \definefontsynonym[StoneSansSemOSITC-Semi] [stosnos]
- \definefontsynonym[StoneSansSemOSITC-SemiIta] [stosnosi]
- %
- \stoptypescript
-
- \starttypescript [serif] [stone-oldstyle] [name]
- %
- \definefontsynonym [Serif] [StoneSerifOSITC-Medium]
- \definefontsynonym [SerifItalic] [StoneSerifOSITC-MediumItalic]
- \definefontsynonym [SerifSlanted] [StoneSerifOSITC-MediumItalic]
- \definefontsynonym [SerifBold] [StoneSerifOSITC-Bold]
- \definefontsynonym [SerifBoldItalic] [StoneSerifOSITC-BoldItalic]
- \definefontsynonym [SerifBoldSlanted] [StoneSerifOSITC-BoldItalic]
- \definefontsynonym [SerifCaps] [StoneSerifSCITC-Medium]
- %
- \definefontsynonym[StoneSerifOSITC-Bold] [stosfob]
- \definefontsynonym[StoneSerifOSITC-BoldItalic] [stosfobi]
- \definefontsynonym[StoneSerifOSITC-Medium] [stosfom]
- \definefontsynonym[StoneSerifOSITC-MediumItalic][stosfomi]
- \definefontsynonym[StoneSerifSemOSITC-Semi] [stosfos]
- \definefontsynonym[StoneSerifSemOSITC-SemiIta] [stosfosi]
- %
- \stoptypescript
-
-\stoptypescriptcollection
-
-% linotype industria
-
-\starttypescriptcollection[industria]
-
- \starttypescript [sans] [industria] [name]
- \definefontsynonym[Industria-Solid][lt_50545]
- \stoptypescript
-
-\stoptypescriptcollection
diff --git a/tex/context/base/type-ini.lua b/tex/context/base/type-ini.lua
index 4f53fbf40..9ee97acae 100644
--- a/tex/context/base/type-ini.lua
+++ b/tex/context/base/type-ini.lua
@@ -35,9 +35,7 @@ end
local function failure_one(name)
name_two = gsub(name,"%-.*$","")
- if name == "loc" then
- -- ignore
- elseif name_two == name then
+ if name_two == name then
report_typescripts("unknown library %a",name_one)
else
commands.uselibrary {
diff --git a/tex/context/base/typo-bld.lua b/tex/context/base/typo-bld.lua
index ad37c36f4..bc9f66ee4 100644
--- a/tex/context/base/typo-bld.lua
+++ b/tex/context/base/typo-bld.lua
@@ -6,12 +6,9 @@ if not modules then modules = { } end modules ['typo-bld'] = { -- was node-par
license = "see context related readme files"
}
--- no need for nuts in the one-line demo (that might move anyway)
-
local insert, remove = table.insert, table.remove
-builders = builders or { }
-local builders = builders
+local builders, nodes, node = builders, nodes, node
builders.paragraphs = builders.paragraphs or { }
local parbuilders = builders.paragraphs
@@ -36,12 +33,11 @@ local texsetattribute = tex.setattribute
local texnest = tex.nest
local texlists = tex.lists
-local nodes = nodes
local nodepool = nodes.pool
local new_baselineskip = nodepool.baselineskip
local new_lineskip = nodepool.lineskip
-local insert_node_before = nodes.insert_before
-local hpack_node = nodes.hpack
+local insert_node_before = node.insert_before
+local hpack_node = node.hpack
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
@@ -173,6 +169,7 @@ function constructors.disable()
enabled = false
end
+
callbacks.register('linebreak_filter', processor, "breaking paragraps into lines")
statistics.register("linebreak processing time", function()
@@ -229,16 +226,7 @@ local function report(groupcode,head)
report_page_builder(" list : %s",head and nodeidstostring(head) or "<empty>")
end
--- use tex.[sg]etlist
-
function builders.buildpage_filter(groupcode)
- -- -- this needs checking .. gets called too often
- -- if group_code ~= "after_output" then
- -- if trace_page_builder then
- -- report(groupcode)
- -- end
- -- return nil, false
- -- end
local head, done = texlists.contrib_head, false
if head then
starttiming(builders)
@@ -249,16 +237,14 @@ function builders.buildpage_filter(groupcode)
stoptiming(builders)
-- -- doesn't work here (not passed on?)
-- tex.pagegoal = tex.vsize - tex.dimen.d_page_floats_inserted_top - tex.dimen.d_page_floats_inserted_bottom
- texlists.contrib_head = head or nil -- needs checking
--- tex.setlist("contrib_head",head,head and nodes.tail(head))
- return done and head or true -- no return value needed
+ texlists.contrib_head = head
+ return done and head or true
else
if trace_page_builder then
report(groupcode)
end
- return nil, false -- no return value needed
+ return nil, false
end
-
end
callbacks.register('vpack_filter', builders.vpack_filter, "vertical spacing etc")
diff --git a/tex/context/base/typo-brk.lua b/tex/context/base/typo-brk.lua
index be11da9c3..3558efa8e 100644
--- a/tex/context/base/typo-brk.lua
+++ b/tex/context/base/typo-brk.lua
@@ -20,36 +20,19 @@ local report_breakpoints = logs.reporter("typesetting","breakpoints")
local nodes, node = nodes, node
local settings_to_array = utilities.parsers.settings_to_array
+local copy_node = node.copy
+local copy_nodelist = node.copy_list
+local free_node = node.free
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove -- ! nodes
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-local getfont = nuts.getfont
-local getid = nuts.getid
-local getfield = nuts.getfield
-local getattr = nuts.getattr
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local copy_node = nuts.copy
-local copy_nodelist = nuts.copy_list
-local free_node = nuts.free
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-
-local tonodes = nuts.tonodes
+local tonodes = nodes.tonodes
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local tasks = nodes.tasks
local v_reset = interfaces.variables.reset
@@ -97,82 +80,74 @@ local function insert_break(head,start,before,after)
end
methods[1] = function(head,start)
- if getprev(start) and getnext(start) then
+ if start.prev and start.next then
insert_break(head,start,10000,0)
end
return head, start
end
methods[2] = function(head,start) -- ( => (-
- if getprev(start) and getnext(start) then
+ if start.prev and start.next then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr")))
- setfield(start,"replace",tmp)
- local tmp = copy_node(tmp)
- local hyphen = copy_node(tmp)
- setfield(hyphen,"char",languages.prehyphenchar(getfield(tmp,"lang")))
- setfield(tmp,"next",hyphen)
- setfield(hyphen,"prev",tmp)
- setfield(start,"post",tmp)
+ start.attr = copy_nodelist(tmp.attr) -- todo: critical only
+ start.replace = tmp
+ local tmp, hyphen = copy_node(tmp), copy_node(tmp)
+ hyphen.char = languages.prehyphenchar(tmp.lang)
+ tmp.next, hyphen.prev = hyphen, tmp
+ start.post = tmp
insert_break(head,start,10000,10000)
end
return head, start
end
methods[3] = function(head,start) -- ) => -)
- if getprev(start) and getnext(start) then
+ if start.prev and start.next then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr")))
- setfield(start,"replace",tmp)
- local tmp = copy_node(tmp)
- local hyphen = copy_node(tmp)
- setfield(hyphen,"char",languages.prehyphenchar(getfield(tmp,"lang")))
- setfield(tmp,"prev",hyphen)
- setfield(hyphen,"next",tmp)
- setfield(start,"pre",hyphen)
+ start.attr = copy_nodelist(tmp.attr) -- todo: critical only
+ start.replace = tmp
+ local tmp, hyphen = copy_node(tmp), copy_node(tmp)
+ hyphen.char = languages.prehyphenchar(tmp.lang)
+ tmp.prev, hyphen.next = hyphen, tmp
+ start.pre = hyphen
insert_break(head,start,10000,10000)
end
return head, start
end
methods[4] = function(head,start) -- - => - - -
- if getprev(start) and getnext(start) then
+ if start.prev and start.next then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr")))
- setfield(start,"pre",copy_node(tmp))
- setfield(start,"post",copy_node(tmp))
- setfield(start,"replace",tmp)
+ start.attr = copy_nodelist(tmp.attr) -- todo: critical only
+ start.pre, start.post, start.replace = copy_node(tmp), copy_node(tmp), tmp
insert_break(head,start,10000,10000)
end
return head, start
end
methods[5] = function(head,start,settings) -- x => p q r
- if getprev(start) and getnext(start) then
+ if start.prev and start.next then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- local attr = getfield(tmp,"attr")
- local font = getfont(tmp)
- local left = settings.left
- local right = settings.right
- local middle = settings.middle
+ local attr = tmp.attr
+ local font = tmp.font
+ start.attr = copy_nodelist(attr) -- todo: critical only
+ local left, right, middle = settings.left, settings.right, settings.middle
if left then
- setfield(start,"pre",(tonodes(tostring(left),font,attr))) -- was right
+ start.pre = tonodes(tostring(left),font,attr) -- was right
end
if right then
- setfield(start,"post",(tonodes(tostring(right),font,attr))) -- was left
+ start.post = tonodes(tostring(right),font,attr) -- was left
end
if middle then
- setfield(start,"replace",(tonodes(tostring(middle),font,attr)))
+ start.replace = tonodes(tostring(middle),font,attr)
end
- setfield(start,"attr",copy_nodelist(attr)) -- todo: critical only
free_node(tmp)
insert_break(head,start,10000,10000)
end
@@ -180,32 +155,31 @@ methods[5] = function(head,start,settings) -- x => p q r
end
function breakpoints.handler(head)
- head = tonut(head)
local done, numbers = false, languages.numbers
local start, n = head, 0
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- local attr = getattr(start,a_breakpoints)
+ local attr = start[a_breakpoints]
if attr and attr > 0 then
- setattr(start,a_breakpoints,unsetvalue) -- maybe test for subtype > 256 (faster)
+ start[a_breakpoints] = unsetvalue -- maybe test for subtype > 256 (faster)
-- look ahead and back n chars
local data = mapping[attr]
if data then
local map = data.characters
- local cmap = map[getchar(start)]
+ local cmap = map[start.char]
if cmap then
- local lang = getfield(start,"lang")
+ local lang = start.lang
-- we do a sanity check for language
local smap = lang and lang >= 0 and lang < 0x7FFF and (cmap[numbers[lang]] or cmap[""])
if smap then
if n >= smap.nleft then
local m = smap.nright
- local next = getnext(start)
+ local next = start.next
while next do -- gamble on same attribute (not that important actually)
- local id = getid(next)
+ local id = next.id
if id == glyph_code then -- gamble on same attribute (not that important actually)
- if map[getchar(next)] then
+ if map[next.char] then
break
elseif m == 1 then
local method = methods[smap.type]
@@ -216,10 +190,10 @@ function breakpoints.handler(head)
break
else
m = m - 1
- next = getnext(next)
+ next = next.next
end
- elseif id == kern_code and getsubtype(next) == kerning_code then
- next = getnext(next)
+ elseif id == kern_code and next.subtype == kerning_code then
+ next = next.next
-- ignore intercharacter kerning, will go way
else
-- we can do clever and set n and jump ahead but ... not now
@@ -240,14 +214,14 @@ function breakpoints.handler(head)
else
-- n = n + 1 -- if we want single char handling (|-|) then we will use grouping and then we need this
end
- elseif id == kern_code and getsubtype(start) == kerning_code then
+ elseif id == kern_code and start.subtype == kerning_code then
-- ignore intercharacter kerning, will go way
else
n = 0
end
- start = getnext(start)
+ start = start.next
end
- return tonode(head), done
+ return head, done
end
local enabled = false
diff --git a/tex/context/base/typo-cap.lua b/tex/context/base/typo-cap.lua
index 78ed8700a..0fc1a3093 100644
--- a/tex/context/base/typo-cap.lua
+++ b/tex/context/base/typo-cap.lua
@@ -16,23 +16,9 @@ local report_casing = logs.reporter("typesetting","casing")
local nodes, node = nodes, node
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-
-local copy_node = nuts.copy
-local end_of_math = nuts.end_of_math
+local copy_node = nodes.copy
+local end_of_math = nodes.end_of_math
+
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
@@ -110,14 +96,14 @@ local lccodes = characters.lccodes
-- true false true == mixed
local function helper(start,attr,lastfont,n,codes,special,once,keepother)
- local char = getchar(start)
+ local char = start.char
local dc = codes[char]
if dc then
- local fnt = getfont(start)
+ local fnt = start.font
if keepother and dc == char then
local lfa = lastfont[n]
if lfa then
- setfield(start,"font",lfa)
+ start.font = lfa
return start, true
else
return start, false
@@ -126,10 +112,10 @@ local function helper(start,attr,lastfont,n,codes,special,once,keepother)
if special then
local lfa = lastfont[n]
if lfa then
- local previd = getid(getprev(start))
+ local previd = start.prev.id
if previd ~= glyph_code and previd ~= disc_code then
fnt = lfa
- setfield(start,"font",lfa)
+ start.font = lfa
end
end
end
@@ -151,18 +137,18 @@ local function helper(start,attr,lastfont,n,codes,special,once,keepother)
local chr = dc[i]
prev = start
if i == 1 then
- setfield(start,"char",chr)
+ start.char = chr
else
local g = copy_node(original)
- setfield(g,"char",chr)
- local next = getnext(start)
- setfield(g,"prev",start)
+ g.char = chr
+ local next = start.next
+ g.prev = start
if next then
- setfield(g,"next",next)
- setfield(start,"next",g)
- setfield(next,"prev",g)
+ g.next = next
+ start.next = g
+ next.prev = g
end
- start = g
+ start = g
end
end
if once then
@@ -175,7 +161,7 @@ local function helper(start,attr,lastfont,n,codes,special,once,keepother)
end
return start, false
elseif ifc[dc] then
- setfield(start,"char",dc)
+ start.char = dc
if once then
lastfont[n] = false
end
@@ -217,29 +203,29 @@ local function word(start,attr,lastfont,n)
end
local function blockrest(start)
- local n = getnext(start)
+ local n = start.next
while n do
- local id = getid(n)
- if id == glyph_code or id == disc_node and getattr(n,a_cases) == attr then
- setattr(n,a_cases,unsetvalue)
+ local id = n.id
+ if id == glyph_code or id == disc_node and n[a_cases] == attr then
+ n[a_cases] = unsetvalue
else
-- break -- we can have nested mess
end
- n = getnext(n)
+ n = n.next
end
end
local function Word(start,attr,lastfont,n) -- looks quite complex
lastfont[n] = false
- local prev = getprev(start)
- if prev and getid(prev) == kern_code and getsubtype(prev) == kerning_code then
- prev = getprev(prev)
+ local prev = start.prev
+ if prev and prev.id == kern_code and prev.subtype == kerning_code then
+ prev = prev.prev
end
if not prev then
blockrest(start)
return helper(start,attr,lastfont,n,uccodes)
end
- local previd = getid(prev)
+ local previd = prev.id
if previd ~= glyph_code and previd ~= disc_code then
-- only the first character is treated
blockrest(start)
@@ -253,14 +239,14 @@ end
local function Words(start,attr,lastfont,n)
lastfont[n] = false
- local prev = getprev(start)
- if prev and getid(prev) == kern_code and getsubtype(prev) == kerning_code then
- prev = getprev(prev)
+ local prev = start.prev
+ if prev and prev.id == kern_code and prev.subtype == kerning_code then
+ prev = prev.prev
end
if not prev then
return helper(start,attr,lastfont,n,uccodes)
end
- local previd = getid(prev)
+ local previd = prev.id
if previd ~= glyph_code and previd ~= disc_code then
return helper(start,attr,lastfont,n,uccodes)
else
@@ -286,15 +272,15 @@ end
local function random(start,attr,lastfont,n)
lastfont[n] = false
- local ch = getchar(start)
- local tfm = fontchar[getfont(start)]
+ local ch = start.char
+ local tfm = fontchar[start.font]
if lccodes[ch] then
while true do
local d = chardata[randomnumber(1,0xFFFF)]
if d then
local uc = uccodes[d]
if uc and tfm[uc] then -- this also intercepts tables
- setfield(start,"char",uc)
+ start.char = uc
return start, true
end
end
@@ -305,7 +291,7 @@ local function random(start,attr,lastfont,n)
if d then
local lc = lccodes[d]
if lc and tfm[lc] then -- this also intercepts tables
- setfield(start,"char",lc)
+ start.char = lc
return start, true
end
end
@@ -328,20 +314,19 @@ register(variables.cap, variables.capital) -- clone
register(variables.Cap, variables.Capital) -- clone
function cases.handler(head) -- not real fast but also not used on much data
- head = tonut(head)
local lastfont = { }
local lastattr = nil
local done = false
local start = head
while start do -- while because start can jump ahead
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- local attr = getattr(start,a_cases)
+ local attr = start[a_cases]
if attr and attr > 0 then
if attr ~= lastattr then
lastattr = attr
end
- setattr(start,a_cases,unsetvalue)
+ start[a_cases] = unsetvalue
local n, id, m = get(attr)
if lastfont[n] == nil then
lastfont[n] = id
@@ -360,27 +345,27 @@ function cases.handler(head) -- not real fast but also not used on much data
end
end
elseif id == disc_code then
- local attr = getattr(start,a_cases)
+ local attr = start[a_cases]
if attr and attr > 0 then
if attr ~= lastattr then
lastattr = attr
end
- setattr(start,a_cases,unsetvalue)
+ start[a_cases] = unsetvalue
local n, id, m = get(attr)
if lastfont[n] == nil then
lastfont[n] = id
end
local action = actions[n] -- map back to low number
if action then
- local replace = getfield(start,"replace")
+ local replace = start.replace
if replace then
action(replace,attr,lastfont,n)
end
- local pre = getfield(start,"pre")
+ local pre = start.pre
if pre then
action(pre,attr,lastfont,n)
end
- local post = getfield(start,"post")
+ local post = start.post
if post then
action(post,attr,lastfont,n)
end
@@ -390,10 +375,10 @@ function cases.handler(head) -- not real fast but also not used on much data
start = end_of_math(start)
end
if start then -- why test
- start = getnext(start)
+ start = start.next
end
end
- return tonode(head), done
+ return head, done
end
local enabled = false
diff --git a/tex/context/base/typo-cln.lua b/tex/context/base/typo-cln.lua
index b7e337662..2aa05b6d1 100644
--- a/tex/context/base/typo-cln.lua
+++ b/tex/context/base/typo-cln.lua
@@ -28,14 +28,7 @@ local tasks = nodes.tasks
local texsetattribute = tex.setattribute
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local setfield = nuts.setfield
-local getchar = nuts.getchar
-local getattr = nuts.getattr
-
-local traverse_id = nuts.traverse_id
+local traverse_id = node.traverse_id
local unsetvalue = attributes.unsetvalue
@@ -55,18 +48,18 @@ local resetter = { -- this will become an entry in char-def
function cleaners.handler(head)
local inline, done = false, false
- for n in traverse_id(glyph_code,tonut(head)) do
- local char = getchar(n)
+ for n in traverse_id(glyph_code,head) do
+ local char = n.char
if resetter[char] then
inline = false
elseif not inline then
- local a = getattr(n,a_cleaner)
+ local a = n[a_cleaner]
if a == 1 then -- currently only one cleaner so no need to be fancy
local upper = uccodes[char]
if type(upper) == "table" then
-- some day, not much change that \SS ends up here
else
- setfield(n,"char",upper)
+ n.char = upper
done = true
if trace_autocase then
report_autocase("")
diff --git a/tex/context/base/typo-dha.lua b/tex/context/base/typo-dha.lua
index 15e345ff8..d5ad66e7e 100644
--- a/tex/context/base/typo-dha.lua
+++ b/tex/context/base/typo-dha.lua
@@ -49,30 +49,13 @@ local trace_directions = false trackers.register("typesetters.directions.defa
local report_directions = logs.reporter("typesetting","text directions")
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-local nutstring = nuts.tostring
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-local end_of_math = nuts.end_of_math
-
-
-local nodepool = nuts.pool
+
+local insert_node_before = nodes.insert_before
+local insert_node_after = nodes.insert_after
+local remove_node = nodes.remove
+local end_of_math = nodes.end_of_math
+
+local nodepool = nodes.pool
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -125,7 +108,7 @@ end
local function process(start)
- local head = tonut(start) -- we have a global head
+ local head = start
local current = head
local inserted = nil
@@ -197,31 +180,31 @@ local function process(start)
end
local function nextisright(current)
- current = getnext(current)
- local id = getid(current)
+ current = current.next
+ local id = current.id
if id == glyph_code then
- local character = getchar(current)
+ local character = current.char
local direction = chardirections[character]
return direction == "r" or direction == "al" or direction == "an"
end
end
local function previsright(current)
- current = getprev(current)
- local id = getid(current)
+ current = current.prev
+ local id = current.id
if id == glyph_code then
- local character = getchar(current)
+ local char = current.char
local direction = chardirections[character]
return direction == "r" or direction == "al" or direction == "an"
end
end
while current do
- local id = getid(current)
+ local id = current.id
if id == math_code then
- current = getnext(end_of_math(getnext(current)))
+ current = end_of_math(current.next).next
else
- local attr = getattr(current,a_directions)
+ local attr = current[a_directions]
if attr and attr > 0 and attr ~= prevattr then
if not getglobal(a) then
lro, rlo = false, false
@@ -230,7 +213,7 @@ local function process(start)
end
if id == glyph_code then
if attr and attr > 0 then
- local character = getchar(current)
+ local character = current.char
local direction = chardirections[character]
local reversed = false
if rlo or override > 0 then
@@ -240,24 +223,24 @@ local function process(start)
end
elseif lro or override < 0 then
if direction == "r" or direction == "al" then
- setattr(current,a_state,s_isol)
+ current[a_state] = s_isol
direction = "l"
reversed = true
end
end
if direction == "on" then
local mirror = charmirrors[character]
- if mirror and fontchar[getfont(current)][mirror] then
+ if mirror and fontchar[current.font][mirror] then
local class = charclasses[character]
if class == "open" then
if nextisright(current) then
if autodir >= 0 then
force_auto_right_before(direction)
end
- setfield(current,"char",mirror)
+ current.char = mirror
done = true
elseif autodir < 0 then
- setfield(current,"char",mirror)
+ current.char = mirror
done = true
else
mirror = false
@@ -268,14 +251,14 @@ local function process(start)
local fencedir = fences[#fences]
fences[#fences] = nil
if fencedir < 0 then
- setfield(current,"char",mirror)
+ current.char = mirror
done = true
force_auto_right_before(direction)
else
mirror = false
end
elseif autodir < 0 then
- setfield(current,"char",mirror)
+ current.char = mirror
done = true
else
mirror = false
@@ -353,9 +336,9 @@ local function process(start)
-- we do nothing
end
elseif id == whatsit_code then
- local subtype = getsubtype(current)
+ local subtype = current.subtype
if subtype == localpar_code then
- local dir = getfield(current,"dir")
+ local dir = current.dir
if dir == 'TRT' then
autodir = -1
elseif dir == 'TLT' then
@@ -368,7 +351,7 @@ local function process(start)
if finish then
finish_auto_before()
end
- local dir = getfield(current,"dir")
+ local dir = current.dir
if dir == "+TRT" then
finish, autodir = "TRT", -1
elseif dir == "-TRT" then
@@ -387,7 +370,7 @@ local function process(start)
elseif finish then
finish_auto_before()
end
- local cn = getnext(current)
+ local cn = current.next
if cn then
-- we're okay
elseif finish then
@@ -407,7 +390,7 @@ local function process(start)
end
end
- return tonode(head), done
+ return head, done
end
diff --git a/tex/context/base/typo-dig.lua b/tex/context/base/typo-dig.lua
index 67849c6d4..ef05e62da 100644
--- a/tex/context/base/typo-dig.lua
+++ b/tex/context/base/typo-dig.lua
@@ -19,24 +19,10 @@ local report_digits = logs.reporter("typesetting","digits")
local nodes, node = nodes, node
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-
-local hpack_node = nuts.hpack
-local traverse_id = nuts.traverse_id
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
+local hpack_node = node.hpack
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
@@ -44,7 +30,7 @@ local unsetvalue = attributes.unsetvalue
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local tasks = nodes.tasks
local new_glue = nodepool.glue
@@ -80,20 +66,16 @@ function nodes.aligned(head,start,stop,width,how)
if how == "flushleft" or how == "middle" then
head, stop = insert_node_after(head,stop,new_glue(0,65536,65536))
end
- local prv = getprev(start)
- local nxt = getnext(stop)
- setfield(start,"prev",nil)
- setfield(stop,"next",nil)
+ local prv, nxt = start.prev, stop.next
+ start.prev, stop.next = nil, nil
local packed = hpack_node(start,width,"exactly") -- no directional mess here, just lr
if prv then
- setfield(prv,"next",packed)
- setfield(packed,"prev",prv)
+ prv.next, packed.prev = packed, prv
end
if nxt then
- setfield(nxt,"prev",packed)
- setfield(packed,"next",nxt)
+ nxt.prev, packed.next = packed, nxt
end
- if getprev(packed) then
+ if packed.prev then
return head, packed
else
return packed, packed
@@ -101,13 +83,12 @@ function nodes.aligned(head,start,stop,width,how)
end
actions[1] = function(head,start,attr)
- local font = getfont(start)
- local char = getchar(start)
+ local font = start.font
+ local char = start.char
local unic = chardata[font][char].tounicode
local what = unic and tonumber(unic,16) or char
if charbase[what].category == "nd" then
- local oldwidth = getfield(start,"width")
- local newwidth = getdigitwidth(font)
+ local oldwidth, newwidth = start.width, getdigitwidth(font)
if newwidth ~= oldwidth then
if trace_digits then
report_digits("digit trigger %a, instance %a, char %C, unicode %U, delta %s",
@@ -121,13 +102,12 @@ actions[1] = function(head,start,attr)
end
function digits.handler(head)
- head = tonut(head)
local done, current, ok = false, head, false
while current do
- if getid(current) == glyph_code then
- local attr = getattr(current,a_digits)
+ if current.id == glyph_code then
+ local attr = current[a_digits]
if attr and attr > 0 then
- setattr(current,a_digits,unsetvalue)
+ current[a_digits] = unsetvalue
local action = actions[attr%100] -- map back to low number
if action then
head, current, ok = action(head,current,attr)
@@ -137,11 +117,9 @@ function digits.handler(head)
end
end
end
- if current then
- current = getnext(current)
- end
+ current = current and current.next
end
- return tonode(head), done
+ return head, done
end
local m, enabled = 0, false -- a trick to make neighbouring ranges work
diff --git a/tex/context/base/typo-dir.lua b/tex/context/base/typo-dir.lua
index fbca0f024..a04028452 100644
--- a/tex/context/base/typo-dir.lua
+++ b/tex/context/base/typo-dir.lua
@@ -40,35 +40,21 @@ local trace_directions = false trackers.register("typesetters.directions",
local report_textdirections = logs.reporter("typesetting","text directions")
local report_mathdirections = logs.reporter("typesetting","math directions")
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-local nutstring = nuts.tostring
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local hasbit = number.hasbit
-local traverse_id = nuts.traverse_id
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-local end_of_math = nuts.end_of_math
+
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove
+local end_of_math = nodes.end_of_math
local texsetattribute = tex.setattribute
local texsetcount = tex.setcount
local unsetvalue = attributes.unsetvalue
+local hasbit = number.hasbit
+
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
local mathcodes = nodes.mathcodes
@@ -90,7 +76,7 @@ local vlist_code = nodecodes.vlist
local localpar_code = whatcodes.localpar
local dir_code = whatcodes.dir
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_textdir = nodepool.textdir
diff --git a/tex/context/base/typo-drp.lua b/tex/context/base/typo-drp.lua
index 3a87d94b3..903140dae 100644
--- a/tex/context/base/typo-drp.lua
+++ b/tex/context/base/typo-drp.lua
@@ -11,7 +11,9 @@ if not modules then modules = { } end modules ['typo-drp'] = {
local tonumber, type, next = tonumber, type, next
local ceil = math.ceil
-local settings_to_hash = utilities.parsers.settings_to_hash
+
+local utfbyte = utf.byte
+local utfchar = utf.char
local trace_initials = false trackers.register("typesetters.initials", function(v) trace_initials = v end)
local report_initials = logs.reporter("nodes","initials")
@@ -22,42 +24,19 @@ typesetters.initials = initials or { }
local nodes = nodes
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getfield = nuts.getfield
-local getattr = nuts.getattr
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
-local hpack_nodes = nuts.hpack
-
+local hpack_nodes = nodes.hpack
local nodecodes = nodes.nodecodes
local whatsitcodes = nodes.whatsitcodes
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_kern = nodepool.kern
-local insert_before = nuts.insert_before
-local insert_after = nuts.insert_after
-local remove_node = nuts.remove
-local traverse_id = nuts.traverse_id
-local traverse = nuts.traverse
-local free_node = nuts.free
+local insert_before = nodes.insert_before
+local insert_after = nodes.insert_after
local variables = interfaces.variables
local v_default = variables.default
local v_margin = variables.margin
-local v_auto = variables.auto
-local v_first = variables.first
-local v_last = variables.last
local texget = tex.get
local texsetattribute = tex.setattribute
@@ -65,8 +44,7 @@ local unsetvalue = attributes.unsetvalue
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
-local glue_code = nodecodes.glue
-local kern_code = nodecodes.kern
+local kern_node = nodecodes.kern
local whatsit_code = nodecodes.whatsit
local localpar_code = whatsitcodes.localpar
@@ -78,8 +56,6 @@ local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
local a_colorspace = attributes.private('colormodel')
-local category = characters.category
-
local settings = nil
function initials.set(specification)
@@ -108,288 +84,74 @@ commands.setinitial = initials.set
-- todo: prevent linebreak .. but normally a initial ends up at the top of
-- a page so this has a low priority
--- actions[v_default] = function(head,setting)
--- local done = false
--- if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
--- -- begin of par
--- local first = getnext(head)
--- -- parbox .. needs to be set at 0
--- if first and getid(first) == hlist_code then
--- first = getnext(first)
--- end
--- -- we need to skip over kerns and glues (signals)
--- while first and getid(first) ~= glyph_code do
--- first = getnext(first)
--- end
--- if first and getid(first) == glyph_code then
--- local char = getchar(first)
--- local prev = getprev(first)
--- local next = getnext(first)
--- -- if getid(prev) == hlist_code then
--- -- -- set the width to 0
--- -- end
--- if next and getid(next) == kern_code then
--- setfield(next,"kern",0)
--- end
--- if setting.font then
--- setfield(first,"font",setting.font)
--- end
--- if setting.dynamic > 0 then
--- setattr(first,0,setting.dynamic)
--- end
--- -- can be a helper
--- local ma = setting.ma or 0
--- local ca = setting.ca
--- local ta = setting.ta
--- if ca and ca > 0 then
--- setattr(first,a_colorspace,ma == 0 and 1 or ma)
--- setattr(first,a_color,ca)
--- end
--- if ta and ta > 0 then
--- setattr(first,a_transparency,ta)
--- end
--- --
--- local width = getfield(first,"width")
--- local height = getfield(first,"height")
--- local depth = getfield(first,"depth")
--- local distance = setting.distance or 0
--- local voffset = setting.voffset or 0
--- local hoffset = setting.hoffset or 0
--- local parindent = tex.parindent
--- local baseline = texget("baselineskip").width
--- local lines = tonumber(setting.n) or 0
--- --
--- setfield(first,"xoffset",- width - hoffset - distance - parindent)
--- setfield(first,"yoffset",- voffset) -- no longer - height here
--- -- We pack so that successive handling cannot touch the dropped cap. Packaging
--- -- in a hlist is also needed because we cannot locally adapt e.g. parindent (not
--- -- yet stored in with localpar).
--- setfield(first,"prev",nil)
--- setfield(first,"next",nil)
--- local h = hpack_nodes(first)
--- setfield(h,"width",0)
--- setfield(h,"height",0)
--- setfield(h,"depth",0)
--- setfield(prev,"next",h)
--- setfield(next,"prev",h)
--- setfield(h,"next",next)
--- setfield(h,"prev",prev)
--- first = h
--- -- end of packaging
--- if setting.location == v_margin then
--- -- okay
--- else
--- if lines == 0 then -- safeguard, not too precise
--- lines = ceil((height+voffset) / baseline)
--- end
--- -- We cannot set parshape yet ... when we can I'll add a slope
--- -- option (positive and negative, in emwidth).
--- local hangafter = - lines
--- local hangindent = width + distance + parindent
--- if trace_initials then
--- report_initials("setting hangafter to %i and hangindent to %p",hangafter,hangindent)
--- end
--- tex.hangafter = hangafter
--- tex.hangindent = hangindent
--- if parindent ~= 0 then
--- insert_after(first,first,new_kern(-parindent))
--- end
--- end
--- done = true
--- end
--- end
--- return head, done
--- end
-
actions[v_default] = function(head,setting)
local done = false
- if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
+ if head.id == whatsit_code and head.subtype == localpar_code then
-- begin of par
- local first = getnext(head)
- local indent = false
+ local first = head.next
-- parbox .. needs to be set at 0
- if first and getid(first) == hlist_code then
- first = getnext(first)
- indent = true
+ if first and first.id == hlist_code then
+ first = first.next
end
-- we need to skip over kerns and glues (signals)
- while first and getid(first) ~= glyph_code do
- first = getnext(first)
+ while first and first.id ~= glyph_code do
+ first = first.next
end
- if first and getid(first) == glyph_code then
- local ma = setting.ma or 0
- local ca = setting.ca
- local ta = setting.ta
- local last = first
+ if first and first.id == glyph_code then
+ local char = first.char
+ local prev = first.prev
+ local next = first.next
+ -- if prev.id == hlist_code then
+ -- -- set the width to 0
+ -- end
+ if next and next.id == kern_node then
+ next.kern = 0
+ end
+ if setting.font then
+ first.font = setting.font
+ end
+ if setting.dynamic > 0 then
+ first[0] = setting.dynamic
+ end
+ -- can be a helper
+ local ma = setting.ma or 0
+ local ca = setting.ca
+ local ta = setting.ta
+ if ca and ca > 0 then
+ first[a_colorspace] = ma == 0 and 1 or ma
+ first[a_color] = ca
+ end
+ if ta and ta > 0 then
+ first[a_transparency] = ta
+ end
+ --
+ local width = first.width
+ local height = first.height
+ local depth = first.depth
local distance = setting.distance or 0
local voffset = setting.voffset or 0
local hoffset = setting.hoffset or 0
local parindent = tex.parindent
local baseline = texget("baselineskip").width
local lines = tonumber(setting.n) or 0
- local dynamic = setting.dynamic
- local font = setting.font
- local method = settings_to_hash(setting.method)
- local length = tonumber(setting.m) or 1
- --
- -- 1 char | n chars | skip first quote | ignore punct | keep punct
--
- if getattr(first,a_initial) then
- for current in traverse(getnext(first)) do
- if getattr(current,a_initial) then
- last = current
- else
- break
- end
- end
- elseif method[v_auto] then
- local char = getchar(first)
- local kind = category(char)
- if kind == "po" or kind == "pi" then
- if method[v_first] then
- -- remove quote etc before initial
- local next = getnext(first)
- if not next then
- -- don't start with a quote or so
- return head, false
- end
- last = nil
- for current in traverse_id(glyph_code,next) do
- head, first = remove_node(head,first,true)
- first = current
- last = first
- break
- end
- if not last then
- -- no following glyph or so
- return head, false
- end
- else
- -- keep quote etc with initial
- local next = getnext(first)
- if not next then
- -- don't start with a quote or so
- return head, false
- end
- for current in traverse_id(glyph_code,next) do
- last = current
- break
- end
- if last == first then
- return head, false
- end
- end
- elseif kind == "pf" then
- -- error: final quote
- else
- -- okay
- end
- -- maybe also: get all A. B. etc
- local next = getnext(first)
- if next then
- for current in traverse_id(glyph_code,next) do
- local char = getchar(current)
- local kind = category(char)
- if kind == "po" then
- if method[v_last] then
- -- remove period etc after initial
- remove_node(head,current,true)
- else
- -- keep period etc with initial
- last = current
- end
- end
- break
- end
- end
- else
- for current in traverse_id(glyph_code,first) do
- last = current
- if length <= 1 then
- break
- else
- length = length - 1
- end
- end
- end
- local current = first
- while true do
- local id = getid(current)
- if id == kern_code then
- setfield(current,"kern",0)
- elseif id == glyph_code then
- local next = getnext(current)
- if font then
- setfield(current,"font",font)
- end
- if dynamic > 0 then
- setattr(current,0,dynamic)
- end
- -- can be a helper
- if ca and ca > 0 then
- setattr(current,a_colorspace,ma == 0 and 1 or ma)
- setattr(current,a_color,ca)
- end
- if ta and ta > 0 then
- setattr(current,a_transparency,ta)
- end
- --
- end
- if current == last then
- break
- else
- current = getnext(current)
- end
- end
+ first.xoffset = - width - hoffset - distance - parindent
+ first.yoffset = - voffset -- no longer - height here
-- We pack so that successive handling cannot touch the dropped cap. Packaging
-- in a hlist is also needed because we cannot locally adapt e.g. parindent (not
-- yet stored in with localpar).
- local prev = getprev(first)
- local next = getnext(last)
- --
- setfield(first,"prev",nil)
- setfield(last,"next",nil)
- local dropper = hpack_nodes(first)
- local width = getfield(dropper,"width")
- local height = getfield(dropper,"height")
- local depth = getfield(dropper,"depth")
- setfield(dropper,"width",0)
- setfield(dropper,"height",0)
- setfield(dropper,"depth",0)
- --
- setfield(prev,"next",dropper)
- if next then
- setfield(next,"prev",dropper)
- end
- setfield(dropper,"next",next)
- setfield(dropper,"prev",prev)
- --
- if next then
- local current = next
- while current do
- local id = getid(current)
- if id == glue_code or id == kern_code then
- local next = getnext(current)
- -- remove_node(current,current,true) -- created an invalid next link and dangling remains
- remove_node(head,current,true)
- current = next
- else
- break
- end
- end
- end
- --
- local hoffset = width + hoffset + distance + (indent and parindent or 0)
- for current in traverse_id(glyph_code,first) do
- setfield(current,"xoffset",- hoffset )
- setfield(current,"yoffset",- voffset) -- no longer - height here
- if current == last then
- break
- end
- end
- --
- first = dropper
- --
+ first.prev = nil
+ first.next = nil
+ local h = hpack_nodes(first)
+ h.width = 0
+ h.height = 0
+ h.depth = 0
+ prev.next = h
+ next.prev = h
+ h.next = next
+ h.prev = prev
+
+ -- end of packaging
if setting.location == v_margin then
-- okay
else
@@ -399,15 +161,15 @@ actions[v_default] = function(head,setting)
-- We cannot set parshape yet ... when we can I'll add a slope
-- option (positive and negative, in emwidth).
local hangafter = - lines
- local hangindent = width + distance
+ local hangindent = width + distance + parindent
if trace_initials then
report_initials("setting hangafter to %i and hangindent to %p",hangafter,hangindent)
end
tex.hangafter = hangafter
tex.hangindent = hangindent
- end
- if indent then
- insert_after(first,first,new_kern(-parindent))
+ if parindent ~= 0 then
+ insert_after(first,first,new_kern(-parindent))
+ end
end
done = true
end
@@ -416,17 +178,16 @@ actions[v_default] = function(head,setting)
end
function initials.handler(head)
- head = tonut(head)
local start = head
local attr = nil
while start do
- attr = getattr(start,a_initial)
+ attr = start[a_initial]
if attr then
break
- elseif getid(start) == glyph then
+ elseif start.id == glyph then
break
else
- start = getnext(start)
+ start = start.next
end
end
if attr then
@@ -440,8 +201,8 @@ function initials.handler(head)
report_initials("processing initials, alternative %a",alternative)
end
local head, done = action(head,settings)
- return tonode(head), done
+ return head, done
end
end
- return tonode(head), false
+ return head, false
end
diff --git a/tex/context/base/typo-drp.mkiv b/tex/context/base/typo-drp.mkiv
index 3ac47466f..78f6df0a2 100644
--- a/tex/context/base/typo-drp.mkiv
+++ b/tex/context/base/typo-drp.mkiv
@@ -57,8 +57,6 @@
\setupinitial
[\c!location=\v!text,
\c!n=3,
- \c!m=1,
- \c!method=\v!none,
% \s!font=Bold sa 4,
% \s!font=Bold ht \measure{initial:n},
\s!font=Bold cp \measure{initial:n},
@@ -69,25 +67,24 @@
\c!color=,
\c!before=\blank]
-\unexpanded\def\placeinitial % we cannot group so no settings
+\unexpanded\def\placeinitial
{\dosingleempty\typo_initials_place}
\def\typo_initials_place[#1]% old command
{\par
\namedinitialparameter{#1}\c!before
- \setinitial[#1]\relax}
+ \setinitial[#1]}
\unexpanded\def\setinitial
- {\dodoubleempty\typo_initials_set}
+ {\dosingleempty\typo_initials_set}
-\unexpanded\def\typo_initials_set[#1][#2]%
- {\edef\typo_initial_handle{\typo_initial_handle_indeed{#1}{#2}}}
+\unexpanded\def\typo_initials_set[#1]%
+ {\edef\typo_initial_handle{\typo_initial_handle_indeed{#1}}}
-\unexpanded\def\typo_initial_handle_indeed#1#2%
+\unexpanded\def\typo_initial_handle_indeed#1%
{\dontleavehmode
\begingroup
\edef\currentinitial{#1}%
- \setupcurrentinitial[#2]%
\scratchcounter \initialparameter\c!n\relax
\scratchdistance\initialparameter\c!distance\relax
\scratchhoffset \initialparameter\c!hoffset \relax
@@ -98,43 +95,24 @@
{\definedfont[\initialparameter\s!font]}
{\useinitialstyleparameter\c!style}%
\useinitialcolorparameter\c!color
- \edef\p_text{\initialparameter\c!text}% optional
\ctxcommand{setinitial{
- location = "\initialparameter\c!location",
- enabled = true,
- n = \number\scratchcounter,
- m = \number\initialparameter\c!m,
- method = "\initialparameter\c!method",
- distance = \number\scratchdistance,
- hoffset = \number\scratchhoffset,
- voffset = \number\scratchvoffset,
- ma = \the\attribute\colormodelattribute,
- ca = \the\attribute\colorattribute,
- ta = \the\attribute\transparencyattribute,
- font = \fontid\font,
- dynamic = \number\attribute\zerocount, % it's a bit over the top to support this here
+ location = "\initialparameter\c!location",
+ enabled = true,
+ n = \number\scratchcounter,
+ distance = \number\scratchdistance,
+ hoffset = \number\scratchhoffset,
+ voffset = \number\scratchvoffset,
+ ma = \the\attribute\colormodelattribute ,
+ ca = \the\attribute\colorattribute ,
+ ta = \the\attribute\transparencyattribute,
+ font = \fontid\font,
+ dynamic = \number\attribute\zerocount, % it's a bit over the top to support this here
}}%
\stopluacode
\kern\zeropoint % we need a node
- \p_text
\endgroup
\globallet\typo_initial_handle\relax}
\let\typo_initial_handle\relax
-% \setupbodyfont[dejavu,9pt]
-%
-% \startbuffer
-% \setinitial[two] D. E. Knuth \ignorespaces\input knuth \par
-% \setinitial[two] Knuth \ignorespaces\input knuth \par
-% \setinitial[two] \quotation{D. E. Knuth} \ignorespaces\input knuth \par
-% \setinitial[two] \quotation {Knuth} \ignorespaces\input knuth \par
-% \setinitial[two] [text={D.E. Knuth}] \ignorespaces\input knuth \par
-% \setinitial[two] [m=4] D. E. Knuth \ignorespaces\input knuth \par
-% \stopbuffer
-%
-% \type{m=2} \start \defineinitial[two][m=2,method=none] \getbuffer \page \stop
-% \type{m=1,method=auto} \start \defineinitial[two][m=1,method=auto] \getbuffer \page \stop
-% \type{m=1,method={auto,first,last}} \start \defineinitial[two][m=1,method={first,auto,last}] \getbuffer \page \stop
-
\protect \endinput
diff --git a/tex/context/base/typo-dua.lua b/tex/context/base/typo-dua.lua
index 91a27a30e..ec85a3d9f 100644
--- a/tex/context/base/typo-dua.lua
+++ b/tex/context/base/typo-dua.lua
@@ -66,24 +66,11 @@ local formatters = string.formatters
local directiondata = characters.directions
local mirrordata = characters.mirrors
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-local nutstring = nuts.tostring
-
-local getnext = nuts.getnext
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-
-local remove_node = nuts.remove
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-
-local nodepool = nuts.pool
+local remove_node = nodes.remove
+local insert_node_after = nodes.insert_after
+local insert_node_before = nodes.insert_before
+
+local nodepool = nodes.pool
local new_textdir = nodepool.textdir
local nodecodes = nodes.nodecodes
@@ -202,17 +189,17 @@ local function build_list(head) -- todo: store node pointer ... saves loop
local size = 0
while current do
size = size + 1
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local chr = getchar(current)
+ local chr = current.char
local dir = directiondata[chr]
list[size] = { char = chr, direction = dir, original = dir, level = 0 }
- current = getnext(current)
+ current = current.next
elseif id == glue_code then
list[size] = { char = 0x0020, direction = "ws", original = "ws", level = 0 }
- current = getnext(current)
- elseif id == whatsit_code and getsubtype(current) == dir_code then
- local dir = getfield(current,"dir")
+ current = current.next
+ elseif id == whatsit_code and current.subtype == dir_code then
+ local dir = current.dir
if dir == "+TLT" then
list[size] = { char = 0x202A, direction = "lre", original = "lre", level = 0 }
elseif dir == "+TRT" then
@@ -222,27 +209,27 @@ local function build_list(head) -- todo: store node pointer ... saves loop
else
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, id = id } -- object replacement character
end
- current = getnext(current)
+ current = current.next
elseif id == math_code then
local skip = 0
- current = getnext(current)
- while getid(current) ~= math_code do
+ current = current.next
+ while current.id ~= math_code do
skip = skip + 1
- current = getnext(current)
+ current = current.next
end
- skip = skip + 1
- current = getnext(current)
+ skip = skip + 1
+ current = current.next
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id }
else
local skip = 0
local last = id
- current = getnext(current)
+ current = current.next
while n do
- local id = getid(current)
- if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and getsubtype(current) == dir_code) then
+ local id = current.id
+ if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and current.subtype == dir_code) then
skip = skip + 1
last = id
- current = getnext(current)
+ current = current.next
else
break
end
@@ -302,8 +289,8 @@ local function find_run_limit_b_s_ws_on(list,start,limit)
end
local function get_baselevel(head,list,size) -- todo: skip if first is object (or pass head and test for local_par)
- if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
- if getfield(head,"dir") == "TRT" then
+ if head.id == whatsit_code and head.subtype == localpar_code then
+ if head.dir == "TRT" then
return 1, "TRT", true
else
return 0, "TLT", true
@@ -690,30 +677,30 @@ local function apply_to_list(list,size,head,pardir)
report_directions("fatal error, size mismatch")
break
end
- local id = getid(current)
+ local id = current.id
local entry = list[index]
local begindir = entry.begindir
local enddir = entry.enddir
if id == glyph_code then
local mirror = entry.mirror
if mirror then
- setfield(current,"char",mirror)
+ current.char = mirror
end
if trace_directions then
local direction = entry.direction
setcolor(current,direction,direction ~= entry.original,mirror)
end
elseif id == hlist_code or id == vlist_code then
- setfield(current,"dir",pardir) -- is this really needed?
+ current.dir = pardir -- is this really needed?
elseif id == glue_code then
- if enddir and getsubtype(current) == parfillskip_code then
+ if enddir and current.subtype == parfillskip_code then
-- insert the last enddir before \parfillskip glue
head = insert_node_before(head,current,new_textdir(enddir))
enddir = false
done = true
end
elseif id == whatsit_code then
- if begindir and getsubtype(current) == localpar_code then
+ if begindir and current.subtype == localpar_code then
-- local_par should always be the 1st node
head, current = insert_node_after(head,current,new_textdir(begindir))
begindir = nil
@@ -727,7 +714,7 @@ local function apply_to_list(list,size,head,pardir)
local skip = entry.skip
if skip and skip > 0 then
for i=1,skip do
- current = getnext(current)
+ current = current.next
end
end
if enddir then
@@ -735,13 +722,13 @@ local function apply_to_list(list,size,head,pardir)
done = true
end
if not entry.remove then
- current = getnext(current)
+ current = current.next
elseif remove_controls then
-- X9
head, current = remove_node(head,current,true)
done = true
else
- current = getnext(current)
+ current = current.next
end
index = index + 1
end
@@ -749,7 +736,6 @@ local function apply_to_list(list,size,head,pardir)
end
local function process(head)
- head = tonut(head)
local list, size = build_list(head)
local baselevel, pardir, dirfound = get_baselevel(head,list,size) -- we always have an inline dir node in context
if not dirfound and trace_details then
@@ -766,7 +752,7 @@ local function process(head)
report_directions("result : %s",show_done(list,size))
end
head, done = apply_to_list(list,size,head,pardir)
- return tonode(head), done
+ return head, done
end
directions.installhandler(interfaces.variables.one,process)
diff --git a/tex/context/base/typo-dub.lua b/tex/context/base/typo-dub.lua
index 4dc0f21fb..3ecfce364 100644
--- a/tex/context/base/typo-dub.lua
+++ b/tex/context/base/typo-dub.lua
@@ -54,25 +54,11 @@ local directiondata = characters.directions
local mirrordata = characters.mirrors
local textclassdata = characters.textclasses
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-local nutstring = nuts.tostring
-
-local getnext = nuts.getnext
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getsubtype = nuts.getsubtype
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-
-local remove_node = nuts.remove
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-
-local nodepool = nuts.pool
+local remove_node = nodes.remove
+local insert_node_after = nodes.insert_after
+local insert_node_before = nodes.insert_before
+
+local nodepool = nodes.pool
local new_textdir = nodepool.textdir
local nodecodes = nodes.nodecodes
@@ -256,17 +242,17 @@ local function build_list(head) -- todo: store node pointer ... saves loop
local size = 0
while current do
size = size + 1
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local chr = getchar(current)
+ local chr = current.char
local dir = directiondata[chr]
list[size] = { char = chr, direction = dir, original = dir, level = 0 }
- current = getnext(current)
+ current = current.next
elseif id == glue_code then
list[size] = { char = 0x0020, direction = "ws", original = "ws", level = 0 }
- current = getnext(current)
- elseif id == whatsit_code and getsubtype(current) == dir_code then
- local dir = getfield(current,"dir")
+ current = current.next
+ elseif id == whatsit_code and current.subtype == dir_code then
+ local dir = current.dir
if dir == "+TLT" then
list[size] = { char = 0x202A, direction = "lre", original = "lre", level = 0 }
elseif dir == "+TRT" then
@@ -276,27 +262,27 @@ local function build_list(head) -- todo: store node pointer ... saves loop
else
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, id = id } -- object replacement character
end
- current = getnext(current)
+ current = current.next
elseif id == math_code then
local skip = 0
- current = getnext(current)
- while getid(current) ~= math_code do
+ current = current.next
+ while current.id ~= math_code do
skip = skip + 1
- current = getnext(current)
+ current = current.next
end
skip = skip + 1
- current = getnext(current)
+ current = current.next
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id }
else
local skip = 0
local last = id
- current = getnext(current)
+ current = current.next
while n do
- local id = getid(current)
- if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and getsubtype(current) == dir_code) then
+ local id = current.id
+ if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and current.subtype == dir_code) then
skip = skip + 1
last = id
- current = getnext(current)
+ current = current.next
else
break
end
@@ -379,8 +365,8 @@ end
-- the action
local function get_baselevel(head,list,size) -- todo: skip if first is object (or pass head and test for local_par)
- if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
- if getfield(head,"dir") == "TRT" then
+ if head.id == whatsit_code and head.subtype == localpar_code then
+ if head.dir == "TRT" then
return 1, "TRT", true
else
return 0, "TLT", true
@@ -799,30 +785,30 @@ local function apply_to_list(list,size,head,pardir)
report_directions("fatal error, size mismatch")
break
end
- local id = getid(current)
+ local id = current.id
local entry = list[index]
local begindir = entry.begindir
local enddir = entry.enddir
if id == glyph_code then
local mirror = entry.mirror
if mirror then
- setfield(current,"char",mirror)
+ current.char = mirror
end
if trace_directions then
local direction = entry.direction
setcolor(current,direction,direction ~= entry.original,mirror)
end
elseif id == hlist_code or id == vlist_code then
- setfield(current,"dir",pardir) -- is this really needed?
+ current.dir = pardir -- is this really needed?
elseif id == glue_code then
- if enddir and getsubtype(current) == parfillskip_code then
+ if enddir and current.subtype == parfillskip_code then
-- insert the last enddir before \parfillskip glue
head = insert_node_before(head,current,new_textdir(enddir))
enddir = false
done = true
end
elseif id == whatsit_code then
- if begindir and getsubtype(current) == localpar_code then
+ if begindir and current.subtype == localpar_code then
-- local_par should always be the 1st node
head, current = insert_node_after(head,current,new_textdir(begindir))
begindir = nil
@@ -836,7 +822,7 @@ local function apply_to_list(list,size,head,pardir)
local skip = entry.skip
if skip and skip > 0 then
for i=1,skip do
- current = getnext(current)
+ current = current.next
end
end
if enddir then
@@ -844,13 +830,13 @@ local function apply_to_list(list,size,head,pardir)
done = true
end
if not entry.remove then
- current = getnext(current)
+ current = current.next
elseif remove_controls then
-- X9
head, current = remove_node(head,current,true)
done = true
else
- current = getnext(current)
+ current = current.next
end
index = index + 1
end
@@ -858,9 +844,8 @@ local function apply_to_list(list,size,head,pardir)
end
local function process(head)
- head = tonut(head)
-- for the moment a whole paragraph property
- local attr = getattr(head,a_directions)
+ local attr = head[a_directions]
local analyze_fences = getfences(attr)
--
local list, size = build_list(head)
@@ -879,7 +864,7 @@ local function process(head)
report_directions("result : %s",show_done(list,size))
end
head, done = apply_to_list(list,size,head,pardir)
- return tonode(head), done
+ return head, done
end
directions.installhandler(interfaces.variables.two,process)
diff --git a/tex/context/base/typo-fln.lua b/tex/context/base/typo-fln.lua
index 7ce41cd81..4c97af450 100644
--- a/tex/context/base/typo-fln.lua
+++ b/tex/context/base/typo-fln.lua
@@ -23,38 +23,25 @@ local firstlines = typesetters.firstlines
local nodes = nodes
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getfield = nuts.getfield
-local getlist = nuts.getlist
-local getattr = nuts.getattr
-local getbox = nuts.getbox
-
-local setfield = nuts.setfield
-local setattr = nuts.setattr
-
+local getbox = nodes.getbox
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
-local traverse_id = nuts.traverse_id
-local free_node_list = nuts.flush_list
-local free_node = nuts.flush_node
-local copy_node_list = nuts.copy_list
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local hpack_node_list = nuts.hpack
-local remove_node = nuts.remove
+local traverse_id = nodes.traverse_id
+local free_node_list = nodes.flush_list
+local free_node = nodes.flush_node
+local copy_node_list = nodes.copy_list
+local insert_node_after = nodes.insert_after
+local insert_node_before = nodes.insert_before
+local hpack_node_list = nodes.hpack
+local remove_node = nodes.remove
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local newpenalty = nodepool.penalty
local newkern = nodepool.kern
-local tracerrule = nodes.tracers.pool.nuts.rule
+local tracerrule = nodes.tracers.pool.nodes.rule
local actions = { }
firstlines.actions = actions
@@ -105,9 +92,9 @@ actions[v_line] = function(head,setting)
local linebreaks = { }
for g in traverse_id(glyph_code,temp) do
if dynamic > 0 then
- setattr(g,0,dynamic)
+ g[0] = dynamic
end
- setfield(g,"font",font)
+ g.font = font
end
local start = temp
local list = temp
@@ -121,7 +108,7 @@ actions[v_line] = function(head,setting)
hsize = hsize - hangindent
end
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
n = n + 1
elseif id == disc_code then
@@ -130,7 +117,7 @@ actions[v_line] = function(head,setting)
-- this could be an option
elseif n > 0 then
local pack = hpack_node_list(copy_node_list(list,start))
- if getfield(pack,"width") > hsize then
+ if pack.width > hsize then
free_node_list(pack)
list = prev
break
@@ -141,7 +128,7 @@ actions[v_line] = function(head,setting)
nofchars = n
end
end
- start = getnext(start)
+ start = start.next
end
if not linebreaks[i] then
linebreaks[i] = n
@@ -152,18 +139,18 @@ actions[v_line] = function(head,setting)
for i=1,noflines do
local linebreak = linebreaks[i]
while start and n < nofchars do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then -- or id == disc_code then
if dynamic > 0 then
- setattr(start,0,dynamic)
+ start[0] = dynamic
end
- setfield(start,"font",font)
+ start.font = font
if ca and ca > 0 then
- setattr(start,a_colorspace,ma == 0 and 1 or ma)
- setattr(start,a_color,ca)
+ start[a_colorspace] = ma == 0 and 1 or ma
+ start[a_color] = ca
end
if ta and ta > 0 then
- setattr(start,a_transparency,ta)
+ start[a_transparency] = ta
end
n = n + 1
end
@@ -176,7 +163,7 @@ actions[v_line] = function(head,setting)
head, start = insert_node_after(head,start,newpenalty(-10000)) -- break
break
end
- start = getnext(start)
+ start = start.next
end
end
free_node_list(temp)
@@ -195,7 +182,7 @@ actions[v_word] = function(head,setting)
local ca = setting.ca
local ta = setting.ta
while start do
- local id = getid(start)
+ local id = start.id
-- todo: delete disc nodes
if id == glyph_code then
if not ok then
@@ -203,16 +190,16 @@ actions[v_word] = function(head,setting)
ok = true
end
if ca and ca > 0 then
- setattr(start,a_colorspace,ma == 0 and 1 or ma)
- setattr(start,a_color,ca)
+ start[a_colorspace] = ma == 0 and 1 or ma
+ start[a_color] = ca
end
if ta and ta > 0 then
- setattr(start,a_transparency,ta)
+ start[a_transparency] = ta
end
if dynamic > 0 then
- setattr(start,0,dynamic)
+ start[0] = dynamic
end
- setfield(start,"font",font)
+ start.font = font
elseif id == disc_code then
-- continue
elseif id == kern_code then -- todo: fontkern
@@ -223,7 +210,7 @@ actions[v_word] = function(head,setting)
break
end
end
- start = getnext(start)
+ start = start.next
end
return head, true
end
@@ -231,17 +218,16 @@ end
actions[v_default] = actions[v_line]
function firstlines.handler(head)
- head = tonut(head)
local start = head
local attr = nil
while start do
- attr = getattr(start,a_firstline)
+ attr = start[a_firstline]
if attr then
break
- elseif getid(start) == glyph_code then
+ elseif start.id == glyph then
break
else
- start = getnext(start)
+ start = start.next
end
end
if attr then
@@ -254,18 +240,17 @@ function firstlines.handler(head)
if trace_firstlines then
report_firstlines("processing firstlines, alternative %a",alternative)
end
- local head, done = action(head,settings)
- return tonode(head), done
+ return action(head,settings)
end
end
- return tonode(head), false
+ return head, false
end
-- goodie
function commands.applytofirstcharacter(box,what)
local tbox = getbox(box) -- assumes hlist
- local list = getlist(tbox)
+ local list = tbox.list
local done = nil
for n in traverse_id(glyph_code,list) do
list = remove_node(list,n)
@@ -273,10 +258,10 @@ function commands.applytofirstcharacter(box,what)
break
end
if done then
- setfield(tbox,"list",list)
+ tbox.list = list
local kind = type(what)
if kind == "string" then
- context[what](tonode(done))
+ context[what](done)
elseif kind == "function" then
what(done)
else
diff --git a/tex/context/base/typo-itc.lua b/tex/context/base/typo-itc.lua
index db94c5c54..452b623c8 100644
--- a/tex/context/base/typo-itc.lua
+++ b/tex/context/base/typo-itc.lua
@@ -9,9 +9,8 @@ if not modules then modules = { } end modules ['typo-itc'] = {
local utfchar = utf.char
local trace_italics = false trackers.register("typesetters.italics", function(v) trace_italics = v end)
-local report_italics = logs.reporter("nodes","italics")
-local threshold = 0.5 trackers.register("typesetters.threshold", function(v) threshold = v == true and 0.5 or tonumber(v) end)
+local report_italics = logs.reporter("nodes","italics")
typesetters.italics = typesetters.italics or { }
local italics = typesetters.italics
@@ -25,35 +24,21 @@ local math_code = nodecodes.math
local tasks = nodes.tasks
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getid = nuts.getid
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getattr = nuts.getattr
-
-local insert_node_after = nuts.insert_after
-local delete_node = nuts.delete
-local end_of_math = nuts.end_of_math
+local insert_node_after = node.insert_after
+local delete_node = nodes.delete
+local end_of_math = node.end_of_math
local texgetattribute = tex.getattribute
local texsetattribute = tex.setattribute
local a_italics = attributes.private("italics")
local unsetvalue = attributes.unsetvalue
-local new_correction_kern = nodepool.fontkern
-local new_correction_glue = nodepool.glue
+local new_correction_kern = nodes.pool.fontkern
+local new_correction_glue = nodes.pool.glue
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
local italicsdata = fonthashes.italics
-local exheights = fonthashes.exheights
local forcedvariant = false
@@ -98,7 +83,6 @@ end
-- todo: clear attribute
function italics.handler(head)
- head = tonut(head)
local done = false
local italic = 0
local lastfont = nil
@@ -108,10 +92,10 @@ function italics.handler(head)
local current = head
local inserted = nil
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local font = getfont(current)
- local char = getchar(current)
+ local font = current.font
+ local char = current.char
local data = italicsdata[font]
if font ~= lastfont then
if italic ~= 0 then
@@ -120,25 +104,11 @@ function italics.handler(head)
report_italics("ignoring %p between italic %C and italic %C",italic,prevchar,char)
end
else
- local okay = true
- if threshold then
- local ht = getfield(current,"height")
- local ex = exheights[font]
- local th = threshold * ex
- if ht <= th then
- if trace_italics then
- report_italics("ignoring correction between italic %C and regular %C, height %p less than threshold %p",prevchar,char,ht,th)
- end
- okay = false
- end
- end
- if okay then
- if trace_italics then
- report_italics("inserting %p between italic %C and regular %C",italic,prevchar,char)
- end
- insert_node_after(head,previous,new_correction_kern(italic))
- done = true
+ if trace_italics then
+ report_italics("inserting %p between italic %C and regular %C",italic,prevchar,char)
end
+ insert_node_after(head,previous,new_correction_kern(italic))
+ done = true
end
elseif inserted and data then
if trace_italics then
@@ -151,7 +121,7 @@ function italics.handler(head)
lastfont = font
end
if data then
- local attr = forcedvariant or getattr(current,a_italics)
+ local attr = forcedvariant or current[a_italics]
if attr and attr > 0 then
local cd = data[char]
if not cd then
@@ -203,7 +173,7 @@ function italics.handler(head)
italic = 0
done = true
end
- current = getnext(current)
+ current = current.next
end
if italic ~= 0 and lastattr > 1 then -- more control is needed here
if trace_italics then
@@ -212,7 +182,7 @@ function italics.handler(head)
insert_node_after(head,previous,new_correction_kern(italic))
done = true
end
- return tonode(head), done
+ return head, done
end
local enable
@@ -254,7 +224,6 @@ function commands.setupitaliccorrection(option) -- no grouping !
elseif options[variables.always] then
variant = 2
end
- -- maybe also keywords for threshold
if options[variables.global] then
forcedvariant = variant
texsetattribute(a_italics,unsetvalue)
diff --git a/tex/context/base/typo-krn.lua b/tex/context/base/typo-krn.lua
index a8ffe557b..56f58bb73 100644
--- a/tex/context/base/typo-krn.lua
+++ b/tex/context/base/typo-krn.lua
@@ -13,36 +13,21 @@ local utfchar = utf.char
local nodes, node, fonts = nodes, node, fonts
-local tasks = nodes.tasks
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local find_node_tail = nuts.tail
-local free_node = nuts.free
-local free_nodelist = nuts.flush_list
-local copy_node = nuts.copy
-local copy_nodelist = nuts.copy_list
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local end_of_math = nuts.end_of_math
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
+local find_node_tail = node.tail or node.slide
+local free_node = node.free
+local free_nodelist = node.flush_list
+local copy_node = node.copy
+local copy_nodelist = node.copy_list
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local end_of_math = node.end_of_math
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+
local new_gluespec = nodepool.gluespec
local new_kern = nodepool.kern
local new_glue = nodepool.glue
@@ -122,10 +107,10 @@ kerns.keeptogether = false -- just for fun (todo: control setting with key/value
-- blue : keep by goodie
function kerns.keepligature(n) -- might become default
- local f = getfont(n)
- local a = getattr(n,0) or 0
+ local f = n.font
+ local a = n[0] or 0
if trace_ligatures then
- local c = getchar(n)
+ local c = n.char
local d = fontdescriptions[f][c].name
if a > 0 and contextsetups[a].keepligatures == v_auto then
report("font %!font:name!, glyph %a, slot %X -> ligature %s, by %s feature %a",f,d,c,"kept","dynamic","keepligatures")
@@ -184,9 +169,9 @@ end
local function kern_injector(fillup,kern)
if fillup then
local g = new_glue(kern)
- local s = getfield(g,"spec")
- setfield(s,"stretch",kern)
- setfield(s,"stretch_order",1)
+ local s = g.spec
+ s.stretch = kern
+ s.stretch_order = 1
return g
else
return new_kern(kern)
@@ -196,7 +181,7 @@ end
local function spec_injector(fillup,width,stretch,shrink)
if fillup then
local s = new_gluespec(width,2*stretch,2*shrink)
- setfield(s,"stretch_order",1)
+ s.stretch_order = 1
return s
else
return new_gluespec(width,stretch,shrink)
@@ -212,9 +197,9 @@ local function do_process(head,force) -- todo: glue so that we can fully stretch
local fillup = false
while start do
-- faster to test for attr first
- local attr = force or getattr(start,a_kerns)
+ local attr = force or start[a_kerns]
if attr and attr > 0 then
- setattr(start,a_kerns,unsetvalue)
+ start[a_kerns] = unsetvalue
local krn = mapping[attr]
if krn == v_max then
krn = .25
@@ -223,10 +208,10 @@ local function do_process(head,force) -- todo: glue so that we can fully stretch
fillup = false
end
if krn and krn ~= 0 then
- local id = getid(start)
- if id == glyph_code then -- we could use the subtype ligature
- lastfont = getfont(start)
- local c = getfield(start,"components")
+ local id = start.id
+ if id == glyph_code then
+ lastfont = start.font
+ local c = start.components
if not c then
-- fine
elseif keepligature and keepligature(start) then
@@ -234,47 +219,47 @@ local function do_process(head,force) -- todo: glue so that we can fully stretch
else
c = do_process(c,attr)
local s = start
- local p, n = getprev(s), getnext(s)
+ local p, n = s.prev, s.next
local tail = find_node_tail(c)
if p then
- setfield(p,"next",c)
- setfield(c,"prev",p)
+ p.next = c
+ c.prev = p
else
head = c
end
if n then
- setfield(n,"prev",tail)
+ n.prev = tail
end
- setfield(tail,"next",n)
+ tail.next = n
start = c
- setfield(s,"components",nil)
+ s.components = nil
-- we now leak nodes !
- -- free_node(s)
+ -- free_node(s)
done = true
end
- local prev = getprev(start)
+ local prev = start.prev
if not prev then
-- skip
- elseif markdata[lastfont][getchar(start)] then
+ elseif markdata[lastfont][start.char] then
-- skip
else
- local pid = getid(prev)
+ local pid = prev.id
if not pid then
-- nothing
elseif pid == kern_code then
- if getsubtype(prev) == kerning_code or getattr(prev,a_fontkern) then
- if keeptogether and getid(getprev(prev)) == glyph_code and keeptogether(getprev(prev),start) then -- we could also pass start
+ if prev.subtype == kerning_code or prev[a_fontkern] then
+ if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then -- we could also pass start
-- keep 'm
else
-- not yet ok, as injected kerns can be overlays (from node-inj.lua)
- setfield(prev,"subtype",userkern_code)
- setfield(prev,"kern",getfield(prev,"kern") + quaddata[lastfont]*krn) -- here
+ prev.subtype = userkern_code
+ prev.kern = prev.kern + quaddata[lastfont]*krn -- here
done = true
end
end
elseif pid == glyph_code then
- if getfont(prev) == lastfont then
- local prevchar, lastchar = getchar(prev), getchar(start)
+ if prev.font == lastfont then
+ local prevchar, lastchar = prev.char, start.char
if keeptogether and keeptogether(prev,start) then
-- keep 'm
else
@@ -293,102 +278,102 @@ local function do_process(head,force) -- todo: glue so that we can fully stretch
-- a bit too complicated, we can best not copy and just calculate
-- but we could have multiple glyphs involved so ...
local disc = prev -- disc
- local prv, nxt = getprev(disc), getnext(disc)
- if getsubtype(disc) == discretionary_code then
+ local prv, nxt = disc.prev, disc.next
+ if disc.subtype == discretionary_code then
-- maybe we should forget about this variant as there is no glue
-- possible
- local pre, post, replace = getfield(disc,"pre"), getfield(disc,"post"), getfield(disc,"replace")
- if pre and prv then -- must pair with getprev(start)
+ local pre, post, replace = disc.pre, disc.post, disc.replace
+ if pre and prv then -- must pair with start.prev
+ -- this one happens in most cases
local before = copy_node(prv)
- setfield(pre,"prev",before)
- setfield(before,"next",pre)
- setfield(before,"prev",nil)
+ pre.prev = before
+ before.next = pre
+ before.prev = nil
pre = do_process(before,attr)
- pre = getnext(pre)
- setfield(pre,"prev",nil)
- setfield(disc,"pre",pre)
+ pre = pre.next
+ pre.prev = nil
+ disc.pre = pre
free_node(before)
end
if post and nxt then -- must pair with start
local after = copy_node(nxt)
local tail = find_node_tail(post)
- setfield(tail,"next",after)
- setfield(after,"prev",tail)
- setfield(after,"next",nil)
+ tail.next = after
+ after.prev = tail
+ after.next = nil
post = do_process(post,attr)
- setfield(tail,"next",nil)
- setfield(disc,"post",post)
+ tail.next = nil
+ disc.post = post
free_node(after)
end
if replace and prv and nxt then -- must pair with start and start.prev
local before = copy_node(prv)
local after = copy_node(nxt)
local tail = find_node_tail(replace)
- setfield(replace,"prev",before)
- setfield(before,"next",replace)
- setfield(before,"prev",nil)
- setfield(tail,"next",after)
- setfield(after,"prev",tail)
- setfield(after,"next",nil)
+ replace.prev = before
+ before.next = replace
+ before.prev = nil
+ tail.next = after
+ after.prev = tail
+ after.next = nil
replace = do_process(before,attr)
- replace = getnext(replace)
- setfield(replace,"prev",nil)
- setfield(getfield(after,"prev"),"next",nil)
- setfield(disc,"replace",replace)
+ replace = replace.next
+ replace.prev = nil
+ after.prev.next = nil
+ disc.replace = replace
free_node(after)
free_node(before)
- elseif prv and getid(prv) == glyph_code and getfont(prv) == lastfont then
- local prevchar, lastchar = getchar(prv), getchar(start)
+ elseif prv and prv.id == glyph_code and prv.font == lastfont then
+ local prevchar, lastchar = prv.char, start.char
local kerns = chardata[lastfont][prevchar].kerns
local kern = kerns and kerns[lastchar] or 0
krn = kern + quaddata[lastfont]*krn -- here
- setfield(disc,"replace",kern_injector(false,krn)) -- only kerns permitted, no glue
+ disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
else
krn = quaddata[lastfont]*krn -- here
- setfield(disc,"replace",kern_injector(false,krn)) -- only kerns permitted, no glue
+ disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
end
else
-- this one happens in most cases: automatic (-), explicit (\-), regular (patterns)
- if prv and getid(prv) == glyph_code and getfont(prv) == lastfont then
- -- the normal case
- local prevchar, lastchar = getchar(prv), getchar(start)
+ if prv and prv.id == glyph_code and prv.font == lastfont then
+ local prevchar, lastchar = prv.char, start.char
local kerns = chardata[lastfont][prevchar].kerns
local kern = kerns and kerns[lastchar] or 0
- krn = kern + quaddata[lastfont]*krn
+ krn = kern + quaddata[lastfont]*krn -- here
else
- krn = quaddata[lastfont]*krn
+ krn = quaddata[lastfont]*krn -- here
end
insert_node_before(head,start,kern_injector(fillup,krn))
end
end
end
elseif id == glue_code then
- local subtype = getsubtype(start)
+ local subtype = start.subtype
if subtype == userskip_code or subtype == xspaceskip_code or subtype == spaceskip_code then
- local s = getfield(start,"spec")
- local w = getfield(s,"width")
+ local s = start.spec
+ local w = s.width
if w > 0 then
- local width, stretch, shrink = w+gluefactor*w*krn, getfield(s,"stretch"), getfield(s,"shrink")
- setfield(start,"spec",spec_injector(fillup,width,stretch*width/w,shrink*width/w))
+ local width, stretch, shrink = w+gluefactor*w*krn, s.stretch, s.shrink
+ start.spec = spec_injector(fillup,width,stretch*width/w,shrink*width/w)
done = true
end
end
elseif id == kern_code then
- -- if getsubtype(start) == kerning_code then -- handle with glyphs
- -- local sk = getfield(start,"kern")
+ -- if start.subtype == kerning_code then -- handle with glyphs
+ -- local sk = start.kern
-- if sk > 0 then
- -- setfield(start,"kern",sk*krn)
+ -- start.kern = sk*krn
-- done = true
-- end
-- end
elseif lastfont and (id == hlist_code or id == vlist_code) then -- todo: lookahead
- local p = getprev(start)
- if p and getid(p) ~= glue_code then
+ local p = start.prev
+ if p and p.id ~= glue_code then
insert_node_before(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
done = true
end
- local n = getnext(start)
- if n and getid(n) ~= glue_code then
+ local n = start.next
+ if n and n.id ~= glue_code then
insert_node_after(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
done = true
end
@@ -398,7 +383,7 @@ local function do_process(head,force) -- todo: glue so that we can fully stretch
end
end
if start then
- start = getnext(start)
+ start = start.next
end
end
return head, done
@@ -429,8 +414,7 @@ function kerns.set(factor)
end
function kerns.handler(head)
- local head, done = do_process(tonut(head)) -- no direct map, because else fourth argument is tail == true
- return tonode(head), done
+ return do_process(head) -- no direct map, because else fourth argument is tail == true
end
-- interface
diff --git a/tex/context/base/typo-mar.lua b/tex/context/base/typo-mar.lua
index 4bfc107ad..85d5c85a8 100644
--- a/tex/context/base/typo-mar.lua
+++ b/tex/context/base/typo-mar.lua
@@ -115,30 +115,13 @@ local v_first = variables.first
local v_text = variables.text
local v_column = variables.column
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local copy_node_list = nuts.copy_list
-local hpack_nodes = nuts.hpack -- nodes.fasthpack not really faster here
-local traverse_id = nuts.traverse_id
-local free_node_list = nuts.flush_list
-local insert_node_after = nuts.insert_after
-local insert_node_before = nuts.insert_before
-local linked_nodes = nuts.linked
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getsubtype = nuts.getsubtype
-local getbox = nuts.getbox
-local getlist = nuts.getlist
+local copy_node_list = node.copy_list
+local slide_nodes = node.slide
+local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
+local traverse_id = node.traverse_id
+local free_node_list = node.flush_list
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -161,7 +144,7 @@ local userdefined_code = whatsitcodes.userdefined
local dir_code = whatsitcodes.dir
local localpar_code = whatsitcodes.localpar
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_kern = nodepool.kern
local new_glue = nodepool.glue
@@ -172,12 +155,13 @@ local new_latelua = nodepool.latelua
local texgetcount = tex.getcount
local texgetdimen = tex.getdimen
+local texgetbox = tex.getbox
local texget = tex.get
local points = number.points
local isleftpage = layouts.status.isleftpage
-local registertogether = builders.paragraphs.registertogether -- tonode
+local registertogether = builders.paragraphs.registertogether
local jobpositions = job.positions
local getposition = jobpositions.position
@@ -186,7 +170,7 @@ local a_margindata = attributes.private("margindata")
local inline_mark = nodepool.userids["margins.inline"]
-local margins = { }
+local margins = { }
typesetters.margins = margins
local locations = { v_left, v_right, v_inner, v_outer } -- order might change
@@ -249,7 +233,7 @@ local function showstore(store,banner,location)
if next(store) then
for i, si in table.sortedpairs(store) do
local si =store[i]
- report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(getlist(si.box)))
+ report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(si.box.list))
end
else
report_margindata("%s: nothing stored in location %a",banner,location)
@@ -258,7 +242,7 @@ end
function margins.save(t)
setmetatable(t,defaults)
- local content = getbox(t.number)
+ local content = texgetbox(t.number)
local location = t.location
local category = t.category
local inline = t.inline
@@ -326,11 +310,11 @@ function margins.save(t)
-- nice is to make a special status table mechanism
local leftmargindistance = texgetdimen("naturalleftmargindistance")
local rightmargindistance = texgetdimen("naturalrightmargindistance")
- local strutbox = getbox("strutbox")
- t.strutdepth = getfield(strutbox,"depth")
- t.strutheight = getfield(strutbox,"height")
- t.leftskip = getfield(texget("leftskip"),"width") -- we're not in forgetall
- t.rightskip = getfield(texget("rightskip"),"width") -- we're not in forgetall
+ local strutbox = texgetbox("strutbox")
+ t.strutdepth = strutbox.depth
+ t.strutheight = strutbox.height
+ t.leftskip = texget("leftskip").width -- we're not in forgetall
+ t.rightskip = texget("rightskip").width -- we're not in forgetall
t.leftmargindistance = leftmargindistance -- todo:layoutstatus table
t.rightmargindistance = rightmargindistance
t.leftedgedistance = texgetdimen("naturalleftedgedistance")
@@ -343,7 +327,7 @@ function margins.save(t)
--
-- t.realpageno = texgetcount("realpageno")
if inline then
- context(tonode(new_usernumber(inline_mark,nofsaved))) -- or use a normal node
+ context(new_usernumber(inline_mark,nofsaved))
store[nofsaved] = t -- no insert
nofinlined = nofinlined + 1
else
@@ -420,7 +404,7 @@ local function realign(current,candidate)
-- we assume that list is a hbox, otherwise we had to take the whole current
-- in order to get it right
- setfield(current,"width",0)
+ current.width = 0
local anchornode, move_x
-- this mess is needed for alignments (combinations) so we use that
@@ -462,9 +446,9 @@ local function realign(current,candidate)
report_margindata("realigned %a, location %a, margin %a",candidate.n,location,margin)
end
end
- local list = hpack_nodes(linked_nodes(anchornode,new_kern(-delta),getlist(current),new_kern(delta)))
- setfield(current,"list",list)
- setfield(current,"width",0)
+
+ current.list = hpack_nodes(anchornode .. new_kern(-delta) .. current.list .. new_kern(delta))
+ current.width = 0
end
local function realigned(current,a)
@@ -506,8 +490,7 @@ local function markovershoot(current)
v_anchors = v_anchors + 1
cache[v_anchors] = stacked
local anchor = new_latelua(format("typesetters.margins.ha(%s)",v_anchors)) -- todo: alleen als offset > line
- local list = hpack_nodes(linked_nodes(anchor,getlist(current)))
- setfield(current,"list",list)
+ current.list = hpack_nodes(anchor .. current.list)
end
local function getovershoot(location)
@@ -529,10 +512,10 @@ end
local function inject(parent,head,candidate)
local box = candidate.box
- local width = getfield(box,"width")
- local height = getfield(box,"height")
- local depth = getfield(box,"depth")
- local shift = getfield(box,"shift")
+ local width = box.width
+ local height = box.height
+ local depth = box.depth
+ local shift = box.shift
local stack = candidate.stack
local location = candidate.location
local method = candidate.method
@@ -541,7 +524,7 @@ local function inject(parent,head,candidate)
local baseline = candidate.baseline
local strutheight = candidate.strutheight
local strutdepth = candidate.strutdepth
- local psubtype = getsubtype(parent)
+ local psubtype = parent.subtype
local offset = stacked[location]
local firstonstack = offset == false or offset == nil
nofstatus = nofstatus + 1
@@ -563,7 +546,7 @@ local function inject(parent,head,candidate)
end
end
candidate.width = width
- candidate.hsize = getfield(parent,"width") -- we can also pass textwidth
+ candidate.hsize = parent.width -- we can also pass textwidth
candidate.psubtype = psubtype
if trace_margindata then
report_margindata("processing, index %s, height %p, depth %p, parent %s",candidate.n,height,depth,listcodes[psubtype])
@@ -590,7 +573,7 @@ local function inject(parent,head,candidate)
-- experimental.
-- -- --
if method == v_top then
- local delta = height - getfield(parent,"height")
+ local delta = height - parent.height
if trace_margindata then
report_margindata("top aligned by %p",delta)
end
@@ -633,23 +616,22 @@ local function inject(parent,head,candidate)
shift = shift + delta
offset = offset + delta
end
- setfield(box,"shift",shift)
- setfield(box,"width",0)
+ box.shift = shift
+ box.width = 0
if not head then
head = box
- elseif getid(head) == whatsit_code and getsubtype(head) == localpar_code then
+ elseif head.id == whatsit_code and head.subtype == localpar_code then
-- experimental
- if getfield(head,"dir") == "TRT" then
- local list = hpack_nodes(linked_nodes(new_kern(candidate.hsize),getlist(box),new_kern(-candidate.hsize)))
- setfield(box,"list",list)
+ if head.dir == "TRT" then
+ box.list = hpack_nodes(new_kern(candidate.hsize) .. box.list .. new_kern(-candidate.hsize))
end
insert_node_after(head,head,box)
else
- setfield(head,"prev",box)
- setfield(box,"next",head)
+ head.prev = box
+ box.next = head
head = box
end
- setattr(box,a_margindata,nofstatus)
+ box[a_margindata] = nofstatus
if trace_margindata then
report_margindata("injected, location %a, shift %p",location,shift)
end
@@ -674,12 +656,12 @@ local function flushinline(parent,head)
local current = head
local done = false
local continue = false
- local room, don, con, list
+ local room, don, con
while current and nofinlined > 0 do
- local id = getid(current)
+ local id = current.id
if id == whatsit_code then
- if getsubtype(current) == userdefined_code and getfield(current,"user_id") == inline_mark then
- local n = getfield(current,"value")
+ if current.subtype == userdefined_code and current.user_id == inline_mark then
+ local n = current.value
local candidate = inlinestore[n]
if candidate then -- no vpack, as we want to realign
inlinestore[n] = nil
@@ -692,12 +674,11 @@ local function flushinline(parent,head)
end
elseif id == hlist_code or id == vlist_code then
-- optional (but sometimes needed)
- list, don, con = flushinline(current,getlist(current))
- setfield(current,"list",list)
+ current.list, don, con = flushinline(current,current.list)
continue = continue or con
done = done or don
end
- current = getnext(current)
+ current = current.next
end
return head, done, continue
end
@@ -705,7 +686,7 @@ end
local a_linenumber = attributes.private('linenumber')
local function flushed(scope,parent) -- current is hlist
- local head = getlist(parent)
+ local head = parent.list
local done = false
local continue = false
local room, con, don
@@ -721,7 +702,7 @@ local function flushed(scope,parent) -- current is hlist
done = true
continue = continue or con
nofstored = nofstored - 1
- registertogether(tonode(parent),room) -- !! tonode
+ registertogether(parent,room)
else
break
end
@@ -730,18 +711,17 @@ local function flushed(scope,parent) -- current is hlist
end
if nofinlined > 0 then
if done then
- setfield(parent,"list",head)
+ parent.list = head
end
head, don, con = flushinline(parent,head)
continue = continue or con
done = done or don
end
if done then
- local a = getattr(head,a_linenumber) -- hack .. we need a more decent critical attribute inheritance mechanism
- local l = hpack_nodes(head,getfield(parent,"width"),"exactly")
- setfield(parent,"list",l)
+ local a = head[a_linenumber] -- hack .. we need a more decent critical attribute inheritance mechanism
+ parent.list = hpack_nodes(head,parent.width,"exactly")
if a then
- setattr(l,a_linenumber,a)
+ parent.list[a_linenumber] = a
end
-- resetstacked()
end
@@ -756,15 +736,14 @@ local function handler(scope,head,group)
if trace_margindata then
report_margindata("flushing stage one, stored %s, scope %s, delayed %s, group %a",nofstored,scope,nofdelayed,group)
end
- head = tonut(head)
local current = head
local done = false
while current do
- local id = getid(current)
- if (id == vlist_code or id == hlist_code) and not getattr(current,a_margindata) then
+ local id = current.id
+ if (id == vlist_code or id == hlist_code) and not current[a_margindata] then
local don, continue = flushed(scope,current)
if don then
- setattr(current,a_margindata,0) -- signal to prevent duplicate processing
+ current[a_margindata] = 0 -- signal to prevent duplicate processing
if continue then
markovershoot(current)
end
@@ -774,12 +753,12 @@ local function handler(scope,head,group)
done = true
end
end
- current = getnext(current)
+ current = current.next
end
-- if done then
resetstacked() -- why doesn't done work ok here?
-- end
- return tonode(head), done
+ return head, done
else
return head, false
end
@@ -832,11 +811,11 @@ local function finalhandler(head)
local current = head
local done = false
while current do
- local id = getid(current)
+ local id = current.id
if id == hlist_code then
- local a = getattr(current,a_margindata)
+ local a = current[a_margindata]
if not a or a == 0 then
- finalhandler(getlist(current))
+ finalhandler(current.list)
elseif realigned(current,a) then
done = true
if nofdelayed == 0 then
@@ -844,9 +823,9 @@ local function finalhandler(head)
end
end
elseif id == vlist_code then
- finalhandler(getlist(current))
+ finalhandler(current.list)
end
- current = getnext(current)
+ current = current.next
end
return head, done
else
@@ -859,10 +838,7 @@ function margins.finalhandler(head)
-- if trace_margindata then
-- report_margindata("flushing stage two, instore: %s, delayed: %s",nofstored,nofdelayed)
-- end
-head = tonut(head)
-local head, done = finalhandler(head)
-head = tonode(head)
- return head, done
+ return finalhandler(head)
else
return head, false
end
diff --git a/tex/context/base/typo-pag.lua b/tex/context/base/typo-pag.lua
index 5b96e9c21..0dd75ddf9 100644
--- a/tex/context/base/typo-pag.lua
+++ b/tex/context/base/typo-pag.lua
@@ -6,14 +6,6 @@ if not modules then modules = { } end modules ['typo-pag'] = {
license = "see context related readme files"
}
-
-builders = builders or { }
-local builders = builders
-
-builders.paragraphs = builders.paragraphs or { }
-local parbuilders = builders.paragraphs
-
-local nodes = nodes
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
@@ -22,22 +14,12 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local penalty_code = nodecodes.penalty
-local unsetvalue = attributes.unsetvalue
-local a_keeptogether = attributes.private("keeptogether")
-
-local nuts = nodes.nuts
-local tonut = nuts.tonut
+local insert_node_after = node.insert_after
+local new_penalty = nodes.pool.penalty
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
+local unsetvalue = attributes.unsetvalue
-local insert_node_after = nuts.insert_after
-local new_penalty = nuts.pool.penalty
+local a_keeptogether = attributes.private("keeptogether")
local trace_keeptogether = false
local report_keeptogether = logs.reporter("parbuilders","keeptogether")
@@ -51,11 +33,11 @@ trackers.register("parbuilders.keeptogether", function(v) trace_keeptogether =
-- todo: also support lines = 3 etc (e.g. dropped caps) but how to set that
-- when no hlists are there ? ... maybe the local_par
-function parbuilders.registertogether(line,specification) -- might change
+function builders.paragraphs.registertogether(line,specification) -- might change
if not enabled then
nodes.tasks.enableaction("finalizers","builders.paragraphs.keeptogether")
end
- local a = getattr(line,a_keeptogether)
+ local a = line[a_keeptogether]
local c = a and cache[a]
if c then
local height = specification.height
@@ -82,7 +64,7 @@ function parbuilders.registertogether(line,specification) -- might change
if not specification.slack then
specification.slack = 0
end
- setattr(line,a_keeptogether,last)
+ line[a_keeptogether] = last
end
if trace_keeptogether then
local a = a or last
@@ -106,24 +88,24 @@ local function keeptogether(start,a)
if start then
local specification = cache[a]
if a then
- local current = getnext(start)
+ local current = start.next
local previous = start
- local total = getfield(previous,"depth")
+ local total = previous.depth
local slack = specification.slack
local threshold = specification.depth - slack
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p, slack %p","list",a,total,threshold,slack)
end
while current do
- local id = getid(current)
+ local id = current.id
if id == vlist_code or id == hlist_code then
- total = total + getfield(current,"height") + getfield(current,"depth")
+ total = total + current.height + current.depth
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p","list",a,total,threshold)
end
if total <= threshold then
- if getid(previous) == penalty_code then
- setfield(previous,"penalty",10000)
+ if previous.id == penalty_code then
+ previous.penalty = 10000
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -132,13 +114,13 @@ local function keeptogether(start,a)
end
elseif id == glue_code then
-- hm, breakpoint, maybe turn this into kern
- total = total + getfield(getfield(current,"spec"),"width")
+ total = total + current.spec.width
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p","glue",a,total,threshold)
end
if total <= threshold then
- if getid(previous) == penalty_code then
- setfield(previous,"penalty",10000)
+ if previous.id == penalty_code then
+ previous.penalty = 10000
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -146,13 +128,13 @@ local function keeptogether(start,a)
break
end
elseif id == kern_code then
- total = total + getfield(current,"kern")
+ total = total + current.kern
if trace_keeptogether then
report_keeptogether("%s, index %s, total %s, threshold %s","kern",a,total,threshold)
end
if total <= threshold then
- if getid(previous) == penalty_code then
- setfield(previous,"penalty",10000)
+ if previous.id == penalty_code then
+ previous.penalty = 10000
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -161,16 +143,16 @@ local function keeptogether(start,a)
end
elseif id == penalty_code then
if total <= threshold then
- if getid(previous) == penalty_code then
- setfield(previous,"penalty",10000)
+ if previous.id == penalty_code then
+ previous.penalty = 10000
end
- setfield(current,"penalty",10000)
+ current.penalty = 10000
else
break
end
end
previous = current
- current = getnext(current)
+ current = current.next
end
end
end
@@ -178,20 +160,20 @@ end
-- also look at first non glue/kern node e.g for a dropped caps
-function parbuilders.keeptogether(head)
+function builders.paragraphs.keeptogether(head)
local done = false
- local current = tonut(head)
+ local current = head
while current do
- if getid(current) == hlist_code then
- local a = getattr(current,a_keeptogether)
+ if current.id == hlist_code then
+ local a = current[a_keeptogether]
if a and a > 0 then
keeptogether(current,a)
- setattr(current,a_keeptogether,unsetvalue)
+ current[a_keeptogether] = unsetvalue
cache[a] = nil
done = true
end
end
- current = getnext(current)
+ current = current.next
end
return head, done
end
diff --git a/tex/context/base/typo-par.mkiv b/tex/context/base/typo-par.mkiv
deleted file mode 100644
index 8572f31b8..000000000
--- a/tex/context/base/typo-par.mkiv
+++ /dev/null
@@ -1,29 +0,0 @@
-%D \module
-%D [ file=typo-par,
-%D version=2008.09.30,
-%D title=\CONTEXT\ Typesetting Macros,
-%D subtitle=Paragraph Building,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{ConTeXt Node Macros / Paragraph Building}
-
-%D This is very experimental, undocumented, subjected to changes, etc. just as
-%D the underlying interfaces. But at least it's cleaned as part of the status-mkiv
-%D cleanup.
-
-% \startparbuilder[basic]
-% \input tufte \par
-% \stopparbuilder
-
-\unprotect
-
-\registerctxluafile{node-ltp}{1.001}
-\registerctxluafile{trac-par}{1.001}
-
-\protect \endinput
diff --git a/tex/context/base/typo-rep.lua b/tex/context/base/typo-rep.lua
index 95b801e2e..01868f490 100644
--- a/tex/context/base/typo-rep.lua
+++ b/tex/context/base/typo-rep.lua
@@ -10,44 +10,31 @@ if not modules then modules = { } end modules ['typo-rep'] = {
-- endure it by listening to a couple cd's by The Scene and The Lau
-- on the squeezebox on my desk.
-local next, type, tonumber = next, type, tonumber
-
local trace_stripping = false trackers.register("nodes.stripping", function(v) trace_stripping = v end)
trackers.register("fonts.stripping", function(v) trace_stripping = v end)
local report_stripping = logs.reporter("fonts","stripping")
-local nodes = nodes
-local tasks = nodes.tasks
-
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getattr = nuts.getid
+local nodes, node = nodes, node
-local setattr = nuts.setattr
-
-local delete_node = nuts.delete
-local replace_node = nuts.replace
-local copy_node = nuts.copy
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
+local delete_node = nodes.delete
+local replace_node = nodes.replace
+local copy_node = node.copy
local chardata = characters.data
local collected = false
+local a_stripping = attributes.private("stripping")
local fontdata = fonts.hashes.identifiers
+local tasks = nodes.tasks
-local a_stripping = attributes.private("stripping")
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
local v_reset = interfaces.variables.reset
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+
-- todo: other namespace -> typesetters
nodes.stripping = nodes.stripping or { } local stripping = nodes.stripping
@@ -72,13 +59,13 @@ local function process(what,head,current,char)
head, current = delete_node(head,current)
elseif type(what) == "function" then
head, current = what(head,current)
- current = getnext(current)
+ current = current.next
if trace_stripping then
report_stripping("processing %C in text",char)
end
elseif what then -- assume node
head, current = replace_node(head,current,copy_node(what))
- current = getnext(current)
+ current = current.next
if trace_stripping then
report_stripping("replacing %C in text",char)
end
@@ -87,29 +74,28 @@ local function process(what,head,current,char)
end
function nodes.handlers.stripping(head)
- head = tonut(head)
local current, done = head, false
while current do
- if getid(current) == glyph_code then
+ if current.id == glyph_code then
-- it's more efficient to keep track of what needs to be kept
- local todo = getattr(current,a_stripping)
+ local todo = current[a_stripping]
if todo == 1 then
- local char = getchar(current)
+ local char = current.char
local what = glyphs[char]
if what then
head, current = process(what,head,current,char)
done = true
else -- handling of spacing etc has to be done elsewhere
- current = getnext(current)
+ current = current.next
end
else
- current = getnext(current)
+ current = current.next
end
else
- current = getnext(current)
+ current = current.next
end
end
- return tonode(head), done
+ return head, done
end
local enabled = false
diff --git a/tex/context/base/typo-spa.lua b/tex/context/base/typo-spa.lua
index 5cf9ab837..c3f50fe98 100644
--- a/tex/context/base/typo-spa.lua
+++ b/tex/context/base/typo-spa.lua
@@ -15,7 +15,10 @@ local report_spacing = logs.reporter("typesetting","spacing")
local nodes, fonts, node = nodes, fonts, node
-local tasks = nodes.tasks
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove
+local end_of_math = node.end_of_math
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
@@ -26,28 +29,6 @@ local unsetvalue = attributes.unsetvalue
local v_reset = interfaces.variables.reset
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getchar = nuts.getchar
-local getid = nuts.getid
-local getfont = nuts.getfont
-local getattr = nuts.getattr
-
-local setattr = nuts.setattr
-
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local remove_node = nuts.remove
-local end_of_math = nuts.end_of_math
-
-local nodepool = nuts.pool
-local new_penalty = nodepool.penalty
-local new_glue = nodepool.glue
-
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local math_code = nodecodes.math
@@ -55,6 +36,12 @@ local math_code = nodecodes.math
local somespace = nodes.somespace
local somepenalty = nodes.somepenalty
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+
+local new_penalty = nodepool.penalty
+local new_glue = nodepool.glue
+
typesetters = typesetters or { }
local typesetters = typesetters
@@ -65,6 +52,7 @@ spacings.mapping = spacings.mapping or { }
spacings.numbers = spacings.numbers or { }
local a_spacings = attributes.private("spacing")
+spacings.attribute = a_spacings
storage.register("typesetters/spacings/mapping", spacings.mapping, "typesetters.spacings.mapping")
@@ -79,30 +67,29 @@ end
-- todo cache lastattr
function spacings.handler(head)
- head = tonut(head)
local done = false
local start = head
-- head is always begin of par (whatsit), so we have at least two prev nodes
-- penalty followed by glue
while start do
- local id = getid(start)
+ local id = start.id
if id == glyph_code then
- local attr = getattr(start,a_spacings)
+ local attr = start[a_spacings]
if attr and attr > 0 then
local data = mapping[attr]
if data then
- local char = getchar(start)
+ local char = start.char
local map = data.characters[char]
- setattr(start,a_spacings,unsetvalue) -- needed?
+ start[a_spacings] = unsetvalue -- needed?
if map then
local left = map.left
local right = map.right
local alternative = map.alternative
- local quad = quaddata[getfont(start)]
- local prev = getprev(start)
+ local quad = quaddata[start.font]
+ local prev = start.prev
if left and left ~= 0 and prev then
local ok = false
- local prevprev = getprev(prev)
+ local prevprev = prev.prev
if alternative == 1 then
local somespace = somespace(prev,true)
if somespace then
@@ -133,10 +120,10 @@ function spacings.handler(head)
done = true
end
end
- local next = getnext(start)
+ local next = start.next
if right and right ~= 0 and next then
local ok = false
- local nextnext = getnext(next)
+ local nextnext = next.next
if alternative == 1 then
local somepenalty = somepenalty(next,10000)
if somepenalty then
@@ -177,10 +164,10 @@ function spacings.handler(head)
start = end_of_math(start) -- weird, can return nil .. no math end?
end
if start then
- start = getnext(start)
+ start = start.next
end
end
- return tonode(head), done
+ return head, done
end
local enabled = false
diff --git a/tex/context/base/typo-tal.lua b/tex/context/base/typo-tal.lua
index debcedfd3..63a66d037 100644
--- a/tex/context/base/typo-tal.lua
+++ b/tex/context/base/typo-tal.lua
@@ -20,34 +20,19 @@ local fontcharacters = fonts.hashes.characters
local unicodes = fonts.hashes.unicodes
local categories = characters.categories -- nd
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-local tonode = nuts.tonode
+local insert_node_before = nodes.insert_before
+local insert_node_after = nodes.insert_after
+local traverse_list_by_id = nodes.traverse_id
+local dimensions_of_list = nodes.dimensions
+local first_glyph = nodes.first_glyph
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getfont = nuts.getfont
-local getchar = nuts.getchar
-local getattr = nuts.getattr
-local getfield = nuts.getfield
-
-local setattr = nuts.setattr
-local setfield = nuts.setfield
-
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local traverse_list_by_id = nuts.traverse_id
-local dimensions_of_list = nuts.dimensions
-local first_glyph = nuts.first_glyph
-
-local nodepool = nuts.pool
+local nodepool = nodes.pool
local new_kern = nodepool.kern
local new_gluespec = nodepool.gluespec
local tracers = nodes.tracers
local setcolor = tracers.colors.set
-local tracedrule = tracers.pool.nuts.rule
+local tracedrule = tracers.pool.nodes.rule
local characteralign = { }
typesetters.characteralign = characteralign
@@ -84,11 +69,10 @@ local function traced_kern(w)
return tracedrule(w,nil,nil,"darkgray")
end
-function characteralign.handler(originalhead,where)
+function characteralign.handler(head,where)
if not datasets then
- return originalhead, false
+ return head, false
end
- local head = tonut(originalhead)
-- local first = first_glyph(head) -- we could do that once
local first
for n in traverse_list_by_id(glyph_code,head) do
@@ -96,11 +80,11 @@ function characteralign.handler(originalhead,where)
break
end
if not first then
- return originalhead, false
+ return head, false
end
- local a = getattr(first,a_characteralign)
+ local a = first[a_characteralign]
if not a or a == 0 then
- return originalhead, false
+ return head, false
end
local column = div(a,100)
local row = a % 100
@@ -116,10 +100,10 @@ function characteralign.handler(originalhead,where)
local sign = nil
-- we can think of constraints
while current do
- local id = getid(current)
+ local id = current.id
if id == glyph_code then
- local char = getchar(current)
- local font = getfont(current)
+ local char = current.char
+ local font = current.font
local unicode = unicodes[font][char]
if not unicode then
-- no unicode so forget about it
@@ -142,13 +126,13 @@ function characteralign.handler(originalhead,where)
if not b_start then
if sign then
b_start = sign
- local new = validsigns[getchar(sign)]
- if char == new or not fontcharacters[getfont(sign)][new] then
+ local new = validsigns[sign.char]
+ if char == new or not fontcharacters[sign.font][new] then
if trace_split then
setcolor(sign,"darkyellow")
end
else
- setfield(sign,"char",new)
+ sign.char = new
if trace_split then
setcolor(sign,"darkmagenta")
end
@@ -174,14 +158,14 @@ function characteralign.handler(originalhead,where)
end
elseif (b_start or a_start) and id == glue_code then
-- somewhat inefficient
- local next = getnext(current)
- local prev = getprev(current)
- if next and prev and getid(next) == glyph_code and getid(prev) == glyph_code then -- too much checking
- local width = fontcharacters[getfont(b_start)][separator or period].width
- -- local spec = getfield(current,"spec")
- -- free_spec(spec)
- setfield(current,"spec",new_gluespec(width))
- setattr(current,a_character,punctuationspace)
+ local next = current.next
+ local prev = current.prev
+ if next and prev and next.id == glyph_code and prev.id == glyph_code then -- too much checking
+ local width = fontcharacters[b_start.font][separator or period].width
+ -- local spec = current.spec
+ -- nodes.free(spec) -- hm, we leak but not that many specs
+ current.spec = new_gluespec(width)
+ current[a_character] = punctuationspace
if a_start then
a_stop = current
elseif b_start then
@@ -189,7 +173,7 @@ function characteralign.handler(originalhead,where)
end
end
end
- current = getnext(current)
+ current = current.next
end
local entry = list[row]
if entry then
@@ -223,7 +207,7 @@ function characteralign.handler(originalhead,where)
if not c then
-- print("[before]")
if dataset.hasseparator then
- local width = fontcharacters[getfont(b_stop)][separator].width
+ local width = fontcharacters[b_stop.font][separator].width
insert_node_after(head,b_stop,new_kern(maxafter+width))
end
elseif a_start then
@@ -245,7 +229,7 @@ function characteralign.handler(originalhead,where)
end
else
-- print("[after]")
- local width = fontcharacters[getfont(b_stop)][separator].width
+ local width = fontcharacters[b_stop.font][separator].width
head = insert_node_before(head,a_start,new_kern(maxbefore+width))
end
if after < maxafter then
@@ -262,12 +246,12 @@ function characteralign.handler(originalhead,where)
end
else
entry = {
- before = b_start and dimensions_of_list(b_start,getnext(b_stop)) or 0,
- after = a_start and dimensions_of_list(a_start,getnext(a_stop)) or 0,
+ before = b_start and dimensions_of_list(b_start,b_stop.next) or 0,
+ after = a_start and dimensions_of_list(a_start,a_stop.next) or 0,
}
list[row] = entry
end
- return tonode(head), true
+ return head, true
end
function setcharacteralign(column,separator)
diff --git a/tex/context/base/util-deb.lua b/tex/context/base/util-deb.lua
index ee732b3b5..785373f86 100644
--- a/tex/context/base/util-deb.lua
+++ b/tex/context/base/util-deb.lua
@@ -92,41 +92,37 @@ end
function debugger.disable()
debug.sethook()
- -- counters[debug.getinfo(2,"f").func] = nil
+--~ counters[debug.getinfo(2,"f").func] = nil
end
--- debugger.enable()
---
--- print(math.sin(1*.5))
--- print(math.sin(1*.5))
--- print(math.sin(1*.5))
--- print(math.sin(1*.5))
--- print(math.sin(1*.5))
---
--- debugger.disable()
---
--- print("")
--- debugger.showstats()
--- print("")
--- debugger.showstats(print,3)
---
+--~ debugger.enable()
+
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+
+--~ debugger.disable()
+
+--~ print("")
+--~ debugger.showstats()
+--~ print("")
+--~ debugger.showstats(print,3)
+
-- from the lua book:
-local function showtraceback(rep) -- from lua site / adapted
- local level = 2 -- we don't want this function to be reported
- local reporter = rep or report
+function traceback()
+ local level = 1
while true do
- local info = getinfo(level, "Sl")
+ local info = debug.getinfo(level, "Sl")
if not info then
break
elseif info.what == "C" then
- reporter("%2i : %s",level-1,"C function")
+ print(format("%3i : C function",level))
else
- reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
end
level = level + 1
end
end
-
-debugger.showtraceback = showtraceback
--- debug.showtraceback = showtraceback
diff --git a/tex/context/base/util-str.lua b/tex/context/base/util-str.lua
index 4ecaed7d3..af8b1651e 100644
--- a/tex/context/base/util-str.lua
+++ b/tex/context/base/util-str.lua
@@ -20,24 +20,8 @@ local utfchar, utfbyte = utf.char, utf.byte
----- loadstripped = utilities.lua.loadstripped
----- setmetatableindex = table.setmetatableindex
-local loadstripped = nil
-
-if _LUAVERSION < 5.2 then
-
- loadstripped = function(str,shortcuts)
- return load(str)
- end
-
-else
-
- loadstripped = function(str,shortcuts)
- if shortcuts then
- return load(dump(load(str),true),nil,nil,shortcuts)
- else
- return load(dump(load(str),true))
- end
- end
-
+local loadstripped = _LUAVERSION < 5.2 and load or function(str)
+ return load(dump(load(str),true)) -- it only makes sense in luajit and luatex where we have a stipped load
end
-- todo: make a special namespace for the formatter
@@ -307,67 +291,33 @@ function number.sparseexponent(f,n)
return tostring(n)
end
+local preamble = [[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+local formattednumber = number.formatted
+local sparseexponent = number.sparseexponent
+]]
+
local template = [[
%s
%s
return function(%s) return %s end
]]
-local preamble, environment = "", { }
-
-if _LUAVERSION < 5.2 then
-
- preamble = [[
-local lpeg=lpeg
-local type=type
-local tostring=tostring
-local tonumber=tonumber
-local format=string.format
-local concat=table.concat
-local signed=number.signed
-local points=number.points
-local basepoints= number.basepoints
-local utfchar=utf.char
-local utfbyte=utf.byte
-local lpegmatch=lpeg.match
-local nspaces=string.nspaces
-local tracedchar=string.tracedchar
-local autosingle=string.autosingle
-local autodouble=string.autodouble
-local sequenced=table.sequenced
-local formattednumber=number.formatted
-local sparseexponent=number.sparseexponent
- ]]
-
-else
-
- environment = {
- global = global or _G,
- lpeg = lpeg,
- type = type,
- tostring = tostring,
- tonumber = tonumber,
- format = string.format,
- concat = table.concat,
- signed = number.signed,
- points = number.points,
- basepoints = number.basepoints,
- utfchar = utf.char,
- utfbyte = utf.byte,
- lpegmatch = lpeg.match,
- nspaces = string.nspaces,
- tracedchar = string.tracedchar,
- autosingle = string.autosingle,
- autodouble = string.autodouble,
- sequenced = table.sequenced,
- formattednumber = number.formatted,
- sparseexponent = number.sparseexponent,
- }
-
-end
-
--- -- --
-
local arguments = { "a1" } -- faster than previously used (select(n,...))
setmetatable(arguments, { __index =
@@ -790,37 +740,28 @@ local builder = Cs { "start",
-- we can be clever and only alias what is needed
--- local direct = Cs (
--- P("%")/""
--- * Cc([[local format = string.format return function(str) return format("%]])
--- * (S("+- .") + R("09"))^0
--- * S("sqidfgGeExXo")
--- * Cc([[",str) end]])
--- * P(-1)
--- )
-
local direct = Cs (
- P("%")
- * (S("+- .") + R("09"))^0
- * S("sqidfgGeExXo")
- * P(-1) / [[local format = string.format return function(str) return format("%0",str) end]]
-)
+ P("%")/""
+ * Cc([[local format = string.format return function(str) return format("%]])
+ * (S("+- .") + R("09"))^0
+ * S("sqidfgGeExXo")
+ * Cc([[",str) end]])
+ * P(-1)
+ )
local function make(t,str)
local f
local p
local p = lpegmatch(direct,str)
if p then
- -- f = loadstripped(p)()
- -- print("builder 1 >",p)
f = loadstripped(p)()
else
n = 0
p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
if n > 0 then
p = format(template,preamble,t._preamble_,arguments[n],p)
- -- print("builder 2 >",p)
- f = loadstripped(p,t._environment_)() -- t._environment is not populated (was experiment)
+-- print("builder>",p)
+ f = loadstripped(p)()
else
f = function() return str end
end
@@ -875,26 +816,10 @@ strings.formatters = { }
-- table (metatable) in which case we could better keep a count and
-- clear that table when a threshold is reached
-if _LUAVERSION < 5.2 then
-
- function strings.formatters.new()
- local t = { _extensions_ = { }, _preamble_ = preamble, _environment_ = { }, _type_ = "formatter" }
- setmetatable(t, { __index = make, __call = use })
- return t
- end
-
-else
-
- function strings.formatters.new()
- local e = { } -- better make a copy as we can overload
- for k, v in next, environment do
- e[k] = v
- end
- local t = { _extensions_ = { }, _preamble_ = "", _environment_ = e, _type_ = "formatter" }
- setmetatable(t, { __index = make, __call = use })
- return t
- end
-
+function strings.formatters.new()
+ local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter" }
+ setmetatable(t, { __index = make, __call = use })
+ return t
end
-- function strings.formatters.new()
@@ -913,12 +838,8 @@ string.formatter = function(str,...) return formatters[str](...) end -- someti
local function add(t,name,template,preamble)
if type(t) == "table" and t._type_ == "formatter" then
t._extensions_[name] = template or "%s"
- if type(preamble) == "string" then
+ if preamble then
t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload !
- elseif type(preamble) == "table" then
- for k, v in next, preamble do
- t._environment_[k] = v
- end
end
end
end
@@ -935,23 +856,9 @@ patterns.luaquoted = Cs(Cc('"') * ((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"
-- escaping by lpeg is faster for strings without quotes, slower on a string with quotes, but
-- faster again when other q-escapables are found (the ones we don't need to escape)
--- add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
--- add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
--- add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
-
-if _LUAVERSION < 5.2 then
-
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
-
-else
-
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape = lpeg.patterns.xmlescape })
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape = lpeg.patterns.texescape })
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape = lpeg.patterns.luaescape })
-
-end
+add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
-- -- yes or no:
--
diff --git a/tex/context/base/util-tab.lua b/tex/context/base/util-tab.lua
index d235520c4..ae44269bb 100644
--- a/tex/context/base/util-tab.lua
+++ b/tex/context/base/util-tab.lua
@@ -316,7 +316,7 @@ function table.fastserialize(t,prefix)
-- not sorted
-- only number and string indices (currently)
- local r = { type(prefix) == "string" and prefix or "return" }
+ local r = { prefix or "return" }
local m = 1
local function fastserialize(t,outer) -- no mixes
@@ -376,6 +376,7 @@ function table.fastserialize(t,prefix)
end
return r
end
+
return concat(fastserialize(t,true))
end
diff --git a/tex/context/base/x-mathml.lua b/tex/context/base/x-mathml.lua
index baf839ad8..cd60e756d 100644
--- a/tex/context/base/x-mathml.lua
+++ b/tex/context/base/x-mathml.lua
@@ -82,9 +82,8 @@ local o_replacements = { -- in main table
-- [utfchar(0xF103C)] = "\\mmlleftdelimiter<",
[utfchar(0xF1026)] = "\\mmlchar{38}",
- [utfchar(0x02061)] = "", -- function applicator sometimes shows up in font
-- [utfchar(0xF103E)] = "\\mmlleftdelimiter>",
- -- [utfchar(0x000AF)] = '\\mmlchar{"203E}', -- 0x203E
+
}
local simpleoperatorremapper = utf.remapper(o_replacements)
@@ -480,7 +479,7 @@ end
function mathml.mo(id)
local str = xmlcontent(getid(id)) or ""
local rep = gsub(str,"&.-;","") -- todo
- context(simpleoperatorremapper(rep) or rep)
+ context(simpleoperatorremapper(rep))
end
function mathml.mi(id)
@@ -492,18 +491,13 @@ function mathml.mi(id)
if n == 0 then
-- nothing to do
elseif n == 1 then
- local first = str[1]
- if type(first) == "string" then
- local str = gsub(first,"&.-;","") -- bah
- local rep = i_replacements[str]
- if not rep then
- rep = gsub(str,".",i_replacements)
- end
- context(rep)
- -- context.mi(rep)
- else
- context.xmlflush(id) -- xmlsprint or so
+ local str = gsub(str[1],"&.-;","") -- bah
+ local rep = i_replacements[str]
+ if not rep then
+ rep = gsub(str,".",i_replacements)
end
+ context(rep)
+ -- context.mi(rep)
else
context.xmlflush(id) -- xmlsprint or so
end
@@ -834,13 +828,3 @@ function mathml.cpolar_a(root)
end
context.right(false,")")
end
-
--- crap .. maybe in char-def a mathml overload
-
-local mathmleq = {
- [utfchar(0x00AF)] = utfchar(0x203E),
-}
-
-function mathml.extensible(chr)
- context(mathmleq[chr] or chr)
-end
diff --git a/tex/context/base/x-mathml.mkiv b/tex/context/base/x-mathml.mkiv
index 5520dbbe6..ec8fd74e4 100644
--- a/tex/context/base/x-mathml.mkiv
+++ b/tex/context/base/x-mathml.mkiv
@@ -2283,7 +2283,7 @@
\unexpanded\def\mmloverbs#1{\mmlexecuteifdefined\mmlbasecommand\relax{\mmlunexpandedsecond{#1}}\relax}
\startxmlsetups mml:mover
- \edef\mmlovertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
\doifelseutfmathabove\mmlovertoken {
\edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
\mmloverof{#1}
@@ -2295,7 +2295,7 @@
} {
\edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
\edef\mmlovercommand{\utfmathfiller\mmlovertoken}
- \mmlundertriplet{\mmloverbf{#1}}{\mmloveros{#1}}{}%\relax
+ \mmlovertriplet{\mmloveros{#1}}{\mmloverbf{#1}}\relax
}
}
% \limits % spoils spacing
@@ -2321,18 +2321,13 @@
% % \limits % spoils spacing
% \stopxmlsetups
-% do this in lua
-
-\def\mmlextensible#1{\ctxmodulemathml{extensible(\!!bs#1\!!es)}}
-
\unexpanded\def\mmlunderuf#1{\mmlexecuteifdefined\mmlundercommand\relax {\mmlunexpandedfirst {#1}}\relax}
\unexpanded\def\mmlunderus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
\unexpanded\def\mmlunderbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-%unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax {\mmlunexpandedsecond{#1}}\relax}
+\unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\mmlunexpandedsecond{#1}}\relax}
\startxmlsetups mml:munder
- \edef\mmlundertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \edef\mmlundertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
\doifelseutfmathbelow\mmlundertoken {%
\edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
\mmlunderuf{#1}
@@ -2344,7 +2339,7 @@
} {
\edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
\edef\mmlundercommand{\utfmathfiller\mmlundertoken}
- \mmlundertriplet{\mmlunderbf{#1}}{}{\mmlunderus{#1}}%\relax
+ \mmlundertriplet{\mmlunderus{#1}}{\mmlunderbf{#1}}\relax
}
}
% \limits % spoils spacing
diff --git a/tex/context/base/x-set-11.mkiv b/tex/context/base/x-set-11.mkiv
index 12854dc92..d4b43a9ee 100644
--- a/tex/context/base/x-set-11.mkiv
+++ b/tex/context/base/x-set-11.mkiv
@@ -448,18 +448,8 @@
% \def\showsetupindeed#1%
% {\xmlfilterlist{\loadedsetups}{interface/command[@name='#1']/command(xml:setups:typeset)}}
-% \def\showsetupindeed#1%
-% {\xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}}
-
-% \setelementnature[setup][display]
-% \setelementnature[setup][mixed]
-
\def\showsetupindeed#1%
- {\startelement[setup][name=#1]%
- \startelement[noexport][comment={setup definition #1}]
- \xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}%
- \stopelement
- \stopelement}
+ {\xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}}
\unexpanded\def\placesetup {\placelistofsorts[texcommand][\c!criterium=\v!used]}
\unexpanded\def\placeallsetups{\placelistofsorts[texcommand][\c!criterium=\v!all ]}
@@ -648,16 +638,11 @@
\xmlflush{#1}
\doifmode{interface:setup:defaults} {
\ifx\currentSETUPhash\empty \else
- \begingroup
- % todo, make a one level expansion of parameter
- \let\emwidth \relax
- \let\exheight\relax
- \edef\currentSETUPvalue{\csname named\currentSETUPhash parameter\endcsname\empty{\xmlatt{#1}{name}}}
- \ifx\currentSETUPvalue\empty \else
- =\space
- \detokenize\expandafter{\currentSETUPvalue}
- \fi
- \endgroup
+ \edef\currentSETUPvalue{\csname named\currentSETUPhash parameter\endcsname\empty{\xmlatt{#1}{name}}}
+ \ifx\currentSETUPvalue\empty
+ \space=\space
+ \detokenize\expandafter{\currentSETUPvalue}
+ \fi
\fi
}
\stopsecondSETUPcolumn
@@ -833,6 +818,7 @@
\stoptabulate
\stopxmlsetups
+
\starttexdefinition showrootvalues [#1]
\edef\currentsetupparametercategory{#1}
\edef\currentsetupparametercommand{setup#1}
diff --git a/tex/context/interface/keys-cs.xml b/tex/context/interface/keys-cs.xml
index 0a0b9b9a6..d36f969f3 100644
--- a/tex/context/interface/keys-cs.xml
+++ b/tex/context/interface/keys-cs.xml
@@ -1051,8 +1051,6 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
- <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
- <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
diff --git a/tex/context/interface/keys-de.xml b/tex/context/interface/keys-de.xml
index 28b21b915..c5ba364e3 100644
--- a/tex/context/interface/keys-de.xml
+++ b/tex/context/interface/keys-de.xml
@@ -1051,8 +1051,6 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
- <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
- <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
diff --git a/tex/context/interface/keys-en.xml b/tex/context/interface/keys-en.xml
index da433cdee..be59542e7 100644
--- a/tex/context/interface/keys-en.xml
+++ b/tex/context/interface/keys-en.xml
@@ -1051,8 +1051,6 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
- <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
- <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
diff --git a/tex/context/interface/keys-fr.xml b/tex/context/interface/keys-fr.xml
index 6a8eaa9c5..43c47d578 100644
--- a/tex/context/interface/keys-fr.xml
+++ b/tex/context/interface/keys-fr.xml
@@ -1051,8 +1051,6 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
- <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
- <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
diff --git a/tex/context/interface/keys-it.xml b/tex/context/interface/keys-it.xml
index c7c996318..95c2d8aa5 100644
--- a/tex/context/interface/keys-it.xml
+++ b/tex/context/interface/keys-it.xml
@@ -1051,8 +1051,6 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
- <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
- <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
diff --git a/tex/context/interface/keys-nl.xml b/tex/context/interface/keys-nl.xml
index 21536214a..bc940ebc4 100644
--- a/tex/context/interface/keys-nl.xml
+++ b/tex/context/interface/keys-nl.xml
@@ -1051,8 +1051,6 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixscheider'/>
<cd:constant name='suffixstopper' value='suffixafsluiter'/>
- <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
- <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
diff --git a/tex/context/interface/keys-pe.xml b/tex/context/interface/keys-pe.xml
index 8e4d412d5..75e3a17c2 100644
--- a/tex/context/interface/keys-pe.xml
+++ b/tex/context/interface/keys-pe.xml
@@ -1051,8 +1051,6 @@
<cd:constant name='suffix' value='پسوند'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
- <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
- <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
diff --git a/tex/context/interface/keys-ro.xml b/tex/context/interface/keys-ro.xml
index f9ef01b9f..e83d145d0 100644
--- a/tex/context/interface/keys-ro.xml
+++ b/tex/context/interface/keys-ro.xml
@@ -1051,8 +1051,6 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
- <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
- <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
diff --git a/tex/generic/context/luatex/luatex-fonts-inj.lua b/tex/generic/context/luatex/luatex-fonts-inj.lua
deleted file mode 100644
index ae48150a6..000000000
--- a/tex/generic/context/luatex/luatex-fonts-inj.lua
+++ /dev/null
@@ -1,526 +0,0 @@
-if not modules then modules = { } end modules ['node-inj'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- This is very experimental (this will change when we have luatex > .50 and
--- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help. Some optimizations can go away when we have faster machines.
-
--- todo: make a special one for context
-
-local next = next
-local utfchar = utf.char
-
-local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
-
-local report_injections = logs.reporter("nodes","injections")
-
-local attributes, nodes, node = attributes, nodes, node
-
-fonts = fonts
-local fontdata = fonts.hashes.identifiers
-
-nodes.injections = nodes.injections or { }
-local injections = nodes.injections
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local kern_code = nodecodes.kern
-local nodepool = nodes.pool
-local newkern = nodepool.kern
-
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-
-local a_kernpair = attributes.private('kernpair')
-local a_ligacomp = attributes.private('ligacomp')
-local a_markbase = attributes.private('markbase')
-local a_markmark = attributes.private('markmark')
-local a_markdone = attributes.private('markdone')
-local a_cursbase = attributes.private('cursbase')
-local a_curscurs = attributes.private('curscurs')
-local a_cursdone = attributes.private('cursdone')
-
--- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
--- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
--- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
--- that this code is not 100% okay but examples are needed to figure things out.
-
-function injections.installnewkern(nk)
- newkern = nk or newkern
-end
-
-local cursives = { }
-local marks = { }
-local kerns = { }
-
--- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in
--- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we
--- can share tables.
-
--- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
--- checking with husayni (volt and fontforge).
-
-function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
- local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
- local ws, wn = tfmstart.width, tfmnext.width
- local bound = #cursives + 1
- start[a_cursbase] = bound
- nxt[a_curscurs] = bound
- cursives[bound] = { rlmode, dx, dy, ws, wn }
- return dx, dy, bound
-end
-
-function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
- local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
- -- dy = y - h
- if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = current[a_kernpair]
- if bound then
- local kb = kerns[bound]
- -- inefficient but singles have less, but weird anyway, needs checking
- kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
- else
- bound = #kerns + 1
- current[a_kernpair] = bound
- kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
- end
- return x, y, w, h, bound
- end
- return x, y, w, h -- no bound
-end
-
-function injections.setkern(current,factor,rlmode,x,tfmchr)
- local dx = factor*x
- if dx ~= 0 then
- local bound = #kerns + 1
- current[a_kernpair] = bound
- kerns[bound] = { rlmode, dx }
- return dx, bound
- else
- return 0, 0
- end
-end
-
-function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = base[a_markbase] -- fails again we should pass it
- local index = 1
- if bound then
- local mb = marks[bound]
- if mb then
- -- if not index then index = #mb + 1 end
- index = #mb + 1
- mb[index] = { dx, dy, rlmode }
- start[a_markmark] = bound
- start[a_markdone] = index
- return dx, dy, bound
- else
- report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
- end
- end
--- index = index or 1
- index = index or 1
- bound = #marks + 1
- base[a_markbase] = bound
- start[a_markmark] = bound
- start[a_markdone] = index
- marks[bound] = { [index] = { dx, dy, rlmode, baseismark } }
- return dx, dy, bound
-end
-
-local function dir(n)
- return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
-end
-
-local function trace(head)
- report_injections("begin run")
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local kp = n[a_kernpair]
- local mb = n[a_markbase]
- local mm = n[a_markmark]
- local md = n[a_markdone]
- local cb = n[a_cursbase]
- local cc = n[a_curscurs]
- local char = n.char
- report_injections("font %s, char %U, glyph %c",n.font,char,char)
- if kp then
- local k = kerns[kp]
- if k[3] then
- report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
- else
- report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
- end
- end
- if mb then
- report_injections(" markbase: bound %a",mb)
- end
- if mm then
- local m = marks[mm]
- if mb then
- local m = m[mb]
- if m then
- report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
- else
- report_injections(" markmark: bound %a, missing index",mm)
- end
- else
- m = m[1]
- report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
- end
- end
- if cb then
- report_injections(" cursbase: bound %a",cb)
- end
- if cc then
- local c = cursives[cc]
- report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
- end
- end
- end
- report_injections("end run")
-end
-
--- todo: reuse tables (i.e. no collection), but will be extra fields anyway
--- todo: check for attribute
-
--- We can have a fast test on a font being processed, so we can check faster for marks etc
--- but I'll make a context variant anyway.
-
-local function show_result(head)
- local current = head
- local skipping = false
- while current do
- local id = current.id
- if id == glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
- skipping = false
- elseif id == kern_code then
- report_injections("kern: %p",current.kern)
- skipping = false
- elseif not skipping then
- report_injections()
- skipping = true
- end
- current = current.next
- end
-end
-
-function injections.handler(head,where,keep)
- local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
- if has_marks or has_cursives then
- if trace_injections then
- trace(head)
- end
- -- in the future variant we will not copy items but refs to tables
- local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
- if has_kerns then -- move outside loop
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- local k = n[a_kernpair]
- if k then
- local kk = kerns[k]
- if kk then
- local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
- local dy = y - h
- if dy ~= 0 then
- ky[n] = dy
- end
- if w ~= 0 or x ~= 0 then
- wx[n] = kk
- end
- rl[n] = kk[1] -- could move in test
- end
- end
- end
- end
- else
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- end
- end
- end
- if nofvalid > 0 then
- -- we can assume done == true because we have cursives and marks
- local cx = { }
- if has_kerns and next(ky) then
- for n, k in next, ky do
- n.yoffset = k
- end
- end
- -- todo: reuse t and use maxt
- if has_cursives then
- local p_cursbase, p = nil, nil
- -- since we need valid[n+1] we can also use a "while true do"
- local t, d, maxt = { }, { }, 0
- for i=1,nofvalid do -- valid == glyphs
- local n = valid[i]
- if not mk[n] then
- local n_cursbase = n[a_cursbase]
- if p_cursbase then
- local n_curscurs = n[a_curscurs]
- if p_cursbase == n_curscurs then
- local c = cursives[n_curscurs]
- if c then
- local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
- if rlmode >= 0 then
- dx = dx - ws
- else
- dx = dx + wn
- end
- if dx ~= 0 then
- cx[n] = dx
- rl[n] = rlmode
- end
- -- if rlmode and rlmode < 0 then
- dy = -dy
- -- end
- maxt = maxt + 1
- t[maxt] = p
- d[maxt] = dy
- else
- maxt = 0
- end
- end
- elseif maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ti.yoffset + ny
- end
- maxt = 0
- end
- if not n_cursbase and maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- p_cursbase, p = n_cursbase, n
- end
- end
- if maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- if not keep then
- cursives = { }
- end
- end
- if has_marks then
- for i=1,nofvalid do
- local p = valid[i]
- local p_markbase = p[a_markbase]
- if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark = n[a_markmark]
- if p_markbase == n_markmark then
- local index = n[a_markdone] or 1
- local d = mrks[index]
- if d then
- local rlmode = d[3]
- --
- local k = wx[p]
- if k then
- local x = k[2]
- local w = k[4]
- if w then
- if rlmode and rlmode >= 0 then
- -- kern(x) glyph(p) kern(w-x) mark(n)
- n.xoffset = p.xoffset - p.width + d[1] - (w-x)
- else
- -- kern(w-x) glyph(p) kern(x) mark(n)
- n.xoffset = p.xoffset - d[1] - x
- end
- else
- if rlmode and rlmode >= 0 then
- -- okay for husayni
- n.xoffset = p.xoffset - p.width + d[1]
- else
- -- needs checking: is x ok here?
- n.xoffset = p.xoffset - d[1] - x
- end
- end
- else
- if rlmode and rlmode >= 0 then
- n.xoffset = p.xoffset - p.width + d[1]
- else
- n.xoffset = p.xoffset - d[1]
- end
- local w = n.width
- if w ~= 0 then
- insert_node_before(head,n,newkern(-w/2))
- insert_node_after(head,n,newkern(-w/2))
- end
- end
- -- --
- if mk[p] then
- n.yoffset = p.yoffset + d[2]
- else
- n.yoffset = n.yoffset + p.yoffset + d[2]
- end
- --
- if nofmarks == 1 then
- break
- else
- nofmarks = nofmarks - 1
- end
- end
- else
- -- KE: there can be <mark> <mkmk> <mark> sequences in ligatures
- end
- end
- end
- end
- if not keep then
- marks = { }
- end
- end
- -- todo : combine
- if next(wx) then
- for n, k in next, wx do
- -- only w can be nil (kernclasses), can be sped up when w == nil
- local x = k[2]
- local w = k[4]
- if w then
- local rl = k[1] -- r2l = k[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx)) -- type 0/2
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x)) -- type 0/2
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x)) -- type 0/2
- end
- if wx ~= 0 then
- insert_node_after (head,n,newkern(wx)) -- type 0/2
- end
- end
- elseif x ~= 0 then
- -- this needs checking for rl < 0 but it is unlikely that a r2l script
- -- uses kernclasses between glyphs so we're probably safe (KE has a
- -- problematic font where marks interfere with rl < 0 in the previous
- -- case)
- insert_node_before(head,n,newkern(x)) -- a real font kern, type 0
- end
- end
- end
- if next(cx) then
- for n, k in next, cx do
- if k ~= 0 then
- local rln = rl[n]
- if rln and rln < 0 then
- insert_node_before(head,n,newkern(-k)) -- type 0/2
- else
- insert_node_before(head,n,newkern(k)) -- type 0/2
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- -- if trace_injections then
- -- show_result(head)
- -- end
- return head, true
- elseif not keep then
- kerns, cursives, marks = { }, { }, { }
- end
- elseif has_kerns then
- if trace_injections then
- trace(head)
- end
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local k = n[a_kernpair]
- if k then
- local kk = kerns[k]
- if kk then
- local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
- if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
- end
- if w then
- -- copied from above
- -- local r2l = kk[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- else
- -- simple (e.g. kernclass kerns)
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- end
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- -- if trace_injections then
- -- show_result(head)
- -- end
- return head, true
- else
- -- no tracing needed
- end
- return head, false
-end
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 3f408b96f..24e49308c 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 02/14/14 17:07:59
+-- merge date : 01/03/14 00:40:35
do -- begin closure to overcome local limits and interference
@@ -82,9 +82,6 @@ function optionalrequire(...)
return result
end
end
-if lua then
- lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
-end
end -- closure
@@ -104,9 +101,7 @@ local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.forma
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-if setinspector then
- setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-end
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -175,11 +170,9 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
-local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
patterns.stripper=stripper
-patterns.fullstripper=fullstripper
patterns.collapser=collapser
patterns.lowercase=lowercase
patterns.uppercase=uppercase
@@ -402,7 +395,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction,isutf)
+function lpeg.finder(lst,makefunction)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -418,11 +411,7 @@ function lpeg.finder(lst,makefunction,isutf)
else
pattern=P(lst)
end
- if isutf then
- pattern=((utf8char or 1)-pattern)^0*pattern
- else
- pattern=(1-pattern)^0*pattern
- end
+ pattern=(1-pattern)^0*pattern
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -759,15 +748,11 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
-local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
-function string.fullstrip(str)
- return lpegmatch(fullstripper,str) or ""
-end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -1651,9 +1636,7 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-if setinspector then
- setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
-end
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
@@ -2525,19 +2508,8 @@ local unpack,concat=table.unpack,table.concat
local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
local patterns,lpegmatch=lpeg.patterns,lpeg.match
local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=nil
-if _LUAVERSION<5.2 then
- loadstripped=function(str,shortcuts)
- return load(str)
- end
-else
- loadstripped=function(str,shortcuts)
- if shortcuts then
- return load(dump(load(str),true),nil,nil,shortcuts)
- else
- return load(dump(load(str),true))
- end
- end
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
end
if not number then number={} end
local stripper=patterns.stripzeros
@@ -2687,58 +2659,31 @@ function number.sparseexponent(f,n)
end
return tostring(n)
end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+local formattednumber = number.formatted
+local sparseexponent = number.sparseexponent
+]]
local template=[[
%s
%s
return function(%s) return %s end
]]
-local preamble,environment="",{}
-if _LUAVERSION<5.2 then
- preamble=[[
-local lpeg=lpeg
-local type=type
-local tostring=tostring
-local tonumber=tonumber
-local format=string.format
-local concat=table.concat
-local signed=number.signed
-local points=number.points
-local basepoints= number.basepoints
-local utfchar=utf.char
-local utfbyte=utf.byte
-local lpegmatch=lpeg.match
-local nspaces=string.nspaces
-local tracedchar=string.tracedchar
-local autosingle=string.autosingle
-local autodouble=string.autodouble
-local sequenced=table.sequenced
-local formattednumber=number.formatted
-local sparseexponent=number.sparseexponent
- ]]
-else
- environment={
- global=global or _G,
- lpeg=lpeg,
- type=type,
- tostring=tostring,
- tonumber=tonumber,
- format=string.format,
- concat=table.concat,
- signed=number.signed,
- points=number.points,
- basepoints=number.basepoints,
- utfchar=utf.char,
- utfbyte=utf.byte,
- lpegmatch=lpeg.match,
- nspaces=string.nspaces,
- tracedchar=string.tracedchar,
- autosingle=string.autosingle,
- autodouble=string.autodouble,
- sequenced=table.sequenced,
- formattednumber=number.formatted,
- sparseexponent=number.sparseexponent,
- }
-end
local arguments={ "a1" }
setmetatable(arguments,{ __index=function(t,k)
local v=t[k-1]..",a"..k
@@ -3060,8 +3005,8 @@ local builder=Cs { "start",
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
-)
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
local function make(t,str)
local f
local p
@@ -3073,7 +3018,7 @@ local function make(t,str)
p=lpegmatch(builder,str,1,"..",t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p,t._environment_)()
+ f=loadstripped(p)()
else
f=function() return str end
end
@@ -3085,22 +3030,10 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-if _LUAVERSION<5.2 then
- function strings.formatters.new()
- local t={ _extensions_={},_preamble_=preamble,_environment_={},_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
-else
- function strings.formatters.new()
- local e={}
- for k,v in next,environment do
- e[k]=v
- end
- local t={ _extensions_={},_preamble_="",_environment_=e,_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
- end
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -3108,12 +3041,8 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if type(preamble)=="string" then
+ if preamble then
t._preamble_=preamble.."\n"..t._preamble_
- elseif type(preamble)=="table" then
- for k,v in next,preamble do
- t._environment_[k]=v
- end
end
end
end
@@ -3122,15 +3051,9 @@ patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;
patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
-if _LUAVERSION<5.2 then
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
-else
- add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
- add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
- add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
-end
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+add(formatters,"lua",[[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
end -- closure
@@ -6482,7 +6405,7 @@ local type,next,tonumber,tostring=type,next,tonumber,tostring
local abs=math.abs
local insert=table.insert
local lpegmatch=lpeg.match
-local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys
+local reversed,concat,remove=table.reversed,table.concat,table.remove
local ioflush=io.flush
local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
local formatters=string.formatters
@@ -6504,7 +6427,7 @@ local report_otf=logs.reporter("fonts","otf loading")
local fonts=fonts
local otf=fonts.handlers.otf
otf.glists={ "gsub","gpos" }
-otf.version=2.751
+otf.version=2.749
otf.cache=containers.define("fonts","otf",otf.version,true)
local fontdata=fonts.hashes.identifiers
local chardata=characters and characters.data
@@ -6656,7 +6579,6 @@ local valid_fields=table.tohash {
"upos",
"use_typo_metrics",
"uwidth",
- "validation_state",
"version",
"vert_base",
"weight",
@@ -7123,6 +7045,7 @@ actions["prepare glyphs"]=function(data,filename,raw)
}
local altuni=glyph.altuni
if altuni then
+ local d
for i=1,#altuni do
local a=altuni[i]
local u=a.unicode
@@ -7135,8 +7058,15 @@ actions["prepare glyphs"]=function(data,filename,raw)
vv={ [u]=unicode }
variants[v]=vv
end
+ elseif d then
+ d[#d+1]=u
+ else
+ d={ u }
end
end
+ if d then
+ duplicates[unicode]=d
+ end
end
else
report_otf("potential problem: glyph %U is used but empty",index)
@@ -7154,49 +7084,47 @@ actions["check encoding"]=function(data,filename,raw)
local duplicates=resources.duplicates
local mapdata=raw.map or {}
local unicodetoindex=mapdata and mapdata.map or {}
- local indextounicode=mapdata and mapdata.backmap or {}
local encname=lower(data.enc_name or mapdata.enc_name or "")
local criterium=0xFFFF
if find(encname,"unicode") then
if trace_loading then
report_otf("checking embedded unicode map %a",encname)
end
- local hash={}
- for index,unicode in next,indices do
- hash[index]=descriptions[unicode]
- end
- local reported={}
- for unicode,index in next,unicodetoindex do
- if not descriptions[unicode] then
- local d=hash[index]
- if d then
- if d.unicode~=unicode then
- local c=d.copies
- if c then
- c[unicode]=true
- else
- d.copies={ [unicode]=true }
+ for unicode,index in next,unicodetoindex do
+ if unicode<=criterium and not descriptions[unicode] then
+ local parent=indices[index]
+ if not parent then
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ else
+ local parentdescription=descriptions[parent]
+ if parentdescription then
+ local altuni=parentdescription.altuni
+ if not altuni then
+ altuni={ { unicode=unicode } }
+ parentdescription.altuni=altuni
+ duplicates[parent]={ unicode }
+ else
+ local done=false
+ for i=1,#altuni do
+ if altuni[i].unicode==unicode then
+ done=true
+ break
+ end
end
+ if not done then
+ insert(altuni,{ unicode=unicode })
+ insert(duplicates[parent],unicode)
+ end
+ end
+ if trace_loading then
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
end
- elseif not reported[i] then
- report_otf("missing index %i",index)
- reported[i]=true
+ else
+ report_otf("weird, unicode %U points to %U with index %H",unicode,index)
end
end
end
- for index,data in next,hash do
- data.copies=sortedkeys(data.copies)
- end
- for index,unicode in next,indices do
- local description=hash[index]
- local copies=description.copies
- if copies then
- duplicates[unicode]=copies
- description.copies=nil
- else
- report_otf("copies but no unicode parent %U",unicode)
- end
- end
+ end
elseif properties.cidinfo then
report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
else
@@ -7204,7 +7132,6 @@ actions["check encoding"]=function(data,filename,raw)
end
if mapdata then
mapdata.map={}
- mapdata.backmap={}
end
end
actions["add duplicates"]=function(data,filename,raw)
@@ -7215,37 +7142,28 @@ actions["add duplicates"]=function(data,filename,raw)
local indices=resources.indices
local duplicates=resources.duplicates
for unicode,d in next,duplicates do
- local nofduplicates=#d
- if nofduplicates>4 then
- if trace_loading then
- report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
- end
- else
- for i=1,nofduplicates do
- local u=d[i]
- if not descriptions[u] then
- local description=descriptions[unicode]
- local n=0
- for _,description in next,descriptions do
- if kerns then
- local kerns=description.kerns
- for _,k in next,kerns do
- local ku=k[unicode]
- if ku then
- k[u]=ku
- n=n+1
- end
+ for i=1,#d do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ local n=0
+ for _,description in next,descriptions do
+ if kerns then
+ local kerns=description.kerns
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
end
end
end
- if u>0 then
- local duplicate=table.copy(description)
- duplicate.comment=format("copy of U+%05X",unicode)
- descriptions[u]=duplicate
- if trace_loading then
- report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
- end
- end
+ end
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
end
end
end
@@ -7969,11 +7887,6 @@ actions["check metadata"]=function(data,filename,raw)
ttftables[i].data="deleted"
end
end
- if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
- local name=file.nameonly(filename)
- metadata.fontname="bad-fontname-"..name
- metadata.fullname="bad-fullname-"..name
- end
end
actions["cleanup tables"]=function(data,filename,raw)
data.resources.indices=nil
@@ -8271,24 +8184,6 @@ local function otftotfm(specification)
local features=specification.features.normal
local rawdata=otf.load(filename,sub,features and features.featurefile)
if rawdata and next(rawdata) then
- local descriptions=rawdata.descriptions
- local duplicates=rawdata.resources.duplicates
- if duplicates then
- local nofduplicates,nofduplicated=0,0
- for parent,list in next,duplicates do
- for i=1,#list do
- local unicode=list[i]
- if not descriptions[unicode] then
- descriptions[unicode]=descriptions[parent]
- nofduplicated=nofduplicated+1
- end
- end
- nofduplicates=nofduplicates+#list
- end
- if trace_otf and nofduplicated~=nofduplicates then
- report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
- end
- end
rawdata.lookuphash={}
tfmdata=copytotfm(rawdata,cache_id)
if tfmdata and next(tfmdata) then
@@ -8986,12 +8881,26 @@ nodes.injections=nodes.injections or {}
local injections=nodes.injections
local nodecodes=nodes.nodecodes
local glyph_code=nodecodes.glyph
+local disc_code=nodecodes.disc
local kern_code=nodecodes.kern
-local nodepool=nodes.pool
+local nuts=nodes.nuts
+local nodepool=nuts.pool
local newkern=nodepool.kern
-local traverse_id=node.traverse_id
-local insert_node_before=node.insert_before
-local insert_node_after=node.insert_after
+local tonode=nuts.tonode
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getattr=nuts.getattr
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local setfield=nuts.setfield
+local setattr=nuts.setattr
+local traverse_id=nuts.traverse_id
+local insert_node_before=nuts.insert_before
+local insert_node_after=nuts.insert_after
local a_kernpair=attributes.private('kernpair')
local a_ligacomp=attributes.private('ligacomp')
local a_markbase=attributes.private('markbase')
@@ -9010,21 +8919,21 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne
local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2])
local ws,wn=tfmstart.width,tfmnext.width
local bound=#cursives+1
- start[a_cursbase]=bound
- nxt[a_curscurs]=bound
+ setattr(start,a_cursbase,bound)
+ setattr(nxt,a_curscurs,bound)
cursives[bound]={ rlmode,dx,dy,ws,wn }
return dx,dy,bound
end
function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
if x~=0 or w~=0 or y~=0 or h~=0 then
- local bound=current[a_kernpair]
+ local bound=getattr(current,a_kernpair)
if bound then
local kb=kerns[bound]
kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h
else
bound=#kerns+1
- current[a_kernpair]=bound
+ setattr(current,a_kernpair,bound)
kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width }
end
return x,y,w,h,bound
@@ -9035,7 +8944,7 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
local dx=factor*x
if dx~=0 then
local bound=#kerns+1
- current[a_kernpair]=bound
+ setattr(current,a_kernpair,bound)
kerns[bound]={ rlmode,dx }
return dx,bound
else
@@ -9044,25 +8953,25 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
end
function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark)
local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
- local bound=base[a_markbase]
+ local bound=getattr(base,a_markbase)
local index=1
if bound then
local mb=marks[bound]
if mb then
index=#mb+1
mb[index]={ dx,dy,rlmode }
- start[a_markmark]=bound
- start[a_markdone]=index
+ setattr(start,a_markmark,bound)
+ setattr(start,a_markdone,index)
return dx,dy,bound
else
- report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound)
end
end
index=index or 1
bound=#marks+1
- base[a_markbase]=bound
- start[a_markmark]=bound
- start[a_markdone]=index
+ setattr(base,a_markbase,bound)
+ setattr(start,a_markmark,bound)
+ setattr(start,a_markdone,index)
marks[bound]={ [index]={ dx,dy,rlmode,baseismark } }
return dx,dy,bound
end
@@ -9072,15 +8981,15 @@ end
local function trace(head)
report_injections("begin run")
for n in traverse_id(glyph_code,head) do
- if n.subtype<256 then
- local kp=n[a_kernpair]
- local mb=n[a_markbase]
- local mm=n[a_markmark]
- local md=n[a_markdone]
- local cb=n[a_cursbase]
- local cc=n[a_curscurs]
- local char=n.char
- report_injections("font %s, char %U, glyph %c",n.font,char,char)
+ if getsubtype(n)<256 then
+ local kp=getattr(n,a_kernpair)
+ local mb=getattr(n,a_markbase)
+ local mm=getattr(n,a_markmark)
+ local md=getattr(n,a_markdone)
+ local cb=getattr(n,a_cursbase)
+ local cc=getattr(n,a_curscurs)
+ local char=getchar(n)
+ report_injections("font %s, char %U, glyph %c",getfont(n),char,char)
if kp then
local k=kerns[kp]
if k[3] then
@@ -9121,21 +9030,23 @@ local function show_result(head)
local current=head
local skipping=false
while current do
- local id=current.id
+ local id=getid(current)
if id==glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",
+ getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
skipping=false
elseif id==kern_code then
- report_injections("kern: %p",current.kern)
+ report_injections("kern: %p",getfield(current,"kern"))
skipping=false
elseif not skipping then
report_injections()
skipping=true
end
- current=current.next
+ current=getnext(current)
end
end
function injections.handler(head,where,keep)
+ head=tonut(head)
local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns)
if has_marks or has_cursives then
if trace_injections then
@@ -9145,17 +9056,18 @@ function injections.handler(head,where,keep)
if has_kerns then
local nf,tm=nil,nil
for n in traverse_id(glyph_code,head) do
- if n.subtype<256 then
+ if getsubtype(n)<256 then
nofvalid=nofvalid+1
valid[nofvalid]=n
- if n.font~=nf then
- nf=n.font
- tm=fontdata[nf].resources.marks
+ local f=getfont(n)
+ if f~=nf then
+ nf=f
+ tm=fontdata[nf].resources.marks
end
if tm then
- mk[n]=tm[n.char]
+ mk[n]=tm[getchar(n)]
end
- local k=n[a_kernpair]
+ local k=getattr(n,a_kernpair)
if k then
local kk=kerns[k]
if kk then
@@ -9175,15 +9087,16 @@ function injections.handler(head,where,keep)
else
local nf,tm=nil,nil
for n in traverse_id(glyph_code,head) do
- if n.subtype<256 then
+ if getsubtype(n)<256 then
nofvalid=nofvalid+1
valid[nofvalid]=n
- if n.font~=nf then
- nf=n.font
- tm=fontdata[nf].resources.marks
+ local f=getfont(n)
+ if f~=nf then
+ nf=f
+ tm=fontdata[nf].resources.marks
end
if tm then
- mk[n]=tm[n.char]
+ mk[n]=tm[getchar(n)]
end
end
end
@@ -9192,7 +9105,7 @@ function injections.handler(head,where,keep)
local cx={}
if has_kerns and next(ky) then
for n,k in next,ky do
- n.yoffset=k
+ setfield(n,"yoffset",k)
end
end
if has_cursives then
@@ -9201,9 +9114,9 @@ function injections.handler(head,where,keep)
for i=1,nofvalid do
local n=valid[i]
if not mk[n] then
- local n_cursbase=n[a_cursbase]
+ local n_cursbase=getattr(n,a_cursbase)
if p_cursbase then
- local n_curscurs=n[a_curscurs]
+ local n_curscurs=getattr(n,a_curscurs)
if p_cursbase==n_curscurs then
local c=cursives[n_curscurs]
if c then
@@ -9226,20 +9139,20 @@ function injections.handler(head,where,keep)
end
end
elseif maxt>0 then
- local ny=n.yoffset
+ local ny=getfield(n,"yoffset")
for i=maxt,1,-1 do
ny=ny+d[i]
local ti=t[i]
- ti.yoffset=ti.yoffset+ny
+ setfield(ti,"yoffset",getfield(ti,"yoffset")+ny)
end
maxt=0
end
if not n_cursbase and maxt>0 then
- local ny=n.yoffset
+ local ny=getfield(n,"yoffset")
for i=maxt,1,-1 do
ny=ny+d[i]
local ti=t[i]
- ti.yoffset=ny
+ setfield(ti,"yoffset",ny)
end
maxt=0
end
@@ -9247,11 +9160,11 @@ function injections.handler(head,where,keep)
end
end
if maxt>0 then
- local ny=n.yoffset
+ local ny=getfield(n,"yoffset")
for i=maxt,1,-1 do
ny=ny+d[i]
local ti=t[i]
- ti.yoffset=ny
+ setfield(ti,"yoffset",ny)
end
maxt=0
end
@@ -9262,57 +9175,66 @@ function injections.handler(head,where,keep)
if has_marks then
for i=1,nofvalid do
local p=valid[i]
- local p_markbase=p[a_markbase]
+ local p_markbase=getattr(p,a_markbase)
if p_markbase then
local mrks=marks[p_markbase]
local nofmarks=#mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark=n[a_markmark]
+ for n in traverse_id(glyph_code,getnext(p)) do
+ local n_markmark=getattr(n,a_markmark)
if p_markbase==n_markmark then
- local index=n[a_markdone] or 1
+ local index=getattr(n,a_markdone) or 1
local d=mrks[index]
if d then
local rlmode=d[3]
local k=wx[p]
+ local px=getfield(p,"xoffset")
+ local ox=0
if k then
local x=k[2]
local w=k[4]
if w then
if rlmode and rlmode>=0 then
- n.xoffset=p.xoffset-p.width+d[1]-(w-x)
+ ox=px-getfield(p,"width")+d[1]-(w-x)
else
- n.xoffset=p.xoffset-d[1]-x
+ ox=px-d[1]-x
end
else
if rlmode and rlmode>=0 then
- n.xoffset=p.xoffset-p.width+d[1]
+ ox=px-getfield(p,"width")+d[1]
else
- n.xoffset=p.xoffset-d[1]-x
+ ox=px-d[1]-x
end
end
else
+ local wp=getfield(p,"width")
+ local wn=getfield(n,"width")
if rlmode and rlmode>=0 then
- n.xoffset=p.xoffset-p.width+d[1]
+ ox=px-wp+d[1]
else
- n.xoffset=p.xoffset-d[1]
+ ox=px-d[1]
end
- local w=n.width
- if w~=0 then
- insert_node_before(head,n,newkern(-w/2))
- insert_node_after(head,n,newkern(-w/2))
+ if wn~=0 then
+ insert_node_before(head,n,newkern(-wn/2))
+ insert_node_after(head,n,newkern(-wn/2))
end
end
+ setfield(n,"xoffset",ox)
+ local py=getfield(p,"yoffset")
+ local oy=0
if mk[p] then
- n.yoffset=p.yoffset+d[2]
+ oy=py+d[2]
else
- n.yoffset=n.yoffset+p.yoffset+d[2]
+ oy=getfield(n,"yoffset")+py+d[2]
end
+ setfield(n,"yoffset",oy)
if nofmarks==1 then
break
else
nofmarks=nofmarks-1
end
end
+ elseif not n_markmark then
+ break
else
end
end
@@ -9364,6 +9286,7 @@ function injections.handler(head,where,keep)
if not keep then
kerns={}
end
+head=tonode(head)
return head,true
elseif not keep then
kerns,cursives,marks={},{},{}
@@ -9373,14 +9296,14 @@ function injections.handler(head,where,keep)
trace(head)
end
for n in traverse_id(glyph_code,head) do
- if n.subtype<256 then
- local k=n[a_kernpair]
+ if getsubtype(n)<256 then
+ local k=getattr(n,a_kernpair)
if k then
local kk=kerns[k]
if kk then
local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4]
if y and y~=0 then
- n.yoffset=y
+ setfield(n,"yoffset",y)
end
if w then
local wx=w-x
@@ -9411,10 +9334,10 @@ function injections.handler(head,where,keep)
if not keep then
kerns={}
end
- return head,true
+ return tonode(head),true
else
end
- return head,false
+ return tonode(head),false
end
end -- closure
@@ -9829,12 +9752,25 @@ registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
registertracker("otf.actions","otf.replacements,otf.positions")
registertracker("otf.injections","nodes.injections")
registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-local insert_node_after=node.insert_after
-local delete_node=nodes.delete
-local copy_node=node.copy
-local find_node_tail=node.tail or node.slide
-local flush_node_list=node.flush_list
-local end_of_math=node.end_of_math
+local nuts=nodes.nuts
+local tonode=nuts.tonode
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getattr=nuts.getattr
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local setfield=nuts.setfield
+local setattr=nuts.setattr
+local insert_node_after=nuts.insert_after
+local delete_node=nuts.delete
+local copy_node=nuts.copy
+local find_node_tail=nuts.tail
+local flush_node_list=nuts.flush_list
+local end_of_math=nuts.end_of_math
local setmetatableindex=table.setmetatableindex
local zwnj=0x200C
local zwj=0x200D
@@ -9945,83 +9881,83 @@ local function pref(kind,lookupname)
return formatters["feature %a, lookup %a"](kind,lookupname)
end
local function copy_glyph(g)
- local components=g.components
+ local components=getfield(g,"components")
if components then
- g.components=nil
+ setfield(g,"components",nil)
local n=copy_node(g)
- g.components=components
+ setfield(g,"components",components)
return n
else
return copy_node(g)
end
end
local function markstoligature(kind,lookupname,head,start,stop,char)
- if start==stop and start.char==char then
+ if start==stop and getchar(start)==char then
return head,start
else
- local prev=start.prev
- local next=stop.next
- start.prev=nil
- stop.next=nil
+ local prev=getprev(start)
+ local next=getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local base=copy_glyph(start)
if head==start then
head=base
end
- base.char=char
- base.subtype=ligature_code
- base.components=start
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
if prev then
- prev.next=base
+ setfield(prev,"next",base)
end
if next then
- next.prev=base
+ setfield(next,"prev",base)
end
- base.next=next
- base.prev=prev
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
return head,base
end
end
local function getcomponentindex(start)
- if start.id~=glyph_code then
+ if getid(start)~=glyph_code then
return 0
- elseif start.subtype==ligature_code then
+ elseif getsubtype(start)==ligature_code then
local i=0
- local components=start.components
+ local components=getfield(start,"components")
while components do
i=i+getcomponentindex(components)
- components=components.next
+ components=getnext(components)
end
return i
- elseif not marks[start.char] then
+ elseif not marks[getchar(start)] then
return 1
else
return 0
end
end
local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound)
- if start==stop and start.char==char then
- start.char=char
+ if start==stop and getchar(start)==char then
+ setfield(start,"char",char)
return head,start
end
- local prev=start.prev
- local next=stop.next
- start.prev=nil
- stop.next=nil
+ local prev=getprev(start)
+ local next=getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local base=copy_glyph(start)
if start==head then
head=base
end
- base.char=char
- base.subtype=ligature_code
- base.components=start
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
if prev then
- prev.next=base
+ setfield(prev,"next",base)
end
if next then
- next.prev=base
+ setfield(next,"prev",base)
end
- base.next=next
- base.prev=prev
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
if not discfound then
local deletemarks=markflag~="mark"
local components=start
@@ -10030,42 +9966,42 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
local head=base
local current=base
while start do
- local char=start.char
+ local char=getchar(start)
if not marks[char] then
baseindex=baseindex+componentindex
componentindex=getcomponentindex(start)
elseif not deletemarks then
- start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ setattr(start,a_ligacomp,baseindex+(getattr(start,a_ligacomp) or componentindex))
if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp))
end
head,current=insert_node_after(head,current,copy_node(start))
elseif trace_marks then
logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
end
- start=start.next
+ start=getnext(start)
end
- local start=current.next
- while start and start.id==glyph_code do
- local char=start.char
+ local start=getnext(current)
+ while start and getid(start)==glyph_code do
+ local char=getchar(start)
if marks[char] then
- start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ setattr(start,a_ligacomp,baseindex+(getattr(start,a_ligacomp) or componentindex))
if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp))
end
else
break
end
- start=start.next
+ start=getnext(start)
end
end
return head,base
end
function handlers.gsub_single(head,start,kind,lookupname,replacement)
if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
end
- start.char=replacement
+ setfield(start,"char",replacement)
return head,start,true
end
local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
@@ -10091,7 +10027,7 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives
return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
end
elseif value==0 then
- return start.char,trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
elseif value<1 then
return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
else
@@ -10102,25 +10038,25 @@ end
local function multiple_glyphs(head,start,multiple,ignoremarks)
local nofmultiples=#multiple
if nofmultiples>0 then
- start.char=multiple[1]
+ setfield(start,"char",multiple[1])
if nofmultiples>1 then
- local sn=start.next
+ local sn=getnext(start)
for k=2,nofmultiples do
local n=copy_node(start)
- n.char=multiple[k]
- n.next=sn
- n.prev=start
+ setfield(n,"char",multiple[k])
+ setfield(n,"next",sn)
+ setfield(n,"prev",start)
if sn then
- sn.prev=n
+ setfield(sn,"prev",n)
end
- start.next=n
+ setfield(start,"next",n)
start=n
end
end
return head,start,true
else
if trace_multiples then
- logprocess("no multiple for %s",gref(start.char))
+ logprocess("no multiple for %s",gref(getchar(start)))
end
return head,start,false
end
@@ -10130,34 +10066,34 @@ function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence
local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives)
if choice then
if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
end
- start.char=choice
+ setfield(start,"char",choice)
else
if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
end
end
return head,start,true
end
function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
end
return multiple_glyphs(head,start,multiple,sequence.flags[1])
end
function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s,stop,discfound=start.next,nil,false
- local startchar=start.char
+ local s,stop,discfound=getnext(start),nil,false
+ local startchar=getchar(start)
if marks[startchar] then
while s do
- local id=s.id
- if id==glyph_code and s.font==currentfont and s.subtype<256 then
- local lg=ligature[s.char]
+ local id=getid(s)
+ if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then
+ local lg=ligature[getchar(s)]
if lg then
stop=s
ligature=lg
- s=s.next
+ s=getnext(s)
else
break
end
@@ -10169,9 +10105,9 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
local lig=ligature.ligature
if lig then
if trace_ligatures then
- local stopchar=stop.char
+ local stopchar=getchar(stop)
head,start=markstoligature(kind,lookupname,head,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
else
head,start=markstoligature(kind,lookupname,head,start,stop,lig)
end
@@ -10182,18 +10118,18 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
else
local skipmark=sequence.flags[1]
while s do
- local id=s.id
- if id==glyph_code and s.subtype<256 then
- if s.font==currentfont then
- local char=s.char
+ local id=getid(s)
+ if id==glyph_code and getsubtype(s)<256 then
+ if getfont(s)==currentfont then
+ local char=getchar(s)
if skipmark and marks[char] then
- s=s.next
+ s=getnext(s)
else
local lg=ligature[char]
if lg then
stop=s
ligature=lg
- s=s.next
+ s=getnext(s)
else
break
end
@@ -10203,7 +10139,7 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
end
elseif id==disc_code then
discfound=true
- s=s.next
+ s=getnext(s)
else
break
end
@@ -10212,36 +10148,35 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
if lig then
if stop then
if trace_ligatures then
- local stopchar=stop.char
+ local stopchar=getchar(stop)
head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
else
head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
end
- return head,start,true
else
- start.char=lig
+ setfield(start,"char",lig)
if trace_ligatures then
logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
end
- return head,start,true
end
+ return head,start,true
else
end
end
return head,start,false
end
function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
- local base=start.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
if marks[basechar] then
while true do
- base=base.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- basechar=base.char
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
if not marks[basechar] then
break
end
@@ -10290,16 +10225,16 @@ function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence
return head,start,false
end
function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
- local base=start.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
if marks[basechar] then
while true do
- base=base.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- basechar=base.char
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
if not marks[basechar] then
break
end
@@ -10311,7 +10246,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
end
end
- local index=start[a_ligacomp]
+ local index=getattr(start,a_ligacomp)
local baseanchors=descriptions[basechar]
if baseanchors then
baseanchors=baseanchors.anchors
@@ -10356,22 +10291,22 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
return head,start,false
end
function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
- local base=start.prev
- local slc=start[a_ligacomp]
+ local base=getprev(start)
+ local slc=getattr(start,a_ligacomp)
if slc then
while base do
- local blc=base[a_ligacomp]
+ local blc=getattr(base,a_ligacomp)
if blc and blc~=slc then
- base=base.prev
+ base=getprev(base)
else
break
end
end
end
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
local baseanchors=descriptions[basechar]
if baseanchors then
baseanchors=baseanchors.anchors
@@ -10409,20 +10344,20 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
return head,start,false
end
function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
- local alreadydone=cursonce and start[a_cursbase]
+ local alreadydone=cursonce and getattr(start,a_cursbase)
if not alreadydone then
local done=false
- local startchar=start.char
+ local startchar=getchar(start)
if marks[startchar] then
if trace_cursive then
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt=start.next
- while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
- local nextchar=nxt.char
+ local nxt=getnext(start)
+ while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do
+ local nextchar=getchar(nxt)
if marks[nextchar] then
- nxt=nxt.next
+ nxt=getnext(nxt)
else
local entryanchors=descriptions[nextchar]
if entryanchors then
@@ -10456,13 +10391,13 @@ function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
return head,start,done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
end
return head,start,false
end
end
function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
- local startchar=start.char
+ local startchar=getchar(start)
local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
@@ -10470,33 +10405,33 @@ function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
return head,start,false
end
function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
- local snext=start.next
+ local snext=getnext(start)
if not snext then
return head,start,false
else
local prev,done=start,false
local factor=tfmdata.parameters.factor
local lookuptype=lookuptypes[lookupname]
- while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
- local nextchar=snext.char
+ while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do
+ local nextchar=getchar(snext)
local krn=kerns[nextchar]
if not krn and marks[nextchar] then
prev=snext
- snext=snext.next
+ snext=getnext(snext)
else
if not krn then
elseif type(krn)=="table" then
if lookuptype=="pair" then
local a,b=krn[2],krn[3]
if a and #a>0 then
- local startchar=start.char
+ local startchar=getchar(start)
local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b>0 then
- local startchar=start.char
+ local startchar=getchar(start)
local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
@@ -10509,7 +10444,7 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
elseif krn~=0 then
local k=setkern(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
end
done=true
end
@@ -10544,13 +10479,13 @@ function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,looku
return head,start,false
end
function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char=start.char
+ local char=getchar(start)
local replacement=replacements[char]
if replacement then
if trace_singles then
logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
end
- start.char=replacement
+ setfield(start,"char",replacement)
return head,start,true
else
return head,start,false
@@ -10563,8 +10498,8 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
end
while current do
- if current.id==glyph_code then
- local currentchar=current.char
+ if getid(current)==glyph_code then
+ local currentchar=getchar(current)
local lookupname=subtables[1]
local replacement=lookuphash[lookupname]
if not replacement then
@@ -10581,21 +10516,21 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
if trace_singles then
logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
end
- current.char=replacement
+ setfield(current,"char",replacement)
end
end
return head,start,true
elseif current==stop then
break
else
- current=current.next
+ current=getnext(current)
end
end
return head,start,false
end
chainmores.gsub_single=chainprocs.gsub_single
function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local startchar=start.char
+ local startchar=getchar(start)
local subtables=currentlookup.subtables
local lookupname=subtables[1]
local replacements=lookuphash[lookupname]
@@ -10624,8 +10559,8 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
local subtables=currentlookup.subtables
local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
while current do
- if current.id==glyph_code then
- local currentchar=current.char
+ if getid(current)==glyph_code then
+ local currentchar=getchar(current)
local lookupname=subtables[1]
local alternatives=lookuphash[lookupname]
if not alternatives then
@@ -10640,7 +10575,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
if trace_alternatives then
logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
end
- start.char=choice
+ setfield(start,"char",choice)
else
if trace_alternatives then
logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
@@ -10654,14 +10589,14 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
elseif current==stop then
break
else
- current=current.next
+ current=getnext(current)
end
end
return head,start,false
end
chainmores.gsub_alternate=chainprocs.gsub_alternate
function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar=start.char
+ local startchar=getchar(start)
local subtables=currentlookup.subtables
local lookupname=subtables[1]
local ligatures=lookuphash[lookupname]
@@ -10676,20 +10611,20 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
end
else
- local s=start.next
+ local s=getnext(start)
local discfound=false
local last=stop
local nofreplacements=0
local skipmark=currentlookup.flags[1]
while s do
- local id=s.id
+ local id=getid(s)
if id==disc_code then
- s=s.next
+ s=getnext(s)
discfound=true
else
- local schar=s.char
+ local schar=getchar(s)
if skipmark and marks[schar] then
- s=s.next
+ s=getnext(s)
else
local lg=ligatures[schar]
if lg then
@@ -10697,7 +10632,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if s==stop then
break
else
- s=s.next
+ s=getnext(s)
end
else
break
@@ -10714,7 +10649,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start==stop then
logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
end
end
head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
@@ -10723,7 +10658,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start==stop then
logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
end
end
end
@@ -10732,7 +10667,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
end
chainmores.gsub_ligature=chainprocs.gsub_ligature
function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
local subtables=currentlookup.subtables
local lookupname=subtables[1]
@@ -10741,14 +10676,14 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
markanchors=markanchors[markchar]
end
if markanchors then
- local base=start.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
if marks[basechar] then
while true do
- base=base.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- basechar=base.char
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
if not marks[basechar] then
break
end
@@ -10795,7 +10730,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
return head,start,false
end
function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
local subtables=currentlookup.subtables
local lookupname=subtables[1]
@@ -10804,14 +10739,14 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
markanchors=markanchors[markchar]
end
if markanchors then
- local base=start.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ local base=getprev(start)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
if marks[basechar] then
while true do
- base=base.prev
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- basechar=base.char
+ base=getprev(base)
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ basechar=getchar(base)
if not marks[basechar] then
break
end
@@ -10823,7 +10758,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
end
end
- local index=start[a_ligacomp]
+ local index=getattr(start,a_ligacomp)
local baseanchors=descriptions[basechar].anchors
if baseanchors then
local baseanchors=baseanchors['baselig']
@@ -10862,7 +10797,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
return head,start,false
end
function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar=start.char
+ local markchar=getchar(start)
if marks[markchar] then
local subtables=currentlookup.subtables
local lookupname=subtables[1]
@@ -10871,20 +10806,20 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
markanchors=markanchors[markchar]
end
if markanchors then
- local base=start.prev
- local slc=start[a_ligacomp]
+ local base=getprev(start)
+ local slc=getattr(start,a_ligacomp)
if slc then
while base do
- local blc=base[a_ligacomp]
+ local blc=getattr(base,a_ligacomp)
if blc and blc~=slc then
- base=base.prev
+ base=getprev(base)
else
break
end
end
end
- if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
- local basechar=base.char
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
local baseanchors=descriptions[basechar].anchors
if baseanchors then
baseanchors=baseanchors['basemark']
@@ -10920,9 +10855,9 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
return head,start,false
end
function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone=cursonce and start[a_cursbase]
+ local alreadydone=cursonce and getattr(start,a_cursbase)
if not alreadydone then
- local startchar=start.char
+ local startchar=getchar(start)
local subtables=currentlookup.subtables
local lookupname=subtables[1]
local exitanchors=lookuphash[lookupname]
@@ -10936,11 +10871,11 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt=start.next
- while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
- local nextchar=nxt.char
+ local nxt=getnext(start)
+ while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do
+ local nextchar=getchar(nxt)
if marks[nextchar] then
- nxt=nxt.next
+ nxt=getnext(nxt)
else
local entryanchors=descriptions[nextchar]
if entryanchors then
@@ -10974,7 +10909,7 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
return head,start,done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
end
return head,start,false
end
@@ -10982,7 +10917,7 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
return head,start,false
end
function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local startchar=start.char
+ local startchar=getchar(start)
local subtables=currentlookup.subtables
local lookupname=subtables[1]
local kerns=lookuphash[lookupname]
@@ -10999,9 +10934,9 @@ function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lo
end
chainmores.gpos_single=chainprocs.gpos_single
function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local snext=start.next
+ local snext=getnext(start)
if snext then
- local startchar=start.char
+ local startchar=getchar(start)
local subtables=currentlookup.subtables
local lookupname=subtables[1]
local kerns=lookuphash[lookupname]
@@ -11011,26 +10946,26 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
local lookuptype=lookuptypes[lookupname]
local prev,done=start,false
local factor=tfmdata.parameters.factor
- while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
- local nextchar=snext.char
+ while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do
+ local nextchar=getchar(snext)
local krn=kerns[nextchar]
if not krn and marks[nextchar] then
prev=snext
- snext=snext.next
+ snext=getnext(snext)
else
if not krn then
elseif type(krn)=="table" then
if lookuptype=="pair" then
local a,b=krn[2],krn[3]
if a and #a>0 then
- local startchar=start.char
+ local startchar=getchar(start)
local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b>0 then
- local startchar=start.char
+ local startchar=getchar(start)
local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
@@ -11042,7 +10977,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
if a and a~=0 then
local k=setkern(snext,factor,rlmode,a)
if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
end
end
if b and b~=0 then
@@ -11053,7 +10988,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
elseif krn~=0 then
local k=setkern(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
end
done=true
end
@@ -11074,6 +11009,10 @@ local function show_skip(kind,chainname,char,ck,class)
logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
end
end
+local quit_on_no_replacement=true
+directives.register("otf.chain.quitonnoreplacement",function(value)
+ quit_on_no_replacement=value
+end)
local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
local flags=sequence.flags
local done=false
@@ -11091,7 +11030,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
local seq=ck[3]
local s=#seq
if s==1 then
- match=current.id==glyph_code and current.font==currentfont and current.subtype<256 and seq[1][current.char]
+ match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
else
local f,l=ck[4],ck[5]
if f==1 and f==l then
@@ -11099,13 +11038,13 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if f==l then
else
local n=f+1
- last=last.next
+ last=getnext(last)
while n<=l do
if last then
- local id=last.id
+ local id=getid(last)
if id==glyph_code then
- if last.font==currentfont and last.subtype<256 then
- local char=last.char
+ if getfont(last)==currentfont and getsubtype(last)<256 then
+ local char=getchar(last)
local ccd=descriptions[char]
if ccd then
local class=ccd.class
@@ -11114,10 +11053,10 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if trace_skips then
show_skip(kind,chainname,char,ck,class)
end
- last=last.next
+ last=getnext(last)
elseif seq[n][char] then
if n<l then
- last=last.next
+ last=getnext(last)
end
n=n+1
else
@@ -11133,7 +11072,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
elseif id==disc_code then
- last=last.next
+ last=getnext(last)
else
match=false
break
@@ -11146,15 +11085,15 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
if match and f>1 then
- local prev=start.prev
+ local prev=getprev(start)
if prev then
local n=f-1
while n>=1 do
if prev then
- local id=prev.id
+ local id=getid(prev)
if id==glyph_code then
- if prev.font==currentfont and prev.subtype<256 then
- local char=prev.char
+ if getfont(prev)==currentfont and getsubtype(prev)<256 then
+ local char=getchar(prev)
local ccd=descriptions[char]
if ccd then
local class=ccd.class
@@ -11184,7 +11123,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
match=false
break
end
- prev=prev.prev
+ prev=getprev(prev)
elseif seq[n][32] then
n=n -1
else
@@ -11204,15 +11143,15 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
if match and s>l then
- local current=last and last.next
+ local current=last and getnext(last)
if current then
local n=l+1
while n<=s do
if current then
- local id=current.id
+ local id=getid(current)
if id==glyph_code then
- if current.font==currentfont and current.subtype<256 then
- local char=current.char
+ if getfont(current)==currentfont and getsubtype(current)<256 then
+ local char=getchar(current)
local ccd=descriptions[char]
if ccd then
local class=ccd.class
@@ -11242,7 +11181,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
match=false
break
end
- current=current.next
+ current=getnext(current)
elseif seq[n][32] then
n=n+1
else
@@ -11265,7 +11204,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if match then
if trace_contexts then
local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5]
- local char=start.char
+ local char=getchar(start)
if ck[9] then
logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
@@ -11299,12 +11238,12 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
repeat
if skipped then
while true do
- local char=start.char
+ local char=getchar(start)
local ccd=descriptions[char]
if ccd then
local class=ccd.class
if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
- start=start.next
+ start=getnext(start)
else
break
end
@@ -11334,7 +11273,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
if start then
- start=start.next
+ start=getnext(start)
else
end
until i>nofchainlookups
@@ -11344,7 +11283,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if replacements then
head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements)
else
- done=true
+ done=quit_on_no_replacement
if trace_contexts then
logprocess("%s: skipping match",cref(kind,chainname))
end
@@ -11461,6 +11400,7 @@ local function featuresprocessor(head,font,attr)
if not lookuphash then
return head,false
end
+ head=tonut(head)
if trace_steps then
checkstep(head)
end
@@ -11493,10 +11433,10 @@ local function featuresprocessor(head,font,attr)
local handler=handlers[typ]
local start=find_node_tail(head)
while start do
- local id=start.id
+ local id=getid(start)
if id==glyph_code then
- if start.font==font and start.subtype<256 then
- local a=start[0]
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
if a then
a=a==attr
else
@@ -11507,7 +11447,7 @@ local function featuresprocessor(head,font,attr)
local lookupname=subtables[i]
local lookupcache=lookuphash[lookupname]
if lookupcache then
- local lookupmatch=lookupcache[start.char]
+ local lookupmatch=lookupcache[getchar(start)]
if lookupmatch then
head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
if success then
@@ -11518,15 +11458,15 @@ local function featuresprocessor(head,font,attr)
report_missing_cache(typ,lookupname)
end
end
- if start then start=start.prev end
+ if start then start=getprev(start) end
else
- start=start.prev
+ start=getprev(start)
end
else
- start=start.prev
+ start=getprev(start)
end
else
- start=start.prev
+ start=getprev(start)
end
end
else
@@ -11544,16 +11484,16 @@ local function featuresprocessor(head,font,attr)
local head=start
local done=false
while start do
- local id=start.id
- if id==glyph_code and start.font==font and start.subtype<256 then
- local a=start[0]
+ local id=getid(start)
+ if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
else
- a=not attribute or start[a_state]==attribute
+ a=not attribute or getattr(start,a_state)==attribute
end
if a then
- local lookupmatch=lookupcache[start.char]
+ local lookupmatch=lookupcache[getchar(start)]
if lookupmatch then
local ok
head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
@@ -11561,12 +11501,12 @@ local function featuresprocessor(head,font,attr)
done=true
end
end
- if start then start=start.next end
+ if start then start=getnext(start) end
else
- start=start.next
+ start=getnext(start)
end
else
- start=start.next
+ start=getnext(start)
end
end
if done then
@@ -11575,18 +11515,18 @@ local function featuresprocessor(head,font,attr)
end
end
local function kerndisc(disc)
- local prev=disc.prev
- local next=disc.next
+ local prev=getprev(disc)
+ local next=getnext(disc)
if prev and next then
- prev.next=next
- local a=prev[0]
+ setfield(prev,"next",next)
+ local a=getattr(prev,0)
if a then
- a=(a==attr) and (not attribute or prev[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(prev,a_state)==attribute)
else
- a=not attribute or prev[a_state]==attribute
+ a=not attribute or getattr(prev,a_state)==attribute
end
if a then
- local lookupmatch=lookupcache[prev.char]
+ local lookupmatch=lookupcache[getchar(prev)]
if lookupmatch then
local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
if ok then
@@ -11595,22 +11535,22 @@ local function featuresprocessor(head,font,attr)
end
end
end
- prev.next=disc
+ setfield(prev,"next",disc)
end
return next
end
while start do
- local id=start.id
+ local id=getid(start)
if id==glyph_code then
- if start.font==font and start.subtype<256 then
- local a=start[0]
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
else
- a=not attribute or start[a_state]==attribute
+ a=not attribute or getattr(start,a_state)==attribute
end
if a then
- local lookupmatch=lookupcache[start.char]
+ local lookupmatch=lookupcache[getchar(start)]
if lookupmatch then
local ok
head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
@@ -11618,38 +11558,38 @@ local function featuresprocessor(head,font,attr)
success=true
end
end
- if start then start=start.next end
+ if start then start=getnext(start) end
else
- start=start.next
+ start=getnext(start)
end
else
- start=start.next
+ start=getnext(start)
end
elseif id==disc_code then
- if start.subtype==discretionary_code then
- local pre=start.pre
+ if getsubtype(start)==discretionary_code then
+ local pre=getfield(start,"pre")
if pre then
local new=subrun(pre)
- if new then start.pre=new end
+ if new then setfield(start,"pre",new) end
end
- local post=start.post
+ local post=getfield(start,"post")
if post then
local new=subrun(post)
- if new then start.post=new end
+ if new then setfield(start,"post",new) end
end
- local replace=start.replace
+ local replace=getfield(start,"replace")
if replace then
local new=subrun(replace)
- if new then start.replace=new end
+ if new then setfield(start,"replace",new) end
end
elseif typ=="gpos_single" or typ=="gpos_pair" then
kerndisc(start)
end
- start=start.next
+ start=getnext(start)
elseif id==whatsit_code then
- local subtype=start.subtype
+ local subtype=getsubtype(start)
if subtype==dir_code then
- local dir=start.dir
+ local dir=getfield(start,"dir")
if dir=="+TRT" or dir=="+TLT" then
topstack=topstack+1
dirstack[topstack]=dir
@@ -11668,7 +11608,7 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype==localpar_code then
- local dir=start.dir
+ local dir=getfield(start,"dir")
if dir=="TRT" then
rlparmode=-1
elseif dir=="TLT" then
@@ -11681,11 +11621,11 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
- start=start.next
+ start=getnext(start)
elseif id==math_code then
- start=end_of_math(start).next
+ start=getnext(end_of_math(start))
else
- start=start.next
+ start=getnext(start)
end
end
end
@@ -11694,20 +11634,20 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
local head=start
local done=false
while start do
- local id=start.id
- if id==glyph_code and start.id==font and start.subtype<256 then
- local a=start[0]
+ local id=getid(start)
+ if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
else
- a=not attribute or start[a_state]==attribute
+ a=not attribute or getattr(start,a_state)==attribute
end
if a then
for i=1,ns do
local lookupname=subtables[i]
local lookupcache=lookuphash[lookupname]
if lookupcache then
- local lookupmatch=lookupcache[start.char]
+ local lookupmatch=lookupcache[getchar(start)]
if lookupmatch then
local ok
head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
@@ -11722,12 +11662,12 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_missing_cache(typ,lookupname)
end
end
- if start then start=start.next end
+ if start then start=getnext(start) end
else
- start=start.next
+ start=getnext(start)
end
else
- start=start.next
+ start=getnext(start)
end
end
if done then
@@ -11736,22 +11676,22 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
end
end
local function kerndisc(disc)
- local prev=disc.prev
- local next=disc.next
+ local prev=getprev(disc)
+ local next=getnext(disc)
if prev and next then
- prev.next=next
- local a=prev[0]
+ setfield(prev,"next",next)
+ local a=getattr(prev,0)
if a then
- a=(a==attr) and (not attribute or prev[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(prev,a_state)==attribute)
else
- a=not attribute or prev[a_state]==attribute
+ a=not attribute or getattr(prev,a_state)==attribute
end
if a then
for i=1,ns do
local lookupname=subtables[i]
local lookupcache=lookuphash[lookupname]
if lookupcache then
- local lookupmatch=lookupcache[prev.char]
+ local lookupmatch=lookupcache[getchar(prev)]
if lookupmatch then
local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
if ok then
@@ -11764,26 +11704,26 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
end
end
end
- prev.next=disc
+ setfield(prev,"next",disc)
end
return next
end
while start do
- local id=start.id
+ local id=getid(start)
if id==glyph_code then
- if start.font==font and start.subtype<256 then
- local a=start[0]
+ if getfont(start)==font and getsubtype(start)<256 then
+ local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or start[a_state]==attribute)
+ a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
else
- a=not attribute or start[a_state]==attribute
+ a=not attribute or getattr(start,a_state)==attribute
end
if a then
for i=1,ns do
local lookupname=subtables[i]
local lookupcache=lookuphash[lookupname]
if lookupcache then
- local lookupmatch=lookupcache[start.char]
+ local lookupmatch=lookupcache[getchar(start)]
if lookupmatch then
local ok
head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
@@ -11798,38 +11738,38 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_missing_cache(typ,lookupname)
end
end
- if start then start=start.next end
+ if start then start=getnext(start) end
else
- start=start.next
+ start=getnext(start)
end
else
- start=start.next
+ start=getnext(start)
end
elseif id==disc_code then
- if start.subtype==discretionary_code then
- local pre=start.pre
+ if getsubtype(start)==discretionary_code then
+ local pre=getfield(start,"pre")
if pre then
local new=subrun(pre)
- if new then start.pre=new end
+ if new then setfield(start,"pre",new) end
end
- local post=start.post
+ local post=getfield(start,"post")
if post then
local new=subrun(post)
- if new then start.post=new end
+ if new then setfield(start,"post",new) end
end
- local replace=start.replace
+ local replace=getfield(start,"replace")
if replace then
local new=subrun(replace)
- if new then start.replace=new end
+ if new then setfield(start,"replace",new) end
end
elseif typ=="gpos_single" or typ=="gpos_pair" then
kerndisc(start)
end
- start=start.next
+ start=getnext(start)
elseif id==whatsit_code then
- local subtype=start.subtype
+ local subtype=getsubtype(start)
if subtype==dir_code then
- local dir=start.dir
+ local dir=getfield(start,"dir")
if dir=="+TRT" or dir=="+TLT" then
topstack=topstack+1
dirstack[topstack]=dir
@@ -11848,7 +11788,7 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype==localpar_code then
- local dir=start.dir
+ local dir=getfield(start,"dir")
if dir=="TRT" then
rlparmode=-1
elseif dir=="TLT" then
@@ -11861,11 +11801,11 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
- start=start.next
+ start=getnext(start)
elseif id==math_code then
- start=end_of_math(start).next
+ start=getnext(end_of_math(start))
else
- start=start.next
+ start=getnext(start)
end
end
end
@@ -11877,6 +11817,7 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
registerstep(head)
end
end
+ head=tonode(head)
return head,done
end
local function generic(lookupdata,lookupname,unicode,lookuphash)
diff --git a/tex/generic/context/luatex/luatex-fonts-otn.lua b/tex/generic/context/luatex/luatex-fonts-otn.lua
deleted file mode 100644
index c57be5f02..000000000
--- a/tex/generic/context/luatex/luatex-fonts-otn.lua
+++ /dev/null
@@ -1,2848 +0,0 @@
-if not modules then modules = { } end modules ['font-otn'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- preprocessors = { "nodes" }
-
--- this is still somewhat preliminary and it will get better in due time;
--- much functionality could only be implemented thanks to the husayni font
--- of Idris Samawi Hamid to who we dedicate this module.
-
--- in retrospect it always looks easy but believe it or not, it took a lot
--- of work to get proper open type support done: buggy fonts, fuzzy specs,
--- special made testfonts, many skype sessions between taco, idris and me,
--- torture tests etc etc ... unfortunately the code does not show how much
--- time it took ...
-
--- todo:
---
--- kerning is probably not yet ok for latin around dics nodes (interesting challenge)
--- extension infrastructure (for usage out of context)
--- sorting features according to vendors/renderers
--- alternative loop quitters
--- check cursive and r2l
--- find out where ignore-mark-classes went
--- default features (per language, script)
--- handle positions (we need example fonts)
--- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
--- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
--- remove some optimizations (when I have a faster machine)
---
--- maybe redo the lot some way (more context specific)
-
---[[ldx--
-<p>This module is a bit more split up that I'd like but since we also want to test
-with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
-and discussion about improvements and functionality mostly happens on the
-<l n='context'/> mailing list.</p>
-
-<p>The specification of OpenType is kind of vague. Apart from a lack of a proper
-free specifications there's also the problem that Microsoft and Adobe
-may have their own interpretation of how and in what order to apply features.
-In general the Microsoft website has more detailed specifications and is a
-better reference. There is also some information in the FontForge help files.</p>
-
-<p>Because there is so much possible, fonts might contain bugs and/or be made to
-work with certain rederers. These may evolve over time which may have the side
-effect that suddenly fonts behave differently.</p>
-
-<p>After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
-implementation. Of course all errors are mine and of course the code can be
-improved. There are quite some optimizations going on here and processing speed
-is currently acceptable. Not all functions are implemented yet, often because I
-lack the fonts for testing. Many scripts are not yet supported either, but I will
-look into them as soon as <l n='context'/> users ask for it.</p>
-
-<p>Because there are different interpretations possible, I will extend the code
-with more (configureable) variants. I can also add hooks for users so that they can
-write their own extensions.</p>
-
-<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
-relationship with unicode at all, apart from the fact that a font might cover certain
-ranges of characters. One character can have multiple shapes. However, at the
-<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
-space. This is needed because we need to access them and <l n='tex'/> has to include
-then in the output eventually.</p>
-
-<p>The raw table as it coms from <l n='fontforge'/> gets reorganized in to fit out needs.
-In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
-so that successive runs can use the optimized table (after loading the table is
-unpacked). The flattening code used later is a prelude to an even more compact table
-format (and as such it keeps evolving).</p>
-
-<p>This module is sparsely documented because it is a moving target. The table format
-of the reader changes and we experiment a lot with different methods for supporting
-features.</p>
-
-<p>As with the <l n='afm'/> code, we may decide to store more information in the
-<l n='otf'/> table.</p>
-
-<p>Incrementing the version number will force a re-cache. We jump the number by one
-when there's a fix in the <l n='fontforge'/> library or <l n='lua'/> code that
-results in different tables.</p>
---ldx]]--
-
--- action handler chainproc chainmore comment
---
--- gsub_single ok ok ok
--- gsub_multiple ok ok not implemented yet
--- gsub_alternate ok ok not implemented yet
--- gsub_ligature ok ok ok
--- gsub_context ok --
--- gsub_contextchain ok --
--- gsub_reversecontextchain ok --
--- chainsub -- ok
--- reversesub -- ok
--- gpos_mark2base ok ok
--- gpos_mark2ligature ok ok
--- gpos_mark2mark ok ok
--- gpos_cursive ok untested
--- gpos_single ok ok
--- gpos_pair ok ok
--- gpos_context ok --
--- gpos_contextchain ok --
---
--- todo: contextpos and contextsub and class stuff
---
--- actions:
---
--- handler : actions triggered by lookup
--- chainproc : actions triggered by contextual lookup
--- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
---
--- remark: the 'not implemented yet' variants will be done when we have fonts that use them
--- remark: we need to check what to do with discretionaries
-
--- We used to have independent hashes for lookups but as the tags are unique
--- we now use only one hash. If needed we can have multiple again but in that
--- case I will probably prefix (i.e. rename) the lookups in the cached font file.
-
--- Todo: make plugin feature that operates on char/glyphnode arrays
-
-local concat, insert, remove = table.concat, table.insert, table.remove
-local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
-local random = math.random
-local formatters = string.formatters
-
-local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
-
-local registertracker = trackers.register
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
-local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
-local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
-local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
-local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
-local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
-local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
-local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
-local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
-local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
-local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
-local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
-
-local report_direct = logs.reporter("fonts","otf direct")
-local report_subchain = logs.reporter("fonts","otf subchain")
-local report_chain = logs.reporter("fonts","otf chain")
-local report_process = logs.reporter("fonts","otf process")
-local report_prepare = logs.reporter("fonts","otf prepare")
-local report_warning = logs.reporter("fonts","otf warning")
-
-registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
-registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
-
-registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
-registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
-registertracker("otf.actions","otf.replacements,otf.positions")
-registertracker("otf.injections","nodes.injections")
-
-registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local copy_node = node.copy
-local find_node_tail = node.tail or node.slide
-local flush_node_list = node.flush_list
-local end_of_math = node.end_of_math
-
-local setmetatableindex = table.setmetatableindex
-
-local zwnj = 0x200C
-local zwj = 0x200D
-local wildcard = "*"
-local default = "dflt"
-
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-local glyphcodes = nodes.glyphcodes
-local disccodes = nodes.disccodes
-
-local glyph_code = nodecodes.glyph
-local glue_code = nodecodes.glue
-local disc_code = nodecodes.disc
-local whatsit_code = nodecodes.whatsit
-local math_code = nodecodes.math
-
-local dir_code = whatcodes.dir
-local localpar_code = whatcodes.localpar
-
-local discretionary_code = disccodes.discretionary
-
-local ligature_code = glyphcodes.ligature
-
-local privateattribute = attributes.private
-
--- Something is messed up: we have two mark / ligature indices, one at the injection
--- end and one here ... this is bases in KE's patches but there is something fishy
--- there as I'm pretty sure that for husayni we need some connection (as it's much
--- more complex than an average font) but I need proper examples of all cases, not
--- of only some.
-
-local a_state = privateattribute('state')
-local a_markbase = privateattribute('markbase')
-local a_markmark = privateattribute('markmark')
-local a_markdone = privateattribute('markdone') -- assigned at the injection end
-local a_cursbase = privateattribute('cursbase')
-local a_curscurs = privateattribute('curscurs')
-local a_cursdone = privateattribute('cursdone')
-local a_kernpair = privateattribute('kernpair')
-local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
-
-local injections = nodes.injections
-local setmark = injections.setmark
-local setcursive = injections.setcursive
-local setkern = injections.setkern
-local setpair = injections.setpair
-
-local markonce = true
-local cursonce = true
-local kernonce = true
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local onetimemessage = fonts.loggers.onetimemessage or function() end
-
-otf.defaultnodealternate = "none" -- first last
-
--- we share some vars here, after all, we have no nested lookups and less code
-
-local tfmdata = false
-local characters = false
-local descriptions = false
-local resources = false
-local marks = false
-local currentfont = false
-local lookuptable = false
-local anchorlookups = false
-local lookuptypes = false
-local handlers = { }
-local rlmode = 0
-local featurevalue = false
-
--- head is always a whatsit so we can safely assume that head is not changed
-
--- we use this for special testing and documentation
-
-local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
-local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
-local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_direct(...)
-end
-
-local function logwarning(...)
- report_direct(...)
-end
-
-local f_unicode = formatters["%U"]
-local f_uniname = formatters["%U (%s)"]
-local f_unilist = formatters["% t (% t)"]
-
-local function gref(n) -- currently the same as in font-otb
- if type(n) == "number" then
- local description = descriptions[n]
- local name = description and description.name
- if name then
- return f_uniname(n,name)
- else
- return f_unicode(n)
- end
- elseif n then
- local num, nam = { }, { }
- for i=1,#n do
- local ni = n[i]
- if tonumber(ni) then -- later we will start at 2
- local di = descriptions[ni]
- num[i] = f_unicode(ni)
- nam[i] = di and di.name or "-"
- end
- end
- return f_unilist(num,nam)
- else
- return "<error in node mode tracing>"
- end
-end
-
-local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
- if index then
- return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
- elseif lookupname then
- return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
- elseif chainlookupname then
- return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
- elseif chainname then
- return formatters["feature %a, chain %a"](kind,chainname)
- else
- return formatters["feature %a"](kind)
- end
-end
-
-local function pref(kind,lookupname)
- return formatters["feature %a, lookup %a"](kind,lookupname)
-end
-
--- We can assume that languages that use marks are not hyphenated. We can also assume
--- that at most one discretionary is present.
-
--- We do need components in funny kerning mode but maybe I can better reconstruct then
--- as we do have the font components info available; removing components makes the
--- previous code much simpler. Also, later on copying and freeing becomes easier.
--- However, for arabic we need to keep them around for the sake of mark placement
--- and indices.
-
-local function copy_glyph(g) -- next and prev are untouched !
- local components = g.components
- if components then
- g.components = nil
- local n = copy_node(g)
- g.components = components
- return n
- else
- return copy_node(g)
- end
-end
-
--- start is a mark and we need to keep that one
-
-local function markstoligature(kind,lookupname,head,start,stop,char)
- if start == stop and start.char == char then
- return head, start
- else
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
- local base = copy_glyph(start)
- if head == start then
- head = base
- end
- base.char = char
- base.subtype = ligature_code
- base.components = start
- if prev then
- prev.next = base
- end
- if next then
- next.prev = base
- end
- base.next = next
- base.prev = prev
- return head, base
- end
-end
-
--- The next code is somewhat complicated by the fact that some fonts can have ligatures made
--- from ligatures that themselves have marks. This was identified by Kai in for instance
--- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
--- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
--- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
--- third component.
-
-local function getcomponentindex(start)
- if start.id ~= glyph_code then
- return 0
- elseif start.subtype == ligature_code then
- local i = 0
- local components = start.components
- while components do
- i = i + getcomponentindex(components)
- components = components.next
- end
- return i
- elseif not marks[start.char] then
- return 1
- else
- return 0
- end
-end
-
--- eventually we will do positioning in an other way (needs addional w/h/d fields)
-
-local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
- if start == stop and start.char == char then
- start.char = char
- return head, start
- end
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
- local base = copy_glyph(start)
- if start == head then
- head = base
- end
- base.char = char
- base.subtype = ligature_code
- base.components = start -- start can have components
- if prev then
- prev.next = base
- end
- if next then
- next.prev = base
- end
- base.next = next
- base.prev = prev
- if not discfound then
- local deletemarks = markflag ~= "mark"
- local components = start
- local baseindex = 0
- local componentindex = 0
- local head = base
- local current = base
- -- first we loop over the glyphs in start .. stop
- while start do
- local char = start.char
- if not marks[char] then
- baseindex = baseindex + componentindex
- componentindex = getcomponentindex(start)
- elseif not deletemarks then -- quite fishy
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
- end
- head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
- elseif trace_marks then
- logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
- end
- start = start.next
- end
- -- we can have one accent as part of a lookup and another following
- -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
- local start = current.next
- while start and start.id == glyph_code do
- local char = start.char
- if marks[char] then
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
- if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
- end
- else
- break
- end
- start = start.next
- end
- end
- return head, base
-end
-
-function handlers.gsub_single(head,start,kind,lookupname,replacement)
- if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
- end
- start.char = replacement
- return head, start, true
-end
-
-local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
- local n = #alternatives
- if value == "random" then
- local r = random(1,n)
- return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r)
- elseif value == "first" then
- return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1)
- elseif value == "last" then
- return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n)
- else
- value = tonumber(value)
- if type(value) ~= "number" then
- return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
- elseif value > n then
- local defaultalt = otf.defaultnodealternate
- if defaultalt == "first" then
- return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
- elseif defaultalt == "last" then
- return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
- else
- return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
- end
- elseif value == 0 then
- return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
- elseif value < 1 then
- return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
- else
- return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value)
- end
- end
-end
-
-local function multiple_glyphs(head,start,multiple,ignoremarks)
- local nofmultiples = #multiple
- if nofmultiples > 0 then
- start.char = multiple[1]
- if nofmultiples > 1 then
- local sn = start.next
- for k=2,nofmultiples do -- todo: use insert_node
--- untested:
---
--- while ignoremarks and marks[sn.char] then
--- local sn = sn.next
--- end
- local n = copy_node(start) -- ignore components
- n.char = multiple[k]
- n.next = sn
- n.prev = start
- if sn then
- sn.prev = n
- end
- start.next = n
- start = n
- end
- end
- return head, start, true
- else
- if trace_multiples then
- logprocess("no multiple for %s",gref(start.char))
- end
- return head, start, false
- end
-end
-
-function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
- end
- start.char = choice
- else
- if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
- end
- end
- return head, start, true
-end
-
-function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
- if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
- end
- return multiple_glyphs(head,start,multiple,sequence.flags[1])
-end
-
-function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s, stop, discfound = start.next, nil, false
- local startchar = start.char
- if marks[startchar] then
- while s do
- local id = s.id
- if id == glyph_code and s.font == currentfont and s.subtype<256 then
- local lg = ligature[s.char]
- if lg then
- stop = s
- ligature = lg
- s = s.next
- else
- break
- end
- else
- break
- end
- end
- if stop then
- local lig = ligature.ligature
- if lig then
- if trace_ligatures then
- local stopchar = stop.char
- head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- end
- return head, start, true
- else
- -- ok, goto next lookup
- end
- end
- else
- local skipmark = sequence.flags[1]
- while s do
- local id = s.id
- if id == glyph_code and s.subtype<256 then
- if s.font == currentfont then
- local char = s.char
- if skipmark and marks[char] then
- s = s.next
- else
- local lg = ligature[char]
- if lg then
- stop = s
- ligature = lg
- s = s.next
- else
- break
- end
- end
- else
- break
- end
- elseif id == disc_code then
- discfound = true
- s = s.next
- else
- break
- end
- end
- local lig = ligature.ligature
- if lig then
- if stop then
- if trace_ligatures then
- local stopchar = stop.char
- head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- end
- return head, start, true
- else
- -- weird but happens (in some arabic font)
- start.char = lig
- if trace_ligatures then
- logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
- end
- return head, start, true
- end
- else
- -- weird but happens
- end
- end
- return head, start, false
-end
-
---[[ldx--
-<p>We get hits on a mark, but we're not sure if the it has to be applied so
-we need to explicitly test for basechar, baselig and basemark entries.</p>
---ldx]]--
-
-function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- end
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
- -- check chainpos variant
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local index = start[a_ligacomp]
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor, ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
- if trace_marks then
- logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head, start, true
- else
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
- end
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = base[a_ligacomp]
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
- end
- end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and start[a_cursbase]
- if not alreadydone then
- local done = false
- local startchar = start.char
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return head, start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
- end
- return head, start, false
- end
-end
-
-function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
- local startchar = start.char
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
- end
- return head, start, false
-end
-
-function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
- -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
- -- todo: kerns in components of ligatures
- local snext = start.next
- if not snext then
- return head, start, false
- else
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- local lookuptype = lookuptypes[lookupname]
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then -- probably not needed
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else -- wrong ... position has different entries
- report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
- -- local a, b = krn[2], krn[6]
- -- if a and a ~= 0 then
- -- local k = setkern(snext,factor,rlmode,a)
- -- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- -- end
- -- end
- -- if b and b ~= 0 then
- -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
- -- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return head, start, done
- end
-end
-
---[[ldx--
-<p>I will implement multiple chain replacements once I run into a font that uses
-it. It's not that complex to handle.</p>
---ldx]]--
-
-local chainmores = { }
-local chainprocs = { }
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_subchain(...)
-end
-
-local logwarning = report_subchain
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_chain(...)
-end
-
-local logwarning = report_chain
-
--- We could share functions but that would lead to extra function calls with many
--- arguments, redundant tests and confusing messages.
-
-function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
- logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return head, start, false
-end
-
-function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
- logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return head, start, false
-end
-
--- The reversesub is a special case, which is why we need to store the replacements
--- in a bit weird way. There is no lookup and the replacement comes from the lookup
--- itself. It is meant mostly for dealing with Urdu.
-
-function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = start.char
- local replacement = replacements[char]
- if replacement then
- if trace_singles then
- logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
- end
- start.char = replacement
- return head, start, true
- else
- return head, start, false
- end
-end
-
---[[ldx--
-<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
-applied: single, alternate, multiple or ligature where ligature can be an invalid
-one in the sense that it will replace multiple by one but not neccessary one that
-looks like the combination (i.e. it is the counterpart of multiple then). For
-example, the following is valid:</p>
-
-<typing>
-<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
-</typing>
-
-<p>Therefore we we don't really do the replacement here already unless we have the
-single lookup case. The efficiency of the replacements can be improved by deleting
-as less as needed but that would also make the code even more messy.</p>
---ldx]]--
-
--- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start
--- local n = 1
--- if start == stop then
--- -- done
--- elseif ignoremarks then
--- repeat -- start x x m x x stop => start m
--- local next = start.next
--- if not marks[next.char] then
--- local components = next.components
--- if components then -- probably not needed
--- flush_node_list(components)
--- end
--- head = delete_node(head,next)
--- end
--- n = n + 1
--- until next == stop
--- else -- start x x x stop => start
--- repeat
--- local next = start.next
--- local components = next.components
--- if components then -- probably not needed
--- flush_node_list(components)
--- end
--- head = delete_node(head,next)
--- n = n + 1
--- until next == stop
--- end
--- return head, n
--- end
-
---[[ldx--
-<p>Here we replace start by a single variant, First we delete the rest of the
-match.</p>
---ldx]]--
-
-function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- -- todo: marks ?
- local current = start
- local subtables = currentlookup.subtables
- if #subtables > 1 then
- logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
- end
- while current do
- if current.id == glyph_code then
- local currentchar = current.char
- local lookupname = subtables[1] -- only 1
- local replacement = lookuphash[lookupname]
- if not replacement then
- if trace_bugs then
- logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- replacement = replacement[currentchar]
- if not replacement or replacement == "" then
- if trace_bugs then
- logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
- end
- else
- if trace_singles then
- logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
- end
- current.char = replacement
- end
- end
- return head, start, true
- elseif current == stop then
- break
- else
- current = current.next
- end
- end
- return head, start, false
-end
-
-chainmores.gsub_single = chainprocs.gsub_single
-
---[[ldx--
-<p>Here we replace start by a sequence of new glyphs. First we delete the rest of
-the match.</p>
---ldx]]--
-
-function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- -- local head, n = delete_till_stop(head,start,stop)
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local replacements = lookuphash[lookupname]
- if not replacements then
- if trace_bugs then
- logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- replacements = replacements[startchar]
- if not replacements or replacement == "" then
- if trace_bugs then
- logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
- end
- else
- if trace_multiples then
- logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
- end
- return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
- end
- end
- return head, start, false
-end
-
-chainmores.gsub_multiple = chainprocs.gsub_multiple
-
---[[ldx--
-<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
---ldx]]--
-
--- char_1 mark_1 -> char_x mark_1 (ignore marks)
--- char_1 mark_1 -> char_x
-
--- to be checked: do we always have just one glyph?
--- we can also have alternates for marks
--- marks come last anyway
--- are there cases where we need to delete the mark
-
-function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local current = start
- local subtables = currentlookup.subtables
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- while current do
- if current.id == glyph_code then -- is this check needed?
- local currentchar = current.char
- local lookupname = subtables[1]
- local alternatives = lookuphash[lookupname]
- if not alternatives then
- if trace_bugs then
- logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- alternatives = alternatives[currentchar]
- if alternatives then
- local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
- end
- start.char = choice
- else
- if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
- end
- end
- elseif trace_bugs then
- logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
- end
- end
- return head, start, true
- elseif current == stop then
- break
- else
- current = current.next
- end
- end
- return head, start, false
-end
-
-chainmores.gsub_alternate = chainprocs.gsub_alternate
-
---[[ldx--
-<p>When we replace ligatures we use a helper that handles the marks. I might change
-this function (move code inline and handle the marks by a separate function). We
-assume rather stupid ligatures (no complex disc nodes).</p>
---ldx]]--
-
-function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local ligatures = lookuphash[lookupname]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- ligatures = ligatures[startchar]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- end
- else
- local s = start.next
- local discfound = false
- local last = stop
- local nofreplacements = 0
- local skipmark = currentlookup.flags[1]
- while s do
- local id = s.id
- if id == disc_code then
- s = s.next
- discfound = true
- else
- local schar = s.char
- if skipmark and marks[schar] then -- marks
- s = s.next
- else
- local lg = ligatures[schar]
- if lg then
- ligatures, last, nofreplacements = lg, s, nofreplacements + 1
- if s == stop then
- break
- else
- s = s.next
- end
- else
- break
- end
- end
- end
- end
- local l2 = ligatures.ligature
- if l2 then
- if chainindex then
- stop = last
- end
- if trace_ligatures then
- if start == stop then
- logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
- else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
- end
- end
- head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
- return head, start, true, nofreplacements
- elseif trace_bugs then
- if start == stop then
- logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
- end
- end
- end
- end
- return head, start, false, 0
-end
-
-chainmores.gsub_ligature = chainprocs.gsub_ligature
-
-function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
- end
- return head, start, false
- end
- end
- end
- -- todo: like marks a ligatures hash
- local index = start[a_ligacomp]
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head, start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- -- local alreadydone = markonce and start[a_markmark]
- -- if not alreadydone then
- -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = base[a_ligacomp]
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
- end
- end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- -- elseif trace_marks and trace_details then
- -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
- -- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and start[a_cursbase]
- if not alreadydone then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local exitanchors = lookuphash[lookupname]
- if exitanchors then
- exitanchors = exitanchors[startchar]
- end
- if exitanchors then
- local done = false
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return head, start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
- end
- return head, start, false
- end
- end
- return head, start, false
-end
-
-function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- -- untested .. needs checking for the new model
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar] -- needed ?
- if kerns then
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
- end
- end
- end
- return head, start, false
-end
-
-chainmores.gpos_single = chainprocs.gpos_single -- okay?
-
--- when machines become faster i will make a shared function
-
-function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local snext = start.next
- if snext then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar]
- if kerns then
- local lookuptype = lookuptypes[lookupname]
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else
- report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
- local a, b = krn[2], krn[6]
- if a and a ~= 0 then
- local k = setkern(snext,factor,rlmode,a)
- if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- end
- if b and b ~= 0 then
- logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return head, start, done
- end
- end
- end
- return head, start, false
-end
-
-chainmores.gpos_pair = chainprocs.gpos_pair -- okay?
-
--- what pointer to return, spec says stop
--- to be discussed ... is bidi changer a space?
--- elseif char == zwnj and sequence[n][32] then -- brrr
-
--- somehow l or f is global
--- we don't need to pass the currentcontext, saves a bit
--- make a slow variant then can be activated but with more tracing
-
-local function show_skip(kind,chainname,char,ck,class)
- if ck[9] then
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
- else
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
- end
-end
-
-local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
- -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
- local flags = sequence.flags
- local done = false
- local skipmark = flags[1]
- local skipligature = flags[2]
- local skipbase = flags[3]
- local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
- local markclass = sequence.markclass -- todo, first we need a proper test
- local skipped = false
- for k=1,#contexts do
- local match = true
- local current = start
- local last = start
- local ck = contexts[k]
- local seq = ck[3]
- local s = #seq
- -- f..l = mid string
- if s == 1 then
- -- never happens
- match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
- else
- -- maybe we need a better space check (maybe check for glue or category or combination)
- -- we cannot optimize for n=2 because there can be disc nodes
- local f, l = ck[4], ck[5]
- -- current match
- if f == 1 and f == l then -- current only
- -- already a hit
- -- match = true
- else -- before/current/after | before/current | current/after
- -- no need to test first hit (to be optimized)
- if f == l then -- new, else last out of sync (f is > 1)
- -- match = true
- else
- local n = f + 1
- last = last.next
- while n <= l do
- if last then
- local id = last.id
- if id == glyph_code then
- if last.font == currentfont and last.subtype<256 then
- local char = last.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- last = last.next
- elseif seq[n][char] then
- if n < l then
- last = last.next
- end
- n = n + 1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- last = last.next
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- end
- end
- end
- -- before
- if match and f > 1 then
- local prev = start.prev
- if prev then
- local n = f-1
- while n >= 1 do
- if prev then
- local id = prev.id
- if id == glyph_code then
- if prev.font == currentfont and prev.subtype<256 then -- normal char
- local char = prev.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n -1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- -- skip 'm
- elseif seq[n][32] then
- n = n -1
- else
- match = false
- break
- end
- prev = prev.prev
- elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
- n = n -1
- else
- match = false
- break
- end
- end
- elseif f == 2 then
- match = seq[1][32]
- else
- for n=f-1,1 do
- if not seq[n][32] then
- match = false
- break
- end
- end
- end
- end
- -- after
- if match and s > l then
- local current = last and last.next
- if current then
- -- removed optimization for s-l == 1, we have to deal with marks anyway
- local n = l + 1
- while n <= s do
- if current then
- local id = current.id
- if id == glyph_code then
- if current.font == currentfont and current.subtype<256 then -- normal char
- local char = current.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n + 1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- -- skip 'm
- elseif seq[n][32] then -- brrr
- n = n + 1
- else
- match = false
- break
- end
- current = current.next
- elseif seq[n][32] then
- n = n + 1
- else
- match = false
- break
- end
- end
- elseif s-l == 1 then
- match = seq[s][32]
- else
- for n=l+1,s do
- if not seq[n][32] then
- match = false
- break
- end
- end
- end
- end
- end
- if match then
- -- ck == currentcontext
- if trace_contexts then
- local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = start.char
- if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
- else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
- end
- end
- local chainlookups = ck[6]
- if chainlookups then
- local nofchainlookups = #chainlookups
- -- we can speed this up if needed
- if nofchainlookups == 1 then
- local chainlookupname = chainlookups[1]
- local chainlookup = lookuptable[chainlookupname]
- if chainlookup then
- local cp = chainprocs[chainlookup.type]
- if cp then
- local ok
- head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- if ok then
- done = true
- end
- else
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- end
- else -- shouldn't happen
- logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
- end
- else
- local i = 1
- repeat
- if skipped then
- while true do
- local char = start.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = start.next
- else
- break
- end
- else
- break
- end
- end
- end
- local chainlookupname = chainlookups[i]
- local chainlookup = lookuptable[chainlookupname]
- if not chainlookup then
- -- okay, n matches, < n replacements
- i = i + 1
- else
- local cp = chainmores[chainlookup.type]
- if not cp then
- -- actually an error
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- i = i + 1
- else
- local ok, n
- head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
- -- messy since last can be changed !
- if ok then
- done = true
- -- skip next one(s) if ligature
- i = i + (n or 1)
- else
- i = i + 1
- end
- end
- end
- if start then
- start = start.next
- else
- -- weird
- end
- until i > nofchainlookups
- end
- else
- local replacements = ck[7]
- if replacements then
- head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
- else
- done = true -- can be meant to be skipped
- if trace_contexts then
- logprocess("%s: skipping match",cref(kind,chainname))
- end
- end
- end
- end
- end
- return head, start, done
-end
-
--- Because we want to keep this elsewhere (an because speed is less an issue) we
--- pass the font id so that the verbose variant can access the relevant helper tables.
-
-local verbose_handle_contextchain = function(font,...)
- logwarning("no verbose handler installed, reverting to 'normal'")
- otf.setcontextchain()
- return normal_handle_contextchain(...)
-end
-
-otf.chainhandlers = {
- normal = normal_handle_contextchain,
- verbose = verbose_handle_contextchain,
-}
-
-function otf.setcontextchain(method)
- if not method or method == "normal" or not otf.chainhandlers[method] then
- if handlers.contextchain then -- no need for a message while making the format
- logwarning("installing normal contextchain handler")
- end
- handlers.contextchain = normal_handle_contextchain
- else
- logwarning("installing contextchain handler %a",method)
- local handler = otf.chainhandlers[method]
- handlers.contextchain = function(...)
- return handler(currentfont,...) -- hm, get rid of ...
- end
- end
- handlers.gsub_context = handlers.contextchain
- handlers.gsub_contextchain = handlers.contextchain
- handlers.gsub_reversecontextchain = handlers.contextchain
- handlers.gpos_contextchain = handlers.contextchain
- handlers.gpos_context = handlers.contextchain
-end
-
-otf.setcontextchain()
-
-local missing = { } -- we only report once
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_process(...)
-end
-
-local logwarning = report_process
-
-local function report_missing_cache(typ,lookup)
- local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
- local t = f[typ] if not t then t = { } f[typ] = t end
- if not t[lookup] then
- t[lookup] = true
- logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
- end
-end
-
-local resolved = { } -- we only resolve a font,script,language pair once
-
--- todo: pass all these 'locals' in a table
-
-local lookuphashes = { }
-
-setmetatableindex(lookuphashes, function(t,font)
- local lookuphash = fontdata[font].resources.lookuphash
- if not lookuphash or not next(lookuphash) then
- lookuphash = false
- end
- t[font] = lookuphash
- return lookuphash
-end)
-
--- fonts.hashes.lookups = lookuphashes
-
-local autofeatures = fonts.analyzers.features -- was: constants
-
-local function initialize(sequence,script,language,enabled)
- local features = sequence.features
- if features then
- for kind, scripts in next, features do
- local valid = enabled[kind]
- if valid then
- local languages = scripts[script] or scripts[wildcard]
- if languages and (languages[language] or languages[wildcard]) then
- return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
- end
- end
- end
- end
- return false
-end
-
-function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
- local shared = tfmdata.shared
- local properties = tfmdata.properties
- local language = properties.language or "dflt"
- local script = properties.script or "dflt"
- local enabled = shared.features
- local res = resolved[font]
- if not res then
- res = { }
- resolved[font] = res
- end
- local rs = res[script]
- if not rs then
- rs = { }
- res[script] = rs
- end
- local rl = rs[language]
- if not rl then
- rl = {
- -- indexed but we can also add specific data by key
- }
- rs[language] = rl
- local sequences = tfmdata.resources.sequences
--- setmetatableindex(rl, function(t,k)
--- if type(k) == "number" then
--- local v = enabled and initialize(sequences[k],script,language,enabled)
--- t[k] = v
--- return v
--- end
--- end)
-for s=1,#sequences do
- local v = enabled and initialize(sequences[s],script,language,enabled)
- if v then
- rl[#rl+1] = v
- end
-end
- end
- return rl
-end
-
--- elseif id == glue_code then
--- if p[5] then -- chain
--- local pc = pp[32]
--- if pc then
--- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
--- if ok then
--- done = true
--- end
--- if start then start = start.next end
--- else
--- start = start.next
--- end
--- else
--- start = start.next
--- end
-
--- there will be a new direction parser (pre-parsed etc)
-
--- less bytecode: 290 -> 254
---
--- attr = attr or false
---
--- local a = getattr(start,0)
--- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then
--- -- the action
--- end
-
-local function featuresprocessor(head,font,attr)
-
- local lookuphash = lookuphashes[font] -- we can also check sequences here
-
- if not lookuphash then
- return head, false
- end
-
- if trace_steps then
- checkstep(head)
- end
-
- tfmdata = fontdata[font]
- descriptions = tfmdata.descriptions
- characters = tfmdata.characters
- resources = tfmdata.resources
-
- marks = resources.marks
- anchorlookups = resources.lookup_to_anchor
- lookuptable = resources.lookups
- lookuptypes = resources.lookuptypes
-
- currentfont = font
- rlmode = 0
-
- local sequences = resources.sequences
- local done = false
- local datasets = otf.dataset(tfmdata,font,attr)
-
- local dirstack = { } -- could move outside function
-
- -- We could work on sub start-stop ranges instead but I wonder if there is that
- -- much speed gain (experiments showed that it made not much sense) and we need
- -- to keep track of directions anyway. Also at some point I want to play with
- -- font interactions and then we do need the full sweeps.
-
- -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
- -- so that multiple cases are also covered.)
-
- for s=1,#datasets do
- local dataset = datasets[s]
- featurevalue = dataset[1] -- todo: pass to function instead of using a global
-
- local sequence = dataset[5] -- sequences[s] -- also dataset[5]
- local rlparmode = 0
- local topstack = 0
- local success = false
- local attribute = dataset[2]
- local chain = dataset[3] -- sequence.chain or 0
- local typ = sequence.type
- local subtables = sequence.subtables
- if chain < 0 then
- -- this is a limited case, no special treatments like 'init' etc
- local handler = handlers[typ]
- -- we need to get rid of this slide! probably no longer needed in latest luatex
- local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
- while start do
- local id = start.id
- if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
- if a then
- a = a == attr
- else
- a = true
- end
- if a then
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if success then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.prev end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- end
- else
- local handler = handlers[typ]
- local ns = #subtables
- local start = head -- local ?
- rlmode = 0 -- to be checked ?
- if ns == 1 then -- happens often
- local lookupname = subtables[1]
- local lookupcache = lookuphash[lookupname]
- if not lookupcache then -- also check for empty cache
- report_missing_cache(typ,lookupname)
- else
-
- local function subrun(start)
- -- mostly for gsub, gpos would demand a more clever approach
- local head = start
- local done = false
- while start do
- local id = start.id
- if id == glyph_code and start.font == font and start.subtype <256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done = true
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- end
- if done then
- success = true
- return head
- end
- end
-
- local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = disc.prev
- local next = disc.next
- if prev and next then
- prev.next = next
- -- next.prev = prev
- local a = prev[0]
- if a then
- a = (a == attr) and (not attribute or prev[a_state] == attribute)
- else
- a = not attribute or prev[a_state] == attribute
- end
- if a then
- local lookupmatch = lookupcache[prev.char]
- if lookupmatch then
- -- sequence kan weg
- local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done = true
- success = true
- end
- end
- end
- prev.next = disc
- -- next.prev = disc
- end
- return next
- end
-
- while start do
- local id = start.id
- if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- success = true
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- elseif id == disc_code then
- -- mostly for gsub
- if start.subtype == discretionary_code then
- local pre = start.pre
- if pre then
- local new = subrun(pre)
- if new then start.pre = new end
- end
- local post = start.post
- if post then
- local new = subrun(post)
- if new then start.post = new end
- end
- local replace = start.replace
- if replace then
- local new = subrun(replace)
- if new then start.replace = new end
- end
-elseif typ == "gpos_single" or typ == "gpos_pair" then
- kerndisc(start)
- end
- start = start.next
- elseif id == whatsit_code then -- will be function
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- -- one might wonder if the par dir should be looked at, so we might as well drop the next line
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
- end
- start = start.next
- elseif id == math_code then
- start = end_of_math(start).next
- else
- start = start.next
- end
- end
- end
- else
-
- local function subrun(start)
- -- mostly for gsub, gpos would demand a more clever approach
- local head = start
- local done = false
- while start do
- local id = start.id
- if id == glyph_code and start.id == font and start.subtype <256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- end
- if done then
- success = true
- return head
- end
- end
-
- local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = disc.prev
- local next = disc.next
- if prev and next then
- prev.next = next
- -- next.prev = prev
- local a = prev[0]
- if a then
- a = (a == attr) and (not attribute or prev[a_state] == attribute)
- else
- a = not attribute or prev[a_state] == attribute
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[prev.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done = true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- end
- prev.next = disc
- -- next.prev = disc
- end
- return next
- end
-
- while start do
- local id = start.id
- if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- success = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- elseif id == disc_code then
- -- mostly for gsub
- if start.subtype == discretionary_code then
- local pre = start.pre
- if pre then
- local new = subrun(pre)
- if new then start.pre = new end
- end
- local post = start.post
- if post then
- local new = subrun(post)
- if new then start.post = new end
- end
- local replace = start.replace
- if replace then
- local new = subrun(replace)
- if new then start.replace = new end
- end
-elseif typ == "gpos_single" or typ == "gpos_pair" then
- kerndisc(start)
- end
- start = start.next
- elseif id == whatsit_code then
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
- end
- start = start.next
- elseif id == math_code then
- start = end_of_math(start).next
- else
- start = start.next
- end
- end
- end
- end
- if success then
- done = true
- end
- if trace_steps then -- ?
- registerstep(head)
- end
- end
- return head, done
-end
-
-local function generic(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if target then
- target[unicode] = lookupdata
- else
- lookuphash[lookupname] = { [unicode] = lookupdata }
- end
-end
-
-local action = {
-
- substitution = generic,
- multiple = generic,
- alternate = generic,
- position = generic,
-
- ligature = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- for i=1,#lookupdata do
- local li = lookupdata[i]
- local tu = target[li]
- if not tu then
- tu = { }
- target[li] = tu
- end
- target = tu
- end
- target.ligature = unicode
- end,
-
- pair = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- local others = target[unicode]
- local paired = lookupdata[1]
- if others then
- others[paired] = lookupdata
- else
- others = { [paired] = lookupdata }
- target[unicode] = others
- end
- end,
-
-}
-
-local function prepare_lookups(tfmdata)
-
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local anchor_to_lookup = resources.anchor_to_lookup
- local lookup_to_anchor = resources.lookup_to_anchor
- local lookuptypes = resources.lookuptypes
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
-
- -- we cannot free the entries in the descriptions as sometimes we access
- -- then directly (for instance anchors) ... selectively freeing does save
- -- much memory as it's only a reference to a table and the slot in the
- -- description hash is not freed anyway
-
- for unicode, character in next, characters do -- we cannot loop over descriptions !
-
- local description = descriptions[unicode]
-
- if description then
-
- local lookups = description.slookups
- if lookups then
- for lookupname, lookupdata in next, lookups do
- action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
- end
- end
-
- local lookups = description.mlookups
- if lookups then
- for lookupname, lookuplist in next, lookups do
- local lookuptype = lookuptypes[lookupname]
- for l=1,#lookuplist do
- local lookupdata = lookuplist[l]
- action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
- end
- end
- end
-
- local list = description.kerns
- if list then
- for lookup, krn in next, list do -- ref to glyph, saves lookup
- local target = lookuphash[lookup]
- if target then
- target[unicode] = krn
- else
- lookuphash[lookup] = { [unicode] = krn }
- end
- end
- end
-
- local list = description.anchors
- if list then
- for typ, anchors in next, list do -- types
- if typ == "mark" or typ == "cexit" then -- or entry?
- for name, anchor in next, anchors do
- local lookups = anchor_to_lookup[name]
- if lookups then
- for lookup, _ in next, lookups do
- local target = lookuphash[lookup]
- if target then
- target[unicode] = anchors
- else
- lookuphash[lookup] = { [unicode] = anchors }
- end
- end
- end
- end
- end
- end
- end
-
- end
-
- end
-
-end
-
-local function split(replacement,original)
- local result = { }
- for i=1,#replacement do
- result[original[i]] = replacement[i]
- end
- return result
-end
-
-local valid = {
- coverage = { chainsub = true, chainpos = true, contextsub = true },
- reversecoverage = { reversesub = true },
- glyphs = { chainsub = true, chainpos = true },
-}
-
-local function prepare_contextchains(tfmdata)
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local lookups = rawdata.lookups
- if lookups then
- for lookupname, lookupdata in next, rawdata.lookups do
- local lookuptype = lookupdata.type
- if lookuptype then
- local rules = lookupdata.rules
- if rules then
- local format = lookupdata.format
- local validformat = valid[format]
- if not validformat then
- report_prepare("unsupported format %a",format)
- elseif not validformat[lookuptype] then
- -- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current = rule.current
- local before = rule.before
- local after = rule.after
- local replacements = rule.replacements
- local sequence = { }
- local nofsequences = 0
- -- Eventually we can store start, stop and sequence in the cached file
- -- but then less sharing takes place so best not do that without a lot
- -- of profiling so let's forget about it.
- if before then
- for n=1,#before do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = before[n]
- end
- end
- local start = nofsequences + 1
- for n=1,#current do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = current[n]
- end
- local stop = nofsequences
- if after then
- for n=1,#after do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = after[n]
- end
- end
- if sequence[1] then
- -- Replacements only happen with reverse lookups as they are single only. We
- -- could pack them into current (replacement value instead of true) and then
- -- use sequence[start] instead but it's somewhat ugly.
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
- end
- else
- -- no rules
- end
- else
- report_prepare("missing lookuptype for lookupname %a",lookupname)
- end
- end
- end
-end
-
--- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
-
-local function featuresinitializer(tfmdata,value)
- if true then -- value then
- -- beware we need to use the topmost properties table
- local rawdata = tfmdata.shared.rawdata
- local properties = rawdata.properties
- if not properties.initialized then
- local starttime = trace_preparing and os.clock()
- local resources = rawdata.resources
- resources.lookuphash = resources.lookuphash or { }
- prepare_contextchains(tfmdata)
- prepare_lookups(tfmdata)
- properties.initialized = true
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
- end
- end
- end
-end
-
-registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- position = 1,
- node = featuresinitializer,
- },
- processors = {
- node = featuresprocessor,
- }
-}
-
--- This can be used for extra handlers, but should be used with care!
-
-otf.handlers = handlers
diff --git a/tex/generic/context/luatex/luatex-fonts.lua b/tex/generic/context/luatex/luatex-fonts.lua
index 5e5c9a4cf..7995be33e 100644
--- a/tex/generic/context/luatex/luatex-fonts.lua
+++ b/tex/generic/context/luatex/luatex-fonts.lua
@@ -210,9 +210,9 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
loadmodule('font-oti.lua')
loadmodule('font-otf.lua')
loadmodule('font-otb.lua')
- loadmodule('luatex-fonts-inj.lua') -- will be replaced (luatex >= .80)
+ loadmodule('node-inj.lua') -- will be replaced (luatex >= .70)
loadmodule('font-ota.lua')
- loadmodule('luatex-fonts-otn.lua')
+ loadmodule('font-otn.lua')
loadmodule('font-otp.lua') -- optional
loadmodule('luatex-fonts-lua.lua')
loadmodule('font-def.lua')