From c36e19abdfd15bf6cae6fa379c6ce51f3ef5332d Mon Sep 17 00:00:00 2001
From: Marius
Date: Fri, 19 Oct 2012 01:21:22 +0300
Subject: beta 2012.10.19 00:06
---
bibtex/bst/context/cont-ab.bst | 1 +
bibtex/bst/context/cont-au.bst | 1 +
bibtex/bst/context/cont-no.bst | 1 +
bibtex/bst/context/cont-ti.bst | 1 +
.../lexers/data/scite-context-data-context.lua | 4 +-
.../lexers/data/scite-context-data-metafun.lua | 2 +-
.../data/scite/lexers/scite-context-lexer-tex.lua | 12 +-
.../data/scite/lexers/scite-context-lexer-txt.lua | 38 +-
.../data/scite/lexers/scite-context-lexer-xml.lua | 17 +-
context/data/scite/lexers/scite-context-lexer.lua | 47 +-
.../scite/scite-context-data-context.properties | 237 +-
.../scite/scite-context-data-metafun.properties | 34 +-
context/data/scite/scite-context-readme.pdf | Bin 205340 -> 206760 bytes
context/data/scite/scite-context-readme.tex | 2 +-
context/data/scite/scite-context.properties | 16 +-
metapost/context/base/metafun.mpiv | 7 +-
metapost/context/base/mp-abck.mpiv | 2 +-
metapost/context/base/mp-apos.mpiv | 2 +-
metapost/context/base/mp-asnc.mpiv | 2 +-
metapost/context/base/mp-butt.mpiv | 2 +-
metapost/context/base/mp-char.mpiv | 2 +-
metapost/context/base/mp-chem.mpiv | 350 +-
metapost/context/base/mp-core.mpiv | 2 +-
metapost/context/base/mp-crop.mpiv | 14 +-
metapost/context/base/mp-figs.mpiv | 2 +-
metapost/context/base/mp-form.mpiv | 2 +-
metapost/context/base/mp-func.mpiv | 2 +-
metapost/context/base/mp-grap.mpiv | 204 +
metapost/context/base/mp-grid.mpiv | 2 +-
metapost/context/base/mp-grph.mpiv | 2 +-
metapost/context/base/mp-mlib.mpiv | 29 +-
metapost/context/base/mp-page.mpiv | 2 +-
metapost/context/base/mp-shap.mpiv | 2 +-
metapost/context/base/mp-step.mpiv | 2 +-
metapost/context/base/mp-text.mpiv | 2 +-
metapost/context/base/mp-tool.mpiv | 4 +-
scripts/context/lua/mtx-context.lua | 1518 ++--
scripts/context/lua/mtx-convert.lua | 4 +-
scripts/context/lua/mtx-epub.lua | 113 +-
scripts/context/lua/mtx-fcd.lua | 366 +
scripts/context/lua/mtx-flac.lua | 6 -
scripts/context/lua/mtx-fonts.lua | 4 +-
scripts/context/lua/mtx-grep.lua | 4 +-
scripts/context/lua/mtx-metapost.lua | 2 +-
scripts/context/lua/mtx-pdf.lua | 142 +-
scripts/context/lua/mtx-scite.lua | 25 +-
scripts/context/lua/mtx-server-ctx-help.lua | 215 +-
scripts/context/lua/mtx-server.lua | 41 +-
scripts/context/lua/mtx-tools.lua | 2 +-
scripts/context/lua/mtx-update.lua | 4 +-
scripts/context/lua/mtx-watch.lua | 11 -
scripts/context/lua/mtxrun.lua | 2679 ++++---
scripts/context/ruby/fcd_start.rb | 472 --
scripts/context/stubs/mswin/mtxrun.lua | 2679 ++++---
scripts/context/stubs/unix/mtxrun | 2679 ++++---
tex/context/base/anch-bar.mkiv | 150 +-
tex/context/base/anch-bck.mkvi | 14 +-
tex/context/base/anch-pgr.lua | 248 +-
tex/context/base/anch-pgr.mkiv | 120 +-
tex/context/base/anch-pos.lua | 18 +-
tex/context/base/anch-pos.mkiv | 8 +-
tex/context/base/anch-tab.mkiv | 186 +-
tex/context/base/attr-col.lua | 71 +-
tex/context/base/attr-eff.lua | 39 +-
tex/context/base/attr-eff.mkiv | 43 +-
tex/context/base/attr-ini.lua | 36 +-
tex/context/base/attr-ini.mkiv | 12 +-
tex/context/base/attr-lay.lua | 98 +-
tex/context/base/attr-lay.mkiv | 82 +-
tex/context/base/attr-neg.lua | 5 +-
tex/context/base/attr-neg.mkiv | 10 +-
tex/context/base/back-exp.lua | 24 +-
tex/context/base/back-exp.mkiv | 26 +-
tex/context/base/back-ini.lua | 4 +-
tex/context/base/back-ini.mkiv | 6 +-
tex/context/base/back-pdf.mkiv | 28 +-
tex/context/base/back-swf.mkiv | 14 +-
tex/context/base/back-u3d.mkiv | 7 +-
tex/context/base/bibl-bib.mkiv | 2 +-
tex/context/base/bibl-tra.lua | 4 +-
tex/context/base/bibl-tra.mkiv | 25 +-
tex/context/base/blob-ini.lua | 2 +-
tex/context/base/buff-ini.lua | 39 +-
tex/context/base/buff-ini.mkiv | 24 +
tex/context/base/buff-par.lua | 27 +-
tex/context/base/buff-par.mkiv | 151 -
tex/context/base/buff-par.mkvi | 131 +
tex/context/base/buff-ver.lua | 6 +-
tex/context/base/buff-ver.mkiv | 30 +-
tex/context/base/catc-ctx.mkiv | 11 +-
tex/context/base/catc-def.mkiv | 7 +-
tex/context/base/catc-ini.lua | 21 +-
tex/context/base/catc-ini.mkiv | 64 +-
tex/context/base/char-cjk.lua | 23 +-
tex/context/base/char-def.lua | 80 +-
tex/context/base/char-enc.lua | 41 +-
tex/context/base/char-ent.lua | 5 +-
tex/context/base/char-ini.lua | 86 +-
tex/context/base/char-map.lua | 3 +-
tex/context/base/char-utf.lua | 21 +-
tex/context/base/chem-ini.lua | 8 +-
tex/context/base/chem-str.lua | 134 +-
tex/context/base/chem-str.mkiv | 216 +-
tex/context/base/cldf-bas.lua | 52 +-
tex/context/base/cldf-com.lua | 4 +-
tex/context/base/cldf-ini.lua | 26 +-
tex/context/base/cldf-int.lua | 11 +-
tex/context/base/cldf-ver.lua | 6 +-
tex/context/base/colo-ext.mkiv | 10 +-
tex/context/base/colo-icc.lua | 2 +-
tex/context/base/colo-ini.lua | 124 +-
tex/context/base/colo-ini.mkiv | 34 +-
tex/context/base/colo-run.lua | 6 +-
tex/context/base/colo-run.mkiv | 28 +-
tex/context/base/cont-log.mkiv | 10 +-
tex/context/base/cont-new.mkii | 2 +-
tex/context/base/cont-new.mkiv | 497 +-
tex/context/base/cont-nop.mkiv | 22 +
tex/context/base/cont-yes.mkiv | 80 +
tex/context/base/context-base.lmx | 4 +-
tex/context/base/context-help.lmx | 1 +
tex/context/base/context-version.pdf | Bin 4128 -> 4140 bytes
tex/context/base/context-version.png | Bin 105721 -> 105499 bytes
tex/context/base/context.mkii | 2 +-
tex/context/base/context.mkiv | 57 +-
tex/context/base/context.rme | 7 +-
tex/context/base/context.todo | 6 -
tex/context/base/core-con.lua | 153 +-
tex/context/base/core-con.mkiv | 177 +-
tex/context/base/core-ctx.ctx | 23 +
tex/context/base/core-ctx.lua | 331 +-
tex/context/base/core-ctx.mkiv | 18 +-
tex/context/base/core-dat.lua | 60 +-
tex/context/base/core-def.mkiv | 126 +-
tex/context/base/core-env.lua | 7 +-
tex/context/base/core-env.mkiv | 115 +-
tex/context/base/core-fnt.mkiv | 158 -
tex/context/base/core-ini.mkiv | 228 +-
tex/context/base/core-mis.mkiv | 738 --
tex/context/base/core-sys.lua | 90 +-
tex/context/base/core-sys.mkiv | 80 +-
tex/context/base/core-two.lua | 2 +-
tex/context/base/core-uti.lua | 22 +-
tex/context/base/core-var.mkiv | 239 -
tex/context/base/data-env.lua | 6 +-
tex/context/base/data-exp.lua | 8 +-
tex/context/base/data-ini.lua | 2 +-
tex/context/base/data-lua.lua | 263 +-
tex/context/base/data-pre.lua | 44 +-
tex/context/base/data-res.lua | 59 +-
tex/context/base/data-sch.lua | 68 +-
tex/context/base/data-tex.lua | 7 +
tex/context/base/data-vir.lua | 8 +-
tex/context/base/data-zip.lua | 16 +-
tex/context/base/enco-ini.mkiv | 111 +-
tex/context/base/file-ini.lua | 5 +-
tex/context/base/file-ini.mkvi | 20 +-
tex/context/base/file-job.lua | 348 +-
tex/context/base/file-job.mkvi | 25 +-
tex/context/base/file-lib.lua | 7 +-
tex/context/base/file-mod.lua | 16 +-
tex/context/base/file-res.lua | 6 +-
tex/context/base/file-syn.lua | 7 +-
tex/context/base/font-afk.lua | 200 +
tex/context/base/font-afm.lua | 372 +-
tex/context/base/font-age.lua | 1 +
tex/context/base/font-agl.lua | 2 +-
tex/context/base/font-aux.lua | 2 +-
tex/context/base/font-aux.mkvi | 26 +
tex/context/base/font-chk.lua | 2 +-
tex/context/base/font-chk.mkiv | 22 +
tex/context/base/font-cid.lua | 17 +-
tex/context/base/font-col.lua | 96 +-
tex/context/base/font-con.lua | 30 +-
tex/context/base/font-ctx.lua | 272 +-
tex/context/base/font-def.lua | 23 +-
tex/context/base/font-ext.lua | 38 +-
tex/context/base/font-fbk.lua | 23 +-
tex/context/base/font-gds.lua | 59 +-
tex/context/base/font-gds.mkiv | 83 -
tex/context/base/font-gds.mkvi | 83 +
tex/context/base/font-ini.lua | 24 +-
tex/context/base/font-ldr.lua | 4 +-
tex/context/base/font-lib.mkvi | 4 +-
tex/context/base/font-map.lua | 12 +-
tex/context/base/font-mat.mkvi | 1 +
tex/context/base/font-mis.lua | 2 +-
tex/context/base/font-ota.lua | 83 +-
tex/context/base/font-otb.lua | 37 +-
tex/context/base/font-otc.lua | 3 +-
tex/context/base/font-otd.lua | 31 +-
tex/context/base/font-otf.lua | 96 +-
tex/context/base/font-oti.lua | 13 +-
tex/context/base/font-otn.lua | 338 +-
tex/context/base/font-otp.lua | 53 +-
tex/context/base/font-ott.lua | 171 +-
tex/context/base/font-pat.lua | 14 +-
tex/context/base/font-pre.mkiv | 30 +-
tex/context/base/font-run.mkiv | 47 +-
tex/context/base/font-set.mkvi | 2 +-
tex/context/base/font-sol.lua | 898 +++
tex/context/base/font-sol.mkvi | 123 +
tex/context/base/font-sty.mkvi | 42 +-
tex/context/base/font-syn.lua | 11 +-
tex/context/base/font-tfm.lua | 15 +-
tex/context/base/font-tra.mkiv | 46 +-
tex/context/base/font-vf.lua | 3 +-
tex/context/base/grph-epd.lua | 1 -
tex/context/base/grph-epd.mkiv | 12 +-
tex/context/base/grph-fig.mkiv | 729 +-
tex/context/base/grph-fil.lua | 11 +-
tex/context/base/grph-inc.lua | 100 +-
tex/context/base/grph-inc.mkiv | 895 ++-
tex/context/base/grph-raw.lua | 5 +-
tex/context/base/grph-raw.mkiv | 14 +-
tex/context/base/grph-swf.lua | 15 +-
tex/context/base/grph-trf.mkiv | 492 +-
tex/context/base/grph-u3d.lua | 8 +-
tex/context/base/java-ini.lua | 37 +-
tex/context/base/l-boolean.lua | 45 +-
tex/context/base/l-dir.lua | 18 +
tex/context/base/l-file.lua | 36 +-
tex/context/base/l-io.lua | 47 +-
tex/context/base/l-lpeg.lua | 228 +-
tex/context/base/l-md5.lua | 33 +-
tex/context/base/l-number.lua | 10 +-
tex/context/base/l-os.lua | 67 +-
tex/context/base/l-string.lua | 17 +
tex/context/base/l-table.lua | 131 +-
tex/context/base/l-unicode.lua | 356 +-
tex/context/base/l-url.lua | 124 +-
tex/context/base/lang-def.lua | 47 +-
tex/context/base/lang-def.mkiv | 34 +-
tex/context/base/lang-frd.mkiv | 141 +
tex/context/base/lang-frq.mkiv | 233 +
tex/context/base/lang-ini.lua | 91 +-
tex/context/base/lang-ini.mkiv | 31 +-
tex/context/base/lang-lab.lua | 67 +-
tex/context/base/lang-lab.mkiv | 70 +-
tex/context/base/lang-mis.mkiv | 31 +
tex/context/base/lang-txt.lua | 183 +-
tex/context/base/lang-url.lua | 7 +-
tex/context/base/lang-wrd.lua | 97 +-
tex/context/base/lpdf-ano.lua | 6 +-
tex/context/base/lpdf-col.lua | 8 +-
tex/context/base/lpdf-epa.lua | 6 +-
tex/context/base/lpdf-epd.lua | 24 +-
tex/context/base/lpdf-fmt.lua | 40 +-
tex/context/base/lpdf-nod.lua | 71 +-
tex/context/base/lpdf-ren.lua | 120 +-
tex/context/base/lpdf-u3d.lua | 2 +-
tex/context/base/luat-bwc.lua | 2 +-
tex/context/base/luat-cbk.lua | 29 +-
tex/context/base/luat-cnf.lua | 7 +-
tex/context/base/luat-cod.lua | 28 +-
tex/context/base/luat-cod.mkiv | 6 -
tex/context/base/luat-env.lua | 44 +-
tex/context/base/luat-fmt.lua | 1 -
tex/context/base/luat-ini.lua | 44 +-
tex/context/base/luat-ini.mkiv | 8 +-
tex/context/base/luat-lib.mkiv | 5 +
tex/context/base/luat-mac.lua | 92 +-
tex/context/base/luat-run.lua | 12 +-
tex/context/base/luat-soc.lua | 16 +-
tex/context/base/luat-sto.lua | 32 +-
tex/context/base/lxml-css.lua | 48 +-
tex/context/base/lxml-ctx.mkiv | 3 -
tex/context/base/lxml-ini.mkiv | 4 +-
tex/context/base/lxml-lpt.lua | 51 +-
tex/context/base/lxml-tab.lua | 42 +-
tex/context/base/lxml-tex.lua | 25 +-
tex/context/base/m-barcodes.mkiv | 8 +-
tex/context/base/m-chart.lua | 159 +-
tex/context/base/m-chart.mkii | 2 +-
tex/context/base/m-chart.mkvi | 154 +-
tex/context/base/m-database.mkiv | 7 +-
tex/context/base/m-graph.mkiv | 124 +-
tex/context/base/m-ipsum.mkiv | 198 +
tex/context/base/m-json.mkiv | 30 +
tex/context/base/m-morse.mkvi | 12 +-
tex/context/base/m-oldfun.mkiv | 714 ++
tex/context/base/m-oldnum.mkiv | 416 ++
tex/context/base/m-pstricks.lua | 8 +-
tex/context/base/m-spreadsheet.lua | 331 +
tex/context/base/m-spreadsheet.mkiv | 295 +-
tex/context/base/m-timing.mkiv | 10 +-
tex/context/base/m-translate.mkiv | 27 +-
tex/context/base/m-visual.mkiv | 581 +-
tex/context/base/math-act.lua | 2 +
tex/context/base/math-ali.mkiv | 101 +-
tex/context/base/math-arr.mkiv | 405 +-
tex/context/base/math-def.mkiv | 10 +-
tex/context/base/math-dim.lua | 6 +-
tex/context/base/math-ext.lua | 5 +-
tex/context/base/math-fbk.lua | 281 +
tex/context/base/math-for.mkiv | 15 -
tex/context/base/math-ini.lua | 141 +-
tex/context/base/math-ini.mkiv | 41 +-
tex/context/base/math-int.mkiv | 15 +-
tex/context/base/math-noa.lua | 219 +-
tex/context/base/math-ttv.lua | 799 +++
tex/context/base/math-vfu.lua | 785 +--
tex/context/base/meta-grd.mkiv | 116 +
tex/context/base/meta-imp-dum.mkiv | 123 +-
tex/context/base/meta-ini.lua | 2 +
tex/context/base/meta-ini.mkiv | 67 +-
tex/context/base/meta-pag.mkiv | 10 +-
tex/context/base/meta-pdf.lua | 7 +-
tex/context/base/meta-pdh.mkiv | 2 +-
tex/context/base/meta-tex.lua | 2 +-
tex/context/base/mlib-ctx.lua | 2 +
tex/context/base/mlib-pdf.lua | 93 +-
tex/context/base/mlib-pdf.mkiv | 6 +-
tex/context/base/mlib-pps.mkiv | 16 +-
tex/context/base/mlib-run.lua | 261 +-
tex/context/base/mult-aux.lua | 2 +-
tex/context/base/mult-aux.mkiv | 180 +-
tex/context/base/mult-chk.lua | 5 +-
tex/context/base/mult-chk.mkiv | 14 +-
tex/context/base/mult-de.mkii | 8 +
tex/context/base/mult-def.lua | 45 +-
tex/context/base/mult-def.mkiv | 76 +-
tex/context/base/mult-dim.mkvi | 1 +
tex/context/base/mult-en.mkii | 8 +
tex/context/base/mult-fr.mkii | 8 +
tex/context/base/mult-fun.lua | 95 +
tex/context/base/mult-ini.lua | 122 +-
tex/context/base/mult-ini.mkiv | 72 +-
tex/context/base/mult-it.mkii | 8 +
tex/context/base/mult-low.lua | 50 +-
tex/context/base/mult-mes.lua | 3 +-
tex/context/base/mult-nl.mkii | 8 +
tex/context/base/mult-pe.mkii | 8 +
tex/context/base/mult-prm.mkiv | 15 +
tex/context/base/mult-ro.mkii | 8 +
tex/context/base/mult-sys.mkiv | 566 +-
tex/context/base/node-aux.lua | 44 +-
tex/context/base/node-dir.lua | 2 +-
tex/context/base/node-fin.lua | 591 +-
tex/context/base/node-ini.lua | 42 +-
tex/context/base/node-inj.lua | 31 +-
tex/context/base/node-par.lua | 118 -
tex/context/base/node-par.mkiv | 82 -
tex/context/base/node-ref.lua | 11 +-
tex/context/base/node-res.lua | 19 +-
tex/context/base/node-rul.lua | 32 +-
tex/context/base/node-rul.mkiv | 40 +-
tex/context/base/node-ser.lua | 27 +-
tex/context/base/node-spl.lua | 619 --
tex/context/base/node-spl.mkiv | 114 -
tex/context/base/node-tra.lua | 90 +-
tex/context/base/node-tsk.lua | 4 +-
tex/context/base/node-typ.lua | 9 +-
tex/context/base/norm-ctx.mkiv | 8 +-
tex/context/base/pack-bar.mkiv | 97 +-
tex/context/base/pack-bck.mkvi | 2 +-
tex/context/base/pack-box.mkiv | 881 ++-
tex/context/base/pack-com.mkiv | 14 +-
tex/context/base/pack-cut.mkiv | 163 +
tex/context/base/pack-mrl.mkiv | 920 ++-
tex/context/base/pack-obj.lua | 25 +-
tex/context/base/pack-obj.mkiv | 143 +-
tex/context/base/pack-pos.mkiv | 189 +-
tex/context/base/pack-rul.mkiv | 1021 ++-
tex/context/base/page-app.mkiv | 9 +-
tex/context/base/page-bck.mkiv | 394 +-
tex/context/base/page-brk.mkiv | 14 +-
tex/context/base/page-col.mkiv | 245 +-
tex/context/base/page-com.mkiv | 176 +-
tex/context/base/page-fac.mkiv | 48 +-
tex/context/base/page-flt.lua | 14 +-
tex/context/base/page-flt.mkiv | 214 +-
tex/context/base/page-flw.mkiv | 186 +-
tex/context/base/page-grd.mkiv | 6 +-
tex/context/base/page-imp.mkiv | 3 +-
tex/context/base/page-inf.mkiv | 6 +-
tex/context/base/page-ini.mkiv | 46 +-
tex/context/base/page-ins.lua | 97 +
tex/context/base/page-ins.mkiv | 197 +-
tex/context/base/page-lay.mkiv | 77 +-
tex/context/base/page-lin.lua | 9 +
tex/context/base/page-lin.mkiv | 597 +-
tex/context/base/page-mak.mkvi | 33 +-
tex/context/base/page-mbk.mkvi | 4 +-
tex/context/base/page-mis.lua | 77 -
tex/context/base/page-mis.mkiv | 95 -
tex/context/base/page-mix.lua | 642 ++
tex/context/base/page-mix.mkiv | 771 ++
tex/context/base/page-mul.mkiv | 2253 +++---
tex/context/base/page-not.mkiv | 20 +-
tex/context/base/page-one.mkiv | 259 +-
tex/context/base/page-otr.mkvi | 4 +-
tex/context/base/page-par.mkiv | 88 +-
tex/context/base/page-plg.mkiv | 110 +-
tex/context/base/page-pst.lua | 78 +
tex/context/base/page-pst.mkiv | 102 +
tex/context/base/page-run.mkiv | 8 +-
tex/context/base/page-sel.mkiv | 347 -
tex/context/base/page-sel.mkvi | 368 +
tex/context/base/page-set.mkiv | 598 +-
tex/context/base/page-sid.mkiv | 60 +-
tex/context/base/page-spr.mkiv | 90 +-
tex/context/base/page-str.mkiv | 39 +-
tex/context/base/page-txt.mkvi | 112 +-
tex/context/base/phys-dim.lua | 66 +-
tex/context/base/phys-dim.mkiv | 19 +-
tex/context/base/ppchtex.mkiv | 18 +-
tex/context/base/prop-ini.mkiv | 2 +-
tex/context/base/regi-ini.lua | 72 +-
tex/context/base/s-abr-01.tex | 554 +-
tex/context/base/s-art-01.mkiv | 4 +
tex/context/base/s-def-01.mkiv | 2 +-
tex/context/base/s-fnt-10.mkiv | 2 +-
tex/context/base/s-fnt-20.mkiv | 10 +-
tex/context/base/s-fnt-28.mkiv | 2 +-
tex/context/base/s-fnt-29.mkiv | 2 +-
tex/context/base/s-fnt-32.mkiv | 2 +-
tex/context/base/s-inf-01.mkvi | 2 +-
tex/context/base/s-inf-03.mkiv | 117 +-
tex/context/base/s-lan-04.mkiv | 2 +-
tex/context/base/s-mod.ctx | 1 -
tex/context/base/s-pre-60.mkiv | 134 +-
tex/context/base/s-pre-61.tex | 2 +-
tex/context/base/s-pre-62.tex | 2 +-
tex/context/base/s-pre-63.tex | 1 +
tex/context/base/s-pre-67.tex | 2 +-
tex/context/base/s-pre-69.mkiv | 2 +-
tex/context/base/scrn-fld.lua | 6 +-
tex/context/base/scrn-fld.mkvi | 10 +-
tex/context/base/scrn-ini.lua | 2 +-
tex/context/base/scrn-ini.mkvi | 14 +-
tex/context/base/scrn-pag.mkvi | 6 -
tex/context/base/scrn-ref.lua | 2 +-
tex/context/base/scrn-wid.lua | 12 +-
tex/context/base/scrn-wid.mkvi | 125 +-
tex/context/base/scrp-cjk.lua | 3 +
tex/context/base/scrp-ini.lua | 276 +-
tex/context/base/sort-ini.lua | 2 +-
tex/context/base/sort-lan.lua | 20 +-
tex/context/base/spac-ali.lua | 6 +-
tex/context/base/spac-ali.mkiv | 52 +-
tex/context/base/spac-cha.mkiv | 191 +
tex/context/base/spac-def.mkiv | 116 +-
tex/context/base/spac-grd.mkiv | 61 +-
tex/context/base/spac-hor.mkiv | 137 +-
tex/context/base/spac-par.mkiv | 7 +
tex/context/base/spac-ver.lua | 62 +-
tex/context/base/spac-ver.mkiv | 299 +-
tex/context/base/status-files.pdf | Bin 24413 -> 24585 bytes
tex/context/base/status-lua.pdf | Bin 180149 -> 195620 bytes
tex/context/base/status-mkiv.lua | 7384 ++++++++++++++++----
tex/context/base/status-mkiv.tex | 308 +-
tex/context/base/strc-bkm.mkiv | 150 +-
tex/context/base/strc-blk.lua | 7 +-
tex/context/base/strc-blk.mkiv | 96 +-
tex/context/base/strc-con.mkvi | 197 +-
tex/context/base/strc-def.mkiv | 59 +-
tex/context/base/strc-des.mkii | 2 +-
tex/context/base/strc-des.mkvi | 52 +-
tex/context/base/strc-doc.lua | 48 +-
tex/context/base/strc-doc.mkiv | 234 +-
tex/context/base/strc-enu.mkvi | 73 +-
tex/context/base/strc-flt.mkvi | 1061 +--
tex/context/base/strc-ind.mkiv | 12 +-
tex/context/base/strc-ini.lua | 25 +-
tex/context/base/strc-itm.mkvi | 89 +-
tex/context/base/strc-lab.mkiv | 241 +-
tex/context/base/strc-lnt.mkvi | 127 +-
tex/context/base/strc-lst.lua | 3 +-
tex/context/base/strc-lst.mkvi | 31 +-
tex/context/base/strc-mar.lua | 11 +-
tex/context/base/strc-mat.mkiv | 488 +-
tex/context/base/strc-not.lua | 180 +-
tex/context/base/strc-not.mkvi | 631 +-
tex/context/base/strc-num.lua | 207 +-
tex/context/base/strc-num.mkiv | 62 +-
tex/context/base/strc-pag.lua | 12 +
tex/context/base/strc-pag.mkiv | 193 +-
tex/context/base/strc-ref.lua | 127 +-
tex/context/base/strc-ref.mkvi | 346 +-
tex/context/base/strc-reg.lua | 11 +-
tex/context/base/strc-reg.mkiv | 454 +-
tex/context/base/strc-ren.mkiv | 866 ++-
tex/context/base/strc-sbe.mkiv | 102 +-
tex/context/base/strc-sec.mkiv | 828 ++-
tex/context/base/strc-syn.lua | 2 +-
tex/context/base/strc-syn.mkiv | 4 +-
tex/context/base/strc-tag.lua | 1 +
tex/context/base/strc-tag.mkiv | 9 +
tex/context/base/supp-ali.mkiv | 173 -
tex/context/base/supp-box.lua | 12 +-
tex/context/base/supp-box.mkiv | 68 +-
tex/context/base/supp-fun.mkiv | 716 --
tex/context/base/supp-mat.mkiv | 143 +-
tex/context/base/supp-num.mkiv | 416 --
tex/context/base/supp-ran.lua | 28 +-
tex/context/base/supp-ran.mkiv | 4 +-
tex/context/base/supp-vis.mkiv | 185 +-
tex/context/base/symb-run.mkiv | 8 +-
tex/context/base/syst-aux.lua | 33 +-
tex/context/base/syst-aux.mkiv | 3650 +++++-----
tex/context/base/syst-fnt.mkiv | 2 +-
tex/context/base/syst-gen.mkii | 5 +
tex/context/base/syst-ini.mkiv | 338 +-
tex/context/base/syst-lua.lua | 52 +-
tex/context/base/syst-lua.mkiv | 7 +
tex/context/base/tabl-ltb.mkiv | 8 +-
tex/context/base/tabl-mis.mkiv | 288 +
tex/context/base/tabl-ntb.mkiv | 1912 ++---
tex/context/base/tabl-nte.mkiv | 67 +-
tex/context/base/tabl-pln.mkiv | 155 +-
tex/context/base/tabl-tab.mkiv | 174 +-
tex/context/base/tabl-tbl.lua | 6 +-
tex/context/base/tabl-tbl.mkiv | 129 +-
tex/context/base/tabl-tsp.mkiv | 470 +-
tex/context/base/tabl-xnt.mkvi | 2 +-
tex/context/base/tabl-xtb.lua | 5 +-
tex/context/base/tabl-xtb.mkvi | 108 +-
tex/context/base/task-ini.lua | 9 +-
tex/context/base/trac-ctx.lua | 36 +
tex/context/base/trac-ctx.mkiv | 28 +
tex/context/base/trac-deb.lua | 13 +-
tex/context/base/trac-deb.mkiv | 6 +-
tex/context/base/trac-fil.lua | 214 +-
tex/context/base/trac-inf.lua | 4 +-
tex/context/base/trac-lmx.lua | 606 +-
tex/context/base/trac-set.lua | 36 +-
tex/context/base/trac-tex.lua | 2 +-
tex/context/base/trac-tim.lua | 2 +-
tex/context/base/trac-vis.lua | 859 +++
tex/context/base/trac-vis.mkiv | 810 +--
tex/context/base/type-imp-husayni.mkiv | 290 +-
tex/context/base/type-ini.lua | 17 +-
tex/context/base/type-ini.mkvi | 14 +-
tex/context/base/typo-bld.lua | 186 +
tex/context/base/typo-bld.mkiv | 64 +
tex/context/base/typo-brk.lua | 17 +-
tex/context/base/typo-brk.mkiv | 18 +-
tex/context/base/typo-cap.mkiv | 44 +-
tex/context/base/typo-del.mkiv | 31 +-
tex/context/base/typo-dir.lua | 10 +-
tex/context/base/typo-itm.mkiv | 273 +
tex/context/base/typo-krn.lua | 51 +-
tex/context/base/typo-krn.mkiv | 57 +-
tex/context/base/typo-mar.lua | 89 +-
tex/context/base/typo-mar.mkiv | 7 +-
tex/context/base/typo-scr.mkiv | 250 +-
tex/context/base/typo-spa.mkiv | 17 +-
tex/context/base/typo-txt.mkvi | 174 +-
tex/context/base/unic-ini.mkiv | 2 +-
tex/context/base/util-deb.lua | 3 +-
tex/context/base/util-dim.lua | 18 +-
tex/context/base/util-jsn.lua | 145 +
tex/context/base/util-lua.lua | 213 +-
tex/context/base/util-mrg.lua | 9 +-
tex/context/base/util-prs.lua | 73 +-
tex/context/base/util-sql-imp-client.lua | 253 +
tex/context/base/util-sql-imp-library.lua | 283 +
tex/context/base/util-sql-imp-swiglib.lua | 426 ++
tex/context/base/util-sql-loggers.lua | 277 +
tex/context/base/util-sql-sessions.lua | 349 +
tex/context/base/util-sql-tickets.lua | 698 ++
tex/context/base/util-sql-users.lua | 391 ++
tex/context/base/util-sql.lua | 425 ++
tex/context/base/util-sta.lua | 342 +
tex/context/base/util-str.lua | 7 +-
tex/context/base/util-tab.lua | 120 +-
tex/context/base/util-tpl.lua | 117 +
tex/context/base/x-asciimath.mkiv | 2 +-
tex/context/base/x-chemml.lua | 2 +-
tex/context/base/x-chemml.mkiv | 22 +-
tex/context/base/x-dir-05.mkiv | 4 +-
tex/context/base/x-mathml.lua | 24 +-
tex/context/base/x-mathml.mkiv | 2366 -------
tex/context/base/x-udhr.mkiv | 2 +-
tex/context/base/x-xfdf.mkiv | 72 +
tex/context/base/xtag-ini.mkii | 50 +-
tex/context/base/xtag-pre.mkii | 13 +
tex/context/fonts/demo.lfg | 29 +-
tex/context/fonts/husayni.lfg | 6 +-
tex/context/interface/keys-cs.xml | 8 +
tex/context/interface/keys-de.xml | 8 +
tex/context/interface/keys-en.xml | 8 +
tex/context/interface/keys-fr.xml | 8 +
tex/context/interface/keys-it.xml | 8 +
tex/context/interface/keys-nl.xml | 8 +
tex/context/interface/keys-pe.xml | 8 +
tex/context/interface/keys-ro.xml | 8 +
tex/context/sample/lorem.tex | 11 +
tex/context/test/pdf-x1a-2001.mkiv | 2 +-
tex/context/test/pdf-x1a-2003.mkiv | 2 +-
tex/context/test/pdf-x3-2002.mkiv | 2 +-
tex/context/test/pdf-x3-2003.mkiv | 2 +-
tex/context/test/pdf-x4.mkiv | 2 +-
tex/context/test/pdf-x4p.mkiv | 4 +-
tex/generic/context/luatex/luatex-basics-gen.lua | 4 +-
tex/generic/context/luatex/luatex-fonts-ext.lua | 16 +-
tex/generic/context/luatex/luatex-fonts-merged.lua | 7308 ++++++++++---------
tex/generic/context/luatex/luatex-fonts.lua | 2 +-
tex/generic/context/luatex/luatex-mplib.tex | 11 +-
600 files changed, 57496 insertions(+), 36685 deletions(-)
create mode 100644 metapost/context/base/mp-grap.mpiv
create mode 100644 scripts/context/lua/mtx-fcd.lua
delete mode 100644 scripts/context/ruby/fcd_start.rb
delete mode 100644 tex/context/base/buff-par.mkiv
create mode 100644 tex/context/base/buff-par.mkvi
create mode 100644 tex/context/base/cont-nop.mkiv
create mode 100644 tex/context/base/cont-yes.mkiv
delete mode 100644 tex/context/base/context.todo
create mode 100644 tex/context/base/core-ctx.ctx
delete mode 100644 tex/context/base/core-fnt.mkiv
delete mode 100644 tex/context/base/core-mis.mkiv
delete mode 100644 tex/context/base/core-var.mkiv
create mode 100644 tex/context/base/font-afk.lua
create mode 100644 tex/context/base/font-aux.mkvi
create mode 100644 tex/context/base/font-chk.mkiv
delete mode 100644 tex/context/base/font-gds.mkiv
create mode 100644 tex/context/base/font-gds.mkvi
create mode 100644 tex/context/base/font-sol.lua
create mode 100644 tex/context/base/font-sol.mkvi
create mode 100644 tex/context/base/lang-frd.mkiv
create mode 100644 tex/context/base/lang-frq.mkiv
create mode 100644 tex/context/base/m-ipsum.mkiv
create mode 100644 tex/context/base/m-json.mkiv
create mode 100644 tex/context/base/m-oldfun.mkiv
create mode 100644 tex/context/base/m-oldnum.mkiv
create mode 100644 tex/context/base/m-spreadsheet.lua
create mode 100644 tex/context/base/math-fbk.lua
create mode 100644 tex/context/base/math-ttv.lua
create mode 100644 tex/context/base/meta-grd.mkiv
create mode 100644 tex/context/base/mult-fun.lua
delete mode 100644 tex/context/base/node-par.lua
delete mode 100644 tex/context/base/node-par.mkiv
delete mode 100644 tex/context/base/node-spl.lua
delete mode 100644 tex/context/base/node-spl.mkiv
create mode 100644 tex/context/base/pack-cut.mkiv
create mode 100644 tex/context/base/page-ins.lua
delete mode 100644 tex/context/base/page-mis.lua
delete mode 100644 tex/context/base/page-mis.mkiv
create mode 100644 tex/context/base/page-mix.lua
create mode 100644 tex/context/base/page-mix.mkiv
create mode 100644 tex/context/base/page-pst.lua
create mode 100644 tex/context/base/page-pst.mkiv
delete mode 100644 tex/context/base/page-sel.mkiv
create mode 100644 tex/context/base/page-sel.mkvi
create mode 100644 tex/context/base/spac-cha.mkiv
delete mode 100644 tex/context/base/supp-ali.mkiv
delete mode 100644 tex/context/base/supp-fun.mkiv
delete mode 100644 tex/context/base/supp-num.mkiv
create mode 100644 tex/context/base/tabl-mis.mkiv
create mode 100644 tex/context/base/trac-ctx.lua
create mode 100644 tex/context/base/trac-ctx.mkiv
create mode 100644 tex/context/base/trac-vis.lua
create mode 100644 tex/context/base/typo-bld.lua
create mode 100644 tex/context/base/typo-bld.mkiv
create mode 100644 tex/context/base/typo-itm.mkiv
create mode 100644 tex/context/base/util-jsn.lua
create mode 100644 tex/context/base/util-sql-imp-client.lua
create mode 100644 tex/context/base/util-sql-imp-library.lua
create mode 100644 tex/context/base/util-sql-imp-swiglib.lua
create mode 100644 tex/context/base/util-sql-loggers.lua
create mode 100644 tex/context/base/util-sql-sessions.lua
create mode 100644 tex/context/base/util-sql-tickets.lua
create mode 100644 tex/context/base/util-sql-users.lua
create mode 100644 tex/context/base/util-sql.lua
create mode 100644 tex/context/base/util-sta.lua
create mode 100644 tex/context/base/util-tpl.lua
delete mode 100644 tex/context/base/x-mathml.mkiv
create mode 100644 tex/context/base/x-xfdf.mkiv
create mode 100644 tex/context/sample/lorem.tex
diff --git a/bibtex/bst/context/cont-ab.bst b/bibtex/bst/context/cont-ab.bst
index c524ff8ee..e09da7ca5 100644
--- a/bibtex/bst/context/cont-ab.bst
+++ b/bibtex/bst/context/cont-ab.bst
@@ -618,6 +618,7 @@ FUNCTION {misc}
format.t.title
format.key
"" "\city" address do.out
+ "" "\day" day do.out
"" "\month" month do.out
"" "\pubname" publisher do.out
"" "\howpublished" howpublished do.out
diff --git a/bibtex/bst/context/cont-au.bst b/bibtex/bst/context/cont-au.bst
index 2eba98b0a..d0b87186c 100644
--- a/bibtex/bst/context/cont-au.bst
+++ b/bibtex/bst/context/cont-au.bst
@@ -618,6 +618,7 @@ FUNCTION {misc}
format.t.title
format.key
"" "\city" address do.out
+ "" "\day" day do.out
"" "\month" month do.out
"" "\pubname" publisher do.out
"" "\howpublished" howpublished do.out
diff --git a/bibtex/bst/context/cont-no.bst b/bibtex/bst/context/cont-no.bst
index dbeb8a45d..393d13db8 100644
--- a/bibtex/bst/context/cont-no.bst
+++ b/bibtex/bst/context/cont-no.bst
@@ -618,6 +618,7 @@ FUNCTION {misc}
format.t.title
format.key
"" "\city" address do.out
+ "" "\day" day do.out
"" "\month" month do.out
"" "\pubname" publisher do.out
"" "\howpublished" howpublished do.out
diff --git a/bibtex/bst/context/cont-ti.bst b/bibtex/bst/context/cont-ti.bst
index 734cbb4b6..34175a0ba 100644
--- a/bibtex/bst/context/cont-ti.bst
+++ b/bibtex/bst/context/cont-ti.bst
@@ -618,6 +618,7 @@ FUNCTION {misc}
format.t.title
format.key
"" "\city" address do.out
+ "" "\day" day do.out
"" "\month" month do.out
"" "\pubname" publisher do.out
"" "\howpublished" howpublished do.out
diff --git a/context/data/scite/lexers/data/scite-context-data-context.lua b/context/data/scite/lexers/data/scite-context-data-context.lua
index aa36277b7..86a817b70 100644
--- a/context/data/scite/lexers/data/scite-context-data-context.lua
+++ b/context/data/scite/lexers/data/scite-context-data-context.lua
@@ -1,4 +1,4 @@
return {
- ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "etexversion", "pdftexversion", "xetexversion", "xetexrevision", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifmodeelse", "doifnotmode", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "startmodule", "stopmodule", "usemodule", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile" },
- ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "donothing", "dontcomplain", "donetrue", "donefalse", "htdp", "unvoidbox", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifinsetelse", "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifdimensionelse", "doifnumberelse", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifinstringelse", "doifassignmentelse", "tracingall", "tracingnone", "loggingall", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "empty", "null", "space", "quad", "enspace", "obeyspaces", "obeylines", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "wait", "writestatus", "define", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "measure", "getvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant", "newmacro", "setnewmacro", "newfraction", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doiffirstcharelse", "startnointerference", "stopnointerference", "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "begstrut", "endstrut" },
+ ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "etexversion", "pdftexversion", "xetexversion", "xetexrevision", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifmodeelse", "doifnotmode", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "startmodule", "stopmodule", "usemodule", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile" },
+ ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "doifelsecommandhandler", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "availablehsize", "localhsize", "setlocalhsize", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifinsetelse", "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifinstringelse", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "obeyspaces", "obeylines", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "wait", "writestatus", "define", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "getvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight" },
}
\ No newline at end of file
diff --git a/context/data/scite/lexers/data/scite-context-data-metafun.lua b/context/data/scite/lexers/data/scite-context-data-metafun.lua
index 749be7663..eac24016c 100644
--- a/context/data/scite/lexers/data/scite-context-data-metafun.lua
+++ b/context/data/scite/lexers/data/scite-context-data-metafun.lua
@@ -1,4 +1,4 @@
return {
- ["commands"]={ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "acosh", "asinh", "sinh", "cosh", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", "withcircularshade", "withlinearshade", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "transparent", "withtransparency", "asgroup", "infont", "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", "space", "CRLF", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedpath", "constructedpairs", "punkedfunction", "curvedfunction", "tightfunction", "punkedpath", "curvedpath", "tightpath", "punkedpairs", "curvedpairs", "tightpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions" },
+ ["commands"]={ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "acosh", "asinh", "sinh", "cosh", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", "withcircularshade", "withlinearshade", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", "transparent", "withtransparency", "asgroup", "infont", "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", "space", "CRLF", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedpath", "constructedpairs", "punkedfunction", "curvedfunction", "tightfunction", "punkedpath", "curvedpath", "tightpath", "punkedpairs", "curvedpairs", "tightpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions" },
["internals"]={ "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel", "shadefactor", "textextoffset", "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent", "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent", "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent" },
}
\ No newline at end of file
diff --git a/context/data/scite/lexers/scite-context-lexer-tex.lua b/context/data/scite/lexers/scite-context-lexer-tex.lua
index 1ff68750f..f59624051 100644
--- a/context/data/scite/lexers/scite-context-lexer-tex.lua
+++ b/context/data/scite/lexers/scite-context-lexer-tex.lua
@@ -128,12 +128,13 @@ local checkedword = context.checkedword
local styleofword = context.styleofword
local setwordlist = context.setwordlist
local validwords = false
+local validminimum = 3
-- % language=uk
local knownpreamble = Cmt(#P("% "), function(input,i,_) -- todo : utfbomb
if i < 10 then
- validwords = false
+ validwords, validminimum = false, 3
local s, e, word = find(input,'^(.+)[\n\r]',i) -- combine with match
if word then
local interface = match(word,"interface=([a-z]+)")
@@ -141,7 +142,7 @@ local knownpreamble = Cmt(#P("% "), function(input,i,_) -- todo : utfbomb
currentcommands = commands[interface] or commands.en or { }
end
local language = match(word,"language=([a-z]+)")
- validwords = language and setwordlist(language)
+ validwords, validminimum = setwordlist(language)
end
end
return false
@@ -224,7 +225,7 @@ local p_unit = P("pt") + P("bp") + P("sp") + P("mm") + P("cm") +
--
-- local p_word = Cmt(iwordpattern, function(_,i,s)
-- if validwords then
--- return checkedword(validwords,s,i)
+-- return checkedword(validwords,validminimum,s,i)
-- else
-- return true, { "text", i }
-- end
@@ -232,7 +233,7 @@ local p_unit = P("pt") + P("bp") + P("sp") + P("mm") + P("cm") +
--
-- So we use this one instead:
-local p_word = Ct( iwordpattern / function(s) return styleofword(validwords,s) end * Cp() ) -- the function can be inlined
+local p_word = Ct( iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
----- p_text = (1 - p_grouping - p_special - p_extra - backslash - space + hspace)^1
@@ -389,7 +390,8 @@ local stoplua = P("\\stop") * Cmt(luaenvironment,stopdisplaylua)
local startluacode = token("embedded", startlua)
local stopluacode = #stoplua * token("embedded", stoplua)
-local metafuncall = ( P("reusable") + P("usable") + P("unique") + P("use") ) * ("MPgraphic")
+local metafuncall = ( P("reusable") + P("usable") + P("unique") + P("use") + P("reuse") ) * ("MPgraphic")
+ + P("uniqueMPpagegraphic")
local metafunenvironment = metafuncall -- ( P("use") + P("reusable") + P("unique") ) * ("MPgraphic")
+ P("MP") * ( P("code")+ P("page") + P("inclusions") + P("initializations") + P("definitions") + P("extensions") + P("graphic") )
diff --git a/context/data/scite/lexers/scite-context-lexer-txt.lua b/context/data/scite/lexers/scite-context-lexer-txt.lua
index 012167aeb..4c4742d54 100644
--- a/context/data/scite/lexers/scite-context-lexer-txt.lua
+++ b/context/data/scite/lexers/scite-context-lexer-txt.lua
@@ -13,19 +13,20 @@ local token = lexer.token
local P, S, Cmt, Cp, Ct = lpeg.P, lpeg.S, lpeg.Cmt, lpeg.Cp, lpeg.Ct
local find, match = string.find, string.match
-local textlexer = { _NAME = "txt", _FILENAME = "scite-context-lexer-txt" }
-local whitespace = lexer.WHITESPACE
-local context = lexer.context
+local textlexer = { _NAME = "txt", _FILENAME = "scite-context-lexer-txt" }
+local whitespace = lexer.WHITESPACE
+local context = lexer.context
-local space = lexer.space
-local any = lexer.any
+local space = lexer.space
+local any = lexer.any
-local wordtoken = context.patterns.wordtoken
-local wordpattern = context.patterns.wordpattern
-local checkedword = context.checkedword
-local styleofword = context.styleofword
-local setwordlist = context.setwordlist
-local validwords = false
+local wordtoken = context.patterns.wordtoken
+local wordpattern = context.patterns.wordpattern
+local checkedword = context.checkedword
+local styleofword = context.styleofword
+local setwordlist = context.setwordlist
+local validwords = false
+local validminimum = 3
-- local styleset = context.newstyleset {
-- "default",
@@ -37,12 +38,12 @@ local validwords = false
local p_preamble = Cmt(#(S("#!-%") * P(" ")), function(input,i,_) -- todo: utf bomb
if i == 1 then -- < 10 then
- validwords = false
+ validwords, validminimum = false, 3
local s, e, line = find(input,'^[#!%-%%](.+)[\n\r]',i)
if line then
local language = match(line,"language=([a-z]+)")
if language then
- validwords = setwordlist(language)
+ validwords, validminimum = setwordlist(language)
end
end
end
@@ -52,17 +53,8 @@ end)
local t_preamble =
token("preamble", p_preamble)
--- local t_word =
--- Cmt(wordpattern, function(_,i,s)
--- if validwords then
--- return checkedword(validwords,s,i)
--- else
--- return true, { "text", i }
--- end
--- end)
-
local t_word =
- Ct( wordpattern / function(s) return styleofword(validwords,s) end * Cp() ) -- the function can be inlined
+ Ct( wordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
local t_text =
token("default", wordtoken^1)
diff --git a/context/data/scite/lexers/scite-context-lexer-xml.lua b/context/data/scite/lexers/scite-context-lexer-xml.lua
index 34636127f..62cbb2d2c 100644
--- a/context/data/scite/lexers/scite-context-lexer-xml.lua
+++ b/context/data/scite/lexers/scite-context-lexer-xml.lua
@@ -71,7 +71,7 @@ local checkedword = context.checkedword
local styleofword = context.styleofword
local setwordlist = context.setwordlist
local validwords = false
-
+local validminimum = 3
--
--
@@ -79,29 +79,20 @@ local validwords = false
local p_preamble = Cmt(#P("]*%?>%s*<%?context%-directive%s+editor%s+language%s+(..)%s+%?>")
-- if not language then
-- language = match(input,'^<%?xml[^>]*language=[\"\'](..)[\"\'][^>]*%?>',i)
-- end
if language then
- validwords = setwordlist(language)
+ validwords, validminimum = setwordlist(language)
end
end
return false
end)
--- local p_word =
--- Cmt(iwordpattern, function(_,i,s)
--- if validwords then
--- return checkedword(validwords,s,i)
--- else
--- return true, { "text", i } -- or default
--- end
--- end)
-
local p_word =
- Ct( iwordpattern / function(s) return styleofword(validwords,s) end * Cp() ) -- the function can be inlined
+ Ct( iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
local p_rest =
token("default", any)
diff --git a/context/data/scite/lexers/scite-context-lexer.lua b/context/data/scite/lexers/scite-context-lexer.lua
index 2db37e26b..1c9f2cec0 100644
--- a/context/data/scite/lexers/scite-context-lexer.lua
+++ b/context/data/scite/lexers/scite-context-lexer.lua
@@ -210,8 +210,11 @@ function context.exact_match(words,word_chars,case_insensitive)
end
-- spell checking (we can only load lua files)
-
+--
-- return {
+-- min = 3,
+-- max = 40,
+-- n = 12345,
-- words = {
-- ["someword"] = "someword",
-- ["anotherword"] = "Anotherword",
@@ -220,42 +223,30 @@ end
local lists = { }
-local splitter = (Cf(Ct("") * (Cg(C((1-S(" \t\n\r"))^1 * Cc(true))) + P(1))^1,rawset) )^0
-local splitter = (Cf(Ct("") * (Cg(C(R("az","AZ","\127\255")^1) * Cc(true)) + P(1))^1,rawset) )^0
-
-local function splitwords(words)
- return lpegmatch(splitter,words)
-end
-
function context.setwordlist(tag,limit) -- returns hash (lowercase keys and original values)
if not tag or tag == "" then
- return false
- elseif lists[tag] ~= nil then
- return lists[tag]
- else
- local list = context.loaddefinitions("spell-" .. tag)
+ return false, 3
+ end
+ local list = lists[tag]
+ if not list then
+ list = context.loaddefinitions("spell-" .. tag)
if not list or type(list) ~= "table" then
- lists[tag] = false
- return false
- elseif type(list.words) == "string" then
- list = splitwords(list.words) or false
- lists[tag] = list
- return list
+ list = { words = false, min = 3 }
else
- list = list.words or false
- lists[tag] = list
- return list
+ list.words = list.words or false
+ list.min = list.min or 3
end
+ lists[tag] = list
end
+ return list.words, list.min
end
patterns.wordtoken = R("az","AZ","\127\255")
patterns.wordpattern = patterns.wordtoken^3 -- todo: if limit and #s < limit then
-function context.checkedword(validwords,s,i) -- ,limit
- if not validwords then
- return true, { "text", i }
--- return true, { "default", i }
+function context.checkedword(validwords,validminimum,s,i) -- ,limit
+ if not validwords then -- or #s < validminimum then
+ return true, { "text", i } -- { "default", i }
else
-- keys are lower
local word = validwords[s]
@@ -278,8 +269,8 @@ function context.checkedword(validwords,s,i) -- ,limit
end
end
-function context.styleofword(validwords,s) -- ,limit
- if not validwords then
+function context.styleofword(validwords,validminimum,s) -- ,limit
+ if not validwords or #s < validminimum then
return "text"
else
-- keys are lower
diff --git a/context/data/scite/scite-context-data-context.properties b/context/data/scite/scite-context-data-context.properties
index 37a39e1e0..c6a0213cf 100644
--- a/context/data/scite/scite-context-data-context.properties
+++ b/context/data/scite/scite-context-data-context.properties
@@ -2,72 +2,87 @@ keywordclass.context.helpers=\
startsetups stopsetups startxmlsetups stopxmlsetups \
startluasetups stopluasetups starttexsetups stoptexsetups startrawsetups \
stoprawsetups startlocalsetups stoplocalsetups starttexdefinition stoptexdefinition \
-starttexcode stoptexcode doifsetupselse doifsetups doifnotsetups \
-setup setups texsetup xmlsetup luasetup \
-directsetup newmode setmode resetmode newsystemmode \
+starttexcode stoptexcode startcontextcode stopcontextcode doifsetupselse \
+doifsetups doifnotsetups setup setups texsetup \
+xmlsetup luasetup directsetup doifelsecommandhandler doifnotcommandhandler \
+doifcommandhandler newmode setmode resetmode newsystemmode \
setsystemmode resetsystemmode pushsystemmode popsystemmode booleanmodevalue \
newcount newdimen newskip newmuskip newbox \
newtoks newread newwrite newmarks newinsert \
newattribute newif newlanguage newfamily newfam \
-newhelp then donothing dontcomplain donetrue \
-donefalse htdp unvoidbox vfilll mathbox \
-mathlimop mathnolop mathnothing mathalpha currentcatcodetable \
-defaultcatcodetable catcodetablename newcatcodetable startcatcodetable stopcatcodetable \
-startextendcatcodetable stopextendcatcodetable pushcatcodetable popcatcodetable restorecatcodes \
-setcatcodetable letcatcodecommand defcatcodecommand uedcatcodecommand hglue \
-vglue hfillneg vfillneg hfilllneg vfilllneg \
-ruledhss ruledhfil ruledhfill ruledhfilneg ruledhfillneg \
-normalhfillneg ruledvss ruledvfil ruledvfill ruledvfilneg \
-ruledvfillneg normalvfillneg ruledhbox ruledvbox ruledvtop \
-ruledvcenter ruledhskip ruledvskip ruledkern ruledmskip \
-ruledmkern ruledhglue ruledvglue normalhglue normalvglue \
-ruledpenalty scratchcounter globalscratchcounter scratchdimen globalscratchdimen \
-scratchskip globalscratchskip scratchmuskip globalscratchmuskip scratchtoks \
-globalscratchtoks scratchbox globalscratchbox nextbox dowithnextbox \
-dowithnextboxcs dowithnextboxcontent dowithnextboxcontentcs scratchwidth scratchheight \
-scratchdepth scratchoffset scratchdistance scratchhsize scratchvsize \
-scratchcounterone scratchcountertwo scratchcounterthree scratchdimenone scratchdimentwo \
-scratchdimenthree scratchskipone scratchskiptwo scratchskipthree scratchmuskipone \
-scratchmuskiptwo scratchmuskipthree scratchtoksone scratchtokstwo scratchtoksthree \
-scratchboxone scratchboxtwo scratchboxthree doif doifnot \
-doifelse doifinset doifnotinset doifinsetelse doifnextcharelse \
-doifnextoptionalelse doifnextbgroupelse doifnextparenthesiselse doiffastoptionalcheckelse doifundefinedelse \
-doifdefinedelse doifundefined doifdefined doifelsevalue doifvalue \
-doifnotvalue doifnothing doifsomething doifelsenothing doifsomethingelse \
-doifvaluenothing doifvaluesomething doifelsevaluenothing doifdimensionelse doifnumberelse \
-doifcommonelse doifcommon doifnotcommon doifinstring doifnotinstring \
-doifinstringelse doifassignmentelse tracingall tracingnone loggingall \
-appendtoks prependtoks appendtotoks prependtotoks to \
-endgraf empty null space quad \
-enspace obeyspaces obeylines normalspace executeifdefined \
-singleexpandafter doubleexpandafter tripleexpandafter dontleavehmode removelastspace \
-removeunwantedspaces wait writestatus define redefine \
-setmeasure setemeasure setgmeasure setxmeasure definemeasure \
-measure getvalue setvalue setevalue setgvalue \
-setxvalue letvalue letgvalue resetvalue undefinevalue \
-ignorevalue setuvalue setuevalue setugvalue setuxvalue \
-globallet glet getparameters geteparameters getgparameters \
-getxparameters forgetparameters copyparameters processcommalist processcommacommand \
-quitcommalist quitprevcommalist processaction processallactions processfirstactioninset \
-processallactionsinset unexpanded expanded startexpanded stopexpanded \
-protected protect unprotect firstofoneargument firstoftwoarguments \
-secondoftwoarguments firstofthreearguments secondofthreearguments thirdofthreearguments firstoffourarguments \
-secondoffourarguments thirdoffourarguments fourthoffourarguments firstoffivearguments secondoffivearguments \
-thirdoffivearguments fourthoffivearguments fifthoffivearguments firstofsixarguments secondofsixarguments \
-thirdofsixarguments fourthofsixarguments fifthofsixarguments sixthofsixarguments firstofoneunexpanded \
-gobbleoneargument gobbletwoarguments gobblethreearguments gobblefourarguments gobblefivearguments \
-gobblesixarguments gobblesevenarguments gobbleeightarguments gobbleninearguments gobbletenarguments \
-gobbleoneoptional gobbletwooptionals gobblethreeoptionals gobblefouroptionals gobblefiveoptionals \
-dorecurse doloop exitloop dostepwiserecurse recurselevel \
-recursedepth dofastloopcs newconstant setnewconstant newconditional \
-settrue setfalse setconstant newmacro setnewmacro \
-newfraction dosingleempty dodoubleempty dotripleempty doquadrupleempty \
-doquintupleempty dosixtupleempty doseventupleempty dosingleargument dodoubleargument \
-dotripleargument doquadrupleargument dosinglegroupempty dodoublegroupempty dotriplegroupempty \
+newhelp then firstargumentfalse firstargumenttrue secondargumentfalse \
+secondargumenttrue thirdargumentfalse thirdargumenttrue fourthargumentfalse fourthargumenttrue \
+fifthargumentfalse fifthsargumenttrue sixthargumentfalse sixtsargumenttrue doglobal \
+dodoglobal redoglobal resetglobal donothing dontcomplain \
+forgetall donetrue donefalse htdp unvoidbox \
+hfilll vfilll mathbox mathlimop mathnolop \
+mathnothing mathalpha currentcatcodetable defaultcatcodetable catcodetablename \
+newcatcodetable startcatcodetable stopcatcodetable startextendcatcodetable stopextendcatcodetable \
+pushcatcodetable popcatcodetable restorecatcodes setcatcodetable letcatcodecommand \
+defcatcodecommand uedcatcodecommand hglue vglue hfillneg \
+vfillneg hfilllneg vfilllneg ruledhss ruledhfil \
+ruledhfill ruledhfilneg ruledhfillneg normalhfillneg ruledvss \
+ruledvfil ruledvfill ruledvfilneg ruledvfillneg normalvfillneg \
+ruledhbox ruledvbox ruledvtop ruledvcenter ruledhskip \
+ruledvskip ruledkern ruledmskip ruledmkern ruledhglue \
+ruledvglue normalhglue normalvglue ruledpenalty scratchcounter \
+globalscratchcounter scratchdimen globalscratchdimen scratchskip globalscratchskip \
+scratchmuskip globalscratchmuskip scratchtoks globalscratchtoks scratchbox \
+globalscratchbox availablehsize localhsize setlocalhsize nextbox \
+dowithnextbox dowithnextboxcs dowithnextboxcontent dowithnextboxcontentcs scratchwidth \
+scratchheight scratchdepth scratchoffset scratchdistance scratchhsize \
+scratchvsize scratchxoffset scratchyoffset scratchhoffset scratchvoffset \
+scratchxposition scratchyposition scratchtopoffset scratchbottomoffset scratchleftoffset \
+scratchrightoffset scratchcounterone scratchcountertwo scratchcounterthree scratchdimenone \
+scratchdimentwo scratchdimenthree scratchskipone scratchskiptwo scratchskipthree \
+scratchmuskipone scratchmuskiptwo scratchmuskipthree scratchtoksone scratchtokstwo \
+scratchtoksthree scratchboxone scratchboxtwo scratchboxthree scratchnx \
+scratchny scratchmx scratchmy scratchleftskip scratchrightskip \
+scratchtopskip scratchbottomskip doif doifnot doifelse \
+doifinset doifnotinset doifinsetelse doifnextcharelse doifnextoptionalelse \
+doifnextbgroupelse doifnextparenthesiselse doiffastoptionalcheckelse doifundefinedelse doifdefinedelse \
+doifundefined doifdefined doifelsevalue doifvalue doifnotvalue \
+doifnothing doifsomething doifelsenothing doifsomethingelse doifvaluenothing \
+doifvaluesomething doifelsevaluenothing doifdimensionelse doifnumberelse doifnumber \
+doifnotnumber doifcommonelse doifcommon doifnotcommon doifinstring \
+doifnotinstring doifinstringelse doifassignmentelse docheckassignment tracingall \
+tracingnone loggingall removetoks appendtoks prependtoks \
+appendtotoks prependtotoks to endgraf endpar \
+everyendpar reseteverypar finishpar empty null \
+space quad enspace obeyspaces obeylines \
+normalspace executeifdefined singleexpandafter doubleexpandafter tripleexpandafter \
+dontleavehmode removelastspace removeunwantedspaces keepunwantedspaces wait \
+writestatus define redefine setmeasure setemeasure \
+setgmeasure setxmeasure definemeasure freezemeasure measure \
+getvalue setvalue setevalue setgvalue setxvalue \
+letvalue letgvalue resetvalue undefinevalue ignorevalue \
+setuvalue setuevalue setugvalue setuxvalue globallet \
+glet udef ugdef uedef uxdef \
+getparameters geteparameters getgparameters getxparameters forgetparameters \
+copyparameters getdummyparameters dummyparameter directdummyparameter setdummyparameter \
+letdummyparameter usedummystyleandcolor usedummystyleparameter usedummycolorparameter processcommalist \
+processcommacommand quitcommalist quitprevcommalist processaction processallactions \
+processfirstactioninset processallactionsinset unexpanded expanded startexpanded \
+stopexpanded protected protect unprotect firstofoneargument \
+firstoftwoarguments secondoftwoarguments firstofthreearguments secondofthreearguments thirdofthreearguments \
+firstoffourarguments secondoffourarguments thirdoffourarguments fourthoffourarguments firstoffivearguments \
+secondoffivearguments thirdoffivearguments fourthoffivearguments fifthoffivearguments firstofsixarguments \
+secondofsixarguments thirdofsixarguments fourthofsixarguments fifthofsixarguments sixthofsixarguments \
+firstofoneunexpanded gobbleoneargument gobbletwoarguments gobblethreearguments gobblefourarguments \
+gobblefivearguments gobblesixarguments gobblesevenarguments gobbleeightarguments gobbleninearguments \
+gobbletenarguments gobbleoneoptional gobbletwooptionals gobblethreeoptionals gobblefouroptionals \
+gobblefiveoptionals dorecurse doloop exitloop dostepwiserecurse \
+recurselevel recursedepth dofastloopcs newconstant setnewconstant \
+newconditional settrue setfalse setconstant newmacro \
+setnewmacro newfraction newsignal dosingleempty dodoubleempty \
+dotripleempty doquadrupleempty doquintupleempty dosixtupleempty doseventupleempty \
+dosingleargument dodoubleargument dotripleargument doquadrupleargument doquintupleargument \
+dosixtupleargument doseventupleargument dosinglegroupempty dodoublegroupempty dotriplegroupempty \
doquadruplegroupempty doquintuplegroupempty nopdfcompression maximumpdfcompression normalpdfcompression \
modulonumber dividenumber getfirstcharacter doiffirstcharelse startnointerference \
-stopnointerference strut setstrut strutbox strutht \
-strutdp strutwd begstrut endstrut
+stopnointerference twodigits threedigits strut setstrut \
+strutbox strutht strutdp strutwd struthtdp \
+begstrut endstrut lineheight
keywordclass.context.constants=\
zerocount minusone minustwo plusone \
@@ -76,55 +91,55 @@ plusseven pluseight plusnine plusten plussixteen \
plushundred plusthousand plustenthousand plustwentythousand medcard \
maxcard zeropoint onepoint halfapoint onebasepoint \
maxdimen scaledpoint thousandpoint points halfpoint \
-zeroskip pluscxxvii pluscxxviii pluscclv pluscclvi \
-normalpagebox endoflinetoken outputnewlinechar emptytoks empty \
-undefined voidbox emptybox emptyvbox emptyhbox \
-bigskipamount medskipamount smallskipamount fmtname fmtversion \
-texengine texenginename texengineversion luatexengine pdftexengine \
-xetexengine unknownengine etexversion pdftexversion xetexversion \
-xetexrevision activecatcode bgroup egroup endline \
-conditionaltrue conditionalfalse attributeunsetvalue uprotationangle rightrotationangle \
-downrotationangle leftrotationangle inicatcodes ctxcatcodes texcatcodes \
-notcatcodes txtcatcodes vrbcatcodes prtcatcodes nilcatcodes \
-luacatcodes tpacatcodes tpbcatcodes xmlcatcodes escapecatcode \
-begingroupcatcode endgroupcatcode mathshiftcatcode alignmentcatcode endoflinecatcode \
-parametercatcode superscriptcatcode subscriptcatcode ignorecatcode spacecatcode \
-lettercatcode othercatcode activecatcode commentcatcode invalidcatcode \
-tabasciicode newlineasciicode formfeedasciicode endoflineasciicode endoffileasciicode \
-spaceasciicode hashasciicode dollarasciicode commentasciicode ampersandasciicode \
-colonasciicode backslashasciicode circumflexasciicode underscoreasciicode leftbraceasciicode \
-barasciicode rightbraceasciicode tildeasciicode delasciicode lessthanasciicode \
-morethanasciicode doublecommentsignal atsignasciicode exclamationmarkasciicode questionmarkasciicode \
-doublequoteasciicode singlequoteasciicode forwardslashasciicode primeasciicode activemathcharcode \
-activetabtoken activeformfeedtoken activeendoflinetoken batchmodecode nonstopmodecode \
-scrollmodecode errorstopmodecode bottomlevelgroupcode simplegroupcode hboxgroupcode \
-adjustedhboxgroupcode vboxgroupcode vtopgroupcode aligngroupcode noaligngroupcode \
-outputgroupcode mathgroupcode discretionarygroupcode insertgroupcode vcentergroupcode \
-mathchoicegroupcode semisimplegroupcode mathshiftgroupcode mathleftgroupcode vadjustgroupcode \
-charnodecode hlistnodecode vlistnodecode rulenodecode insertnodecode \
-marknodecode adjustnodecode ligaturenodecode discretionarynodecode whatsitnodecode \
-mathnodecode gluenodecode kernnodecode penaltynodecode unsetnodecode \
-mathsnodecode charifcode catifcode numifcode dimifcode \
-oddifcode vmodeifcode hmodeifcode mmodeifcode innerifcode \
-voidifcode hboxifcode vboxifcode xifcode eofifcode \
-trueifcode falseifcode caseifcode definedifcode csnameifcode \
-fontcharifcode fontslantperpoint fontinterwordspace fontinterwordstretch fontinterwordshrink \
-fontexheight fontemwidth fontextraspace slantperpoint interwordspace \
-interwordstretch interwordshrink exheight emwidth extraspace \
-mathsupdisplay mathsupnormal mathsupcramped mathsubnormal mathsubcombined \
-mathaxisheight startmode stopmode startnotmode stopnotmode \
-startmodeset stopmodeset doifmode doifmodeelse doifnotmode \
-startallmodes stopallmodes startnotallmodes stopnotallmodes doifallmodes \
-doifallmodeselse doifnotallmodes startenvironment stopenvironment environment \
-startcomponent stopcomponent component startproduct stopproduct \
-product startproject stopproject project starttext \
-stoptext startnotext stopnotext startdocument stopdocument \
-documentvariable startmodule stopmodule usemodule startTEXpage \
-stopTEXpage enablemode disablemode preventmode pushmode \
-popmode typescriptone typescripttwo typescriptthree mathsizesuffix \
-mathordcode mathopcode mathbincode mathrelcode mathopencode \
-mathclosecode mathpunctcode mathalphacode mathinnercode mathnothingcode \
-mathlimopcode mathnolopcode mathboxcode mathchoicecode mathaccentcode \
-mathradicalcode constantnumber constantnumberargument constantdimen constantdimenargument \
-constantemptyargument continueifinputfile
+zeroskip zeromuskip onemuskip pluscxxvii pluscxxviii \
+pluscclv pluscclvi normalpagebox endoflinetoken outputnewlinechar \
+emptytoks empty undefined voidbox emptybox \
+emptyvbox emptyhbox bigskipamount medskipamount smallskipamount \
+fmtname fmtversion texengine texenginename texengineversion \
+luatexengine pdftexengine xetexengine unknownengine etexversion \
+pdftexversion xetexversion xetexrevision activecatcode bgroup \
+egroup endline conditionaltrue conditionalfalse attributeunsetvalue \
+uprotationangle rightrotationangle downrotationangle leftrotationangle inicatcodes \
+ctxcatcodes texcatcodes notcatcodes txtcatcodes vrbcatcodes \
+prtcatcodes nilcatcodes luacatcodes tpacatcodes tpbcatcodes \
+xmlcatcodes escapecatcode begingroupcatcode endgroupcatcode mathshiftcatcode \
+alignmentcatcode endoflinecatcode parametercatcode superscriptcatcode subscriptcatcode \
+ignorecatcode spacecatcode lettercatcode othercatcode activecatcode \
+commentcatcode invalidcatcode tabasciicode newlineasciicode formfeedasciicode \
+endoflineasciicode endoffileasciicode spaceasciicode hashasciicode dollarasciicode \
+commentasciicode ampersandasciicode colonasciicode backslashasciicode circumflexasciicode \
+underscoreasciicode leftbraceasciicode barasciicode rightbraceasciicode tildeasciicode \
+delasciicode lessthanasciicode morethanasciicode doublecommentsignal atsignasciicode \
+exclamationmarkasciicode questionmarkasciicode doublequoteasciicode singlequoteasciicode forwardslashasciicode \
+primeasciicode activemathcharcode activetabtoken activeformfeedtoken activeendoflinetoken \
+batchmodecode nonstopmodecode scrollmodecode errorstopmodecode bottomlevelgroupcode \
+simplegroupcode hboxgroupcode adjustedhboxgroupcode vboxgroupcode vtopgroupcode \
+aligngroupcode noaligngroupcode outputgroupcode mathgroupcode discretionarygroupcode \
+insertgroupcode vcentergroupcode mathchoicegroupcode semisimplegroupcode mathshiftgroupcode \
+mathleftgroupcode vadjustgroupcode charnodecode hlistnodecode vlistnodecode \
+rulenodecode insertnodecode marknodecode adjustnodecode ligaturenodecode \
+discretionarynodecode whatsitnodecode mathnodecode gluenodecode kernnodecode \
+penaltynodecode unsetnodecode mathsnodecode charifcode catifcode \
+numifcode dimifcode oddifcode vmodeifcode hmodeifcode \
+mmodeifcode innerifcode voidifcode hboxifcode vboxifcode \
+xifcode eofifcode trueifcode falseifcode caseifcode \
+definedifcode csnameifcode fontcharifcode fontslantperpoint fontinterwordspace \
+fontinterwordstretch fontinterwordshrink fontexheight fontemwidth fontextraspace \
+slantperpoint interwordspace interwordstretch interwordshrink exheight \
+emwidth extraspace mathsupdisplay mathsupnormal mathsupcramped \
+mathsubnormal mathsubcombined mathaxisheight startmode stopmode \
+startnotmode stopnotmode startmodeset stopmodeset doifmode \
+doifmodeelse doifnotmode startallmodes stopallmodes startnotallmodes \
+stopnotallmodes doifallmodes doifallmodeselse doifnotallmodes startenvironment \
+stopenvironment environment startcomponent stopcomponent component \
+startproduct stopproduct product startproject stopproject \
+project starttext stoptext startnotext stopnotext \
+startdocument stopdocument documentvariable startmodule stopmodule \
+usemodule startTEXpage stopTEXpage enablemode disablemode \
+preventmode pushmode popmode typescriptone typescripttwo \
+typescriptthree mathsizesuffix mathordcode mathopcode mathbincode \
+mathrelcode mathopencode mathclosecode mathpunctcode mathalphacode \
+mathinnercode mathnothingcode mathlimopcode mathnolopcode mathboxcode \
+mathchoicecode mathaccentcode mathradicalcode constantnumber constantnumberargument \
+constantdimen constantdimenargument constantemptyargument continueifinputfile
diff --git a/context/data/scite/scite-context-data-metafun.properties b/context/data/scite/scite-context-data-metafun.properties
index e0413b2dd..099f8119b 100644
--- a/context/data/scite/scite-context-data-metafun.properties
+++ b/context/data/scite/scite-context-data-metafun.properties
@@ -33,21 +33,21 @@ grayed greyed onlayer along graphictext \
loadfigure externalfigure withmask figure register \
bitmapimage colordecimals ddecimal dddecimal ddddecimal \
textext thetextext rawtextext textextoffset verbatim \
-thelabel label transparent withtransparency asgroup \
-infont set_linear_vector linear_shade define_linear_shade define_circular_linear_shade \
-define_sampled_linear_shade set_circular_vector circular_shade define_circular_shade define_circular_linear_shade \
-define_sampled_circular_shade space CRLF grayscale greyscale \
-withgray withgrey colorpart readfile clearxy \
-unitvector center epsed anchored originpath \
-infinite break xstretched ystretched snapped \
-pathconnectors function constructedpath constructedpairs punkedfunction \
-curvedfunction tightfunction punkedpath curvedpath tightpath \
-punkedpairs curvedpairs tightpairs evenly oddly \
-condition pushcurrentpicture popcurrentpicture arrowpath tensecircle \
-roundedsquare colortype whitecolor blackcolor normalfill \
-normaldraw visualizepaths naturalizepaths drawboundary drawwholepath \
-visualizeddraw visualizedfill draworigin drawboundingbox drawpath \
-drawpoint drawpoints drawcontrolpoints drawcontrollines drawpointlabels \
-drawlineoptions drawpointoptions drawcontroloptions drawlabeloptions draworiginoptions \
-drawboundoptions drawpathoptions resetdrawoptions
+thelabel label autoalign transparent withtransparency \
+asgroup infont set_linear_vector linear_shade define_linear_shade \
+define_circular_linear_shade define_sampled_linear_shade set_circular_vector circular_shade define_circular_shade \
+define_circular_linear_shade define_sampled_circular_shade space CRLF grayscale \
+greyscale withgray withgrey colorpart readfile \
+clearxy unitvector center epsed anchored \
+originpath infinite break xstretched ystretched \
+snapped pathconnectors function constructedpath constructedpairs \
+punkedfunction curvedfunction tightfunction punkedpath curvedpath \
+tightpath punkedpairs curvedpairs tightpairs evenly \
+oddly condition pushcurrentpicture popcurrentpicture arrowpath \
+tensecircle roundedsquare colortype whitecolor blackcolor \
+normalfill normaldraw visualizepaths naturalizepaths drawboundary \
+drawwholepath visualizeddraw visualizedfill draworigin drawboundingbox \
+drawpath drawpoint drawpoints drawcontrolpoints drawcontrollines \
+drawpointlabels drawlineoptions drawpointoptions drawcontroloptions drawlabeloptions \
+draworiginoptions drawboundoptions drawpathoptions resetdrawoptions
diff --git a/context/data/scite/scite-context-readme.pdf b/context/data/scite/scite-context-readme.pdf
index 32c9628b6..5d5113528 100644
Binary files a/context/data/scite/scite-context-readme.pdf and b/context/data/scite/scite-context-readme.pdf differ
diff --git a/context/data/scite/scite-context-readme.tex b/context/data/scite/scite-context-readme.tex
index 6221d7cde..f687c4fe1 100644
--- a/context/data/scite/scite-context-readme.tex
+++ b/context/data/scite/scite-context-readme.tex
@@ -444,7 +444,7 @@ But probably better is to use the next directive just below the
usual \XML\ marker line:
\starttyping
-
+
\stoptyping
\subject{Interface selection}
diff --git a/context/data/scite/scite-context.properties b/context/data/scite/scite-context.properties
index 1a4f0f5e8..739967d3c 100644
--- a/context/data/scite/scite-context.properties
+++ b/context/data/scite/scite-context.properties
@@ -56,7 +56,7 @@ textwrapper.length=68
file.patterns.tex=
file.patterns.latex=
-file.patterns.context=*.tex;*.mkii;*.mkiv;*.mkvi;
+file.patterns.context=*.tex;*.mkii;*.mkiv;*.mkvi;*.mkix;*.mkxi;
open.suffix.$(file.patterns.context)=.tex
@@ -73,7 +73,7 @@ lexer.$(file.patterns.example)=xml
# Lua : patterns
-file.patterns.lua=*.lua;*.luc;*.cld;*.tuc;*.luj;*.lum;*.tma;*.lfg
+file.patterns.lua=*.lua;*.luc;*.cld;*.tuc;*.luj;*.lum;*.tma;*.lfg;*.luv;*.lui
open.suffix.$(file.patterns.lua)=.lua
@@ -140,6 +140,8 @@ if PLAT_GTK
# Commands: help info, e:\websites\www.pragma-ade.com\showcase.pdf / todo: manuals
command.help.$(file.patterns.context)=$(name.context.texshow) $(CurrentWord)
+command.help.$(file.patterns.context)=mtxrun --gethelp --url="http://localhost:31415/mtx-server-ctx-help.lua?command=%command%" --command="$(CurrentWord)"
+command.help.$(file.patterns.context)=mtxrun --gethelp --url="http://www.contextgarden.net/Command/%command%" --command="$(CurrentWord)"
command.help.$(file.patterns.example)=
command.help.$(file.patterns.metafun)=
@@ -169,9 +171,9 @@ command.compile.$(file.patterns.example)=$(name.example.xmlcheck) $(FileNameExt)
command.compile.*.fo=$(name.example.xmlcheck) $(FileNameExt)
command.build.$(file.patterns.context)=$(name.context.run) $(FileNameExt)
-command.build.$(file.patterns.metafun)=$(name.metafun.mptopdf) $(FileNameExt)
-command.build.$(file.patterns.example)=$(name.context.run) --xml $(FileNameExt)
-command.build.*.fo=$(name.context.run) $(name.flag.pdfopen) --xml --use=foxet $(FileNameExt)
+command.build.$(file.patterns.metafun)=$(name.context.mtxrun) --script context $(name.flag.pdfopen) $(FileNameExt)
+command.build.$(file.patterns.example)=$(name.context.run) --forcexml $(FileNameExt)
+command.build.*.fo=$(name.context.run) $(name.flag.pdfopen) --forcexml --use=foxet $(FileNameExt)
command.build.subsystem.$(file.patterns.context)=1
command.build.subsystem.$(file.patterns.metafun)=1
@@ -560,7 +562,7 @@ filter.metafun=MetaFun|$(file.patterns.metafun)|
lexer.$(file.patterns.metafun)=metapost
command.compile.$(file.patterns.metafun)=
-command.build.$(file.patterns.metafun)=context $(FileNameExt)
+command.build.$(file.patterns.metafun)=$(name.context.mtxrun) --script context $(name.flag.pdfopen) $(FileNameExt)
command.go.$(file.patterns.metafun)=gv $(FileName).1
command.0.$(file.patterns.metafun)=
@@ -664,3 +666,5 @@ fold.margin.colour=#CCCCCC
# testing
#~ cache.layout=
+
+find.command=mtxrun --script grep "$(find.what)" "$(find.files)"
diff --git a/metapost/context/base/metafun.mpiv b/metapost/context/base/metafun.mpiv
index d600764a3..8247c121f 100644
--- a/metapost/context/base/metafun.mpiv
+++ b/metapost/context/base/metafun.mpiv
@@ -11,10 +11,9 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D First we input John Hobby's metapost plain file. However,
-%D because we want to prevent dependency problems and in the
-%D end even may use a patched version, we prefer to use a
-%D copy.
+%D First we input John Hobby's metapost plain file. However, because we want to
+%D prevent dependency problems and in the end even may use a patched version,
+%D we prefer to use a copy.
input "mp-base.mpiv" ;
input "mp-tool.mpiv" ;
diff --git a/metapost/context/base/mp-abck.mpiv b/metapost/context/base/mp-abck.mpiv
index 02fab0337..abd7d8848 100644
--- a/metapost/context/base/mp-abck.mpiv
+++ b/metapost/context/base/mp-abck.mpiv
@@ -5,7 +5,7 @@
%D subtitle=anchored background macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
diff --git a/metapost/context/base/mp-apos.mpiv b/metapost/context/base/mp-apos.mpiv
index f92efc5fd..7b7737754 100644
--- a/metapost/context/base/mp-apos.mpiv
+++ b/metapost/context/base/mp-apos.mpiv
@@ -5,7 +5,7 @@
%D subtitle=anchored background macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
diff --git a/metapost/context/base/mp-asnc.mpiv b/metapost/context/base/mp-asnc.mpiv
index dfd88317c..2626e4d58 100644
--- a/metapost/context/base/mp-asnc.mpiv
+++ b/metapost/context/base/mp-asnc.mpiv
@@ -5,7 +5,7 @@
%D subtitle=anchored background macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
diff --git a/metapost/context/base/mp-butt.mpiv b/metapost/context/base/mp-butt.mpiv
index df7e9e4fd..6f5b90a7e 100644
--- a/metapost/context/base/mp-butt.mpiv
+++ b/metapost/context/base/mp-butt.mpiv
@@ -5,7 +5,7 @@
%D subtitle=buttons,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/metapost/context/base/mp-char.mpiv b/metapost/context/base/mp-char.mpiv
index c293b7284..f604accd8 100644
--- a/metapost/context/base/mp-char.mpiv
+++ b/metapost/context/base/mp-char.mpiv
@@ -5,7 +5,7 @@
%D subtitle=charts,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/metapost/context/base/mp-chem.mpiv b/metapost/context/base/mp-chem.mpiv
index cb595f2c6..2b2d8e5bf 100644
--- a/metapost/context/base/mp-chem.mpiv
+++ b/metapost/context/base/mp-chem.mpiv
@@ -3,15 +3,15 @@
%D version=2009.05.13,
%D title=\CONTEXT\ \METAPOST\ graphics,
%D subtitle=chemicals,
-%D author=Hans Hagen,
+%D author=Hans Hagen \& Alan Braslau",
%D date=\currentdate,
-%D copyright=\PRAGMA]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
%C details.
-%D This module in incomplete and experimental.
+%D This module is incomplete and experimental.
% either consistent setting or not
@@ -20,7 +20,7 @@ if known context_chem : endinput ; fi ;
boolean context_chem ; context_chem := true ;
numeric
- chem_width, chem_radical_min, chem_radical_max, chem_text_max, chem_circle_radius,
+ chem_width, chem_radical_min, chem_radical_max, chem_text_min, chem_text_max, chem_circle_radius,
chem_rotation, chem_adjacent, chem_stack, chem_substituent, chem_direction, chem_setting_scale,
chem_setting_offset, chem_text_offset, chem_picture_offset, chem_center_offset, chem_substituent_offset,
chem_setting_l, chem_setting_r, chem_setting_t, chem_setting_b ;
@@ -30,6 +30,9 @@ boolean
chem_setting_fixedwidth, chem_setting_fixedheight,
chem_doing_pb, chem_text_trace ;
+color
+ chem_axis_color ;
+
path
chem_setting_bbox ;
@@ -38,24 +41,26 @@ pair
chem_adjacent_p, chem_substituent_p, chem_direction_p, chem_move_p ;
numeric
- chem_width[], chem_angle[], chem_start[], chem_initialrot[], chem_initialmov[] ;
+ chem_width[], chem_angle[], chem_dbl_offset[], chem_initialmov[] ;
pair
chem_stack_d[],
chem_b_zero[], chem_n_zero[],
- chem_r_max[], chem_r_min[],
+ chem_r_max[],
chem_r_zero[], chem_mr_zero[], chem_pr_zero[], chem_crz_zero[],
chem_rt_zero[], chem_rtt_zero[], chem_rbt_zero[],
+ chem_zbt_zero[], chem_ztt_zero[],
chem_mid_zero[] ;
path
chem_b_path[], chem_bx_path[], chem_eb_path[], chem_sr_path[], chem_br_path[],
chem_sb_path[], chem_msb_path[], chem_psb_path[],
chem_s_path[], chem_ss_path[], chem_mss_path[], chem_pss_path[],
- chem_e_path[], chem_sd_path[], chem_bb_path[], chem_oe_path[],
+ chem_e_path[], chem_bb_path[], chem_oe_path[],
chem_bd_path[], chem_bw_path[],
chem_ddt_path[], chem_ddb_path[], chem_ldt_path[], chem_ldb_path[], chem_rdt_path[], chem_rdb_path[],
chem_dbl_path[], chem_dbr_path[],
+ chem_tbl_path[], chem_tbr_path[],
chem_ad_path[], chem_au_path[],
chem_r_path[], chem_rl_path[], chem_rr_path[],
chem_rb_path[], chem_prb_path[], chem_mrb_path[],
@@ -66,23 +71,23 @@ path
chem_midt_path[], chem_midb_path[], chem_midst_path[], chem_midsb_path[] ;
chem_setting_scale := 1 ;
-chem_base_width := 40pt ;
-chem_text_offset := 3pt ;
+chem_base_width := 40pt ; % Should this rather follow the font size?
+chem_text_offset := 3pt ; % ?
chem_center_offset := 6pt ;
-chem_picture_offset := 10pt ;
-chem_substituent_offset := 10pt ;
+chem_picture_offset := 10pt ; % Should this follow chem_base_width (thus the font size)?
+chem_substituent_offset := 10pt ; % Should this follow chem_base_width (thus the font size)?
chem_radical_min := 1.25 ;
chem_radical_max := 1.50 ;
chem_text_min := 0.75 ;
-chem_text_max := 1.75 ;
+chem_text_max := 1.25 ;
chem_circle_radius := 0.80 ;
chem_circle_radius := 1.10 ;
-chem_rotation := 1 ;
+chem_rotation := 0 ;
chem_adjacent := 0 ;
chem_substituent := 0 ;
chem_direction := 0 ;
chem_stack_n := 0 ;
-chem_doing_pb := false ;
+chem_doing_pb := false ;
chem_shift := origin ;
chem_dot_factor := 4 ;
chem_text_trace := false ;
@@ -90,8 +95,10 @@ chem_bd_n := 4 ;
chem_bw_n := 4 ;
chem_bd_angle := 4 ;
chem_bb_angle := 4 ;
+chem_axis_color := blue ; % TODO: add "axiscolor=" option in lua...
vardef chem_start_structure(expr n, l, r, t, b, scale, axis, fixedwidth, fixedheight, offset) =
+ % note that "n" is not used...
chem_setting_axis := axis ;
chem_setting_l := l * scale ;
chem_setting_r := r * scale ;
@@ -104,7 +111,7 @@ vardef chem_start_structure(expr n, l, r, t, b, scale, axis, fixedwidth, fixedhe
chem_setting_scale := scale ;
chem_init_all ;
fi ;
- chem_rotation := 1 ;
+ chem_rotation := 0 ;
chem_adjacent := 0 ;
chem_substituent := 0 ;
chem_direction := 0 ;
@@ -127,17 +134,18 @@ def chem_stop_structure =
chem_setting_bbox :=
(-chem_setting_l,-chem_setting_b) -- ( chem_setting_r,-chem_setting_b) --
( chem_setting_r, chem_setting_t) -- (-chem_setting_l, chem_setting_t) -- cycle ;
- % maybe put it behind the picture
- if chem_setting_axis :
+ if chem_setting_axis : % put it behind the picture
+ picture chem_picture ; chem_picture := currentpicture ; currentpicture := nullpicture ;
save stp ; stp := chem_base_width/ 2 * chem_setting_scale ;
- save siz ; siz := chem_base_width/10 * chem_setting_scale ;
- draw (-chem_setting_l,0) -- (chem_setting_r,0) withcolor blue ;
- draw (0,-chem_setting_b) -- (0,chem_setting_t) withcolor blue ;
- for i = 0 step stp until chem_setting_r : draw (i,-siz) -- (i,siz) withcolor blue ; endfor ;
- for i = 0 step -stp until -chem_setting_l : draw (i,-siz) -- (i,siz) withcolor blue ; endfor ;
- for i = 0 step stp until chem_setting_t : draw (-siz,i) -- (siz,i) withcolor blue ; endfor ;
- for i = 0 step -stp until -chem_setting_b : draw (-siz,i) -- (siz,i) withcolor blue ; endfor ;
- draw chem_setting_bbox withcolor blue ;
+ save siz ; siz := stp/5 ;
+ draw (-chem_setting_l,0) -- (chem_setting_r,0) withcolor chem_axis_color ;
+ draw (0,-chem_setting_b) -- (0,chem_setting_t) withcolor chem_axis_color ;
+ for i = 0 step stp until chem_setting_r : draw (i,-siz) -- (i,siz) withcolor chem_axis_color ; endfor ;
+ for i = 0 step -stp until -chem_setting_l : draw (i,-siz) -- (i,siz) withcolor chem_axis_color ; endfor ;
+ for i = 0 step stp until chem_setting_t : draw (-siz,i) -- (siz,i) withcolor chem_axis_color ; endfor ;
+ for i = 0 step -stp until -chem_setting_b : draw (-siz,i) -- (siz,i) withcolor chem_axis_color ; endfor ;
+ % frame=on: draw chem_setting_bbox withcolor chem_axis_color ;
+ addto currentpicture also chem_picture ;
fi ;
setbounds currentpicture to chem_setting_bbox ;
enddef ;
@@ -146,8 +154,8 @@ def chem_start_component = enddef ;
def chem_stop_component = enddef ;
def chem_pb =
-% draw boundingbox currentpicture withpen pencircle scaled 1mm withcolor blue ;
-% draw origin withpen pencircle scaled 2mm withcolor blue ;
+% draw boundingbox currentpicture withpen pencircle scaled 1mm withcolor chem_axis_color ;
+% draw origin withpen pencircle scaled 2mm withcolor chem_axis_color ;
chem_doing_pb := true ;
enddef ;
@@ -174,6 +182,7 @@ vardef chem_do (expr p) =
fi
enddef ;
+% f_rom, t_o, r_ule, c_olor
vardef chem_b (expr n, f, t, r, c) =
chem_draw (n, chem_b_path[n], f, t, r, c) ;
enddef ;
@@ -221,13 +230,8 @@ vardef chem_eb (expr n, f, t, r, c) =
enddef ;
vardef chem_db (expr n, f, t, r, c) =
- if n = 1 :
- chem_draw (n, chem_msb_path [n], f, t, r, c) ;
- chem_draw (n, chem_psb_path [n], f, t, r, c) ;
- else :
- chem_draw (n, chem_dbl_path [n], f, t, r, c) ;
- chem_draw (n, chem_dbr_path [n], f, t, r, c) ;
- fi ;
+ chem_draw (n, chem_dbl_path [n], f, t, r, c) ;
+ chem_draw (n, chem_dbr_path [n], f, t, r, c) ;
enddef ;
vardef chem_er (expr n, f, t, r, c) =
@@ -313,7 +317,7 @@ vardef chem_psr (expr n, f, t, r, c) =
enddef ;
vardef chem_c (expr n, f, t, r, c) =
- chem_draw (n, chem_c_path[n], f, t, r, c)
+ chem_draw (n, chem_c_path[n], f, f, r, c)
enddef ;
vardef chem_cc (expr n, f, t, r, c) =
@@ -321,7 +325,7 @@ vardef chem_cc (expr n, f, t, r, c) =
enddef ;
vardef chem_cd (expr n, f, t, r, c) =
- chem_dashed_connected (n, chem_c_path[n], f, t, r, c)
+ chem_dashed_connected (n, chem_c_path[n], f, f, r, c)
enddef ;
vardef chem_ccd (expr n, f, t, r, c) =
@@ -341,13 +345,13 @@ vardef chem_rbn (expr n, i, t) =
enddef ;
vardef chem_tb (expr n, f, t, r, c) = % one
- chem_draw (n, chem_msb_path[n], f, t, r, c) ;
- chem_draw (n, chem_sb_path [n], f, t, r, c) ;
- chem_draw (n, chem_psb_path[n], f, t, r, c) ;
+ chem_draw (n, chem_tbl_path [n], f, t, r, c) ;
+ chem_draw (n, chem_sb_path [n], f, t, r, c) ;
+ chem_draw (n, chem_tbr_path [n], f, t, r, c) ;
enddef ;
vardef chem_ep (expr n, f, t, r, c) = % one
- chem_draw (n, chem_e_path[n], f, t, r, c) ;
+ chem_draw (n, (subpath (.25,.75) of chem_e_path[n]), f, t, r, c) ;
enddef ;
vardef chem_es (expr n, f, t, r, c) = % one
@@ -355,8 +359,8 @@ vardef chem_es (expr n, f, t, r, c) = % one
enddef ;
vardef chem_ed (expr n, f, t, r, c) = % one
- chem_draw_dot (n, point 0 of chem_e_path[n], f, t, r, c) ;
- chem_draw_dot (n, point 1 of chem_e_path[n], f, t, r, c) ;
+ chem_draw_dot (n, point .25 of chem_e_path[n], f, t, r, c) ;
+ chem_draw_dot (n, point .75 of chem_e_path[n], f, t, r, c) ;
enddef ;
vardef chem_et (expr n, f, t, r, c) = % one
@@ -373,11 +377,11 @@ enddef ;
vardef chem_rdd (expr n, f, t, r, c) = % one
chem_draw (n, chem_ldt_path[n], f, t, r, c) ;
chem_draw (n, chem_ldb_path[n], f, t, r, c) ;
- chem_draw (n, chem_psb_path[n], f, t, r, c) ;
+ chem_draw (n, chem_sb_path [n], f, t, r, c) ;
enddef ;
vardef chem_ldd (expr n, f, t, r, c) = % one
- chem_draw (n, chem_msb_path[n], f, t, r, c) ;
+ chem_draw (n, chem_sb_path [n], f, t, r, c) ;
chem_draw (n, chem_rdt_path[n], f, t, r, c) ;
chem_draw (n, chem_rdb_path[n], f, t, r, c) ;
enddef ;
@@ -388,10 +392,11 @@ vardef chem_hb (expr n, f, t, r, c) = % one
chem_draw_dot (n, point 1 of chem_sb_path[n], f, t, r, c) ;
enddef ;
-vardef chem_bb (expr n, f, t, r, c) = % one
+vardef chem_bb (expr n, f, t, r, c) = % one and front
if n < 0 :
- chem_fill (n, chem_bb_path[n], 1, 1, r, c) ;
- chem_b (n, f, t, r, c) ;
+ if ((f = 1) and (t = -n)) : % ignore all but "BB"
+ chem_fill (n, chem_bb_path[n], 1, 1, r, c) ;
+ fi
else :
chem_fill (n, chem_bb_path[n], f, t, r, c) ;
fi ;
@@ -428,7 +433,9 @@ vardef chem_z@#(expr n, p) (text t) =
enddef ;
vardef chem_cz@#(expr n, p) (text t) =
- if n = 1 :
+ if p = 0 :
+ chem_text@#(t, chem_do(origin)) ;
+ elseif n = 1 :
chem_c_text(t, chem_do(chem_crz_zero[n] rotated chem_ang(n,p))) ;
else :
chem_text@#(t, chem_do(chem_b_zero[n] rotated chem_ang(n,p))) ;
@@ -439,12 +446,37 @@ vardef chem_midz@#(expr n, p) (text t) =
chem_text@#(t, chem_do(chem_mid_zero[n] rotated chem_ang(n,p))) ;
enddef ;
+string mfun_auto_align[] ;
+mfun_auto_align[0] := "rt" ;
+mfun_auto_align[1] := "urt" ;
+mfun_auto_align[2] := "top" ;
+mfun_auto_align[3] := "ulft" ;
+mfun_auto_align[4] := "lft" ;
+mfun_auto_align[5] := "llft" ;
+mfun_auto_align[6] := "bot" ;
+mfun_auto_align[7] := "lrt" ;
+mfun_auto_align[8] := "rt" ;
+
+def autoalign(expr n) =
+ scantokens mfun_auto_align[round((n mod 360)/45)]
+enddef ;
+
+% draw textext.autoalign(60) ("\strut oeps 1") ;
+% draw textext.autoalign(160)("\strut oeps 2") ;
+% draw textext.autoalign(260)("\strut oeps 3") ;
+% draw textext.autoalign(360)("\strut oeps 4") ;
+
+
vardef chem_rz@#(expr n, p) (text t) =
if n < 0 :
% quite special
chem_text@#(t, chem_do(chem_r_zero[n] shifted (chem_b_zero[n] rotated chem_ang(n,p)))) ;
else :
- chem_text@#(t, chem_do(chem_r_zero[n] rotated chem_ang(n,p))) ;
+ if (length(str @#)>0) and (str @# = "auto") :
+ chem_text.autoalign(chem_ang(n,p-.5)) (t, chem_do(chem_r_zero[n] rotated chem_ang(n,p))) ;
+ else :
+ chem_text@#(t, chem_do(chem_r_zero[n] rotated chem_ang(n,p))) ;
+ fi
fi ;
enddef ;
@@ -483,35 +515,27 @@ vardef chem_rbt@#(expr n, p) (text t) =
enddef ;
vardef chem_zt@#(expr n, p) (text t) =
- if n = 1 :
- chem_text@#(t, chem_do(chem_rt_zero[n] rotated chem_ang(n,p))) ;
- else :
- chem_text@#(t, chem_do(chem_n_zero[n] rotated chem_ang(n,p))) ;
- fi ;
+ chem_text@#(t, chem_do(chem_n_zero[n] rotated chem_ang(n,p))) ;
enddef ;
vardef chem_zn@#(expr n, p) (text t) =
- if n = 1 :
- chem_text@#(t, chem_do(chem_rt_zero[n] rotated chem_ang(n,p))) ;
- else :
- chem_text@#(t, chem_do(chem_n_zero[n] rotated chem_ang(n,p))) ;
- fi ;
+ chem_zt@#(n, p, t) ;
enddef ;
vardef chem_zbt@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_rtt_zero[n] rotated chem_ang(n,p))) ;
+ chem_text@#(t, chem_do(chem_zbt_zero[n] rotated chem_ang(n,p))) ;
enddef ;
vardef chem_zbn@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_rtt_zero[n] rotated chem_ang(n,p))) ;
+ chem_zbt@#(n, p, t) ;
enddef ;
vardef chem_ztt@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_rbt_zero[n] rotated chem_ang(n,p))) ;
+ chem_text@#(t, chem_do(chem_ztt_zero[n] rotated chem_ang(n,p))) ;
enddef ;
vardef chem_ztn@#(expr n, p) (text t) =
- chem_text@#(t, chem_do(chem_rbt_zero[n] rotated chem_ang(n,p))) ;
+ chem_ztt@#(n, p, t) ;
enddef ;
vardef chem_symbol(expr t) =
@@ -545,11 +569,13 @@ vardef chem_c_text(expr txt, z) = % adapted copy of thelabel@
enddef ;
vardef chem_ang (expr n, d) =
- ((-1 * (d-1) * chem_angle[n]) + (-chem_rotation+1) * 90 + chem_start[n]) % no ;
+ ((1 - d)*chem_angle[n] + if (n<0): -90 else: chem_rotation fi) % no ;
enddef ;
vardef chem_rot (expr n, d) =
- chem_rotation := d ;
+ if (d <> 0) :
+ chem_rotation := chem_rotation + 180/(if (d=1): 8 else: abs(d) fi) ;
+ fi
enddef ;
vardef chem_adj (expr n, d) =
@@ -562,7 +588,7 @@ enddef ;
vardef chem_dir (expr n, d) =
if n = 1 :
- chem_direction_p := (origin - 2*center(chem_b_path[n] rotated chem_ang(n,d+1))) ;
+ chem_direction_p := (origin - 2*center(chem_b_path[n] rotated chem_ang(n,d))/cosd(chem_angle[n])) ;
currentpicture := currentpicture shifted chem_direction_p ;
chem_shift := chem_shift + chem_direction_p ;
fi ;
@@ -573,23 +599,21 @@ vardef chem_mov (expr n, d) =
currentpicture := currentpicture shifted - chem_shift ;
chem_shift := origin ;
else :
- chem_move_p := (origin - 2*center(chem_b_path[n] rotated chem_ang(n,d+chem_initialmov[n]))) ;
+ %chem_move_p := (origin - 2*center(chem_b_path[n] rotated chem_ang(n,d+chem_initialmov[n]))) ;
+ chem_move_p := -chem_b_zero[n] rotated chem_ang(n,d+chem_initialmov[n]) ;
currentpicture := currentpicture shifted chem_move_p ;
chem_shift := chem_shift + chem_move_p ;
+ %if (n = 3) or (n = 5) : chem_rot(n, -n) ; fi
fi ;
enddef ;
vardef chem_off (expr n, d) =
- if (d = 1) or (d = 2) or (d = 8) : % positive
- currentpicture := currentpicture shifted (-chem_setting_offset,0) ;
- chem_shift := chem_shift + (-chem_setting_offset,0)
- elseif (d = 4) or (d = 5) or (d = 6) : % negative
- currentpicture := currentpicture shifted ( chem_setting_offset,0) ;
- chem_shift := chem_shift + ( chem_setting_offset,0)
- fi ;
+ pair o ; o := (-chem_setting_offset,0) rotated chem_ang(1,d+chem_initialmov[1]) ;
+ currentpicture := currentpicture shifted o ;
+ chem_shift := chem_shift + o ;
enddef ;
-vardef chem_set(expr n, m) =
+vardef chem_set (expr n, m) =
if chem_adjacent > 0 :
chem_adjacent_d := xpart chem_b_zero[n] + xpart chem_b_zero[m] ;
if chem_adjacent = 1 : chem_adjacent_p := (-chem_adjacent_d, 0) ;
@@ -618,7 +642,6 @@ vardef chem_set(expr n, m) =
chem_shift := chem_shift + chem_substituent_p ;
chem_substituent := 0 ;
fi ;
- chem_rotation := chem_initialrot[m] ;
enddef ;
vardef chem_draw (expr n, path_fragment, from_point, to_point, linewidth, linecolor) =
@@ -678,7 +701,7 @@ vardef chem_save =
chem_shift := origin ;
% chem_adjacent := 0 ;
% chem_substituent := 0 ;
-% chem_rotation := 1 ;
+% chem_rotation := 0 ;
currentpicture := nullpicture ;
enddef ;
@@ -693,14 +716,16 @@ vardef chem_restore =
fi ;
enddef ;
-def chem_init_some(expr n, ratio, start, initialrot, initialmov) =
- chem_width [n] := ratio * chem_base_width * chem_setting_scale ;
+def chem_init_some(expr n) =
+ boolean front ; front := n < 0 ;
chem_angle [n] := 360/abs(n) ;
- chem_start [n] := start ;
- chem_initialrot[n] := initialrot ;
- chem_initialmov[n] := initialmov ;
- chem_b_zero [n] := (chem_width[n],0) rotated (chem_angle[n]/2) ;
- chem_n_zero [n] := (chem_text_min*chem_width[n],0) rotated (chem_angle[n]/2) ;
+ chem_initialmov[n] := 0 ;
+ chem_width [n] := chem_setting_scale * (chem_base_width/2) / sind(chem_angle[n]/2) ;
+ chem_dbl_offset[n] := chem_setting_scale * chem_base_width / 20 ;
+ chem_b_path [n] := ((chem_width[n],0) rotated -(chem_angle[n]/2))--
+ ((chem_width[n],0) rotated +(chem_angle[n]/2)) ;
+ chem_b_zero [n] := point 1 of chem_b_path [n] ;
+ chem_n_zero [n] := chem_text_min*chem_b_zero[n] ;
chem_r_max [n] := chem_radical_max*chem_b_zero[n] ;
chem_r_path [n] := chem_b_zero[n] -- chem_r_max[n] ;
chem_mr_path [n] := chem_r_path [n] rotatedaround(chem_b_zero[n], (180-chem_angle[n])/2) ;
@@ -709,119 +734,120 @@ def chem_init_some(expr n, ratio, start, initialrot, initialmov) =
chem_mr_zero [n] := point 1 of chem_mr_path[n] ;
chem_pr_zero [n] := point 1 of chem_pr_path[n] ;
chem_crz_zero [n] := point 1 of (chem_r_path[n] enlonged chem_center_offset) ;
- chem_au_path [n] := subpath (0.2,0.8) of (chem_r_max[n] -- (chem_r_max[n] rotated chem_angle[n])) ;
+ chem_ztt_zero [n] := chem_text_max*(center chem_b_path[n]) ;
+ chem_zbt_zero [n] := chem_text_min*(center chem_b_path[n]) ;
+ chem_au_path [n] := chem_b_path[n] paralleled ((1-chem_text_max)*(abs(center chem_b_path[n]))) ;
chem_ad_path [n] := reverse(chem_au_path[n]) ;
chem_rt_zero [n] := (((chem_radical_max+chem_radical_min)/2)*chem_width[n],0) rotated (chem_angle[n]/2) ;
chem_rtt_zero [n] := chem_rt_zero[n] rotated + 10 ;
chem_rbt_zero [n] := chem_rt_zero[n] rotated - 10 ;
- chem_b_path [n] := reverse(chem_b_zero[n] -- (chem_b_zero[n] rotated -chem_angle[n])) ;
- chem_bx_path [n] := reverse(chem_b_zero[n] -- (chem_b_zero[n] rotated -chem_angle[n])) ; % ?
+ chem_bx_path [n] := reverse(chem_b_zero[n] -- (chem_b_zero[n] rotated -chem_angle[n])) ; % ? NOT USED...
chem_sb_path [n] := subpath (0.25,0.75) of chem_b_path[n] ;
+ chem_msb_path [n] := subpath (0.00,0.75) of chem_b_path[n] ;
+ chem_psb_path [n] := subpath (0.25,1.00) of chem_b_path[n] ;
+ chem_dbl_path [n] := chem_sb_path[n] paralleled - chem_dbl_offset[n] ;
+ chem_dbr_path [n] := chem_sb_path[n] paralleled + chem_dbl_offset[n] ;
+ chem_eb_path [n] := chem_sb_path[n] paralleled +2chem_dbl_offset[n] ;
+ chem_c_path [n] := (fullcircle scaled 2)
+ scaled (abs(center chem_b_path[n]) - 2chem_dbl_offset[n]) ;
+ chem_cc_path [n] := (subpath (0,(length chem_c_path[n])*(1-1/n)) of chem_c_path[n])
+ rotated chem_angle[n] ;
chem_s_path [n] := point 0 of chem_b_path[n] -- point 0 of (chem_b_path[n] rotated (2chem_angle[n])) ;
chem_ss_path [n] := subpath (0.25,0.75) of (chem_s_path[n]) ;
chem_pss_path [n] := subpath (0.00,0.75) of (chem_s_path[n]) ;
chem_mss_path [n] := subpath (0.25,1.00) of (chem_s_path[n]) ;
chem_mid_zero [n] := origin shifted (-.25chem_width[n],0) ;
- chem_midst_path[n] := chem_mid_zero[n] -- (chem_width[n],0) rotated ( chem_angle[n] + chem_angle[n]/2) ;
- chem_midsb_path[n] := chem_mid_zero[n] -- (chem_width[n],0) rotated (-chem_angle[n] - chem_angle[n]/2) ;
- chem_midt_path [n] := subpath (0.25,1.00) of chem_midst_path [n] ;
- chem_midb_path [n] := subpath (0.25,1.00) of chem_midsb_path [n] ;
- chem_msb_path [n] := subpath (0.00,0.75) of chem_b_path[n] ;
- chem_psb_path [n] := subpath (0.25,1.00) of chem_b_path[n] ;
- chem_dbl_path [n] := chem_sb_path[n] shifted - (0.05[origin,center chem_sb_path[n]]) ; % parallel
- chem_dbr_path [n] := chem_sb_path[n] shifted + (0.05[origin,center chem_sb_path[n]]) ;
- chem_eb_path [n] := chem_sb_path[n] shifted - (0.25[origin,center chem_sb_path[n]]) ;
+ chem_midt_path[n] := chem_mid_zero[n] -- chem_b_zero[n] rotated (+chem_angle[n]*floor(n/4)) ;
+ chem_midb_path[n] := chem_mid_zero[n] -- chem_b_zero[n] rotated (-chem_angle[n]*ceiling(n/4)) ;
+ chem_midst_path [n] := subpath (0.25,1.00) of chem_midt_path [n] ;
+ chem_midsb_path [n] := subpath (0.25,1.00) of chem_midb_path [n] ;
chem_sr_path [n] := chem_radical_min*chem_b_zero[n] -- chem_r_max[n] ;
- chem_rl_path [n] := chem_r_path[n] paralleled (chem_base_width/20) ;
- chem_rr_path [n] := chem_r_path[n] paralleled -(chem_base_width/20) ;
- chem_srl_path [n] := chem_sr_path[n] paralleled (chem_base_width/20) ;
- chem_srr_path [n] := chem_sr_path[n] paralleled -(chem_base_width/20) ;
+ chem_rl_path [n] := chem_r_path[n] paralleled +chem_dbl_offset[n] ;
+ chem_rr_path [n] := chem_r_path[n] paralleled -chem_dbl_offset[n] ;
+ chem_srl_path [n] := chem_sr_path[n] paralleled +chem_dbl_offset[n] ;
+ chem_srr_path [n] := chem_sr_path[n] paralleled -chem_dbl_offset[n] ;
chem_br_path [n] := point 1 of chem_sb_path[n] --
point 0 of chem_sb_path[n] rotatedaround(point 1 of chem_sb_path[n], -4) --
point 0 of chem_sb_path[n] rotatedaround(point 1 of chem_sb_path[n], 4) -- cycle ;
chem_rb_path [n] := chem_b_zero[n] -- chem_r_max[n] rotated -2 -- chem_r_max[n] -- chem_r_max[n] rotated 2 -- cycle ;
- chem_mrb_path [n] := chem_rb_path[n] rotatedaround(chem_b_zero[n], (180-chem_angle[n])/2) ;
+ chem_mrb_path [n] := chem_rb_path[n] rotatedaround(chem_b_zero[n],+(180-chem_angle[n])/2) ;
chem_prb_path [n] := chem_rb_path[n] rotatedaround(chem_b_zero[n],-(180-chem_angle[n])/2) ;
- chem_msr_path [n] := chem_sr_path[n] rotatedaround(chem_b_zero[n], (180-chem_angle[n])/2) ;
+ chem_msr_path [n] := chem_sr_path[n] rotatedaround(chem_b_zero[n],+(180-chem_angle[n])/2) ;
chem_psr_path [n] := chem_sr_path[n] rotatedaround(chem_b_zero[n],-(180-chem_angle[n])/2) ;
- % not yet ok:
-% chem_c_path [n] := subpath (30/45, -30/45) of (fullcircle scaled (1.25*chem_circle_radius*chem_width[n]));
-% chem_cc_path [n] := subpath (30/45,8-30/45) of (fullcircle rotated 90 scaled (1.25*chem_circle_radius*chem_width[n]));
- chem_c_path [n] := subpath (30/45, -30/45) of (fullcircle scaled (chem_width[n]));
- chem_cc_path [n] := subpath (30/45,8-30/45) of (fullcircle rotated 90 scaled (chem_width[n]));
+
+ if (front) :
+ chem_bb_path [n] := chem_b_path[n] rotated -chem_angle[n] --
+ chem_b_path[n] --
+ chem_b_path[n] rotated +chem_angle[n] --
+ (reverse(chem_b_path[n])) % shortened (.5chem_dbl_offset[n])))
+ paralleled chem_dbl_offset[n] --
+ cycle ;
+ chem_mr_path [n] := origin -- origin shifted (0,-.25chem_base_width) ;
+ chem_pr_path [n] := origin -- origin shifted (0,+.25chem_base_width) ;
+ chem_mr_zero [n] := point 1 of chem_mr_path[n] ;
+ chem_pr_zero [n] := point 1 of chem_pr_path[n] ;
+ chem_r_path [n] := chem_mr_zero[n] -- chem_pr_zero[n] ;
+ fi
enddef ;
-def chem_init_three = chem_init_some(3,30/52 ,-60,1,2) ; enddef ; % 60
-def chem_init_four = chem_init_some(4,30/42.5, 0,1,0) ; enddef ; % 45
-def chem_init_five = chem_init_some(5,30/35 , 0,1,0) ; enddef ; % 36
-def chem_init_six = chem_init_some(6, 1 , 0,1,0) ; enddef ; % 30
-def chem_init_eight = chem_init_some(8,30/22.5, 0,1,0) ; enddef ; % 22.5
+def chem_init_three = chem_init_some(3) ; enddef ;
+def chem_init_four = chem_init_some(4) ; enddef ;
+def chem_init_five = chem_init_some(5) ; enddef ;
+def chem_init_six = chem_init_some(6) ; enddef ;
+def chem_init_eight = chem_init_some(8) ; enddef ;
+def chem_init_five_front = chem_init_some(-5) ; enddef ;
+def chem_init_six_front = chem_init_some(-6) ; enddef ;
% bb R -R R Z -RZ +RZ
-def chem_init_some_front(expr n, ratio, start, initialrot, initialmov) =
- chem_init_some(n, ratio, start, initialrot, initialmov) ;
- chem_bb_path [n] := chem_b_path[n] rotated -chem_angle[n] -- chem_b_path[n] -- chem_b_path[n] rotated chem_angle[n] --
- (reverse(chem_b_path[n] shortened (chem_base_width/20))) paralleled (chem_base_width/20) --
- cycle ;
- chem_r_max [n] := chem_radical_max*chem_b_zero[n] ;
- chem_mr_path [n] := origin -- origin shifted (0,-.25chem_base_width) ;
- chem_pr_path [n] := origin -- origin shifted (0, .25*chem_base_width) ;
- chem_r_path [n] := point 1 of chem_mr_path[n] -- point 1 of chem_pr_path[n] ;
- chem_mr_zero [n] := point 1 of chem_mr_path[n] ;
- chem_pr_zero [n] := point 1 of chem_pr_path[n] ;
-enddef ;
-
-def chem_init_five_front = chem_init_some_front(-5,30/35,0,2,0) ; enddef ; % 36
-def chem_init_six_front = chem_init_some_front(-6, 1 ,0,2,0) ; enddef ; % 30
-
vardef chem_init_one =
- chem_width [1] := .75 * chem_base_width * chem_setting_scale ;
chem_angle [1] := 360/8 ;
- chem_start [1] := 0 ;
- chem_initialrot[1] := 1 ;
- chem_initialmov[1] := 1 ;
- chem_b_zero [1] := (1.75*chem_width[1],0) ;
- chem_r_min [1] := chem_radical_min*chem_b_zero[1] ;
+ chem_initialmov[1] := 0 ;
+ chem_width [1] := chem_setting_scale * chem_base_width ;
+ chem_dbl_offset[1] := chem_width[1] / 20 ;
+ chem_b_path [1] := origin -- (chem_width[1],0) ;
+ chem_b_zero [1] := point 1 of chem_b_path[1] ;
chem_r_max [1] := chem_radical_max*chem_b_zero[1] ;
- chem_r_path [1] := (.5*chem_width[1],0) -- (1.25*chem_width[1],0) ;
- chem_r_zero [1] := point 1 of chem_r_path [1] ;
- chem_b_path [1] := chem_r_path[1] rotated + (chem_angle[1]) ; % used for move here
- chem_b_zero [1] := chem_r_zero[1] ;
- chem_crz_zero [1] := chem_r_zero[1] enlonged chem_center_offset ;
- chem_e_path [1] := (.5*chem_width[1],-.25*chem_width[1]) -- (.5*chem_width[1],.25*chem_width[1]) ;
- chem_sb_path [1] := chem_r_path [1] ;
- chem_msb_path [1] := chem_r_path [1] shifted (0,-.1chem_width[1]) ;
- chem_psb_path [1] := chem_r_path [1] shifted (0, .1chem_width[1]) ;
- chem_ddt_path [1] := subpath(0,.4) of chem_r_path [1] ;
- chem_ddb_path [1] := subpath(.6,1) of chem_r_path [1] ;
- chem_ldt_path [1] := chem_ddt_path [1] shifted (0,-.1chem_width[1]) ; % parallel
- chem_ldb_path [1] := chem_ddb_path [1] shifted (0,-.1chem_width[1]) ;
- chem_rdt_path [1] := chem_ddt_path [1] shifted (0, .1chem_width[1]) ;
- chem_rdb_path [1] := chem_ddb_path [1] shifted (0, .1chem_width[1]) ;
+ chem_r_path [1] := (center chem_b_path[1]) -- (chem_radical_min*chem_b_zero[1]) ;
+ chem_r_zero [1] := point 1 of chem_r_path[1] ;
+ chem_crz_zero [1] := chem_r_zero[1] enlonged chem_center_offset ; % ???
+ chem_e_path [1] := ((1,-.5) -- (1,+.5)) scaled (.25chem_width[1]) ;
+ chem_sb_path [1] := subpath (0.25,0.75) of chem_b_path[1] ;
+ chem_msb_path [1] := subpath (0, 0.75) of chem_b_path[1] ;
+ chem_psb_path [1] := subpath (0.25,1) of chem_b_path[1] ;
+ chem_ddt_path [1] := subpath (0, 0.4) of chem_sb_path[1] ;
+ chem_ddb_path [1] := subpath (0.6, 1) of chem_sb_path[1] ;
+ chem_dbl_path [1] := chem_sb_path[1] paralleled -1chem_dbl_offset[1] ;
+ chem_dbr_path [1] := chem_sb_path[1] paralleled +1chem_dbl_offset[1] ;
+ chem_tbl_path [1] := chem_sb_path[1] paralleled -2chem_dbl_offset[1] ;
+ chem_tbr_path [1] := chem_sb_path[1] paralleled +2chem_dbl_offset[1] ;
+ chem_ldt_path [1] := chem_ddt_path[1] paralleled -2chem_dbl_offset[1] ;
+ chem_ldb_path [1] := chem_ddb_path[1] paralleled -2chem_dbl_offset[1] ;
+ chem_rdt_path [1] := chem_ddt_path[1] paralleled +2chem_dbl_offset[1] ;
+ chem_rdb_path [1] := chem_ddb_path[1] paralleled +2chem_dbl_offset[1] ;
+ chem_n_zero [1] := center chem_b_path[1] ;
+ chem_ztt_zero [1] := chem_n_zero[1] rotated +.5chem_angle[1] ;
+ chem_zbt_zero [1] := chem_n_zero[1] rotated -.5chem_angle[1] ;
save pr ; pair pr[] ;
- pr0 := point 0 of chem_r_path[1] ;
- pr1 := point 1 of chem_r_path[1] ;
- chem_bb_path [1] := pr0 -- (pr1 rotatedaround(pr0,-chem_bb_angle)) -- (pr1 rotatedaround(pr0,chem_bb_angle)) -- cycle ;
+ pr0 := point 0 of chem_sb_path[1] ;
+ pr1 := point 1 of chem_sb_path[1] ;
+ chem_bb_path [1] := pr0 -- (pr1 rotatedaround(pr0,-chem_bb_angle)) --
+ (pr1 rotatedaround(pr0,+chem_bb_angle)) -- cycle ;
chem_oe_path [1] := ((-20,0)--(10,0){up}..(20,10)..(30,0)..(40,-10)..(50.0,0)..(60,10)..(70,0)..(80,-10)..{up}(90,0)--(120,0))
- xsized (.75*chem_width[1]) shifted pr0 ;
- chem_rt_zero [1] := point .5 of chem_r_path[1] ;
- chem_rtt_zero [1] := chem_rt_zero[1] rotated + (chem_angle[1]/2) ;
- chem_rbt_zero [1] := chem_rt_zero[1] rotated - (chem_angle[1]/2) ;
- % added by Alan Braslau (adapted to use shared variables):
+ xsized (abs(pr1-pr0)) shifted pr0 ;
save p ; pair p[] ;
p0 := pr1 rotatedaround(pr0, -chem_bd_angle) ;
p1 := pr1 rotatedaround(pr0, +chem_bd_angle) ;
- p2 := p0 shifted - pr1 ;
- p3 := p1 shifted - pr1 ;
+ p2 := p0 shifted -pr1 ;
+ p3 := p1 shifted -pr1 ;
chem_bd_path [1] :=
p0 -- p1 for i=chem_bd_n downto 0 :
- -- p2 shifted (i/chem_bd_n)[pr1,pr0]
- -- p3 shifted (i/chem_bd_n)[pr1,pr0]
+ -- p2 shifted (i/chem_bd_n)[pr1,pr0]
+ -- p3 shifted (i/chem_bd_n)[pr1,pr0]
endfor ;
chem_bw_path [1] :=
for i=0 upto chem_bw_n - 1 :
- ((i) /chem_bw_n)[pr0,pr1] .. ((i+.25)/chem_bw_n)[pr0,pr1] shifted p2 ..
+ ((i) /chem_bw_n)[pr0,pr1] .. ((i+.25)/chem_bw_n)[pr0,pr1] shifted +p2 ..
((i+.50)/chem_bw_n)[pr0,pr1] .. ((i+.75)/chem_bw_n)[pr0,pr1] shifted -p2 ..
endfor pr1 ;
enddef ;
diff --git a/metapost/context/base/mp-core.mpiv b/metapost/context/base/mp-core.mpiv
index 1934e3040..3dba4a004 100644
--- a/metapost/context/base/mp-core.mpiv
+++ b/metapost/context/base/mp-core.mpiv
@@ -5,7 +5,7 @@
%D subtitle=background macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
diff --git a/metapost/context/base/mp-crop.mpiv b/metapost/context/base/mp-crop.mpiv
index 6360757e5..00bcdcb44 100644
--- a/metapost/context/base/mp-crop.mpiv
+++ b/metapost/context/base/mp-crop.mpiv
@@ -5,7 +5,7 @@
%D subtitle=Cropmarks,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
@@ -15,20 +15,20 @@ if known context_crop : endinput ; fi ;
boolean context_crop ; context_crop := true ;
-vardef crop_marks_lines (expr box, length, offset, nx, ny) =
- save p ; picture p ; save w, h, x, y ; numeric w, h, x, y ;
+vardef crop_marks_lines (expr box, len, offset, nx, ny) =
+ save p ; picture p ; save w, h, x, y ; numeric w, h, x, y ;
p := image (
x := if nx = 0 : 1 else : nx - 1 fi ;
y := if ny = 0 : 1 else : ny - 1 fi ;
w := bbwidth (box) / x ;
h := bbheight(box) / y ;
for i=0 upto y :
- draw ((llcorner box) -- (llcorner box) shifted (-length,0)) shifted (-offset,i*h) ;
- draw ((lrcorner box) -- (lrcorner box) shifted ( length,0)) shifted ( offset,i*h) ;
+ draw ((llcorner box) -- (llcorner box) shifted (-len,0)) shifted (-offset,i*h) ;
+ draw ((lrcorner box) -- (lrcorner box) shifted ( len,0)) shifted ( offset,i*h) ;
endfor ;
for i=0 upto x :
- draw ((llcorner box) -- (llcorner box) shifted (0,-length)) shifted (i*w,-offset) ;
- draw ((ulcorner box) -- (ulcorner box) shifted (0, length)) shifted (i*w, offset) ;
+ draw ((llcorner box) -- (llcorner box) shifted (0,-len)) shifted (i*w,-offset) ;
+ draw ((ulcorner box) -- (ulcorner box) shifted (0, len)) shifted (i*w, offset) ;
endfor ;
) ;
setbounds p to box ;
diff --git a/metapost/context/base/mp-figs.mpiv b/metapost/context/base/mp-figs.mpiv
index c65808f6d..aac7c5ad2 100644
--- a/metapost/context/base/mp-figs.mpiv
+++ b/metapost/context/base/mp-figs.mpiv
@@ -5,7 +5,7 @@
%D subtitle=figures,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/metapost/context/base/mp-form.mpiv b/metapost/context/base/mp-form.mpiv
index d0519b4f2..b58792e1a 100644
--- a/metapost/context/base/mp-form.mpiv
+++ b/metapost/context/base/mp-form.mpiv
@@ -5,7 +5,7 @@
%D subtitle=form support,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/metapost/context/base/mp-func.mpiv b/metapost/context/base/mp-func.mpiv
index 1d289b606..58df711f2 100644
--- a/metapost/context/base/mp-func.mpiv
+++ b/metapost/context/base/mp-func.mpiv
@@ -5,7 +5,7 @@
%D subtitle=function hacks,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/metapost/context/base/mp-grap.mpiv b/metapost/context/base/mp-grap.mpiv
new file mode 100644
index 000000000..757413a1c
--- /dev/null
+++ b/metapost/context/base/mp-grap.mpiv
@@ -0,0 +1,204 @@
+%D \module
+%D [ file=mp-grap.mpiv,
+%D version=2012.10.16, % 2008.09.08 and earlier,
+%D title=\CONTEXT\ \METAPOST\ graphics,
+%D subtitle=graph packagesupport,
+%D author=Hans Hagen \& Alan Braslau,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
+%C details.
+
+if known context_grap : endinput ; fi ;
+
+boolean context_grap ; context_grap := true ;
+
+input graph.mp ;
+
+vardef roundd(expr x, d) =
+ if abs d > 4 :
+ if d > 0 :
+ x
+ else :
+ 0
+ fi
+ elseif d > 0 :
+ save i ; i = floor x ;
+ i + round(Ten_to[d]*(x-i))/Ten_to[d]
+ else :
+ round(x/Ten_to[-d])*Ten_to[-d]
+ fi
+enddef ;
+
+Ten_to0 = 1 ;
+Ten_to1 = 10 ;
+Ten_to2 = 100 ;
+Ten_to3 = 1000 ;
+Ten_to4 = 10000 ;
+
+def sFe_base = enddef ;
+
+if unknown Fe_plus :
+ picture Fe_plus ; Fe_plus := textext("+") ; % btex + etex ;
+fi ;
+
+vardef format (expr f,x) = dofmt_.Feform_(f,x) enddef ;
+vardef Mformat (expr f,x) = dofmt_.Meform (f,x) enddef ;
+vardef formatstr (expr f,x) = dofmt_.Feform_(f,x) enddef ;
+vardef Mformatstr(expr f,x) = dofmt_.Meform(f,x) enddef ;
+
+vardef escaped_format(expr s) =
+ "" for n=1 upto length(s) : &
+ if ASCII substring (n,n+1) of s = 37 :
+ "@"
+ else :
+ substring (n,n+1) of s
+ fi
+ endfor
+enddef ;
+
+vardef dofmt_@#(expr f, x) =
+ textext("\MPgraphformat{" & escaped_format(f) & "}{" & (if string x : x else: decimal x fi) & "}")
+ % textext(mfun_format_number(escaped_format(f),x))
+enddef ;
+
+% We redefine autogrid from graph.mp adding the possibility of differing X and Y
+% formats. Autoform is defined in graph.mp (by default "%g").
+%
+% string Autoform_X ; Autoform_X := "@.0e" ;
+% string Autoform_Y ; Autoform_Y := "@.0e" ;
+
+vardef autogrid(suffix tx, ty) text w =
+ Gneedgr_ := false ;
+ if str tx <> "" :
+ for x=auto.x :
+ tx (
+ if string Autoform_X :
+ if Autoform_X <> "" :
+ Autoform_X
+ else :
+ Autoform
+ fi
+ else :
+ Autoform
+ fi,
+ x) w ;
+ endfor
+ fi ;
+ if str ty <> "" :
+ for y=auto.y :
+ ty (
+ if string Autoform_Y :
+ if Autoform_Y <> "" :
+ Autoform_Y
+ else :
+ Autoform
+ fi
+ else :
+ Autoform
+ fi,
+ y) w ;
+ endfor
+ fi ;
+enddef ;
+
+% A couple of extensions:
+
+% Define a vector function sym returning a picture: 10 different shapes,
+% unfilled outline, interior filled with different shades of the background.
+% Thus, overlapping points on a plot are more clearly distinguishable.
+
+% grap_symsize := fontsize defaultfont ; % can be redefined
+%
+% dynamic version:
+
+vardef grap_symsize =
+ % fontsize defaultfont
+ % .8ExHeight
+ .35BodyFontSize
+enddef ;
+
+path grap_sym[] ; % (internal) symbol path
+
+grap_sym[0] := (0,0) ; % point
+grap_sym[1] := fullcircle ; % circle
+grap_sym[2] := (up -- down) scaled .5 ; % vertical bar
+
+for i = 3 upto 9 : % polygons
+ grap_sym[i] := for j = 0 upto i-1 :
+ (up scaled .5) rotated (j*360/i) --
+ endfor cycle ;
+endfor
+
+grap_sym[12] := grap_sym[2] rotated +90 ; % horizontal line
+grap_sym[22] := grap_sym[2] rotated +45 ; % backslash
+grap_sym[32] := grap_sym[2] rotated -45 ; % slash
+grap_sym[13] := grap_sym[3] rotated 180 ; % down triangle
+grap_sym[23] := grap_sym[3] rotated -90 ; % right triangle
+grap_sym[33] := grap_sym[3] rotated +90 ; % left triangle
+grap_sym[14] := grap_sym[4] rotated +45 ; % square
+grap_sym[15] := grap_sym[5] rotated 180 ; % down pentagon
+grap_sym[16] := grap_sym[6] rotated +90 ; % turned hexagon
+grap_sym[17] := grap_sym[7] rotated 180 ;
+grap_sym[18] := grap_sym[8] rotated +22.5 ;
+
+numeric l ;
+
+for j = 5 upto 9 :
+ l := length(grap_sym[j]) ;
+ pair p[] ;
+ for i = 0 upto l :
+ p[i] = whatever [point i of grap_sym[j],
+ point (i+2 mod l) of grap_sym[j]] ;
+ p[i] = whatever [point (i+1 mod l) of grap_sym[j],
+ point (i+l-1 mod l) of grap_sym[j]] ;
+ endfor
+ grap_sym[20+j] := for i = 0 upto l : point i of grap_sym[j]--p[i]--endfor cycle ;
+endfor
+
+path s ; s := grap_sym[4] ;
+path q ; q := s scaled .25 ;
+numeric l ; l := length(s) ;
+
+pair p[] ;
+
+grap_sym[24] := for i = 0 upto l-1 :
+ hide(
+ p[i] = whatever [point i of s, point (i+1 mod l) of s] ;
+ p[i] = whatever [point i of q, point (i-1+l mod l) of q] ;
+ p[i+l] = whatever [point i of s, point (i+1 mod l) of s] ;
+ p[i+l] = whatever [point i+1 of q, point (i+2 mod l) of q] ;
+ )
+ point i of q -- p[i] -- p[i+l] --
+endfor cycle ;
+
+grap_sym[34] := grap_sym[24] rotated 45 ;
+
+ % usage: gdraw p plot plotsymbol(1,red,1) ; % a filled red circle
+ % usage: gdraw p plot plotsymbol(4,blue,0) ; % a blue square
+ % usage: gdraw p plot plotsymbol(14,green,0.5) ; % a 50% filled green diamond
+
+def plotsymbol(expr n,c,f) = % (number,color,color|number)
+ if known grap_sym[n] :
+ image(
+ path p ; p := grap_sym[n] scaled grap_symsize ;
+ undraw p withpen currentpen scaled 2 ;
+ if cycle p : fill p withcolor
+ if color f and known f :
+ f
+ elseif numeric f and known f and color c and known c :
+ f[background,c]
+ elseif numeric f and known f :
+ f[background,black]
+ else :
+ background
+ fi ;
+ fi
+ draw p if color c and known c : withcolor c fi ;
+ )
+ else :
+ nullpicture
+ fi
+enddef ;
diff --git a/metapost/context/base/mp-grid.mpiv b/metapost/context/base/mp-grid.mpiv
index cc5c2b76e..b9243b1b9 100644
--- a/metapost/context/base/mp-grid.mpiv
+++ b/metapost/context/base/mp-grid.mpiv
@@ -5,7 +5,7 @@
%D subtitle=grid support,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/metapost/context/base/mp-grph.mpiv b/metapost/context/base/mp-grph.mpiv
index a8868033b..30c49e6e0 100644
--- a/metapost/context/base/mp-grph.mpiv
+++ b/metapost/context/base/mp-grph.mpiv
@@ -5,7 +5,7 @@
%D subtitle=graphic text support,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/metapost/context/base/mp-mlib.mpiv b/metapost/context/base/mp-mlib.mpiv
index 71985cef8..b8fabbfb9 100644
--- a/metapost/context/base/mp-mlib.mpiv
+++ b/metapost/context/base/mp-mlib.mpiv
@@ -5,7 +5,7 @@
%D subtitle=plugins,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
@@ -255,7 +255,7 @@ vardef thetextext@#(expr p,z) =
fi
enddef ;
-vardef textext@#(expr p) = % no draw hers
+vardef textext@#(expr p) = % no draw here
thetextext@#(p,origin)
enddef ;
@@ -614,3 +614,28 @@ primarydef t asgroup s = % s = isolated|knockout
wrappedpicture
endgroup
enddef ;
+
+% Also experimental
+
+string mfun_auto_align[] ;
+
+mfun_auto_align[0] := "rt" ;
+mfun_auto_align[1] := "urt" ;
+mfun_auto_align[2] := "top" ;
+mfun_auto_align[3] := "ulft" ;
+mfun_auto_align[4] := "lft" ;
+mfun_auto_align[5] := "llft" ;
+mfun_auto_align[6] := "bot" ;
+mfun_auto_align[7] := "lrt" ;
+mfun_auto_align[8] := "rt" ;
+
+def autoalign(expr n) =
+ scantokens mfun_auto_align[round((n mod 360)/45)]
+enddef ;
+
+% draw textext.autoalign(60) ("\strut oeps 1") ;
+% draw textext.autoalign(160)("\strut oeps 2") ;
+% draw textext.autoalign(260)("\strut oeps 3") ;
+% draw textext.autoalign(360)("\strut oeps 4") ;
+
+
diff --git a/metapost/context/base/mp-page.mpiv b/metapost/context/base/mp-page.mpiv
index 96f617257..c8e3c6237 100644
--- a/metapost/context/base/mp-page.mpiv
+++ b/metapost/context/base/mp-page.mpiv
@@ -5,7 +5,7 @@
%D subtitle=page enhancements,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
diff --git a/metapost/context/base/mp-shap.mpiv b/metapost/context/base/mp-shap.mpiv
index b62e636d5..713656510 100644
--- a/metapost/context/base/mp-shap.mpiv
+++ b/metapost/context/base/mp-shap.mpiv
@@ -5,7 +5,7 @@
%D subtitle=shapes,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/metapost/context/base/mp-step.mpiv b/metapost/context/base/mp-step.mpiv
index 654ef443d..f7a7ba5de 100644
--- a/metapost/context/base/mp-step.mpiv
+++ b/metapost/context/base/mp-step.mpiv
@@ -5,7 +5,7 @@
%D subtitle=steps,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/metapost/context/base/mp-text.mpiv b/metapost/context/base/mp-text.mpiv
index d5630f68e..b68e8412a 100644
--- a/metapost/context/base/mp-text.mpiv
+++ b/metapost/context/base/mp-text.mpiv
@@ -5,7 +5,7 @@
%D subtitle=text support,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
diff --git a/metapost/context/base/mp-tool.mpiv b/metapost/context/base/mp-tool.mpiv
index 764863b65..cdf0e02a9 100644
--- a/metapost/context/base/mp-tool.mpiv
+++ b/metapost/context/base/mp-tool.mpiv
@@ -5,12 +5,14 @@
%D subtitle=auxiliary macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% def loadfile(expr name) = scantokens("input " & name & ";") enddef ;
+
if known context_tool : endinput ; fi ;
boolean context_tool ; context_tool := true ;
diff --git a/scripts/context/lua/mtx-context.lua b/scripts/context/lua/mtx-context.lua
index e07ecdfc7..f33363a0d 100644
--- a/scripts/context/lua/mtx-context.lua
+++ b/scripts/context/lua/mtx-context.lua
@@ -6,11 +6,18 @@ if not modules then modules = { } end modules ['mtx-context'] = {
license = "see context related readme files"
}
+-- todo: more local functions
+-- todo: pass jobticket/ctxdata table around
+
local format, gmatch, match, gsub, find = string.format, string.gmatch, string.match, string.gsub, string.find
-local quote = string.quote
+local quote, validstring = string.quote, string.valid
local concat = table.concat
+local settings_to_array = utilities.parsers.settings_to_array
+local appendtable = table.append
+local lpegpatterns, lpegmatch, Cs, P = lpeg.patterns, lpeg.match, lpeg.Cs, lpeg.P
-local getargument = environment.argument
+local getargument = environment.getargument or environment.argument
+local setargument = environment.setargument
local basicinfo = [[
--run process (one or more) files (default action)
@@ -35,8 +42,10 @@ local basicinfo = [[
--noconsole disable logging to the console (logfile only)
--purgeresult purge result file before run
---forcexml force xml stub (optional flag: --mkii)
+--forcexml force xml stub
--forcecld force cld (context lua document) stub
+--forcelua force lua stub (like texlua)
+--forcemp force mp stub
--arrange run extra imposition pass, given that the style sets up imposition
--noarrange ignore imposition specifications in the style
@@ -50,23 +59,16 @@ local basicinfo = [[
--version report installed context version
--global assume given file present elsewhere
+--nofile use dummy file as jobname
--expert expert options
]]
--- filter=list is kind of obsolete
--- color is obsolete for mkiv, always on
--- separation is obsolete for mkiv, no longer available
--- output is currently obsolete for mkiv
--- setuppath=list must check
--- modefile=name must check
--- input=name load the given inputfile (must check)
-
local expertinfo = [[
expert options:
--touch update context version number (remake needed afterwards, also provide --expert)
---nostats omit runtime statistics at the end of the run
+--nostatistics omit runtime statistics at the end of the run
--update update context from website (not to be confused with contextgarden)
--profile profile job (use: mtxrun --script profile --analyze)
--timing generate timing and statistics overview
@@ -80,13 +82,14 @@ special options:
--pdftex process file with texexec using pdftex
--xetex process file with texexec using xetex
+--mkii process file with texexec
--pipe don't check for file and enter scroll mode (--dummyfile=whatever.tmp)
]]
local application = logs.application {
name = "mtx-context",
- banner = "ConTeXt Process Management 0.52",
+ banner = "ConTeXt Process Management 0.60",
helpinfo = {
basic = basicinfo,
extra = extrainfo,
@@ -94,159 +97,105 @@ local application = logs.application {
}
}
+-- local luatexflags = {
+-- ["8bit"] = true, -- ignored, input is assumed to be in UTF-8 encoding
+-- ["default-translate-file"] = true, -- ignored, input is assumed to be in UTF-8 encoding
+-- ["translate-file"] = true, -- ignored, input is assumed to be in UTF-8 encoding
+-- ["etex"] = true, -- ignored, the etex extensions are always active
+--
+-- ["credits"] = true, -- display credits and exit
+-- ["debug-format"] = true, -- enable format debugging
+-- ["disable-write18"] = true, -- disable \write18{SHELL COMMAND}
+-- ["draftmode"] = true, -- switch on draft mode (generates no output PDF)
+-- ["enable-write18"] = true, -- enable \write18{SHELL COMMAND}
+-- ["file-line-error"] = true, -- enable file:line:error style messages
+-- ["file-line-error-style"] = true, -- aliases of --file-line-error
+-- ["no-file-line-error"] = true, -- disable file:line:error style messages
+-- ["no-file-line-error-style"] = true, -- aliases of --no-file-line-error
+-- ["fmt"] = true, -- load the format file FORMAT
+-- ["halt-on-error"] = true, -- stop processing at the first error
+-- ["help"] = true, -- display help and exit
+-- ["ini"] = true, -- be iniluatex, for dumping formats
+-- ["interaction"] = true, -- set interaction mode (STRING=batchmode/nonstopmode/scrollmode/errorstopmode)
+-- ["jobname"] = true, -- set the job name to STRING
+-- ["kpathsea-debug"] = true, -- set path searching debugging flags according to the bits of NUMBER
+-- ["lua"] = true, -- load and execute a lua initialization script
+-- ["mktex"] = true, -- enable mktexFMT generation (FMT=tex/tfm)
+-- ["no-mktex"] = true, -- disable mktexFMT generation (FMT=tex/tfm)
+-- ["nosocket"] = true, -- disable the lua socket library
+-- ["output-comment"] = true, -- use STRING for DVI file comment instead of date (no effect for PDF)
+-- ["output-directory"] = true, -- use existing DIR as the directory to write files in
+-- ["output-format"] = true, -- use FORMAT for job output; FORMAT is 'dvi' or 'pdf'
+-- ["parse-first-line"] = true, -- enable parsing of the first line of the input file
+-- ["no-parse-first-line"] = true, -- disable parsing of the first line of the input file
+-- ["progname"] = true, -- set the program name to STRING
+-- ["recorder"] = true, -- enable filename recorder
+-- ["safer"] = true, -- disable easily exploitable lua commands
+-- ["shell-escape"] = true, -- enable \write18{SHELL COMMAND}
+-- ["no-shell-escape"] = true, -- disable \write18{SHELL COMMAND}
+-- ["shell-restricted"] = true, -- restrict \write18 to a list of commands given in texmf.cnf
+-- ["synctex"] = true, -- enable synctex
+-- ["version"] = true, -- display version and exit
+-- ["luaonly"] = true, -- run a lua file, then exit
+-- ["luaconly"] = true, -- byte-compile a lua file, then exit
+-- }
+
local report = application.report
scripts = scripts or { }
scripts.context = scripts.context or { }
--- a demo cld file:
---
--- context.starttext()
--- context.chapter("Hello There")
--- context.readfile("tufte","","not found")
--- context.stoptext()
-
--- l-file / todo
+-- constants
-function file.needsupdate(oldfile,newfile)
- return true
-end
-function file.syncmtimes(oldfile,newfile)
-end
+local usedfiles = {
+ nop = "cont-nop.mkiv",
+ yes = "cont-yes.mkiv",
+}
--- l-io
+local usedsuffixes = {
+ before = {
+ "tuc"
+ },
+ after = {
+ "pdf", "tuc", "log"
+ },
+ keep = {
+ "log"
+ },
+}
-function io.copydata(fromfile,tofile)
- io.savedata(tofile,io.loaddata(fromfile) or "")
-end
+local formatofinterface = {
+ en = "cont-en",
+ uk = "cont-uk",
+ de = "cont-de",
+ fr = "cont-fr",
+ nl = "cont-nl",
+ cs = "cont-cs",
+ it = "cont-it",
+ ro = "cont-ro",
+ pe = "cont-pe",
+}
--- ctx (will become util-ctx)
-
-local ctxrunner = { }
-
-function ctxrunner.filtered(str,method)
- str = tostring(str)
- if method == 'name' then str = file.removesuffix(file.basename(str))
- elseif method == 'path' then str = file.dirname(str)
- elseif method == 'suffix' then str = file.extname(str)
- elseif method == 'nosuffix' then str = file.removesuffix(str)
- elseif method == 'nopath' then str = file.basename(str)
- elseif method == 'base' then str = file.basename(str)
--- elseif method == 'full' then
--- elseif method == 'complete' then
--- elseif method == 'expand' then -- str = file.expandpath(str)
- end
- return str:gsub("\\","/")
-end
+local defaultformats = {
+ "cont-en",
+ "cont-nl",
+}
-function ctxrunner.substitute(e,str)
- local attributes = e.at
- if str and attributes then
- if attributes['method'] then
- str = ctxrunner.filtered(str,attributes['method'])
- end
- if str == "" and attributes['default'] then
- str = attributes['default']
- end
- end
- return str
-end
+-- process information
-function ctxrunner.reflag(flags)
- local t = { }
- for _, flag in next, flags do
- local key, value = match(flag,"^(.-)=(.+)$")
- if key and value then
- t[key] = value
- else
- t[flag] = true
- end
- end
- return t
-end
+local ctxrunner = { } -- namespace will go
-function ctxrunner.substitute(str)
- return str
-end
-
-function ctxrunner.justtext(str)
- str = xml.unescaped(tostring(str))
- str = xml.cleansed(str)
- str = str:gsub("\\+",'/')
- str = str:gsub("%s+",' ')
- return str
-end
+local ctx_locations = { '..', '../..' }
function ctxrunner.new()
return {
- ctxname = "",
- jobname = "",
- xmldata = nil,
- suffix = "prep",
- locations = { '..', '../..' },
- variables = { },
- messages = { },
- environments = { },
- modules = { },
- filters = { },
- flags = { },
- modes = { },
- prepfiles = { },
- paths = { },
+ ctxname = "",
+ jobname = "",
+ flags = { },
}
end
-function ctxrunner.savelog(ctxdata,ctlname)
- local function yn(b)
- if b then return 'yes' else return 'no' end
- end
- if not ctlname or ctlname == "" or ctlname == ctxdata.jobname then
- if ctxdata.jobname then
- ctlname = file.replacesuffix(ctxdata.jobname,'ctl')
- elseif ctxdata.ctxname then
- ctlname = file.replacesuffix(ctxdata.ctxname,'ctl')
- else
- report("invalid ctl name: %s",ctlname or "?")
- return
- end
- end
- local prepfiles = ctxdata.prepfiles
- if prepfiles and next(prepfiles) then
- report("saving logdata in: %s",ctlname)
- f = io.open(ctlname,'w')
- if f then
- f:write("\n\n")
- f:write(format("\n",yn(ctxdata.runlocal)))
- local sorted = table.sortedkeys(prepfiles)
- for i=1,#sorted do
- local name = sorted[i]
- f:write(format("\t%s\n",yn(prepfiles[name]),name))
- end
- f:write("\n")
- f:close()
- end
- else
- report("nothing prepared, no ctl file saved")
- os.remove(ctlname)
- end
-end
-
-function ctxrunner.register_path(ctxdata,path)
- -- test if exists
- ctxdata.paths[ctxdata.paths+1] = path
-end
-
-function ctxrunner.trace(ctxdata)
- print(table.serialize(ctxdata.messages))
- print(table.serialize(ctxdata.flags))
- print(table.serialize(ctxdata.environments))
- print(table.serialize(ctxdata.modules))
- print(table.serialize(ctxdata.filters))
- print(table.serialize(ctxdata.modes))
- print(xml.tostring(ctxdata.xmldata))
-end
-
-function ctxrunner.manipulate(ctxdata,ctxname,defaultname)
+function ctxrunner.checkfile(ctxdata,ctxname,defaultname)
if not ctxdata.jobname or ctxdata.jobname == "" then
return
@@ -269,13 +218,14 @@ function ctxrunner.manipulate(ctxdata,ctxname,defaultname)
local usedname = ctxdata.ctxname
local found = lfs.isfile(usedname)
- -- no futher test if qualified path
+ -- no further test if qualified path
if not found then
- for _, path in next, ctxdata.locations do
+ for _, path in next, ctx_locations do
local fullname = file.join(path,ctxdata.ctxname)
if lfs.isfile(fullname) then
- usedname, found = fullname, true
+ usedname = fullname
+ found = true
break
end
end
@@ -283,194 +233,70 @@ function ctxrunner.manipulate(ctxdata,ctxname,defaultname)
if not found then
usedname = resolvers.findfile(ctxdata.ctxname,"tex")
- found = usedname ~= ""
+ found = usedname ~= ""
end
if not found and defaultname and defaultname ~= "" and lfs.isfile(defaultname) then
- usedname, found = defaultname, true
+ usedname = defaultname
+ found = true
end
if not found then
return
end
- ctxdata.xmldata = xml.load(usedname)
+ local xmldata = xml.load(usedname)
- if not ctxdata.xmldata then
+ if not xmldata then
return
else
-- test for valid, can be text file
end
- xml.include(ctxdata.xmldata,'ctx:include','name', table.append({'.', file.dirname(ctxdata.ctxname)},ctxdata.locations))
-
- ctxdata.variables['job'] = ctxdata.jobname
-
- ctxdata.flags = xml.collect_texts(ctxdata.xmldata,"/ctx:job/ctx:flags/ctx:flag",true)
- ctxdata.environments = xml.collect_texts(ctxdata.xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:environment",true)
- ctxdata.modules = xml.collect_texts(ctxdata.xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:module",true)
- ctxdata.filters = xml.collect_texts(ctxdata.xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:filter",true)
- ctxdata.modes = xml.collect_texts(ctxdata.xmldata,"/ctx:job/ctx:process/ctx:resources/ctx:mode",true)
- ctxdata.messages = xml.collect_texts(ctxdata.xmldata,"ctx:message",true)
-
- ctxdata.flags = ctxrunner.reflag(ctxdata.flags)
-
- local messages = ctxdata.messages
- for i=1,#messages do
- report("ctx comment: %s", xml.tostring(messages[i]))
- end
-
- for r, d, k in xml.elements(ctxdata.xmldata,"ctx:value[@name='job']") do
- d[k] = ctxdata.variables['job'] or ""
- end
-
- local commands = { }
- for e in xml.collected(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:processors/ctx:processor") do
- commands[e.at and e.at['name'] or "unknown"] = e
- end
-
- local suffix = xml.filter(ctxdata.xmldata,"/ctx:job/ctx:preprocess/attribute('suffix')") or ctxdata.suffix
- local runlocal = xml.filter(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:processors/attribute('local')")
-
- runlocal = toboolean(runlocal)
-
- for files in xml.collected(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:files") do
- for pattern in xml.collected(files,"ctx:file") do
+ local ctxpaths = table.append({'.', file.dirname(ctxdata.ctxname)}, ctx_locations)
- preprocessor = pattern.at['processor'] or ""
+ xml.include(xmldata,'ctx:include','name', ctxpaths)
- if preprocessor ~= "" then
+ local flags = ctxdata.flags
- ctxdata.variables['old'] = ctxdata.jobname
- for r, d, k in xml.elements(ctxdata.xmldata,"ctx:value") do
- local ek = d[k]
- local ekat = ek.at['name']
- if ekat == 'old' then
- d[k] = ctxrunner.substitute(ctxdata.variables[ekat] or "")
- end
- end
-
- pattern = ctxrunner.justtext(xml.tostring(pattern))
-
- local oldfiles = dir.glob(pattern)
-
- local pluspath = false
- if #oldfiles == 0 then
- -- message: no files match pattern
- local paths = ctxdata.paths
- for i=1,#paths do
- local p = paths[i]
- local oldfiles = dir.glob(path.join(p,pattern))
- if #oldfiles > 0 then
- pluspath = true
- break
- end
- end
- end
- if #oldfiles == 0 then
- -- message: no old files
- else
- for i=1,#oldfiles do
- local oldfile = oldfiles[i]
- local newfile = oldfile .. "." .. suffix -- addsuffix will add one only
- if ctxdata.runlocal then
- newfile = file.basename(newfile)
- end
- if oldfile ~= newfile and file.needsupdate(oldfile,newfile) then
- -- message: oldfile needs preprocessing
- -- os.remove(newfile)
- local splitted = preprocessor:split(',')
- for i=1,#splitted do
- local pp = splitted[i]
- local command = commands[pp]
- if command then
- command = xml.copy(command)
- local suf = (command.at and command.at['suffix']) or ctxdata.suffix
- if suf then
- newfile = oldfile .. "." .. suf
- end
- if ctxdata.runlocal then
- newfile = file.basename(newfile)
- end
- for r, d, k in xml.elements(command,"ctx:old") do
- d[k] = ctxrunner.substitute(oldfile)
- end
- for r, d, k in xml.elements(command,"ctx:new") do
- d[k] = ctxrunner.substitute(newfile)
- end
- ctxdata.variables['old'] = oldfile
- ctxdata.variables['new'] = newfile
- for r, d, k in xml.elements(command,"ctx:value") do
- local ek = d[k]
- local ekat = ek.at and ek.at['name']
- if ekat then
- d[k] = ctxrunner.substitute(ctxdata.variables[ekat] or "")
- end
- end
- -- potential optimization: when mtxrun run internal
- command = xml.content(command)
- command = ctxrunner.justtext(command)
- report("command: %s",command)
- local result = os.spawn(command) or 0
- -- somehow we get the wrong return value
- if result > 0 then
- report("error, return code: %s",result)
- end
- if ctxdata.runlocal then
- oldfile = file.basename(oldfile)
- end
- end
- end
- if lfs.isfile(newfile) then
- file.syncmtimes(oldfile,newfile)
- ctxdata.prepfiles[oldfile] = true
- else
- report("error, check target location of new file: %s", newfile)
- ctxdata.prepfiles[oldfile] = false
- end
- else
- report("old file needs no preprocessing")
- ctxdata.prepfiles[oldfile] = lfs.isfile(newfile)
- end
- end
- end
- end
+ for e in xml.collected(xmldata,"/ctx:job/ctx:flags/ctx:flag") do
+ local flag = xml.text(e) or ""
+ local key, value = match(flag,"^(.-)=(.+)$")
+ if key and value then
+ flags[key] = value
+ else
+ flags[flag] = true
end
end
- ctxrunner.savelog(ctxdata)
-
end
-function ctxrunner.preppedfile(ctxdata,filename)
- if ctxdata.prepfiles[file.basename(filename)] then
- return filename .. ".prep"
- else
- return filename
+function ctxrunner.checkflags(ctxdata)
+ if ctxdata then
+ for k,v in next, ctxdata.flags do
+ if getargument(k) == nil then
+ setargument(k,v)
+ end
+ end
end
end
--- rest
+-- multipass control
-scripts.context.multipass = {
--- suffixes = { ".tuo", ".tuc" },
- suffixes = { ".tuc" },
- nofruns = 8,
--- nofruns = 7, -- test oscillation
-}
+local multipass_suffixes = { ".tuc" }
+local multipass_nofruns = 8 -- or 7 to test oscillation
-function scripts.context.multipass.hashfiles(jobname)
+local function multipass_hashfiles(jobname)
local hash = { }
- local suffixes = scripts.context.multipass.suffixes
- for i=1,#suffixes do
- local suffix = suffixes[i]
+ for i=1,#multipass_suffixes do
+ local suffix = multipass_suffixes[i]
local full = jobname .. suffix
hash[full] = md5.hex(io.loaddata(full) or "unknown")
end
return hash
end
-function scripts.context.multipass.changed(oldhash, newhash)
+local function multipass_changed(oldhash, newhash)
for k,v in next, oldhash do
if v ~= newhash[k] then
return true
@@ -479,126 +305,7 @@ function scripts.context.multipass.changed(oldhash, newhash)
return false
end
-function scripts.context.multipass.makeoptionfile(jobname,ctxdata,kindofrun,currentrun,finalrun,once)
- -- take jobname from ctx
- jobname = file.removesuffix(jobname)
- local f = io.open(jobname..".top","w")
- if f then
- local function someflag(flag)
- return (ctxdata and ctxdata.flags[flag]) or getargument(flag)
- end
- local function setvalue(flag,template,hash,default)
- local a = someflag(flag) or default
- if a and a ~= "" then
- if hash then
- if hash[a] then
- f:write(format(template,a),"\n")
- end
- else
- f:write(format(template,a),"\n")
- end
- end
- end
- local function setvalues(flag,template,plural)
- if type(flag) == "table" then
- for k, v in next, flag do
- f:write(format(template,v),"\n")
- end
- else
- local a = someflag(flag) or (plural and someflag(flag.."s"))
- if a and a ~= "" then
- for v in gmatch(a,"%s*([^,]+)") do
- f:write(format(template,v),"\n")
- end
- end
- end
- end
- local function setfixed(flag,template,...)
- if someflag(flag) then
- f:write(format(template,...),"\n")
- end
- end
- local function setalways(template,...)
- f:write(format(template,...),"\n")
- end
- --
- -- This might change ... we can just pass the relevant flags directly.
- --
- setalways("%% runtime options files (command line driven)")
- --
- setalways("\\unprotect")
- --
- setalways("%% feedback and basic job control")
- --
- -- Option file, we can pass more on the commandline some day soon. Actually we
- -- should use directives and trackers.
- --
- setfixed ("timing" , "\\usemodule[timing]")
- setfixed ("batchmode" , "\\batchmode")
- setfixed ("batch" , "\\batchmode")
- setfixed ("nonstopmode" , "\\nonstopmode")
- setfixed ("nonstop" , "\\nonstopmode")
- -- setfixed ("tracefiles" , "\\tracefilestrue")
- setfixed ("nostats" , "\\nomkivstatistics")
- setfixed ("paranoid" , "\\def\\maxreadlevel{1}")
- --
- setalways("%% handy for special styles")
- --
- setalways("\\startluacode")
- setalways("document = document or { }")
- setalways(table.serialize(environment.arguments, "document.arguments"))
- setalways(table.serialize(environment.files, "document.files"))
- setalways("\\stopluacode")
- --
- setalways("%% process info")
- --
- setalways( "\\setupsystem[inputfile=%s]",getargument("input") or environment.files[1] or "\\jobname")
- setvalue ("result" , "\\setupsystem[file=%s]")
- setalways( "\\setupsystem[\\c!n=%s,\\c!m=%s]", kindofrun or 0, currentrun or 0)
- setvalues("path" , "\\usepath[%s]")
- setvalue ("setuppath" , "\\setupsystem[\\c!directory={%s}]")
- setvalue ("randomseed" , "\\setupsystem[\\c!random=%s]")
- setvalue ("arguments" , "\\setupenv[%s]")
- if once then
- setalways("\\enabledirectives[system.runonce]")
- end
- setalways("%% modes")
- setvalues("modefile" , "\\readlocfile{%s}{}{}")
- setvalues("mode" , "\\enablemode[%s]", true)
- if ctxdata then
- setvalues(ctxdata.modes, "\\enablemode[%s]")
- end
- --
- setalways("%% options (not that important)")
- --
- setalways("\\startsetups *runtime:options")
- setfixed ("color" , "\\setupcolors[\\c!state=\\v!start]")
- setvalue ("separation" , "\\setupcolors[\\c!split=%s]")
- setfixed ("noarrange" , "\\setuparranging[\\v!disable]")
- if getargument('arrange') and not finalrun then
- setalways( "\\setuparranging[\\v!disable]")
- end
- setalways("\\stopsetups")
- --
- setalways("%% styles and modules")
- --
- setalways("\\startsetups *runtime:modules")
- setvalues("usemodule" , "\\usemodule[%s]", true)
- setvalues("environment" , "\\environment %s ", true)
- if ctxdata then
- setvalues(ctxdata.modules, "\\usemodule[%s]")
- setvalues(ctxdata.environments, "\\environment %s ")
- end
- setalways("\\stopsetups")
- --
- setalways("%% done")
- --
- setalways("\\protect \\endinput")
- f:close()
- end
-end
-
-function scripts.context.multipass.copyluafile(jobname) -- obsolete
+local function multipass_copyluafile(jobname)
local tuaname, tucname = jobname..".tua", jobname..".tuc"
if lfs.isfile(tuaname) then
os.remove(tucname)
@@ -606,120 +313,54 @@ function scripts.context.multipass.copyluafile(jobname) -- obsolete
end
end
-scripts.context.cldsuffixes = table.tohash {
- "cld",
-}
-
-scripts.context.xmlsuffixes = table.tohash {
- "xml",
-}
-
-scripts.context.luasuffixes = table.tohash {
- "lua",
-}
-
-scripts.context.beforesuffixes = {
- "tuo", "tuc"
-}
-scripts.context.aftersuffixes = {
- "pdf", "tuo", "tuc", "log"
-}
-
-scripts.context.errorsuffixes = {
- "log"
-}
-
-scripts.context.interfaces = {
- en = "cont-en",
- uk = "cont-uk",
- de = "cont-de",
- fr = "cont-fr",
- nl = "cont-nl",
- cs = "cont-cs",
- it = "cont-it",
- ro = "cont-ro",
- pe = "cont-pe",
-}
-
-scripts.context.defaultformats = {
- "cont-en",
- "cont-nl",
--- "mptopdf", -- todo: mak emkiv variant
--- "metatex", -- will show up soon
--- "metafun", -- todo: mp formats
--- "plain"
-}
-
-local lpegpatterns, Cs, P = lpeg.patterns, lpeg.Cs, lpeg.P
+--
local pattern = lpegpatterns.utfbom^-1 * (P("%% ") + P("% ")) * Cs((1-lpegpatterns.newline)^1)
-local function analyze(filename) -- only files on current path
- local f = io.open(file.addsuffix(filename,"tex"))
- if f then
- local t = { }
- local line = f:read("*line") or ""
- local preamble = lpeg.match(pattern,line)
+local function preamble_analyze(filename) -- only files on current path
+ local t = { }
+ local line = io.loadlines(file.addsuffix(filename,"tex"))
+ if line then
+ local preamble = lpegmatch(pattern,line)
if preamble then
for key, value in gmatch(preamble,"(%S+)%s*=%s*(%S+)") do
t[key] = value
end
t.type = "tex"
- elseif line:find("^&1', file.replacesuffix(name,"pdf")))
---~ end
---~ function scripts.context.closepdf(name)
---~ os.spawn(format('pdfclose --file "%s" 2>&1', file.replacesuffix(name,"pdf")))
---~ end
+-- automatically opening and closing pdf files
-local pdfview -- delayed loading
+local pdfview -- delayed
-function scripts.context.openpdf(name,method)
+local function pdf_open(name,method)
pdfview = pdfview or dofile(resolvers.findfile("l-pdfview.lua","tex"))
pdfview.setmethod(method)
report(pdfview.status())
pdfview.open(file.replacesuffix(name,"pdf"))
end
-function scripts.context.closepdf(name,method)
+local function pdf_close(name,method)
pdfview = pdfview or dofile(resolvers.findfile("l-pdfview.lua","tex"))
pdfview.setmethod(method)
pdfview.close(file.replacesuffix(name,"pdf"))
end
-local function push_result_purge(oldbase,newbase)
- for _, suffix in next, scripts.context.aftersuffixes do
+-- result file handling
+
+local function result_push_purge(oldbase,newbase)
+ for _, suffix in next, usedsuffixes.after do
local oldname = file.addsuffix(oldbase,suffix)
local newname = file.addsuffix(newbase,suffix)
os.remove(newname)
@@ -727,8 +368,8 @@ local function push_result_purge(oldbase,newbase)
end
end
-local function push_result_keep(oldbase,newbase)
- for _, suffix in next, scripts.context.beforesuffixes do
+local function result_push_keep(oldbase,newbase)
+ for _, suffix in next, usedsuffixes.before do
local oldname = file.addsuffix(oldbase,suffix)
local newname = file.addsuffix(newbase,suffix)
local tmpname = "keep-"..oldname
@@ -739,8 +380,8 @@ local function push_result_keep(oldbase,newbase)
end
end
-local function save_result_error(oldbase,newbase)
- for _, suffix in next, scripts.context.errorsuffixes do
+local function result_save_error(oldbase,newbase)
+ for _, suffix in next, usedsuffixes.keep do
local oldname = file.addsuffix(oldbase,suffix)
local newname = file.addsuffix(newbase,suffix)
os.remove(newname) -- to be sure
@@ -748,8 +389,8 @@ local function save_result_error(oldbase,newbase)
end
end
-local function save_result_purge(oldbase,newbase)
- for _, suffix in next, scripts.context.aftersuffixes do
+local function result_save_purge(oldbase,newbase)
+ for _, suffix in next, usedsuffixes.after do
local oldname = file.addsuffix(oldbase,suffix)
local newname = file.addsuffix(newbase,suffix)
os.remove(newname) -- to be sure
@@ -757,8 +398,8 @@ local function save_result_purge(oldbase,newbase)
end
end
-local function save_result_keep(oldbase,newbase)
- for _, suffix in next, scripts.context.aftersuffixes do
+local function result_save_keep(oldbase,newbase)
+ for _, suffix in next, usedsuffixes.after do
local oldname = file.addsuffix(oldbase,suffix)
local newname = file.addsuffix(newbase,suffix)
local tmpname = "keep-"..oldname
@@ -768,313 +409,339 @@ local function save_result_keep(oldbase,newbase)
end
end
-function scripts.context.run(ctxdata,filename)
- -- filename overloads environment.files
- local files = (filename and { filename }) or environment.files
- if ctxdata then
- -- todo: interface
- for k,v in next, ctxdata.flags do
- environment.setargument(k,v)
+-- executing luatex
+
+local function flags_to_string(flags,prefix) -- context flags get prepended by c:
+ local t = { }
+ for k, v in table.sortedhash(flags) do
+ if prefix then
+ k = format("c:%s",k)
+ end
+ if not v or v == "" or v == '""' then
+ -- no need to flag false
+ elseif v == true then
+ t[#t+1] = format('--%s',k)
+ elseif type(v) == "string" then
+ t[#t+1] = format('--%s=%s',k,quote(v))
+ else
+ t[#t+1] = format('--%s=%s',k,tostring(v))
end
end
- if #files > 0 then
+ return concat(t," ")
+end
+
+local function luatex_command(l_flags,c_flags,filename)
+ return format('luatex %s %s "%s"',
+ flags_to_string(l_flags),
+ flags_to_string(c_flags,true),
+ filename
+ )
+end
+
+local function run_texexec(filename,a_purge,a_purgeall)
+ if false then
+ -- we need to write a top etc too and run mp etc so it's not worth the
+ -- trouble, so it will take a while before the next is finished
--
- local interface = getargument("interface")
- -- todo: getargument("interface","en")
- interface = (type(interface) == "string" and interface) or "en"
+ -- context --extra=texutil --convert myfile
+ else
+ local texexec = resolvers.findfile("texexec.rb") or ""
+ if texexec ~= "" then
+ os.setenv("RUBYOPT","")
+ local options = environment.reconstructcommandline(environment.arguments_after)
+ options = gsub(options,"--purge","")
+ options = gsub(options,"--purgeall","")
+ local command = format("ruby %s %s",texexec,options)
+ if a_purge then
+ os.execute(command)
+ scripts.context.purge_job(filename,false,true)
+ elseif a_purgeall then
+ os.execute(command)
+ scripts.context.purge_job(filename,true,true)
+ else
+ os.exec(command)
+ end
+ end
+ end
+end
+
+--
+
+function scripts.context.run(ctxdata,filename)
+ --
+ local a_nofile = getargument("nofile")
+ --
+ local files = environment.files or { }
+ --
+ local filelist, mainfile
+ --
+ if filename then
+ -- the given forced name is processed, the filelist is passed to context
+ mainfile = filename
+ filelist = { filename }
+ -- files = files
+ elseif a_nofile then
+ -- the list of given files is processed using the dummy file
+ mainfile = usedfiles.nop
+ filelist = { usedfiles.nop }
+ -- files = { }
+ elseif #files > 0 then
+ -- the list of given files is processed using the stub file
+ mainfile = usedfiles.yes
+ filelist = files
+ files = { }
+ else
+ return
+ end
+ --
+ local interface = validstring(getargument("interface")) or "en"
+ local formatname = formatofinterface[interface] or "cont-en"
+ local formatfile, scriptfile = resolvers.locateformat(formatname)
+ if not formatfile or not scriptfile then
+ report("warning: no format found, forcing remake (commandline driven)")
+ scripts.context.make(formatname)
+ formatfile, scriptfile = resolvers.locateformat(formatname)
+ end
+ if formatfile and scriptfile then
+ -- okay
+ elseif formatname then
+ report("error, no format found with name: %s, aborting",formatname)
+ return
+ else
+ report("error, no format found (provide formatname or interface)")
+ return
+ end
+ --
+ local a_mkii = getargument("mkii") or getargument("pdftex") or getargument("xetex")
+ local a_purge = getargument("purge")
+ local a_purgeall = getargument("purgeall")
+ local a_purgeresult = getargument("purgeresult")
+ local a_global = getargument("global")
+ local a_timing = getargument("timing")
+ local a_batchmode = getargument("batchmode")
+ local a_nonstopmode = getargument("nonstopmode")
+ local a_once = getargument("once")
+ local a_synctex = getargument("synctex")
+ local a_backend = getargument("backend")
+ local a_arrange = getargument("arrange")
+ local a_noarrange = getargument("noarrange")
+ --
+ for i=1,#filelist do
--
- local formatname = scripts.context.interfaces[interface] or "cont-en"
- local formatfile, scriptfile = resolvers.locateformat(formatname)
- -- this catches the command line
- if not formatfile or not scriptfile then
- report("warning: no format found, forcing remake (commandline driven)")
- scripts.context.make(formatname)
- formatfile, scriptfile = resolvers.locateformat(formatname)
+ local filename = filelist[i]
+ local basename = file.basename(filename)
+ local pathname = file.dirname(filename)
+ local jobname = file.removesuffix(basename)
+ local ctxname = ctxdata and ctxdata.ctxname
+ --
+ if pathname == "" and not a_global and filename ~= usedfiles.nop then
+ filename = "./" .. filename
end
--
- if formatfile and scriptfile then
- for i=1,#files do
- local filename = files[i]
- local basename, pathname = file.basename(filename), file.dirname(filename)
- local jobname = file.removesuffix(basename)
- if pathname == "" and not getargument("global") then
- filename = "./" .. filename
+ local analysis = preamble_analyze(filename)
+ --
+ if a_mkii or analysis.engine == 'pdftex' or analysis.engine == 'xetex' then
+ run_texexec(filename,a_purge,a_purgeall)
+ else
+ if analysis.interface and analysis.interface ~= interface then
+ formatname = formatofinterface[analysis.interface] or formatname
+ formatfile, scriptfile = resolvers.locateformat(formatname)
+ end
+ if not formatfile or not scriptfile then
+ report("warning: no format found, forcing remake (source driven)")
+ scripts.context.make(formatname)
+ formatfile, scriptfile = resolvers.locateformat(formatname)
+ end
+ if formatfile and scriptfile then
+ --
+ local suffix = validstring(getargument("suffix"))
+ local resultname = validstring(getargument("result"))
+ if suffix then
+ resultname = file.removesuffix(jobname) .. suffix
end
- -- look at the first line
- local a = analyze(filename)
- if a and (a.engine == 'pdftex' or a.engine == 'xetex' or getargument("pdftex") or getargument("xetex")) then
- if false then
- -- we need to write a top etc too and run mp etc so it's not worth the
- -- trouble, so it will take a while before the next is finished
- --
- -- context --extra=texutil --convert myfile
- else
- local texexec = resolvers.findfile("texexec.rb") or ""
- if texexec ~= "" then
- os.setenv("RUBYOPT","")
- local options = environment.reconstructcommandline(environment.arguments_after)
- options = gsub(options,"--purge","")
- options = gsub(options,"--purgeall","")
- local command = format("ruby %s %s",texexec,options)
- if getargument("purge") then
- os.execute(command)
- scripts.context.purge_job(filename,false,true)
- elseif getargument("purgeall") then
- os.execute(command)
- scripts.context.purge_job(filename,true,true)
- else
- os.exec(command)
- end
+ local oldbase = ""
+ local newbase = ""
+ if resultname then
+ oldbase = file.removesuffix(jobname)
+ newbase = file.removesuffix(resultname)
+ if oldbase ~= newbase then
+ if a_purgeresult then
+ result_push_purge(oldbase,newbase)
+ else
+ result_push_keep(oldbase,newbase)
end
+ else
+ resultname = nil
end
- else
- if a and a.interface and a.interface ~= interface then
- formatname = scripts.context.interfaces[a.interface] or formatname
- formatfile, scriptfile = resolvers.locateformat(formatname)
+ end
+ --
+ local pdfview = getargument("autopdf") or getargument("closepdf")
+ if pdfview then
+ pdf_close(filename,pdfview)
+ if resultname then
+ pdf_close(resultname,pdfview)
end
- -- this catches the command line
- if not formatfile or not scriptfile then
- report("warning: no format found, forcing remake (source driven)")
- scripts.context.make(formatname)
- formatfile, scriptfile = resolvers.locateformat(formatname)
+ end
+ --
+ local okay = statistics.checkfmtstatus(formatfile)
+ if okay ~= true then
+ report("warning: %s, forcing remake",tostring(okay))
+ scripts.context.make(formatname)
+ end
+ --
+ local oldhash = multipass_hashfiles(jobname)
+ local newhash = { }
+ local maxnofruns = once and 1 or multipass_nofruns
+ --
+ local c_flags = {
+ directives = validstring(environment.directives), -- gets passed via mtxrun
+ trackers = validstring(environment.trackers), -- gets passed via mtxrun
+ experiments = validstring(environment.experiments), -- gets passed via mtxrun
+ --
+ result = validstring(resultname),
+ input = validstring(getargument("input") or filename), -- alternative input
+ fulljobname = validstring(filename),
+ files = concat(files,","),
+ ctx = validstring(ctxname),
+ }
+ --
+ for k, v in next, environment.arguments do
+ if c_flags[k] == nil then
+ c_flags[k] = v
end
- if formatfile and scriptfile then
- -- we default to mkiv xml !
- -- the --prep argument might become automatic (and noprep)
- local suffix = file.extname(filename) or "?"
- if scripts.context.xmlsuffixes[suffix] or getargument("forcexml") then
- if getargument("mkii") then
- filename = makestub(true,"\\processXMLfilegrouped{%s}",filename)
- else
- filename = makestub(true,"\\xmlprocess{\\xmldocument}{%s}{}",filename)
- end
- elseif scripts.context.cldsuffixes[suffix] or getargument("forcecld") then
- -- self contained cld files need to have a starttext/stoptext (less fontloading)
- filename = makestub(false,"\\ctxlua{context.runfile('%s')}",filename)
- elseif scripts.context.luasuffixes[suffix] or getargument("forcelua") then
- filename = makestub(true,"\\ctxlua{dofile('%s')}",filename)
- elseif getargument("prep") then
- -- we need to keep the original jobname
- filename = makestub(true,"\\readfile{%s}{}{}",filename,ctxrunner.preppedfile(ctxdata,filename))
- end
- --
- -- todo: also other stubs
- --
- local suffix, resultname = getargument("suffix"), getargument("result")
- if type(suffix) == "string" then
- resultname = file.removesuffix(jobname) .. suffix
- end
- local oldbase, newbase = "", ""
- if type(resultname) == "string" then
- oldbase = file.removesuffix(jobname)
- newbase = file.removesuffix(resultname)
- if oldbase ~= newbase then
- if getargument("purgeresult") then
- push_result_purge(oldbase,newbase)
- else
- push_result_keep(oldbase,newbase)
- end
- else
- resultname = nil
- end
- else
- resultname = nil
- end
- --
- local pdfview = getargument("autopdf") or getargument("closepdf")
- if pdfview then
- scripts.context.closepdf(filename,pdfview)
- if resultname then
- scripts.context.closepdf(resultname,pdfview)
- end
- end
- --
- local okay = statistics.checkfmtstatus(formatfile)
- if okay ~= true then
- report("warning: %s, forcing remake",tostring(okay))
- scripts.context.make(formatname)
- end
- --
- local flags = { }
- if getargument("batchmode") or getargument("batch") then
- flags[#flags+1] = "--interaction=batchmode"
- end
- if getargument("synctex") then
- -- this should become a directive
- report("warning: synctex is enabled") -- can add upto 5% runtime
- flags[#flags+1] = "--synctex=1"
- end
- flags[#flags+1] = "--fmt=" .. quote(formatfile)
- flags[#flags+1] = "--lua=" .. quote(scriptfile)
- --
- -- We pass these directly.
- --
-
---~ local silent = getargument("silent")
---~ local noconsole = getargument("noconsole")
---~ local directives = getargument("directives")
---~ local trackers = getargument("trackers")
---~ if silent == true then
---~ silent = "*"
---~ end
---~ if type(silent) == "string" then
---~ if type(directives) == "string" then
---~ directives = format("%s,logs.blocked={%s}",directives,silent)
---~ else
---~ directives = format("logs.blocked={%s}",silent)
---~ end
---~ end
---~ if noconsole then
---~ if type(directives) == "string" then
---~ directives = format("%s,logs.target=file",directives)
---~ else
---~ directives = format("logs.target=file")
---~ end
---~ end
-
- local directives = environment.directives
- local trackers = environment.trackers
- local experiments = environment.experiments
-
- --
- if type(directives) == "string" then
- flags[#flags+1] = format('--directives="%s"',directives)
- end
- if type(trackers) == "string" then
- flags[#flags+1] = format('--trackers="%s"',trackers)
- end
- --
- local backend = getargument("backend")
- if type(backend) ~= "string" then
- backend = "pdf"
- end
- flags[#flags+1] = format('--backend="%s"',backend)
- --
- local command = format("luatex %s %s \\stoptext", concat(flags," "), quote(filename))
- local oldhash, newhash = scripts.context.multipass.hashfiles(jobname), { }
- local once = getargument("once")
- local maxnofruns = (once and 1) or scripts.context.multipass.nofruns
- local arrange = getargument("arrange")
- for i=1,maxnofruns do
- -- 1:first run, 2:successive run, 3:once, 4:last of maxruns
- local kindofrun = (once and 3) or (i==1 and 1) or (i==maxnofruns and 4) or 2
- scripts.context.multipass.makeoptionfile(jobname,ctxdata,kindofrun,i,false,once) -- kindofrun, currentrun, final
- report("run %s: %s",i,command)
---~ print("\n") -- cleaner, else continuation on same line
- print("") -- cleaner, else continuation on same line
- local returncode, errorstring = os.spawn(command)
- --~ if returncode == 3 then
- --~ scripts.context.make(formatname)
- --~ returncode, errorstring = os.spawn(command)
- --~ if returncode == 3 then
- --~ report("ks: return code 3, message: %s",errorstring or "?")
- --~ os.exit(1)
- --~ end
- --~ end
- if not returncode then
- report("fatal error: no return code, message: %s",errorstring or "?")
- if resultname then
- save_result_error(oldbase,newbase)
- end
- os.exit(1)
- break
- elseif returncode > 0 then
- report("fatal error: return code: %s",returncode or "?")
- if resultname then
- save_result_error(oldbase,newbase)
- end
- os.exit(returncode)
- break
- else
- scripts.context.multipass.copyluafile(jobname)
- -- scripts.context.multipass.copytuifile(jobname)
- newhash = scripts.context.multipass.hashfiles(jobname)
- if scripts.context.multipass.changed(oldhash,newhash) then
- oldhash = newhash
- else
- break
- end
- end
- end
- --
- if arrange then
- local kindofrun = 3
- scripts.context.multipass.makeoptionfile(jobname,ctxdata,kindofrun,i,true) -- kindofrun, currentrun, final
- report("arrange run: %s",command)
- local returncode, errorstring = os.spawn(command)
- if not returncode then
- report("fatal error: no return code, message: %s",errorstring or "?")
- os.exit(1)
- elseif returncode > 0 then
- report("fatal error: return code: %s",returncode or "?")
- os.exit(returncode)
- end
- end
- --
- if getargument("purge") then
- scripts.context.purge_job(jobname)
- elseif getargument("purgeall") then
- scripts.context.purge_job(jobname,true)
- end
- --
- os.remove(jobname..".top")
- --
+ end
+ --
+ local l_flags = {
+ ["interaction"] = (a_batchmode and "batchmode") or (a_nonstopmode and "nonstopmode") or nil,
+ ["synctex"] = a_synctex and 1 or nil,
+ ["no-parse-first-line"] = true,
+ -- ["no-mktex"] = true,
+ -- ["file-line-error-style"] = true,
+ ["fmt"] = formatfile,
+ ["lua"] = scriptfile,
+ ["jobname"] = jobname,
+ }
+ --
+ if a_synctex then
+ report("warning: synctex is enabled") -- can add upto 5% runtime
+ end
+ --
+ -- kindofrun: 1:first run, 2:successive run, 3:once, 4:last of maxruns
+ --
+ for currentrun=1,maxnofruns do
+ --
+ c_flags.final = false
+ c_flags.kindofrun = (a_once and 3) or (currentrun==1 and 1) or (currentrun==maxnofruns and 4) or 2
+ c_flags.currentrun = currentrun
+ c_flags.noarrange = a_noarrange or a_arrange or nil
+ --
+ local command = luatex_command(l_flags,c_flags,mainfile)
+ --
+ report("run %s: %s",i,command)
+ print("") -- cleaner, else continuation on same line
+ local returncode, errorstring = os.spawn(command)
+ if not returncode then
+ report("fatal error: no return code, message: %s",errorstring or "?")
if resultname then
- if getargument("purgeresult") then
- -- so, if there is no result then we don't get the old one, but
- -- related files (log etc) are still there for tracing purposes
- save_result_purge(oldbase,newbase)
- else
- save_result_keep(oldbase,newbase)
- end
- report("result renamed to: %s",newbase)
- end
- --
- if getargument("purge") then
- scripts.context.purge_job(resultname)
- elseif getargument("purgeall") then
- scripts.context.purge_job(resultname,true)
- end
- --
- local pdfview = getargument("autopdf")
- if pdfview then
- scripts.context.openpdf(resultname or filename,pdfview)
+ result_save_error(oldbase,newbase)
end
- --
- if getargument("timing") then
- report()
- report("you can process (timing) statistics with:",jobname)
- report()
- report("context --extra=timing '%s'",jobname)
- report("mtxrun --script timing --xhtml [--launch --remove] '%s'",jobname)
- report()
+ os.exit(1)
+ break
+ elseif returncode == 0 then
+ multipass_copyluafile(jobname)
+ newhash = multipass_hashfiles(jobname)
+ if multipass_changed(oldhash,newhash) then
+ oldhash = newhash
+ else
+ break
end
else
- if formatname then
- report("error, no format found with name: %s, skipping",formatname)
- else
- report("error, no format found (provide formatname or interface)")
+ report("fatal error: return code: %s",returncode or "?")
+ if resultname then
+ result_save_error(oldbase,newbase)
end
+ os.exit(1) -- (returncode)
break
end
+ --
+ end
+ --
+ if a_arrange then
+ --
+ c_flags.final = true
+ c_flags.kindofrun = 3
+ c_flags.currentrun = c_flags.currentrun + 1
+ c_flags.noarrange = nil
+ --
+ local command = luatex_command(l_flags,c_flags,mainfile)
+ --
+ report("arrange run: %s",command)
+ local returncode, errorstring = os.spawn(command)
+ if not returncode then
+ report("fatal error: no return code, message: %s",errorstring or "?")
+ os.exit(1)
+ elseif returncode > 0 then
+ report("fatal error: return code: %s",returncode or "?")
+ os.exit(returncode)
+ end
+ --
+ end
+ --
+ if a_purge then
+ scripts.context.purge_job(jobname)
+ elseif a_purgeall then
+ scripts.context.purge_job(jobname,true)
+ end
+ --
+ if resultname then
+ if a_purgeresult then
+ -- so, if there is no result then we don't get the old one, but
+ -- related files (log etc) are still there for tracing purposes
+ result_save_purge(oldbase,newbase)
+ else
+ result_save_keep(oldbase,newbase)
+ end
+ report("result renamed to: %s",newbase)
+ end
+ --
+ if purge then
+ scripts.context.purge_job(resultname)
+ elseif purgeall then
+ scripts.context.purge_job(resultname,true)
+ end
+ --
+ local pdfview = getargument("autopdf")
+ if pdfview then
+ pdf_open(resultname or jobname,pdfview)
+ end
+ --
+ if a_timing then
+ report()
+ report("you can process (timing) statistics with:",jobname)
+ report()
+ report("context --extra=timing '%s'",jobname)
+ report("mtxrun --script timing --xhtml [--launch --remove] '%s'",jobname)
+ report()
end
- end
- else
- if formatname then
- report("error, no format found with name: %s, aborting",formatname)
else
- report("error, no format found (provide formatname or interface)")
+ if formatname then
+ report("error, no format found with name: %s, skipping",formatname)
+ else
+ report("error, no format found (provide formatname or interface)")
+ end
+ break
end
end
end
+ --
end
-function scripts.context.pipe()
+function scripts.context.pipe() -- still used?
-- context --pipe
-- context --pipe --purge --dummyfile=whatever.tmp
local interface = getargument("interface")
interface = (type(interface) == "string" and interface) or "en"
- local formatname = scripts.context.interfaces[interface] or "cont-en"
+ local formatname = formatofinterface[interface] or "cont-en"
local formatfile, scriptfile = resolvers.locateformat(formatname)
if not formatfile or not scriptfile then
report("warning: no format found, forcing remake (commandline driven)")
@@ -1087,11 +754,16 @@ function scripts.context.pipe()
report("warning: %s, forcing remake",tostring(okay))
scripts.context.make(formatname)
end
- local flags = {
- "--interaction=scrollmode",
- "--fmt=" .. quote(formatfile),
- "--lua=" .. quote(scriptfile),
- "--backend=pdf",
+ local l_flags = {
+ interaction = "scrollmode",
+ fmt = formatfile,
+ lua = scriptfile,
+ }
+ local c_flags = {
+ backend = "pdf",
+ final = false,
+ kindofrun = 3,
+ currentrun = 1,
}
local filename = getargument("dummyfile") or ""
if filename == "" then
@@ -1100,10 +772,9 @@ function scripts.context.pipe()
else
filename = file.addsuffix(filename,"tmp")
io.savedata(filename,"\\relax")
- scripts.context.multipass.makeoptionfile(filename,{ flags = flags },3,1,false) -- kindofrun, currentrun, final
report("entering scrollmode using '%s' with optionfile, end job with \\end",filename)
end
- local command = format("luatex %s %s", concat(flags," "), quote(filename))
+ local command = luatex_command(l_flags,c_flags,filename)
os.spawn(command)
if getargument("purge") then
scripts.context.purge_job(filename)
@@ -1123,11 +794,9 @@ end
local make_mkiv_format = environment.make_format
local function make_mkii_format(name,engine)
- if getargument(engine) then
- local command = format("mtxrun texexec.rb --make --%s %s",name,engine)
- report("running command: %s",command)
- os.spawn(command)
- end
+ local command = format("mtxrun texexec.rb --make --%s %s",name,engine)
+ report("running command: %s",command)
+ os.spawn(command)
end
function scripts.context.generate()
@@ -1140,14 +809,17 @@ function scripts.context.make(name)
if not getargument("fast") then -- as in texexec
scripts.context.generate()
end
- local list = (name and { name }) or (environment.files[1] and environment.files) or scripts.context.defaultformats
+ local list = (name and { name }) or (environment.files[1] and environment.files) or defaultformats
+ local engine = getargument("engine") or "luatex"
for i=1,#list do
local name = list[i]
- name = scripts.context.interfaces[name] or name or ""
- if name ~= "" then
+ name = formatofinterface[name] or name or ""
+ if name == "" then
+ -- nothing
+ elseif engine == "luatex" then
make_mkiv_format(name)
- make_mkii_format(name,"pdftex")
- make_mkii_format(name,"xetex")
+ elseif engine == "pdftex" or engine == "xetex" then
+ make_mkii_format(name,engine)
end
end
end
@@ -1155,68 +827,77 @@ end
function scripts.context.ctx()
local ctxdata = ctxrunner.new()
ctxdata.jobname = environment.files[1]
- ctxrunner.manipulate(ctxdata,getargument("ctx"))
+ ctxrunner.checkfile(ctxdata,getargument("ctx"))
+ ctxrunner.checkflags(ctxdata)
scripts.context.run(ctxdata)
end
function scripts.context.autoctx()
local ctxdata = nil
- local files = (filename and { filename }) or environment.files
+ local files = environment.files
local firstfile = #files > 0 and files[1]
- if firstfile and file.extname(firstfile) == "xml" then
- local f = io.open(firstfile)
- if f then
- local chunk = f:read(512) or ""
- f:close()
- local ctxname = match(chunk,"<%?context%-directive%s+job%s+ctxfile%s+([^ ]-)%s*?>")
- if ctxname then
- ctxdata = ctxrunner.new()
- ctxdata.jobname = firstfile
- ctxrunner.manipulate(ctxdata,ctxname)
+ if firstfile then
+ local suffix = file.suffix(firstfile)
+ if suffix == "xml" then
+ local chunk = io.loadchunk(firstfile) -- 1024
+ if chunk then
+ local ctxname = match(chunk,"<%?context%-directive%s+job%s+ctxfile%s+([^ ]-)%s*?>")
+ if ctxname then
+ ctxdata = ctxrunner.new()
+ ctxdata.jobname = firstfile
+ ctxrunner.checkfile(ctxdata,ctxname)
+ ctxrunner.checkflags(ctxdata)
+ end
end
+ elseif suffix == "tex" then
+ -- maybe but we scan the preamble later too
end
end
scripts.context.run(ctxdata)
end
-local template = [[
-\starttext
- \directMPgraphic{%s}{input "%s"}
-\stoptext
-]]
-
-local loaded = false
+-- no longer ok as mlib-run misses something:
-function scripts.context.metapost()
- local filename = environment.files[1] or ""
- if not loaded then
- dofile(resolvers.findfile("mlib-run.lua"))
- loaded = true
- commands = commands or { }
- commands.writestatus = report -- no longer needed
- end
- local formatname = getargument("format") or "metafun"
- if formatname == "" or type(formatname) == "boolean" then
- formatname = "metafun"
- end
- if getargument("pdf") then
- local basename = file.removesuffix(filename)
- local resultname = getargument("result") or basename
- local jobname = "mtx-context-metapost"
- local tempname = file.addsuffix(jobname,"tex")
- io.savedata(tempname,format(template,"metafun",filename))
- environment.files[1] = tempname
- environment.setargument("result",resultname)
- environment.setargument("once",true)
- scripts.context.run()
- scripts.context.purge_job(jobname,true)
- scripts.context.purge_job(resultname,true)
- elseif getargument("svg") then
- metapost.directrun(formatname,filename,"svg")
- else
- metapost.directrun(formatname,filename,"mps")
- end
-end
+-- local template = [[
+-- \starttext
+-- \directMPgraphic{%s}{input "%s"}
+-- \stoptext
+-- ]]
+--
+-- local loaded = false
+--
+-- function scripts.context.metapost()
+-- local filename = environment.files[1] or ""
+-- if not loaded then
+-- dofile(resolvers.findfile("mlib-run.lua"))
+-- loaded = true
+-- commands = commands or { }
+-- commands.writestatus = report -- no longer needed
+-- end
+-- local formatname = getargument("format") or "metafun"
+-- if formatname == "" or type(formatname) == "boolean" then
+-- formatname = "metafun"
+-- end
+-- if getargument("pdf") then
+-- local basename = file.removesuffix(filename)
+-- local resultname = getargument("result") or basename
+-- local jobname = "mtx-context-metapost"
+-- local tempname = file.addsuffix(jobname,"tex")
+-- io.savedata(tempname,format(template,"metafun",filename))
+-- environment.files[1] = tempname
+-- setargument("result",resultname)
+-- setargument("once",true)
+-- scripts.context.run()
+-- scripts.context.purge_job(jobname,true)
+-- scripts.context.purge_job(resultname,true)
+-- elseif getargument("svg") then
+-- metapost.directrun(formatname,filename,"svg")
+-- else
+-- metapost.directrun(formatname,filename,"mps")
+-- end
+-- end
+
+-- --
function scripts.context.version()
local name = resolvers.findfile("context.mkiv")
@@ -1238,6 +919,8 @@ function scripts.context.version()
end
end
+-- purging files
+
local generic_files = {
"texexec.tex", "texexec.tui", "texexec.tuo",
"texexec.tuc", "texexec.tua",
@@ -1262,7 +945,6 @@ local persistent_runfiles = {
}
local special_runfiles = {
---~ "-mpgraph*", "-mprun*", "-temp-*" -- hm, wasn't this escaped?
"-mpgraph", "-mprun", "-temp-"
}
@@ -1278,9 +960,6 @@ local function purge_file(dfile,cfile)
end
end
-local function remove_special_files(pattern)
-end
-
function scripts.context.purge_job(jobname,all,mkiitoo)
if jobname and jobname ~= "" then
jobname = file.basename(jobname)
@@ -1318,7 +997,7 @@ function scripts.context.purge(all,pattern,mkiitoo)
local deleted = { }
for i=1,#files do
local name = files[i]
- local suffix = file.extname(name)
+ local suffix = file.suffix(name)
local basename = file.basename(name)
if obsolete[suffix] or temporary[suffix] or persistent[suffix] or generic[basename] then
deleted[#deleted+1] = purge_file(name)
@@ -1335,12 +1014,14 @@ function scripts.context.purge(all,pattern,mkiitoo)
end
end
+-- touching files (signals regeneration of formats)
+
local function touch(name,pattern)
local name = resolvers.findfile(name)
local olddata = io.loaddata(name)
if olddata then
local oldversion, newversion = "", os.date("%Y.%m.%d %H:%M")
- local newdata, ok = olddata:gsub(pattern,function(pre,mid,post)
+ local newdata, ok = gsub(olddata,pattern,function(pre,mid,post)
oldversion = mid
return pre .. newversion .. post
end)
@@ -1374,25 +1055,29 @@ function scripts.context.touch()
touchfiles("mkii")
touchfiles("mkiv")
touchfiles("mkvi")
+ touchfiles("mkix")
+ touchfiles("mkxi")
+ else
+ report("touching needs --expert")
end
end
-- modules
local labels = { "title", "comment", "status" }
-local cards = { "*.mkvi", "*.mkiv", "*.tex" }
+local cards = { "*.mkvi", "*.mkiv", "*.mkxi", "*.mkix", "*.tex" }
function scripts.context.modules(pattern)
local list = { }
local found = resolvers.findfile("context.mkiv")
if not pattern or pattern == "" then
-- official files in the tree
- for _, card in ipairs(cards) do
- resolvers.findwildcardfiles(card,list)
+ for i=1,#cards do
+ resolvers.findwildcardfiles(cards[i],list)
end
-- my dev path
- for _, card in ipairs(cards) do
- dir.glob(file.join(file.dirname(found),card),list)
+ for i=1,#cards do
+ dir.glob(file.join(file.dirname(found),cards[i]),list)
end
else
resolvers.findwildcardfiles(pattern,list)
@@ -1405,7 +1090,7 @@ function scripts.context.modules(pattern)
if not done[base] then
done[base] = true
local suffix = file.suffix(base)
- if suffix == "tex" or suffix == "mkiv" or suffix == "mkvi" then
+ if suffix == "tex" or suffix == "mkiv" or suffix == "mkvi" or suffix == "mkix" or suffix == "mkxi" then
local prefix = match(base,"^([xmst])%-")
if prefix then
v = resolvers.findfile(base) -- so that files on my dev path are seen
@@ -1462,30 +1147,28 @@ end
function scripts.context.extra()
local extra = getargument("extra")
- if type(extra) == "string" then
- if getargument("help") then
- scripts.context.extras(extra)
+ if type(extra) ~= "string" then
+ scripts.context.extras()
+ elseif getargument("help") then
+ scripts.context.extras(extra)
+ else
+ local fullextra = extra
+ if not find(fullextra,"mtx%-context%-") then
+ fullextra = "mtx-context-" .. extra
+ end
+ local foundextra = resolvers.findfile(fullextra)
+ if foundextra == "" then
+ scripts.context.extras()
+ return
else
- local fullextra = extra
- if not find(fullextra,"mtx%-context%-") then
- fullextra = "mtx-context-" .. extra
- end
- local foundextra = resolvers.findfile(fullextra)
- if foundextra == "" then
- scripts.context.extras()
- return
- else
- report("processing extra: %s", foundextra)
- end
- environment.setargument("purgeall",true)
- local result = environment.setargument("result") or ""
- if result == "" then
- environment.setargument("result","context-extra")
- end
- scripts.context.run(nil,foundextra)
+ report("processing extra: %s", foundextra)
end
- else
- scripts.context.extras()
+ setargument("purgeall",true)
+ local result = getargument("result") or ""
+ if result == "" then
+ setargument("result","context-extra")
+ end
+ scripts.context.run(nil,foundextra)
end
end
@@ -1493,25 +1176,27 @@ end
function scripts.context.trackers()
environment.files = { resolvers.findfile("m-trackers.mkiv") }
- scripts.context.multipass.nofruns = 1
- environment.setargument("purgeall",true)
+ multipass_nofruns = 1
+ setargument("purgeall",true)
scripts.context.run()
end
function scripts.context.directives()
environment.files = { resolvers.findfile("m-directives.mkiv") }
- scripts.context.multipass.nofruns = 1
- environment.setargument("purgeall",true)
+ multipass_nofruns = 1
+ setargument("purgeall",true)
scripts.context.run()
end
function scripts.context.logcategories()
environment.files = { resolvers.findfile("m-logcategories.mkiv") }
- scripts.context.multipass.nofruns = 1
- environment.setargument("purgeall",true)
+ multipass_nofruns = 1
+ setargument("purgeall",true)
scripts.context.run()
end
+-- updating (often one will use mtx-update instead)
+
function scripts.context.timed(action)
statistics.timed(action)
end
@@ -1548,7 +1233,7 @@ function scripts.context.update()
local function is_okay(basetree)
for _, tree in next, validtrees do
local pattern = gsub(tree,"%-","%%-")
- if basetree:find(pattern) then
+ if find(basetree,pattern) then
return tree
end
end
@@ -1614,7 +1299,7 @@ function scripts.context.update()
end
for k in zipfile:files() do
local filename = k.filename
- if filename:find("/$") then
+ if find(filename,"/$") then
lfs.mkdir(filename)
else
local data = zip.loaddata(zipfile,filename)
@@ -1652,6 +1337,23 @@ function scripts.context.update()
end
end
+-- getting it done
+
+if getargument("nostats") then
+ setargument("nostatistics",true)
+ setargument("nostat",nil)
+end
+
+if getargument("batch") then
+ setargument("batchmode",true)
+ setargument("batch",nil)
+end
+
+if getargument("nonstop") then
+ setargument("nonstopmode",true)
+ setargument("nonstop",nil)
+end
+
do
local silent = getargument("silent")
@@ -1664,9 +1366,9 @@ do
end
if getargument("once") then
- scripts.context.multipass.nofruns = 1
+ multipass_nofruns = 1
elseif getargument("runs") then
- scripts.context.multipass.nofruns = tonumber(getargument("runs")) or nil
+ multipass_nofruns = tonumber(getargument("runs")) or nil
end
if getargument("profile") then
@@ -1674,7 +1376,6 @@ if getargument("profile") then
end
if getargument("run") then
--- scripts.context.timed(scripts.context.run)
scripts.context.timed(scripts.context.autoctx)
elseif getargument("make") then
scripts.context.timed(function() scripts.context.make() end)
@@ -1682,8 +1383,8 @@ elseif getargument("generate") then
scripts.context.timed(function() scripts.context.generate() end)
elseif getargument("ctx") then
scripts.context.timed(scripts.context.ctx)
-elseif getargument("mp") or getargument("metapost") then
- scripts.context.timed(scripts.context.metapost)
+-- elseif getargument("mp") or getargument("metapost") then
+-- scripts.context.timed(scripts.context.metapost)
elseif getargument("version") then
application.identify()
scripts.context.version()
@@ -1711,10 +1412,7 @@ elseif getargument("showdirectives") or getargument("directives") == true then
scripts.context.directives()
elseif getargument("showlogcategories") then
scripts.context.logcategories()
-elseif getargument("track") and type(getargument("track")) == "boolean" then -- for old times sake, will go
- scripts.context.trackers()
-elseif environment.files[1] then
--- scripts.context.timed(scripts.context.run)
+elseif environment.files[1] or getargument("nofile") then
scripts.context.timed(scripts.context.autoctx)
elseif getargument("pipe") then
scripts.context.timed(scripts.context.pipe)
diff --git a/scripts/context/lua/mtx-convert.lua b/scripts/context/lua/mtx-convert.lua
index b4e6e010b..04ff38aad 100644
--- a/scripts/context/lua/mtx-convert.lua
+++ b/scripts/context/lua/mtx-convert.lua
@@ -83,7 +83,7 @@ function converters.convertpath(inputpath,outputpath)
inputpath = inputpath or "."
outputpath = outputpath or "."
for name in lfs.dir(inputpath) do
- local suffix = file.extname(name)
+ local suffix = file.suffix(name)
if find(name,"%.$") then
-- skip . and ..
elseif converters[suffix] then
@@ -102,7 +102,7 @@ function converters.convertpath(inputpath,outputpath)
end
function converters.convertfile(oldname)
- local suffix = file.extname(oldname)
+ local suffix = file.suffix(oldname)
if converters[suffix] then
local newname = file.replacesuffix(oldname,"pdf")
if oldname == newname then
diff --git a/scripts/context/lua/mtx-epub.lua b/scripts/context/lua/mtx-epub.lua
index 7d1c15774..28a37fec2 100644
--- a/scripts/context/lua/mtx-epub.lua
+++ b/scripts/context/lua/mtx-epub.lua
@@ -11,8 +11,8 @@ if not modules then modules = { } end modules ['mtx-epub'] = {
-- really an id but has some special property). Then there is this ncx suffix
-- thing. Somehow it give the impression of a reversed engineered application
-- format so it will probably take a few cycles to let it become a real
--- clean standard. Thanks to Adam Reviczky for helping to figure out all these
--- puzzling details.
+-- clean standard. Thanks to Adam Reviczky, Luigi Scarso and Andy Thomas for
+-- helping to figure out all the puzzling details.
-- This is preliminary code. At some point we will deal with images as well but
-- first we need a decent strategy to export them. More information will be
@@ -31,7 +31,7 @@ mtxrun --script epub --make mydocument
local application = logs.application {
name = "mtx-epub",
- banner = "ConTeXt EPUB Helpers 0.11",
+ banner = "ConTeXt EPUB Helpers 0.12",
helpinfo = helpinfo,
}
@@ -43,26 +43,27 @@ scripts.epub = scripts.epub or { }
local mimetype = "application/epub+zip"
local container = [[
-
+
-
+
]]
local package = [[
-
+
- My Title
- en
- urn:uuid:%s
- MySelf
+ %s
+ %s
+ urn:uuid:%s
+ %s%s
+
@@ -70,13 +71,14 @@ local package = [[
+
]]
-local item = [[ ]]
+local item = [[ ]]
local toc = [[
@@ -108,6 +110,23 @@ local toc = [[
]]
+local coverxhtml = [[
+
+
+
+
+
+
+ cover.xhtml
+
+
+
+
+
+
+
+]]
+
-- We need to figure out what is permitted. Numbers only seem to give
-- problems is some applications as do names with dashes. Also the
-- optional toc is supposed to be there and although id's are by
@@ -117,7 +136,7 @@ local toc = [[
local function dumbid(filename)
-- return (string.gsub(os.uuid(),"%-%","")) -- to be tested
- return file.nameonly(filename) .. "-" .. file.extname(filename)
+ return file.nameonly(filename) .. "-" .. file.suffix(filename)
end
local mimetypes = {
@@ -128,6 +147,7 @@ local mimetypes = {
png = "image/png",
jpg = "image/jpeg",
ncx = "application/x-dtbncx+xml",
+ gif = "image/gif",
-- default = "text/plain",
}
@@ -194,9 +214,21 @@ function scripts.epub.make()
local files = specification.files or { file.addsuffix(filename,"xhtml") }
local images = specification.images or { }
local root = specification.root or files[1]
+ local language = specification.language or "en"
+ local creator = specification.author or "My Self"
+ local title = specification.title or "My Title"
+ local firstpage = specification.firstpage or ""
+ local lastpage = specification.lastpage or ""
-- identifier = gsub(identifier,"[^a-zA-z0-9]","")
+ if firstpage ~= "" then
+ images[firstpage] = firstpage
+ end
+ if lastpage ~= "" then
+ images[lastpage] = lastpage
+ end
+
identifier = "BookId" -- weird requirement
local epubname = name
@@ -204,11 +236,12 @@ function scripts.epub.make()
local epubfile = file.replacesuffix(name,"epub")
local epubroot = file.replacesuffix(name,"opf")
local epubtoc = "toc.ncx"
+ local epubcover = "cover.xhtml"
application.report("creating paths in tree %s",epubpath)
lfs.mkdir(epubpath)
lfs.mkdir(file.join(epubpath,"META-INF"))
- lfs.mkdir(file.join(epubpath,"OPS"))
+ lfs.mkdir(file.join(epubpath,"OEBPS"))
local used = { }
@@ -217,13 +250,14 @@ function scripts.epub.make()
local mime = mimetypes[suffix]
if mime then
local idmaker = idmakers[suffix] or idmakers.default
- local target = file.join(epubpath,"OPS",filename)
+ local target = file.join(epubpath,"OEBPS",filename)
file.copy(filename,target)
application.report("copying %s to %s",filename,target)
used[#used+1] = format(item,idmaker(filename),filename,mime)
end
end
+ copyone("cover.xhtml")
copyone("toc.ncx")
local function copythem(files)
@@ -241,7 +275,7 @@ function scripts.epub.make()
for k, v in table.sortedpairs(images) do
theimages[#theimages+1] = k
- if not lfs.isfile(k) and file.extname(k) == "svg" and file.extname(v) == "pdf" then
+ if not lfs.isfile(k) and file.suffix(k) == "svg" and file.suffix(v) == "pdf" then
local command = format("inkscape --export-plain-svg=%s %s",k,v)
application.report("running command '%s'\n\n",command)
os.execute(command)
@@ -250,33 +284,52 @@ function scripts.epub.make()
copythem(theimages)
- local idmaker = idmakers[file.extname(root)] or idmakers.default
-
- container = format(container,epubroot)
- package = format(package,identifier,identifier,os.uuid(),os.date("!%Y-%m-%dT%H:%M:%SZ"),concat(used,"\n"),idmaker(root))
- toc = format(toc,identifier,"title",root)
+ local idmaker = idmakers[file.suffix(root)] or idmakers.default
+
+ container = format(container,
+ epubroot
+ )
+ package = format(package,
+ identifier,
+ title,
+ language,
+ identifier,
+ os.uuid(),
+ creator,
+ os.date("!%Y-%m-%dT%H:%M:%SZ"),
+ idmaker(firstpage),
+ concat(used,"\n"),
+ idmaker(root)
+ )
+ toc = format(toc,
+ identifier,
+ title,
+ root
+ )
+ coverxhtml = format(coverxhtml,
+ firstpage
+ )
io.savedata(file.join(epubpath,"mimetype"),mimetype)
io.savedata(file.join(epubpath,"META-INF","container.xml"),container)
- io.savedata(file.join(epubpath,"OPS",epubroot),package)
- io.savedata(file.join(epubpath,"OPS",epubtoc),toc)
+ io.savedata(file.join(epubpath,"OEBPS",epubroot),package)
+ io.savedata(file.join(epubpath,"OEBPS",epubtoc),toc)
+ io.savedata(file.join(epubpath,"OEBPS",epubcover),coverxhtml)
application.report("creating archive\n\n")
- local done = false
- local list = { }
-
lfs.chdir(epubpath)
os.remove(epubfile)
+ local done = false
+
for i=1,#zippers do
local zipper = zippers[i]
if os.execute(format(zipper.uncompressed,epubfile,"mimetype")) then
os.execute(format(zipper.compressed,epubfile,"META-INF"))
- os.execute(format(zipper.compressed,epubfile,"OPS"))
+ os.execute(format(zipper.compressed,epubfile,"OEBPS"))
done = zipper.name
- else
- list[#list+1] = zipper.name
+ break
end
end
@@ -285,6 +338,10 @@ function scripts.epub.make()
if done then
application.report("epub archive made using %s: %s",done,file.join(epubpath,epubfile))
else
+ local list = { }
+ for i=1,#zippers do
+ list[#list+1] = zipper.name
+ end
application.report("no epub archive made, install one of: %s",concat(list," "))
end
diff --git a/scripts/context/lua/mtx-fcd.lua b/scripts/context/lua/mtx-fcd.lua
new file mode 100644
index 000000000..d7e1d17a7
--- /dev/null
+++ b/scripts/context/lua/mtx-fcd.lua
@@ -0,0 +1,366 @@
+if not modules then modules = { } end modules ['mtx-fcd'] = {
+ version = 1.002,
+ comment = "companion to mtxrun.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+ comment = "based on the ruby version from 2005",
+}
+
+-- This is a kind of variant of the good old ncd (norton change directory) program. This
+-- script uses the same indirect cmd trick as Erwin Waterlander's wcd program.
+--
+-- The program is called via the stubs fcd.cmd or fcd.sh. On unix one should probably source
+-- the file: ". fcd args" in order to make the chdir persistent.
+--
+-- You need to create a stub with:
+--
+-- mtxrun --script fcd --stub > fcd.cmd
+-- mtxrun --script fcd --stub > fcd.sh
+--
+-- The stub starts this script and afterwards runs the created directory change script as
+-- part if the same run, so that indeed we change.
+
+local helpinfo = [[
+--clear clear the cache
+--clear --history [entry] clear the history
+--scan clear the cache and add given path(s)
+--add add given path(s)
+--find file given path (can be substring)
+--find --nohistory file given path (can be substring) but don't use history
+--stub print platform stub file
+--list show roots of cached dirs
+--list --history show history of chosen dirs
+--help show this help
+
+usage:
+
+ fcd --scan t:\
+ fcd --add f:\project
+ fcd [--find] whatever
+ fcd --list
+]]
+
+local application = logs.application {
+ name = "mtx-fcd",
+ banner = "Fast Directory Change",
+ helpinfo = helpinfo,
+}
+
+local report = application.report
+local writeln = print -- texio.write_nl
+
+local find, char, byte, lower, gsub, format = string.find, string.char, string.byte, string.lower, string.gsub, string.format
+
+local mswinstub = [[@echo off
+
+rem this is: fcd.cmd
+
+@echo off
+
+if not exist "%HOME%" goto homepath
+
+:home
+
+mtxrun --script mtx-fcd.lua %1 %2 %3 %4 %5 %6 %7 %8 %9
+
+if exist "%HOME%\mtx-fcd-goto.cmd" call "%HOME%\mtx-fcd-goto.cmd"
+
+goto end
+
+:homepath
+
+if not exist "%HOMEDRIVE%\%HOMEPATH%" goto end
+
+mtxrun --script mtx-fcd.lua %1 %2 %3 %4 %5 %6 %7 %8 %9
+
+if exist "%HOMEDRIVE%\%HOMEPATH%\mtx-fcd-goto.cmd" call "%HOMEDRIVE%\%HOMEPATH%\mtx-fcd-goto.cmd"
+
+goto end
+
+:end
+]]
+
+local unixstub = [[#!/usr/bin/env sh
+
+# this is: fcd.sh
+
+# mv fcd.sh fcd
+# chmod fcd 755
+# . fcd [args]
+
+ruby -S fcd_start.rb $1 $2 $3 $4 $5 $6 $7 $8 $9
+
+if test -f "$HOME/fcd_stage.sh" ; then
+ . $HOME/fcd_stage.sh ;
+fi;
+
+]]
+
+local gotofile
+local datafile
+local stubfile
+local stubdata
+local stubdummy
+local stubchdir
+
+if os.platform == 'mswin' then
+ gotofile = 'mtx-fcd-goto.cmd'
+ datafile = 'mtx-fcd-data.lua'
+ stubfile = 'fcd.cmd'
+ stubdata = mswinstub
+ stubdummy = 'rem no dir to change to'
+ stubchdir = 'cd /d "%s"'
+else
+ gotofile = 'mtx-fcd-goto.sh'
+ datafile = 'mtx-fcd-data.lua'
+ stubfile = 'fcd.sh'
+ stubdata = unixstub
+ stubdummy = '# no dir to change to'
+ stubchdir = '# cd "%s"'
+end
+
+local homedir = os.env["HOME"] or "" -- no longer TMP etc
+
+if homedir == "" then
+ homedir = format("%s/%s",os.env["HOMEDRIVE"] or "",os.env["HOMEPATH"] or "")
+end
+
+if homedir == "/" or not lfs.isdir(homedir) then
+ os.exit()
+end
+
+local datafile = file.join(homedir,datafile)
+local gotofile = file.join(homedir,gotofile)
+local hash = nil
+local found = { }
+local pattern = ""
+local version = modules['mtx-fcd'].version
+
+io.savedata(gotofile,stubdummy)
+
+if not lfs.isfile(gotofile) then
+ -- write error
+ os.exit()
+end
+
+local function fcd_clear(onlyhistory,what)
+ if onlyhistory and hash and hash.history then
+ if what and what ~= "" then
+ hash.history[what] = nil
+ else
+ hash.history = { }
+ end
+ else
+ hash = {
+ name = "fcd cache",
+ comment = "generated by mtx-fcd.lua",
+ created = os.date(),
+ version = version,
+ paths = { },
+ history = { },
+ }
+ end
+end
+
+local function fcd_changeto(dir)
+ if dir and dir ~= "" then
+ io.savedata(gotofile,format(stubchdir,dir))
+ end
+end
+
+local function fcd_load(forcecreate)
+ if lfs.isfile(datafile) then
+ hash = dofile(datafile)
+ end
+ if not hash or hash.version ~= version then
+ if forcecache then
+ fcd_clear()
+ else
+ writeln("empty dir cache")
+ fcd_clear()
+ os.exit()
+ end
+ end
+end
+
+local function fcd_save()
+ if hash then
+ io.savedata(datafile,table.serialize(hash,true))
+ end
+end
+
+local function fcd_list(onlyhistory)
+ if hash then
+ writeln("")
+ if onlyhistory then
+ if next(hash.history) then
+ for k, v in table.sortedhash(hash.history) do
+ writeln(format("%s => %s",k,v))
+ end
+ else
+ writeln("no history")
+ end
+ else
+ local paths = hash.paths
+ if #paths > 0 then
+ for i=1,#paths do
+ local path = paths[i]
+ writeln(format("%4i %s",#path[2],path[1]))
+ end
+ else
+ writeln("empty cache")
+ end
+ end
+ end
+end
+
+local function fcd_find()
+ found = { }
+ pattern = environment.files[1] or ""
+ if pattern ~= "" then
+ pattern = string.escapedpattern(pattern)
+ local paths = hash.paths
+ for i=1,#paths do
+ local paths = paths[i][2]
+ for i=1,#paths do
+ local path = paths[i]
+ if find(path,pattern) then
+ found[#found+1] = path
+ end
+ end
+ end
+ end
+end
+
+local function fcd_choose(new)
+ if pattern == "" then
+ writeln(format("staying in dir %q",(gsub(lfs.currentdir(),"\\","/"))))
+ return
+ end
+ if #found == 0 then
+ writeln(format("dir %q not found",pattern))
+ return
+ end
+ local okay = #found == 1 and found[1] or (not new and hash.history[pattern])
+ if okay then
+ writeln(format("changing to %q",okay))
+ fcd_changeto(okay)
+ return
+ end
+ local offset = 0
+ while true do
+ if not found[offset] then
+ offset = 0
+ end
+ io.write("\n")
+ for i=1,26 do
+ local v = found[i+offset]
+ if v then
+ writeln(format("%s %3i %s",char(i+96),offset+i,v))
+ else
+ break
+ end
+ end
+ offset = offset + 26
+ if found[offset+1] then
+ io.write("\n[press enter for more or select letter]\n\n>> ")
+ else
+ io.write("\n[select letter]\n\n>> ")
+ end
+ local answer = lower(io.read() or "")
+ if not answer or answer == 'quit' then
+ break
+ elseif #answer > 0 then
+ local choice = tonumber(answer)
+ if not choice then
+ if answer >= "a" and answer <= "z" then
+ choice = byte(answer) - 96 + offset - 26
+ end
+ end
+ local newdir = found[choice]
+ if newdir then
+ hash.history[pattern] = newdir
+ writeln(format("changing to %q",newdir))
+ fcd_changeto(newdir)
+ fcd_save()
+ return
+ end
+ else
+ -- try again
+ end
+ end
+end
+
+local function globdirs(path,dirs)
+ local dirs = dirs or { }
+ for name in lfs.dir(path) do
+ if not find(name,"%.$") then
+ local fullname = path .. "/" .. name
+ if lfs.isdir(fullname) and not find(fullname,"/%.") then
+ dirs[#dirs+1] = fullname
+ globdirs(fullname,dirs)
+ end
+ end
+ end
+ return dirs
+end
+
+local function fcd_scan()
+ if hash then
+ local paths = hash.paths
+ for i=1,#environment.files do
+ local name = environment.files[i]
+ local name = gsub(name,"\\","/")
+ local name = gsub(name,"/$","")
+ local list = globdirs(name)
+ local done = false
+ for i=1,#paths do
+ if paths[i][1] == name then
+ paths[i][2] = list
+ done = true
+ break
+ end
+ end
+ if not done then
+ paths[#paths+1] = { name, list }
+ end
+ end
+ end
+end
+
+local argument = environment.argument
+
+if argument("clear") then
+ if argument("history") then
+ fcd_load()
+ fcd_clear(true)
+ else
+ fcd_clear()
+ end
+ fcd_save()
+elseif argument("scan") then
+ fcd_clear()
+ fcd_scan()
+ fcd_save()
+elseif argument("add") then
+ fcd_load(true)
+ fcd_scan()
+ fcd_save()
+elseif argument("stub") then
+ writeln(stubdata)
+elseif argument("list") then
+ fcd_load()
+ if argument("history") then
+ fcd_list(true)
+ else
+ fcd_list()
+ end
+elseif argument("help") then
+ application.help()
+else -- also argument("find")
+ fcd_load()
+ fcd_find()
+ fcd_choose(argument("nohistory"))
+end
+
diff --git a/scripts/context/lua/mtx-flac.lua b/scripts/context/lua/mtx-flac.lua
index 37f985654..cb73a6592 100644
--- a/scripts/context/lua/mtx-flac.lua
+++ b/scripts/context/lua/mtx-flac.lua
@@ -6,12 +6,6 @@ if not modules then modules = { } end modules ['mtx-flac'] = {
license = "see context related readme files"
}
--- Written with Within Temptation's "The Unforgiven" in loopmode on
--- the speakers. The following code is also used for my occasional music
--- repository cleanup session using the code below.
-
--- this can become l-flac.lua
-
local sub, match, byte, lower = string.sub, string.match, string.byte, string.lower
local readstring, readnumber = io.readstring, io.readnumber
local concat = table.concat
diff --git a/scripts/context/lua/mtx-fonts.lua b/scripts/context/lua/mtx-fonts.lua
index c5b458c14..31ee18ce9 100644
--- a/scripts/context/lua/mtx-fonts.lua
+++ b/scripts/context/lua/mtx-fonts.lua
@@ -126,7 +126,7 @@ function fonts.names.simple()
end
report("saving names in '%s'",name)
io.savedata(name,table.serialize(simplified,true))
- local data = io.loaddata(resolvers.findfile("font-dum.lua","tex"))
+ local data = io.loaddata(resolvers.findfile("luatex-fonts-syn.lua","tex")) or ""
local dummy = string.match(data,"fonts%.names%.version%s*=%s*([%d%.]+)")
if tonumber(dummy) ~= simpleversion then
report("warning: version number %s in 'font-dum' does not match database version number %s",dummy or "?",simpleversion)
@@ -367,7 +367,7 @@ function scripts.fonts.save()
if name and name ~= "" then
local filename = resolvers.findfile(name) -- maybe also search for opentype
if filename and filename ~= "" then
- local suffix = string.lower(file.extname(filename))
+ local suffix = string.lower(file.suffix(filename))
if suffix == 'ttf' or suffix == 'otf' or suffix == 'ttc' or suffix == "dfont" then
local fontinfo = fontloader.info(filename)
if fontinfo then
diff --git a/scripts/context/lua/mtx-grep.lua b/scripts/context/lua/mtx-grep.lua
index 3cbc1421a..98a97279d 100644
--- a/scripts/context/lua/mtx-grep.lua
+++ b/scripts/context/lua/mtx-grep.lua
@@ -60,7 +60,7 @@ function scripts.grep.find(pattern, files, offset)
if m > 0 then
nofmatches = nofmatches + m
nofmatchedfiles = nofmatchedfiles + 1
- write_nl(format("%s: %s",name,m))
+ write_nl(format("%5i %s",m,name))
io.flush()
end
else
@@ -127,7 +127,7 @@ function scripts.grep.find(pattern, files, offset)
if count and m > 0 then
nofmatches = nofmatches + m
nofmatchedfiles = nofmatchedfiles + 1
- write_nl(format("%s: %s",name,m))
+ write_nl(format("%5i %s",m,name))
io.flush()
end
end
diff --git a/scripts/context/lua/mtx-metapost.lua b/scripts/context/lua/mtx-metapost.lua
index 3b9ed6ff1..44cf8205d 100644
--- a/scripts/context/lua/mtx-metapost.lua
+++ b/scripts/context/lua/mtx-metapost.lua
@@ -49,7 +49,7 @@ local tempname = "mptopdf-temp.tex"
local function do_convert(filename)
if find(filename,".%d+$") or find(filename,"%.mps$") then
io.savedata(tempname,format(template,filename))
- local resultname = format("%s-%s.pdf",file.nameonly(filename),file.extname(filename))
+ local resultname = format("%s-%s.pdf",file.nameonly(filename),file.suffix(filename))
local result = os.execute(format([[context --once --batch --purge --result=%s "%s"]],resultname,tempname))
return lfs.isfile(resultname) and resultname
end
diff --git a/scripts/context/lua/mtx-pdf.lua b/scripts/context/lua/mtx-pdf.lua
index 5654b8bc4..f37ee006a 100644
--- a/scripts/context/lua/mtx-pdf.lua
+++ b/scripts/context/lua/mtx-pdf.lua
@@ -6,14 +6,21 @@ if not modules then modules = { } end modules ['mtx-pdf'] = {
license = "see context related readme files"
}
+local tonumber = tonumber
+local format, gmatch = string.format, string.gmatch
+local utfchar = utf.char
+local concat = table.concat
+local setmetatableindex, sortedhash, sortedkeys = table.setmetatableindex, table.sortedhash, table.sortedkeys
+
local helpinfo = [[
--info show some info about the given file
--metadata show metadata xml blob
+--fonts show used fonts (--detail)
]]
local application = logs.application {
name = "mtx-pdf",
- banner = "ConTeXt PDF Helpers 0.01",
+ banner = "ConTeXt PDF Helpers 0.10",
helpinfo = helpinfo,
}
@@ -39,9 +46,8 @@ local function loadpdffile(filename)
end
end
-function scripts.pdf.info()
- local filename = environment.files[1]
- local pdffile = loadpdffile(filename)
+function scripts.pdf.info(filename)
+ local pdffile = loadpdffile(filename)
if pdffile then
local catalog = pdffile.Catalog
local info = pdffile.Info
@@ -73,9 +79,8 @@ function scripts.pdf.info()
end
end
-function scripts.pdf.metadata()
- local filename = environment.files[1]
- local pdffile = loadpdffile(filename)
+function scripts.pdf.metadata(filename)
+ local pdffile = loadpdffile(filename)
if pdffile then
local catalog = pdffile.Catalog
local metadata = catalog.Metadata
@@ -87,10 +92,127 @@ function scripts.pdf.metadata()
end
end
-if environment.argument("info") then
- scripts.pdf.info()
+local function getfonts(pdffile)
+ local usedfonts = { }
+ for i=1,pdffile.pages.n do
+ local page = pdffile.pages[i]
+ local fontlist = page.Resources.Font
+ for k, v in next, lpdf.epdf.expand(fontlist) do
+ usedfonts[k] = lpdf.epdf.expand(v)
+ end
+ end
+ return usedfonts
+end
+
+local function getunicodes(font)
+ local cid = font.ToUnicode
+ if cid then
+ cid = cid()
+ local counts = { }
+ -- for s in gmatch(cid,"begincodespacerange%s*(.-)%s*endcodespacerange") do
+ -- for a, b in gmatch(s,"<([^>]+)>%s+<([^>]+)>") do
+ -- print(a,b)
+ -- end
+ -- end
+ setmetatableindex(counts, function(t,k) t[k] = 0 return 0 end)
+ for s in gmatch(cid,"beginbfrange%s*(.-)%s*endbfrange") do
+ for first, last, offset in gmatch(s,"<([^>]+)>%s+<([^>]+)>%s+<([^>]+)>") do
+ first = tonumber(first,16)
+ last = tonumber(last,16)
+ offset = tonumber(offset,16)
+ offset = offset - first
+ for i=first,last do
+ local c = i + offset
+ counts[c] = counts[c] + 1
+ end
+ end
+ end
+ for s in gmatch(cid,"beginbfchar%s*(.-)%s*endbfchar") do
+ for old, new in gmatch(s,"<([^>]+)>%s+<([^>]+)>") do
+ for n in gmatch(new,"....") do
+ local c = tonumber(n,16)
+ counts[c] = counts[c] + 1
+ end
+ end
+ end
+ return counts
+ end
+end
+
+function scripts.pdf.fonts(filename)
+ local pdffile = loadpdffile(filename)
+ if pdffile then
+ local usedfonts = getfonts(pdffile)
+ local found = { }
+ for k, v in table.sortedhash(usedfonts) do
+ local counts = getunicodes(v)
+ local codes = { }
+ local chars = { }
+ local freqs = { }
+ if counts then
+ codes = sortedkeys(counts)
+ for i=1,#codes do
+ local k = codes[i]
+ local c = utfchar(k)
+ chars[i] = c
+ freqs[i] = format("U+%05X %s %s",k,counts[k] > 1 and "+" or " ", c)
+ end
+ for i=1,#codes do
+ codes[i] = format("U+%05X",codes[i])
+ end
+ end
+ found[k] = {
+ basefont = v.BaseFont or "no basefont",
+ encoding = v.Encoding or "no encoding",
+ subtype = v.Subtype or "no subtype",
+ unicode = v.ToUnicode and "unicode" or "no unicode",
+ chars = chars,
+ codes = codes,
+ freqs = freqs,
+ }
+ end
+
+ if environment.argument("detail") then
+ for k, v in sortedhash(found) do
+ report("id : %s",k)
+ report("basefont : %s",v.basefont)
+ report("encoding : %s",v.encoding)
+ report("subtype : %s",v.subtype)
+ report("unicode : %s",v.unicode)
+ report("characters : %s", concat(v.chars," "))
+ report("codepoints : %s", concat(v.codes," "))
+ report("")
+ end
+ else
+ local results = { { "id", "basefont", "encoding", "subtype", "unicode", "characters" } }
+ for k, v in sortedhash(found) do
+ results[#results+1] = { k, v.basefont, v.encoding, v.subtype, v.unicode, concat(v.chars," ") }
+ end
+ utilities.formatters.formatcolumns(results)
+ report(results[1])
+ report("")
+ for i=2,#results do
+ report(results[i])
+ end
+ report("")
+ end
+ end
+end
+
+-- scripts.pdf.info("e:/tmp/oeps.pdf")
+-- scripts.pdf.metadata("e:/tmp/oeps.pdf")
+-- scripts.pdf.fonts("e:/tmp/oeps.pdf")
+
+local filename = environment.files[1] or ""
+
+if filename == "" then
+ application.help()
+elseif environment.argument("info") then
+ scripts.pdf.info(filename)
elseif environment.argument("metadata") then
- scripts.pdf.metadata()
+ scripts.pdf.metadata(filename)
+elseif environment.argument("fonts") then
+ scripts.pdf.fonts(filename)
else
application.help()
end
diff --git a/scripts/context/lua/mtx-scite.lua b/scripts/context/lua/mtx-scite.lua
index 116555e79..3369c5f3b 100644
--- a/scripts/context/lua/mtx-scite.lua
+++ b/scripts/context/lua/mtx-scite.lua
@@ -199,14 +199,31 @@ function scripts.scite.words()
if lfs.isfile(txtname) then
report("loading %s",txtname)
local olddata = io.loaddata(txtname) or ""
+ local words = splitwords(olddata)
+ local min, max, n = 100, 1, 0
+ for k, v in next, words do
+ local l = #k
+ if l < min then
+ min = l
+ end
+ if l > max then
+ max = l
+ end
+ n = n + 1
+ end
+ if min > max then
+ min = max
+ end
local newdata = {
- words = splitwords(olddata),
- -- words = olddata,
+ words = words,
source = oldname,
+ min = min,
+ max = max,
+ n = n,
}
- report("saving %s",luaname)
+ report("saving %q, %s words, %s shortest, %s longest",luaname,n,min,max)
io.savedata(luaname,table.serialize(newdata,true))
- report("compiling %s",lucname)
+ report("compiling %q",lucname)
os.execute(format("luac -s -o %s %s",lucname,luaname))
else
report("no data file %s",txtname)
diff --git a/scripts/context/lua/mtx-server-ctx-help.lua b/scripts/context/lua/mtx-server-ctx-help.lua
index a212e1369..39a73dc4e 100644
--- a/scripts/context/lua/mtx-server-ctx-help.lua
+++ b/scripts/context/lua/mtx-server-ctx-help.lua
@@ -15,7 +15,7 @@ dofile(resolvers.findfile("trac-lmx.lua","tex"))
-- problem ... serialize parent stack
-local format = string.format
+local format, match, gsub, find = string.format, string.match, string.gsub, string.find
local concat = table.concat
local report = logs.reporter("ctx-help")
@@ -282,18 +282,48 @@ document.setups.translations = document.setups.translations or {
}
document.setups.formats = {
- open_command = { [[\%s]], [[context.%s (]] },
- close_command = { [[]], [[ )]] },
- connector = { [[]], [[, ]] },
- href_in_list = { [[%s]], [[%s]] },
- href_as_command = { [[\%s]], [[context.%s]] },
+ open_command = {
+ tex = [[\%s]],
+ lua = [[context.%s (]],
+ },
+ close_command = {
+ tex = [[]],
+ lua = [[ )]],
+ },
+ connector = {
+ tex = [[]],
+ lua = [[, ]],
+ },
+ href_in_list = {
+ tex = [[%s]],
+ lua = [[%s]],
+ },
+ href_as_command = {
+ tex = [[\%s]],
+ lua = [[context.%s]],
+ },
+ modes = {
+ tex = [[lua mode]],
+ lua = [[tex mode]],
+ },
+ optional_single = {
+ tex = "[optional string %s]",
+ lua = "{optional string %s}",
+ },
+ optional_list = {
+ tex = "[optional list %s]",
+ lua = "{optional table %s}" ,
+ } ,
+ mandate_single = {
+ tex = "[mandate string %s]",
+ lua = "{mandate string %s}",
+ },
+ mandate_list = {
+ tex = "[mandate list %s]",
+ lua = "{mandate list %s}",
+ },
interface = [[%s]],
source = [[%s]],
- modes = { [[lua mode]], [[tex mode]] },
- optional_single = { "[optional string %s]", "{optional string %s}" },
- optional_list = { "[optional list %s]", "{optional table %s}" } ,
- mandate_single = { "[mandate string %s]", "{mandate string %s}" },
- mandate_list = { "[mandate list %s]", "{mandate list %s}" },
parameter = [[
%s
%s
%s
]],
parameters = [[
%s
]],
listing = [[
%s]],
@@ -315,7 +345,7 @@ end
local function translated(e,int)
local attributes = e.at
local s = attributes.type or "?"
- local tag = s:match("^cd:(.*)$")
+ local tag = match(s,"^cd:(.*)$")
if attributes.default == "yes" then
return format(document.setups.formats.default,tag or "?")
elseif tag then
@@ -329,7 +359,7 @@ document.setups.loaded = document.setups.loaded or { }
document.setups.current = { }
document.setups.showsources = true
-document.setups.mode = 1
+document.setups.mode = "tex"
function document.setups.load(filename)
filename = resolvers.findfile(filename) or ""
@@ -402,7 +432,7 @@ end
function document.setups.show(name)
local current = document.setups.current
if current.root then
- local name = name:gsub("[<>]","")
+ local name = gsub(name,"[<>]","")
local setup = xml.first(current.root,"cd:command[@name='" .. name .. "']")
current.used[#current.used+1] = setup
xml.sprint(setup)
@@ -452,12 +482,12 @@ function document.setups.collect(name,int,lastmode)
category = attributes.category or "",
}
if document.setups.showsources then
- data.source = (attributes.file and formats.source:format(attributes.file,lastmode,attributes.file)) or ""
+ data.source = (attributes.file and format(formats.source,attributes.file,lastmode,attributes.file)) or ""
else
data.source = attributes.file or ""
end
local n, sequence, tags = 0, { }, { }
- sequence[#sequence+1] = formats.open_command[lastmode]:format(document.setups.csname(command,int))
+ sequence[#sequence+1] = format(formats.open_command[lastmode],document.setups.csname(command,int))
local arguments, tag = { }, ""
for r, d, k in xml.elements(command,"(cd:keywords|cd:assignments)") do
n = n + 1
@@ -470,15 +500,15 @@ function document.setups.collect(name,int,lastmode)
end
if attributes.optional == 'yes' then
if attributes.list == 'yes' then
- tag = formats.optional_list[lastmode]:format(n)
+ tag = format(formats.optional_list[lastmode],n)
else
- tag = formats.optional_single[lastmode]:format(n)
+ tag = format(formats.optional_single[lastmode],n)
end
else
if attributes.list == 'yes' then
- tag = formats.mandate_list[lastmode]:format(n)
+ tag = format(formats.mandate_list[lastmode],n)
else
- tag = formats.mandate_single[lastmode]:format(n)
+ tag = format(formats.mandate_single[lastmode],n)
end
end
sequence[#sequence+1] = tag
@@ -506,7 +536,7 @@ function document.setups.collect(name,int,lastmode)
right[#right+1] = translated(d[k],int)
end
end
- parameters[#parameters+1] = formats.parameter:format(left,"",concat(right, ", "))
+ parameters[#parameters+1] = format(formats.parameter,left,"",concat(right, ", "))
else
local what = tags[n]
for r, d, k in xml.elements(d[k],"(cd:parameter|cd:inherit)") do
@@ -514,11 +544,11 @@ function document.setups.collect(name,int,lastmode)
local left, right = d[k].at.name or "?", { }
if tag == "inherit" then
local name = d[k].at.name or "?"
- local goto = document.setups.formats.href_as_command[lastmode]:format(name,lastmode,name)
- if #parameters > 0 and not parameters[#parameters]:find(" ") then
- parameters[#parameters+1] = formats.parameter:format(" ","","")
+ local goto = format(document.setups.formats.href_as_command[lastmode],name,lastmode,name)
+ if #parameters > 0 and not find(parameters[#parameters]," ") then
+ parameters[#parameters+1] = format(formats.parameter," ","","")
end
- parameters[#parameters+1] = formats.parameter:format(what,formats.special:format(translate("inherits",int)),goto)
+ parameters[#parameters+1] = format(formats.parameter,what,format(formats.special,translate("inherits",int)),goto)
else
for r, d, k in xml.elements(d[k],"(cd:constant|cd:resolve)") do
local tag = d[k].tg
@@ -534,15 +564,15 @@ function document.setups.collect(name,int,lastmode)
right[#right+1] = translated(d[k],int)
end
end
- parameters[#parameters+1] = formats.parameter:format(what,left,concat(right, ", "))
+ parameters[#parameters+1] = format(formats.parameter,what,left,concat(right, ", "))
end
what = ""
end
end
- parameters[#parameters+1] = formats.parameter:format(" ","","")
+ parameters[#parameters+1] = format(formats.parameter," ","","")
end
data.parameters = parameters or { }
- data.mode = formats.modes[lastmode or 1]
+ data.mode = formats.modes[lastmode or "tex"]
return data
else
return nil
@@ -566,7 +596,7 @@ local interfaces = {
romanian = 'ro',
}
-local lastinterface, lastcommand, lastsource, lastmode = "en", "", "", 1
+local lastinterface, lastcommand, lastsource, lastmode = "en", "", "", "tex"
local variables = {
['color-background-main-left'] = '#3F3F3F',
@@ -584,78 +614,87 @@ local function doit(configuration,filename,hashed)
local formats = document.setups.formats
- local start = os.clock()
+ local start = os.clock()
+ local detail = hashed.queries
- local detail = url.query(hashed.query or "")
+ if detail then
- lastinterface = detail.interface or lastinterface
- lastcommand = detail.command or lastcommand
- lastsource = detail.source or lastsource
- lastmode = tonumber(detail.mode or lastmode) or 1
+ lastinterface = detail.interface or lastinterface
+ lastcommand = detail.command or lastcommand
+ lastsource = detail.source or lastsource
+ lastmode = detail.mode or lastmode or "tex"
- if lastinterface then
- report("checking interface: %s",lastinterface)
- document.setups.load(format("cont-%s.xml",lastinterface))
- end
+ lastcommand = gsub(lastcommand,"%s*^\\*(.+)%s*","%1")
- local div = document.setups.div[lastinterface]
- local span = document.setups.span[lastinterface]
+ if lastinterface then
+ report("checking interface: %s",lastinterface)
+ document.setups.load(format("cont-%s.xml",lastinterface))
+ end
- local result = { content = "error" }
+ local div = document.setups.div [lastinterface]
+ local span = document.setups.span[lastinterface]
- local names, refs, ints = document.setups.names(lastinterface), { }, { }
- for k=1,#names do
- local v = names[k]
- refs[k] = formats.href_in_list[lastmode]:format(v[1],lastmode,v[2])
- end
- if lastmode ~= 2 then
- local sorted = table.sortedkeys(interfaces)
- for k=1,#sorted do
- local v = sorted[k]
- ints[k] = formats.interface:format(interfaces[v],lastmode,v)
+ local names, refs, ints = document.setups.names(lastinterface), { }, { }
+ for k=1,#names do
+ local v = names[k]
+ refs[k] = format(formats.href_in_list[lastmode],v[1],lastmode,v[2])
+ end
+ if lastmode ~= "lua" then
+ local sorted = table.sortedkeys(interfaces)
+ for k=1,#sorted do
+ local v = sorted[k]
+ ints[k] = format(formats.interface,interfaces[v],lastmode,v)
+ end
end
- end
- local n = concat(refs," ")
- local i = concat(ints,"
")
+ local n = concat(refs," ")
+ local i = concat(ints,"
")
- if div then
- variables.names = div:format(n)
- variables.interfaces = div:format(i)
- else
- variables.names = n
- variables.interfaces = i
- end
+ if div then
+ variables.names = format(div,n)
+ variables.interfaces = format(div,i)
+ else
+ variables.names = n
+ variables.interfaces = i
+ end
- -- first we need to add information about mkii/mkiv
-
- variables.maintitle = "no definition"
- variables.maintext = ""
- variables.extra = ""
-
- if document.setups.showsources and lastsource and lastsource ~= "" then
- -- todo: mkii, mkiv, tex (can be different)
- local data = io.loaddata(resolvers.findfile(lastsource))
- variables.maintitle = lastsource
- variables.maintext = formats.listing:format(data)
- lastsource = ""
- elseif lastcommand and lastcommand ~= "" then
- local data = document.setups.collect(lastcommand,lastinterface,lastmode)
- if data then
- local what, extra = { "environment", "category", "source", "mode" }, { }
- for k=1,#what do
- local v = what[k]
- if data[v] and data[v] ~= "" then
- lmx.set(v, data[v])
- extra[#extra+1] = v .. ": " .. data[v]
+ -- first we need to add information about mkii/mkiv
+
+ variables.maintitle = "no definition"
+ variables.maintext = ""
+ variables.extra = ""
+
+ if document.setups.showsources and lastsource and lastsource ~= "" then
+ -- todo: mkii, mkiv, tex (can be different)
+ local data = io.loaddata(resolvers.findfile(lastsource))
+ variables.maintitle = lastsource
+ variables.maintext = format(formats.listing,data)
+ lastsource = ""
+ elseif lastcommand and lastcommand ~= "" then
+ local data = document.setups.collect(lastcommand,lastinterface,lastmode)
+ if data then
+ local what, extra = { "environment", "category", "source", "mode" }, { }
+ for k=1,#what do
+ local v = what[k]
+ if data[v] and data[v] ~= "" then
+ lmx.set(v, data[v])
+ extra[#extra+1] = v .. ": " .. data[v]
+ end
end
+ variables.maintitle = data.sequence
+ variables.maintext = format(formats.parameters,concat(data.parameters))
+ variables.extra = concat(extra," ")
+ else
+ variables.maintext = "select command"
end
- variables.maintitle = data.sequence
- variables.maintext = formats.parameters:format(concat(data.parameters))
- variables.extra = concat(extra," ")
- else
- variables.maintext = "select command"
end
+
+ else
+
+ variables.maintitle = "no definition"
+ variables.maintext = "some error"
+ variables.extra = ""
+
end
local content = lmx.convert('context-help.lmx',false,variables)
diff --git a/scripts/context/lua/mtx-server.lua b/scripts/context/lua/mtx-server.lua
index 068d51111..d6e8ac902 100644
--- a/scripts/context/lua/mtx-server.lua
+++ b/scripts/context/lua/mtx-server.lua
@@ -30,7 +30,7 @@ dofile(resolvers.findfile("l-url.lua","tex"))
dofile(resolvers.findfile("luat-soc.lua","tex"))
local socket = socket or require("socket")
-local http = socket or require("socket.http")
+local http = http or require("socket.http") -- not needed
local format = string.format
-- The following two lists are taken from webrick (ruby) and
@@ -231,6 +231,7 @@ function handlers.lua(client,configuration,filename,suffix,iscontent,hashed) --
end
if result then
if type(result) == "function" then
+ report("running script: %s",filename)
result = result(configuration,filename,hashed) -- second argument will become query
end
if result and type(result) == "string" then
@@ -242,7 +243,7 @@ function handlers.lua(client,configuration,filename,suffix,iscontent,hashed) --
local action = handlers[suffix] or handlers.generic
action(client,configuration,result.content,suffix,true) -- content
elseif result.filename then
- local suffix = file.extname(result.filename) or "text/html"
+ local suffix = file.suffix(result.filename) or "text/html"
local action = handlers[suffix] or handlers.generic
action(client,configuration,result.filename,suffix,false) -- filename
else
@@ -301,40 +302,50 @@ function scripts.webserver.run(configuration)
report("scripts subpath: %s",configuration.scripts)
report("context services: http://localhost:%s/mtx-server-ctx-startup.lua",configuration.port)
local server = assert(socket.bind("*", configuration.port))
--- local reading = { server }
- while true do -- no multiple clients
+ local script = configuration.script
+ while true do -- blocking
local start = os.clock()
--- local input = socket.select(reading)
--- local client = input:accept()
local client = server:accept()
client:settimeout(configuration.timeout or 60)
local request, e = client:receive()
--- local request, e = client:receive("*a") -- doesn't work well (so no post)
if e then
errormessage(client,configuration,404)
else
local from = client:getpeername()
report("request from: %s",tostring(from))
- local fullurl = request:match("GET (.+) HTTP/.*$") or "" -- todo: more clever / post
+ report("request data: %s",tostring(request))
+ local fullurl = string.match(request,"GET (.+) HTTP/.*$") or "" -- todo: more clever / post
if fullurl == "" then
+ report("no url")
errormessage(client,configuration,404)
else
- fullurl = socket.url.unescape(fullurl)
+ report("requested url: %s",fullurl)
+ fullurl = socket.url.unescape(fullurl) -- still needed?
local hashed = url.hashed(fullurl)
local query = url.query(hashed.query)
- local filename = hashed.path
--- table.print(hashed)
- if filename then
+ local filename = hashed.path -- hm, not query?
+ if script then
+ filename = script
+ report("forced script: %s",filename)
+ local suffix = file.suffix(filename)
+ local action = handlers[suffix] or handlers.generic
+ if action then
+ report("performing action: %s",filename)
+ action(client,configuration,filename,suffix,false,hashed) -- filename and no content
+ else
+ errormessage(client,configuration,404)
+ end
+ elseif filename then
filename = socket.url.unescape(filename)
report("requested action: %s",filename)
- if filename:find("%.%.") then
+ if string.find(filename,"%.%.") then
filename = nil -- invalid path
end
if filename == nil or filename == "" or filename == "/" then
filename = configuration.index
report("invalid filename, forcing: %s",filename)
end
- local suffix = file.extname(filename)
+ local suffix = file.suffix(filename)
local action = handlers[suffix] or handlers.generic
if action then
report("performing action: %s",filename)
@@ -358,6 +369,7 @@ if environment.argument("auto") then
port = environment.argument("port"),
root = environment.argument("root") or file.dirname(path) or ".",
scripts = environment.argument("scripts") or file.dirname(path) or ".",
+ script = environment.argument("script"),
}
elseif environment.argument("start") then
scripts.webserver.run {
@@ -365,6 +377,7 @@ elseif environment.argument("start") then
root = environment.argument("root") or ".", -- "e:/websites/www.pragma-ade.com",
index = environment.argument("index"),
scripts = environment.argument("scripts"),
+ script = environment.argument("script"),
}
else
application.help()
diff --git a/scripts/context/lua/mtx-tools.lua b/scripts/context/lua/mtx-tools.lua
index 45961a639..c1aaf9e5d 100644
--- a/scripts/context/lua/mtx-tools.lua
+++ b/scripts/context/lua/mtx-tools.lua
@@ -102,7 +102,7 @@ end
function scripts.tools.dirtoxml()
- local join, removesuffix, extname, date = file.join, file.removesuffix, file.extname, os.date
+ local join, removesuffix, suffixonly, date = file.join, file.removesuffix, file.suffixonly, os.date
local xmlns = "http://www.pragma-ade.com/rlg/xmldir.rng"
local timestamp = "%Y-%m-%d %H:%M"
diff --git a/scripts/context/lua/mtx-update.lua b/scripts/context/lua/mtx-update.lua
index 037de8650..b5f34d615 100644
--- a/scripts/context/lua/mtx-update.lua
+++ b/scripts/context/lua/mtx-update.lua
@@ -421,9 +421,9 @@ function scripts.update.synchronize()
if platform == 'mswin' then
bin = gsub(bin,"([a-zA-Z]):/", "/cygdrive/%1/")
texroot = gsub(texroot,"([a-zA-Z]):/", "/cygdrive/%1/")
- command = format("%s -t %s/texmf-context/scripts/context/lua/%s.lua %s/texmf-mswin/bin/", bin, texroot, script, texroot)
+ command = format([[%s -t "%s/texmf-context/scripts/context/lua/%s.lua" "%s/texmf-mswin/bin/"]], bin, texroot, script, texroot)
else
- command = format("%s -tgo --chmod=a+x %s/texmf-context/scripts/context/lua/%s.lua %s/texmf-%s/bin/%s", bin, texroot, script, texroot, platform, script)
+ command = format([[%s -tgo --chmod=a+x '%s/texmf-context/scripts/context/lua/%s.lua' '%s/texmf-%s/bin/%s']], bin, texroot, script, texroot, platform, script)
end
report("updating %s for %s: %s", script, platform, command)
scripts.update.run(command)
diff --git a/scripts/context/lua/mtx-watch.lua b/scripts/context/lua/mtx-watch.lua
index 36a3176c4..31ed95f7b 100644
--- a/scripts/context/lua/mtx-watch.lua
+++ b/scripts/context/lua/mtx-watch.lua
@@ -227,17 +227,6 @@ function scripts.watch.watch()
end
end
local n, start = 0, time()
---~ local function wait()
---~ io.flush()
---~ if not done then
---~ n = n + 1
---~ if n >= 10 then
---~ report("run time: %i seconds, memory usage: %0.3g MB", difftime(time(),start), (status.luastate_bytes/1024)/1000)
---~ n = 0
---~ end
---~ os.sleep(delay)
---~ end
---~ end
local wtime = 0
local function wait()
io.flush()
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 108f2a8a1..e6bbbe2b5 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -8,6 +8,11 @@ if not modules then modules = { } end modules ['mtxrun'] = {
license = "see context related readme files"
}
+-- if not lpeg then require("lpeg") end
+-- if not md5 then require("md5") end
+-- if not lfs then require("lfs") end
+-- if not texconfig then texconfig = { } end
+
-- one can make a stub:
--
-- #!/bin/sh
@@ -150,11 +155,28 @@ function string.topattern(str,lowercase,strict)
end
end
+
+function string.valid(str,default)
+ return (type(str) == "string" and str ~= "" and str) or default or nil
+end
+
-- obsolete names:
string.quote = string.quoted
string.unquote = string.unquoted
+-- handy fallback
+
+string.itself = function(s) return s end
+
+-- also handy (see utf variant)
+
+local pattern = Ct(C(1)^0)
+
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+
end -- of closure
@@ -168,7 +190,8 @@ if not modules then modules = { } end modules ['l-table'] = {
license = "see context related readme files"
}
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
+local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
+local table, string = table, string
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
@@ -179,6 +202,8 @@ local getinfo = debug.getinfo
-- impact on ConTeXt was not that large; the remaining ipairs already
-- have been replaced. In a similar fashion we also hardly used pairs.
--
+-- Hm, actually ipairs was retained, but we no longer use it anyway.
+--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -238,12 +263,16 @@ function table.strip(tab)
end
function table.keys(t)
- local keys, k = { }, 0
- for key, _ in next, t do
- k = k + 1
- keys[k] = key
+ if t then
+ local keys, k = { }, 0
+ for key, _ in next, t do
+ k = k + 1
+ keys[k] = key
+ end
+ return keys
+ else
+ return { }
end
- return keys
end
local function compare(a,b)
@@ -256,41 +285,49 @@ local function compare(a,b)
end
local function sortedkeys(tab)
- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- s = s + 1
- srt[s] = key
- if category == 3 then
- -- no further check
- else
- local tkey = type(key)
- if tkey == "string" then
- category = (category == 2 and 3) or 1
- elseif tkey == "number" then
- category = (category == 1 and 3) or 2
+ if tab then
+ local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
+ for key,_ in next, tab do
+ s = s + 1
+ srt[s] = key
+ if category == 3 then
+ -- no further check
else
- category = 3
+ local tkey = type(key)
+ if tkey == "string" then
+ category = (category == 2 and 3) or 1
+ elseif tkey == "number" then
+ category = (category == 1 and 3) or 2
+ else
+ category = 3
+ end
end
end
- end
- if category == 0 or category == 3 then
- sort(srt,compare)
+ if category == 0 or category == 3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
else
- sort(srt)
+ return { }
end
- return srt
end
local function sortedhashkeys(tab) -- fast one
- local srt, s = { }, 0
- for key,_ in next, tab do
- if key then
- s= s + 1
- srt[s] = key
+ if tab then
+ local srt, s = { }, 0
+ for key,_ in next, tab do
+ if key then
+ s= s + 1
+ srt[s] = key
+ end
end
+ sort(srt)
+ return srt
+ else
+ return { }
end
- sort(srt)
- return srt
end
table.sortedkeys = sortedkeys
@@ -315,7 +352,7 @@ end
table.sortedhash = sortedhash
table.sortedpairs = sortedhash
-function table.append(t, list)
+function table.append(t,list)
local n = #t
for i=1,#list do
n = n + 1
@@ -550,12 +587,26 @@ local function do_serialize(root,name,depth,level,indexed)
end
-- we could check for k (index) being number (cardinal)
if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- if compact then
+ -- -- NOT: for k=1,#root do (we need to quit at nil)
+ -- for k,v in ipairs(root) do -- can we use next?
+ -- if not first then first = k end
+ -- last = last + 1
+ -- end
+ -- end
+ local first, last = nil, 0
if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
+ last = #root
+ for k=1,last do
+-- if not root[k] then
+ if root[k] == nil then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
end
end
local sk = sortedkeys(root)
@@ -1027,23 +1078,27 @@ function table.reversed(t)
end
end
-function table.sequenced(t,sep,simple) -- hash only
- local s, n = { }, 0
- for k, v in sortedhash(t) do
- if simple then
- if v == true then
- n = n + 1
- s[n] = k
- elseif v and v~= "" then
+function table.sequenced(t,sep) -- hash only
+ if t then
+ local s, n = { }, 0
+ for k, v in sortedhash(t) do
+ if simple then
+ if v == true then
+ n = n + 1
+ s[n] = k
+ elseif v and v~= "" then
+ n = n + 1
+ s[n] = k .. "=" .. tostring(v)
+ end
+ else
n = n + 1
s[n] = k .. "=" .. tostring(v)
end
- else
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
end
+ return concat(s, sep or " | ")
+ else
+ return ""
end
- return concat(s, sep or " | ")
end
function table.print(t,...)
@@ -1124,6 +1179,8 @@ local lpeg = require("lpeg")
-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+-- some code will move to unicode and string
+
local report = texio and texio.write_nl or print
-- local lpmatch = lpeg.match
@@ -1160,8 +1217,8 @@ local report = texio and texio.write_nl or print
-- function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end
-- function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end
-local type = type
-local byte, char, gmatch = string.byte, string.char, string.gmatch
+local type, next = type, next
+local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format
-- Beware, we predefine a bunch of patterns here and one reason for doing so
-- is that we get consistent behaviour in some of the visualizers.
@@ -1169,9 +1226,8 @@ local byte, char, gmatch = string.byte, string.char, string.gmatch
lpeg.patterns = lpeg.patterns or { } -- so that we can share
local patterns = lpeg.patterns
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
+local P, R, S, V, Ct, C, Cs, Cc, Cp = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp
+local lpegtype, lpegmatch = lpeg.type, lpeg.match
local utfcharacters = string.utfcharacters
local utfgmatch = unicode and unicode.utf8.gmatch
@@ -1222,6 +1278,10 @@ patterns.utf8char = utf8char
patterns.validutf8 = validutf8char
patterns.validutf8char = validutf8char
+local eol = S("\n\r")
+local spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+local whitespace = eol + spacer
+
patterns.digit = digit
patterns.sign = sign
patterns.cardinal = sign^0 * digit^1
@@ -1241,16 +1301,16 @@ patterns.letter = patterns.lowercase + patterns.uppercase
patterns.space = space
patterns.tab = P("\t")
patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.eol = eol
+patterns.spacer = spacer
+patterns.whitespace = whitespace
patterns.newline = newline
patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.nonspacer = 1 - spacer
+patterns.nonwhitespace = 1 - whitespace
patterns.equal = P("=")
patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.commaspacer = P(",") * spacer^0
patterns.period = P(".")
patterns.colon = P(":")
patterns.semicolon = P(";")
@@ -1265,6 +1325,10 @@ patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
patterns.unspacer = ((patterns.spacer^1)/"")^0
+patterns.singlequoted = squote * patterns.nosquote * squote
+patterns.doublequoted = dquote * patterns.nodquote * dquote
+patterns.quoted = patterns.doublequoted + patterns.singlequoted
+
patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
patterns.beginline = #(1-newline)
@@ -1275,8 +1339,17 @@ patterns.beginline = #(1-newline)
-- print(string.unquoted('"test"'))
-- print(string.unquoted('"test"'))
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
+local function anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) }
+end
+
+lpeg.anywhere = anywhere
+
+function lpeg.instringchecker(p)
+ p = anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
end
function lpeg.splitter(pattern, action)
@@ -1325,7 +1398,7 @@ function string.splitup(str,separator)
if not separator then
separator = ","
end
- return match(splitters_m[separator] or splitat(separator),str)
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
end
@@ -1337,16 +1410,20 @@ function lpeg.split(separator,str)
c = tsplitat(separator)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
+ if separator then
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
end
- return match(c,str)
end
local spacing = patterns.spacer^0 * newline -- sort of strip
@@ -1362,7 +1439,7 @@ local linesplitter = tsplitat(newline)
patterns.linesplitter = linesplitter
function string.splitlines(str)
- return match(linesplitter,str)
+ return lpegmatch(linesplitter,str)
end
local utflinesplitter = utfbom^-1 * tsplitat(newline)
@@ -1370,7 +1447,58 @@ local utflinesplitter = utfbom^-1 * tsplitat(newline)
patterns.utflinesplitter = utflinesplitter
function string.utfsplitlines(str)
- return match(utflinesplitter,str or "")
+ return lpegmatch(utflinesplitter,str or "")
+end
+
+local utfcharsplitter_ows = utfbom^-1 * Ct(C(utf8char)^0)
+local utfcharsplitter_iws = utfbom^-1 * Ct((whitespace^1 + C(utf8char))^0)
+
+function string.utfsplit(str,ignorewhitespace) -- new
+ if ignorewhitespace then
+ return lpegmatch(utfcharsplitter_iws,str or "")
+ else
+ return lpegmatch(utfcharsplitter_ows,str or "")
+ end
+end
+
+-- inspect(string.utfsplit("a b c d"))
+-- inspect(string.utfsplit("a b c d",true))
+
+-- -- alternative 1: 0.77
+--
+-- local utfcharcounter = utfbom^-1 * Cs((utf8char/'!')^0)
+--
+-- function string.utflength(str)
+-- return #lpegmatch(utfcharcounter,str or "")
+-- end
+--
+-- -- alternative 2: 1.70
+--
+-- local n = 0
+--
+-- local utfcharcounter = utfbom^-1 * (utf8char/function() n = n + 1 end)^0 -- slow
+--
+-- function string.utflength(str)
+-- n = 0
+-- lpegmatch(utfcharcounter,str or "")
+-- return n
+-- end
+--
+-- -- alternative 3: 0.24 (native unicode.utf8.len: 0.047)
+
+local n = 0
+
+local utfcharcounter = utfbom^-1 * Cs ( (
+ Cp() * (lpeg.patterns.utf8one )^1 * Cp() / function(f,t) n = n + t - f end
+ + Cp() * (lpeg.patterns.utf8two )^1 * Cp() / function(f,t) n = n + (t - f)/2 end
+ + Cp() * (lpeg.patterns.utf8three)^1 * Cp() / function(f,t) n = n + (t - f)/3 end
+ + Cp() * (lpeg.patterns.utf8four )^1 * Cp() / function(f,t) n = n + (t - f)/4 end
+)^0 )
+
+function string.utflength(str)
+ n = 0
+ lpegmatch(utfcharcounter,str or "")
+ return n
end
@@ -1384,7 +1512,7 @@ function lpeg.checkedsplit(separator,str)
c = Ct(separator^0 * other * (separator^1 * other)^0)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
function string.checkedsplit(str,separator)
@@ -1395,7 +1523,7 @@ function string.checkedsplit(str,separator)
c = Ct(separator^0 * other * (separator^1 * other)^0)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
@@ -1440,11 +1568,11 @@ function lpeg.keeper(str)
end
function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
+ return (P(str) + P(true)) * Cs(anything^0)
end
function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
+ return Cs((1 - P(str) * endofstring)^0)
end
-- Just for fun I looked at the used bytecode and
@@ -1453,8 +1581,22 @@ end
function lpeg.replacer(one,two)
if type(one) == "table" then
local no = #one
- if no > 0 then
- local p
+ local p
+ if no == 0 then
+ for k, v in next, one do
+ local pp = P(k) / v
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ elseif no == 1 then
+ local o = one[1]
+ one, two = P(o[1]), o[2]
+ return Cs(((1-one)^1 + one/two)^0)
+ else
for i=1,no do
local o = one[i]
local pp = P(o[1]) / o[2]
@@ -1467,11 +1609,16 @@ function lpeg.replacer(one,two)
return Cs((p + 1)^0)
end
else
+ one = P(one)
two = two or ""
- return Cs((P(one)/two + 1)^0)
+ return Cs(((1-one)^1 + one/two)^0)
end
end
+-- print(lpeg.match(lpeg.replacer("e","a"),"test test"))
+-- print(lpeg.match(lpeg.replacer{{"e","a"}},"test test"))
+-- print(lpeg.match(lpeg.replacer({ e = "a", t = "x" }),"test test"))
+
local splitters_f, splitters_s = { }, { }
function lpeg.firstofsplit(separator) -- always return value
@@ -1506,7 +1653,7 @@ local nany = utf8char/""
function lpeg.counter(pattern)
pattern = Cs((P(pattern)/" " + nany)^0)
return function(str)
- return #match(pattern,str)
+ return #lpegmatch(pattern,str)
end
end
@@ -1520,7 +1667,7 @@ if utfgmatch then
end
return n
else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
+ return #lpegmatch(Cs((P(what)/" " + nany)^0),str)
end
end
@@ -1535,9 +1682,9 @@ else
p = Cs((P(what)/" " + nany)^0)
cache[p] = p
end
- return #match(p,str)
+ return #lpegmatch(p,str)
else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
+ return #lpegmatch(Cs((P(what)/" " + nany)^0),str)
end
end
@@ -1564,7 +1711,7 @@ local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
+ return lpegmatch(simple and s or p,str)
end
-- utf extensies
@@ -1611,7 +1758,7 @@ else
p = P(uc)
end
end
- match((utf8char/f)^0,str)
+ lpegmatch((utf8char/f)^0,str)
return p
end
@@ -1627,7 +1774,7 @@ function lpeg.UR(str,more)
first = str
last = more or first
else
- first, last = match(range,str)
+ first, last = lpegmatch(range,str)
if not last then
return P(str)
end
@@ -1654,11 +1801,15 @@ end
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order
if type(list) ~= "table" then
list = { list, ... }
end
- -- sort(list) -- longest match first
+ -- table.sort(list) -- longest match first
local p = P(list[1])
for l=2,#list do
p = p + P(list[l])
@@ -1666,10 +1817,6 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
return p
end
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-- For the moment here, but it might move to utilities. Beware, we need to
-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
-- loop back from the end cq. prepend.
@@ -1827,6 +1974,24 @@ end
-- utfchar(0x205F), -- math thinspace
-- } )
+-- handy from within tex:
+
+local lpegmatch = lpeg.match
+
+local replacer = lpeg.replacer("@","%%") -- Watch the escaped % in lpeg!
+
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+
+-- strips leading and trailing spaces and collapsed all other spaces
+
+local pattern = Cs(whitespace^0/"" * ((whitespace^1 * P(-1) / "") + (whitespace^1/" ") + P(1))^0)
+
+function string.collapsespaces(str)
+ return lpegmatch(pattern,str)
+end
+
end -- of closure
@@ -1851,14 +2016,14 @@ else
io.fileseparator, io.pathseparator = "/" , ":"
end
-function io.loaddata(filename,textmode)
+function io.loaddata(filename,textmode) -- return nil if empty
local f = io.open(filename,(textmode and 'r') or 'rb')
if f then
local data = f:read('*all')
f:close()
- return data
- else
- return nil
+ if #data > 0 then
+ return data
+ end
end
end
@@ -1880,6 +2045,45 @@ function io.savedata(filename,data,joiner)
end
end
+function io.loadlines(filename,n) -- return nil if empty
+ local f = io.open(filename,'r')
+ if f then
+ if n then
+ local lines = { }
+ for i=1,n do
+ local line = f:read("*lines")
+ if line then
+ lines[#lines+1] = line
+ else
+ break
+ end
+ end
+ f:close()
+ lines = concat(lines,"\n")
+ if #lines > 0 then
+ return lines
+ end
+ else
+ local line = f:read("*line") or ""
+ assert(f:close())
+ if #line > 0 then
+ return line
+ end
+ end
+ end
+end
+
+function io.loadchunk(filename,n)
+ local f = io.open(filename,'rb')
+ if f then
+ local data = f:read(n or 1024)
+ f:close()
+ if #data > 0 then
+ return data
+ end
+ end
+end
+
function io.exists(filename)
local f = io.open(filename)
if f == nil then
@@ -2107,7 +2311,7 @@ if not modules then modules = { } end modules ['l-number'] = {
-- this module will be replaced when we have the bit library
-local tostring = tostring
+local tostring, tonumber = tostring, tonumber
local format, floor, match, rep = string.format, math.floor, string.match, string.rep
local concat, insert = table.concat, table.insert
local lpegmatch = lpeg.match
@@ -2170,11 +2374,11 @@ function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
end
function number.setbit(x, p)
- return hasbit(x, p) and x or x + p
+ return (x % (p + p) >= p) and x or x + p
end
function number.clearbit(x, p)
- return hasbit(x, p) and x - p or x
+ return (x % (p + p) >= p) and x - p or x
end
@@ -2208,6 +2412,10 @@ function number.tobitstring(n,m)
end
+function number.valid(str,default)
+ return tonumber(str) or default or nil
+end
+
end -- of closure
@@ -2319,17 +2527,28 @@ if not modules then modules = { } end modules ['l-os'] = {
-- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
-- os.platform : extended os.name with architecture
+-- os.sleep() => socket.sleep()
+-- math.randomseed(tonumber(string.sub(string.reverse(tostring(math.floor(socket.gettime()*10000))),1,6)))
+
-- maybe build io.flush in os.execute
local os = os
+local date, time = os.date, os.time
local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
-local random, ceil = math.random, math.ceil
-local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+local random, ceil, randomseed = math.random, math.ceil, math.randomseed
+local rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring = rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring
-- The following code permits traversing the environment table, at least
-- in luatex. Internally all environment names are uppercase.
+-- The randomseed in Lua is not that random, although this depends on the operating system as well
+-- as the binary (Luatex is normally okay). But to be sure we set the seed anyway.
+
+math.initialseed = tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
+
+randomseed(math.initialseed)
+
if not os.__getenv__ then
os.__getenv__ = os.getenv
@@ -2433,12 +2652,14 @@ else
os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
+local launchers = {
+ windows = "start %s",
+ macosx = "open %s",
+ unix = "$BROWSER %s &> /dev/null &",
+}
+
function os.launch(str)
- if os.type == "windows" then
- os.execute("start " .. str) -- os.spawn ?
- else
- os.execute(str .. " &") -- os.spawn ?
- end
+ os.execute(format(launchers[os.name] or launchers.unix,str))
end
if not os.times then
@@ -2649,7 +2870,7 @@ end
local d
function os.timezone(delta)
- d = d or tonumber(tonumber(os.date("%H")-os.date("!%H")))
+ d = d or tonumber(tonumber(date("%H")-date("!%H")))
if delta then
if d > 0 then
return format("+%02i:00",d)
@@ -2661,6 +2882,44 @@ function os.timezone(delta)
end
end
+local timeformat = format("%%s%s",os.timezone(true))
+local dateformat = "!%Y-%m-%d %H:%M:%S"
+
+function os.fulltime(t,default)
+ t = tonumber(t) or 0
+ if t > 0 then
+ -- valid time
+ elseif default then
+ return default
+ else
+ t = nil
+ end
+ return format(timeformat,date(dateformat,t))
+end
+
+local dateformat = "%Y-%m-%d %H:%M:%S"
+
+function os.localtime(t,default)
+ t = tonumber(t) or 0
+ if t > 0 then
+ -- valid time
+ elseif default then
+ return default
+ else
+ t = nil
+ end
+ return date(dateformat,t)
+end
+
+function os.converttime(t,default)
+ local t = tonumber(t)
+ if t and t > 0 then
+ return date(dateformat,t)
+ else
+ return default or "-"
+ end
+end
+
local memory = { }
local function which(filename)
@@ -2735,7 +2994,7 @@ local function nameonly(name)
return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$",""))
end
-local function extname(name,default)
+local function suffixonly(name,default)
return match(name,"^.+%.([^/\\]-)$") or default or ""
end
@@ -2744,11 +3003,16 @@ local function splitname(name)
return n or name, s or ""
end
-file.basename = basename
-file.dirname = dirname
-file.nameonly = nameonly
-file.extname = extname
-file.suffix = extname
+file.basename = basename
+
+file.pathpart = dirname
+file.dirname = dirname
+
+file.nameonly = nameonly
+
+file.suffixonly = suffixonly
+file.extname = suffixonly -- obsolete
+file.suffix = suffixonly
function file.removesuffix(filename)
return (gsub(filename,"%.[%a%d]+$",""))
@@ -2864,6 +3128,11 @@ end
file.isreadable = file.is_readable -- depricated
file.iswritable = file.is_writable -- depricated
+function file.size(name)
+ local a = attributes(name)
+ return a and a.size or 0
+end
+
-- todo: lpeg \\ / .. does not save much
local checkedsplit = string.checkedsplit
@@ -3001,6 +3270,7 @@ local drive = C(R("az","AZ")) * P(":")
local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
+local rest = C(P(1)^0)
drive = drive + Cc("")
path = path + Cc("")
@@ -3009,7 +3279,8 @@ suffix = suffix + Cc("")
local pattern_a = drive * path * base * suffix
local pattern_b = path * base * suffix
-local pattern_c = C(drive * path) * C(base * suffix)
+local pattern_c = C(drive * path) * C(base * suffix) -- trick: two extra captures
+local pattern_d = path * rest
function file.splitname(str,splitdrive)
if splitdrive then
@@ -3019,6 +3290,10 @@ function file.splitname(str,splitdrive)
end
end
+function file.splitbase(str)
+ return lpegmatch(pattern_d,str) -- returns path, base+suffix
+end
+
function file.nametotable(str,splitdrive) -- returns table
local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
if splitdrive then
@@ -3040,6 +3315,8 @@ function file.nametotable(str,splitdrive) -- returns table
end
end
+-- print(file.splitbase("a/b/c.txt"))
+
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
--
-- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
@@ -3081,15 +3358,30 @@ if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
-function file.needs_updating(oldname,newname,threshold) -- size modification access change
- local oldtime = lfs.attributes(oldname, modification)
- local newtime = lfs.attributes(newname, modification)
- if newtime >= oldtime then
- return false
- elseif oldtime - newtime < (threshold or 1) then
- return false
+function file.needsupdating(oldname,newname,threshold) -- size modification access change
+ local oldtime = lfs.attributes(oldname,"modification")
+ if oldtime then
+ local newtime = lfs.attributes(newname,"modification")
+ if not newtime then
+ return true -- no new file, so no updating needed
+ elseif newtime >= oldtime then
+ return false -- new file definitely needs updating
+ elseif oldtime - newtime < (threshold or 1) then
+ return false -- new file is probably still okay
+ else
+ return true -- new file has to be updated
+ end
else
- return true
+ return false -- no old file, so no updating needed
+ end
+end
+
+file.needs_updating = file.needsupdating
+
+function file.syncmtimes(oldname,newname)
+ local oldtime = lfs.attributes(oldname,"modification")
+ if oldtime and lfs.isfile(newname) then
+ lfs.touch(newname,oldtime,oldtime)
end
end
@@ -3111,7 +3403,7 @@ function file.loadchecksum(name)
return nil
end
-function file.savechecksum(name, checksum)
+function file.savechecksum(name,checksum)
if not checksum then checksum = file.checksum(name) end
if checksum then
io.savedata(name .. ".md5",checksum)
@@ -3136,7 +3428,7 @@ if not modules then modules = { } end modules ['l-url'] = {
local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
local concat = table.concat
local tonumber, type = tonumber, type
-local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
+local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V
local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
-- from wikipedia:
@@ -3169,15 +3461,19 @@ local endofstring = P(-1)
local hexdigit = R("09","AF","af")
local plus = P("+")
local nothing = Cc("")
-local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
+local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar
+local escaped = (plus / " ") + escapedchar
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-- we also assume that when we have a scheme, we also have an authority
+--
+-- maybe we should already split the query (better for unescaping as = & can be part of a value
local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
local pathstr = Cs((escaped+(1- qmark-hash))^0)
-local querystr = Cs((escaped+(1- hash))^0)
+----- querystr = Cs((escaped+(1- hash))^0)
+local querystr = Cs(( (1- hash))^0)
local fragmentstr = Cs((escaped+(1- endofstring))^0)
local scheme = schemestr * colon + nothing
@@ -3192,11 +3488,20 @@ local parser = Ct(validurl)
lpegpatterns.url = validurl
lpegpatterns.urlsplitter = parser
-local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
+local escapes = { }
-local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
+setmetatable(escapes, { __index = function(t,k)
+ local v = format("%%%02X",byte(k))
+ t[k] = v
+ return v
+end })
-lpegpatterns.urlescaper = escaper
+local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most
+local unescaper = Cs((escapedchar + 1)^0)
+
+lpegpatterns.urlunescaped = escapedchar
+lpegpatterns.urlescaper = escaper
+lpegpatterns.urlunescaper = unescaper
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
@@ -3208,8 +3513,12 @@ end
local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
local function hasscheme(str)
- local scheme = lpegmatch(isscheme,str) -- at least one character
- return scheme ~= "" and scheme or false
+ if str then
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
+ else
+ return false
+ end
end
@@ -3228,10 +3537,32 @@ local rootbased = P("/")
local barswapper = replacer("|",":")
local backslashswapper = replacer("\\","/")
+-- queries:
+
+local equal = P("=")
+local amp = P("&")
+local key = Cs(((escapedchar+1)-equal )^0)
+local value = Cs(((escapedchar+1)-amp -endofstring)^0)
+
+local splitquery = Cf ( Ct("") * P { "sequence",
+ sequence = V("pair") * (amp * V("pair"))^0,
+ pair = Cg(key * equal * value),
+}, rawset)
+
+-- hasher
+
local function hashed(str) -- not yet ok (/test?test)
+ if str == "" then
+ return {
+ scheme = "invalid",
+ original = str,
+ }
+ end
local s = split(str)
- local somescheme = s[1] ~= ""
- local somequery = s[4] ~= ""
+ local rawscheme = s[1]
+ local rawquery = s[4]
+ local somescheme = rawscheme ~= ""
+ local somequery = rawquery ~= ""
if not somescheme and not somequery then
s = {
scheme = "file",
@@ -3247,14 +3578,17 @@ local function hashed(str) -- not yet ok (/test?test)
local authority, path, filename = s[2], s[3]
if authority == "" then
filename = path
+ elseif path == "" then
+ filename = ""
else
filename = authority .. "/" .. path
end
s = {
- scheme = s[1],
+ scheme = rawscheme,
authority = authority,
path = path,
- query = s[4],
+ query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and =
+ queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped
fragment = s[5],
original = str,
noscheme = false,
@@ -3264,6 +3598,8 @@ local function hashed(str) -- not yet ok (/test?test)
return s
end
+-- inspect(hashed("template://test"))
+
-- Here we assume:
--
-- files: /// = relative
@@ -3306,23 +3642,65 @@ function url.construct(hash) -- dodo: we need to escape !
return lpegmatch(escaper,concat(fullurl))
end
-function url.filename(filename)
+function url.filename(filename) -- why no lpeg here ?
local t = hashed(filename)
return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
end
+local function escapestring(str)
+ return lpegmatch(escaper,str)
+end
+
+url.escape = escapestring
+
+-- function url.query(str) -- separator could be an option
+-- if type(str) == "string" then
+-- local t = { }
+-- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
+-- t[k] = v
+-- end
+-- return t
+-- else
+-- return str
+-- end
+-- end
+
function url.query(str)
if type(str) == "string" then
- local t = { }
- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
- t[k] = v
- end
- return t
+ return lpegmatch(splitquery,str) or ""
else
return str
end
end
+function url.toquery(data)
+ local td = type(data)
+ if td == "string" then
+ return #str and escape(data) or nil -- beware of double escaping
+ elseif td == "table" then
+ if next(data) then
+ local t = { }
+ for k, v in next, data do
+ t[#t+1] = format("%s=%s",k,escapestring(v))
+ end
+ return concat(t,"&")
+ end
+ else
+ -- nil is a signal that no query
+ end
+end
+
+-- /test/ | /test | test/ | test => test
+
+function url.barepath(path)
+ if not path or path == "" then
+ return ""
+ else
+ return (gsub(path,"^/?(.-)/?$","%1"))
+ end
+end
+
+
@@ -3363,6 +3741,24 @@ local isdir = lfs.isdir
local isfile = lfs.isfile
local currentdir = lfs.currentdir
+-- in case we load outside luatex
+
+if not isdir then
+ function isdir(name)
+ local a = attributes(name)
+ return a and a.mode == "directory"
+ end
+ lfs.isdir = isdir
+end
+
+if not isfile then
+ function isfile(name)
+ local a = attributes(name)
+ return a and a.mode == "file"
+ end
+ lfs.isfile = isfile
+end
+
-- handy
function dir.current()
@@ -3738,28 +4134,49 @@ function boolean.tonumber(b)
end
function toboolean(str,tolerant)
- if tolerant then
- local tstr = type(str)
- if tstr == "string" then
- return str == "true" or str == "yes" or str == "on" or str == "1" or str == "t"
- elseif tstr == "number" then
- return tonumber(str) ~= 0
- elseif tstr == "nil" then
- return false
- else
- return str
- end
+ if str == nil then
+ return false
+ elseif str == false then
+ return false
+ elseif str == true then
+ return true
elseif str == "true" then
return true
elseif str == "false" then
return false
+ elseif not tolerant then
+ return false
+ elseif str == 0 then
+ return false
+ elseif (tonumber(str) or 0) > 0 then
+ return true
else
- return str
+ return str == "yes" or str == "on" or str == "t"
end
end
string.toboolean = toboolean
+function string.booleanstring(str)
+ if str == nil then
+ return false
+ elseif str == false then
+ return false
+ elseif str == true then
+ return true
+ elseif str == "true" then
+ return true
+ elseif str == "false" then
+ return false
+ elseif str == 0 then
+ return false
+ elseif (tonumber(str) or 0) > 0 then
+ return true
+ else
+ return str == "yes" or str == "on" or str == "t"
+ end
+end
+
function string.is_boolean(str,default)
if type(str) == "string" then
if str == "true" or str == "yes" or str == "on" or str == "t" then
@@ -3784,57 +4201,229 @@ if not modules then modules = { } end modules ['l-unicode'] = {
license = "see context related readme files"
}
+-- this module will be reorganized
+
+-- todo: utf.sub replacement (used in syst-aux)
+
+local concat = table.concat
+local type = type
+local P, C, R, Cs, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Ct
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local utftype = patterns.utftype
+local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
+local utfsplitlines = string.utfsplitlines
+
if not unicode then
- unicode = { utf8 = { } }
+ unicode = { }
+
+end
+
+local unicode = unicode
+
+utf = utf or unicode.utf8
+
+if not utf then
+
+ utf8 = { }
+ unicode.utf8 = utf8
+ utf = utf8
+
+end
+
+if not utf.char then
local floor, char = math.floor, string.char
- function unicode.utf8.utfchar(n)
+ function utf.char(n)
if n < 0x80 then
+ -- 0aaaaaaa : 0x80
return char(n)
elseif n < 0x800 then
+ -- 110bbbaa : 0xC0 : n >> 6
+ -- 10aaaaaa : 0x80 : n & 0x3F
return char(
0xC0 + floor(n/0x40),
0x80 + (n % 0x40)
)
elseif n < 0x10000 then
+ -- 1110bbbb : 0xE0 : n >> 12
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
return char(
0xE0 + floor(n/0x1000),
0x80 + (floor(n/0x40) % 0x40),
0x80 + (n % 0x40)
)
- elseif n < 0x40000 then
+ elseif n < 0x200000 then
+ -- 11110ccc : 0xF0 : n >> 18
+ -- 10ccbbbb : 0x80 : (n >> 12) & 0x3F
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
+ -- dddd : ccccc - 1
return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + floor(n/0x1000),
+ 0xF0 + floor(n/0x40000),
+ 0x80 + (floor(n/0x1000) % 0x40),
0x80 + (floor(n/0x40) % 0x40),
0x80 + (n % 0x40)
)
else
- -- return char(
- -- 0xF1 + floor(n/0x1000000),
- -- 0x80 + floor(n/0x40000),
- -- 0x80 + floor(n/0x1000),
- -- 0x80 + (floor(n/0x40) % 0x40),
- -- 0x80 + (n % 0x40)
- -- )
- return "?"
+ return ""
end
end
end
-local unicode = unicode
+if not utf.byte then
-utf = utf or unicode.utf8
+ local utf8byte = patterns.utf8byte
-local concat = table.concat
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
-local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
-local type = type
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
-local utfsplitlines = string.utfsplitlines
+end
+
+local utfchar, utfbyte = utf.char, utf.byte
+
+-- As we want to get rid of the (unmaintained) utf library we implement our own
+-- variants (in due time an independent module):
+
+function unicode.filetype(data)
+ return data and lpegmatch(utftype,data) or "unknown"
+end
+
+local toentities = Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+
+
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+-- actually one of them is already utf ... sort of useless this one
+
+-- function utf.char(n)
+-- if n < 0x80 then
+-- return char(n)
+-- elseif n < 0x800 then
+-- return char(
+-- 0xC0 + floor(n/0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x10000 then
+-- return char(
+-- 0xE0 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x40000 then
+-- return char(
+-- 0xF0 + floor(n/0x40000),
+-- 0x80 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- else
+-- -- return char(
+-- -- 0xF1 + floor(n/0x1000000),
+-- -- 0x80 + floor(n/0x40000),
+-- -- 0x80 + floor(n/0x1000),
+-- -- 0x80 + (floor(n/0x40) % 0x40),
+-- -- 0x80 + (n % 0x40)
+-- -- )
+-- return "?"
+-- end
+-- end
+--
+-- merge into:
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+local validatedutf = Cs (
+ (
+ patterns.utf8one
+ + patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ + P(1) / "�"
+ )^0
+)
+
+patterns.validatedutf = validatedutf
+
+function string.validutf(str)
+ return lpegmatch(validatedutf,str)
+end
+
+
+utf.length = string.utflength
+utf.split = string.utfsplit
+utf.splitines = string.utfsplitlines
+utf.valid = string.validutf
+
+if not utf.len then
+ utf.len = utf.length
+end
+
+-- a replacement for simple gsubs:
+
+local utf8char = patterns.utf8char
+
+function utf.remapper(mapping)
+ local pattern = Cs((utf8char/mapping)^0)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end, pattern
+end
+
+-- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" }
+-- print(remap("abcd 1234 abcd"))
-- 0 EF BB BF UTF-8
-- 1 FF FE UTF-16-little-endian
@@ -4027,11 +4616,22 @@ local function big(c)
end
end
+-- function unicode.utf8_to_utf16(str,littleendian)
+-- if littleendian then
+-- return char(255,254) .. utfgsub(str,".",little)
+-- else
+-- return char(254,255) .. utfgsub(str,".",big)
+-- end
+-- end
+
+local _, l_remap = utf.remapper(little)
+local _, b_remap = utf.remapper(big)
+
function unicode.utf8_to_utf16(str,littleendian)
if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
+ return char(255,254) .. lpegmatch(l_remap,str)
else
- return char(254,255) .. utfgsub(str,".",big)
+ return char(254,255) .. lpegmatch(b_remap,str)
end
end
@@ -4052,84 +4652,12 @@ function unicode.xstring(s)
return format("0x%05X",type(s) == "number" and s or utfbyte(s))
end
+--
-local lpegmatch = lpeg.match
-local patterns = lpeg.patterns
-local utftype = patterns.utftype
-
-function unicode.filetype(data)
- return data and lpegmatch(utftype,data) or "unknown"
-end
-
-local toentities = lpeg.Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("%X;",b) end end
- )^0
-)
-
-patterns.toentities = toentities
-
-function utf.toentities(str)
- return lpegmatch(toentities,str)
-end
-
-
-
-
-local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
-
--- actually one of them is already utf ... sort of useless this one
-
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
-
-function string.toutf(s)
- return lpegmatch(pattern,s) or s -- todo: utf32
-end
-
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
-
-patterns.validatedutf = validatedutf
+local pattern = Ct(C(patterns.utf8char)^0)
-function string.validutf(str)
- return lpegmatch(validatedutf,str)
+function utf.totable(str)
+ return lpegmatch(pattern,str)
end
@@ -4189,10 +4717,11 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch, rep = string.format, string.gmatch, string.rep
+local format, gmatch, rep, gsub = string.format, string.gmatch, string.rep, string.gsub
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber = type, next, rawset, tonumber
+local type, next, rawset, tonumber, loadstring = type, next, rawset, tonumber, loadstring
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -4346,6 +4875,121 @@ function tables.encapsulate(core,capsule,protect)
end
end
+local function serialize(t,r,outer) -- no mixes
+ r[#r+1] = "{"
+ local n = #t
+ if n > 0 then
+ for i=1,n do
+ local v = t[i]
+ local tv = type(v)
+ if tv == "string" then
+ r[#r+1] = format("%q,",v)
+ elseif tv == "number" then
+ r[#r+1] = format("%s,",v)
+ elseif tv == "table" then
+ serialize(v,r)
+ elseif tv == "boolean" then
+ r[#r+1] = format("%s,",tostring(v))
+ end
+ end
+ else
+ for k, v in next, t do
+ local tv = type(v)
+ if tv == "string" then
+ r[#r+1] = format("[%q]=%q,",k,v)
+ elseif tv == "number" then
+ r[#r+1] = format("[%q]=%s,",k,v)
+ elseif tv == "table" then
+ r[#r+1] = format("[%q]=",k)
+ serialize(v,r)
+ elseif tv == "boolean" then
+ r[#r+1] = format("[%q]=%s,",k,tostring(v))
+ end
+ end
+ end
+ if outer then
+ r[#r+1] = "}"
+ else
+ r[#r+1] = "},"
+ end
+ return r
+end
+
+function table.fastserialize(t,prefix)
+ return concat(serialize(t,{ prefix or "return" },true))
+end
+
+function table.deserialize(str)
+ if not str or str == "" then
+ return
+ end
+ local code = loadstring(str)
+ if not code then
+ return
+ end
+ code = code()
+ if not code then
+ return
+ end
+ return code
+end
+
+-- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } })
+
+function table.load(filename)
+ if filename then
+ local t = io.loaddata(filename)
+ if t and t ~= "" then
+ t = loadstring(t)
+ if type(t) == "function" then
+ t = t()
+ if type(t) == "table" then
+ return t
+ end
+ end
+ end
+ end
+end
+
+local function slowdrop(t)
+ local r = { }
+ local l = { }
+ for i=1,#t do
+ local ti = t[i]
+ local j = 0
+ for k, v in next, ti do
+ j = j + 1
+ l[j] = format("%s=%q",k,v)
+ end
+ r[i] = format(" {%s},\n",concat(l))
+ end
+ return format("return {\n%s}",concat(r))
+end
+
+local function fastdrop(t)
+ local r = { "return {\n" }
+ for i=1,#t do
+ local ti = t[i]
+ r[#r+1] = " {"
+ for k, v in next, ti do
+ r[#r+1] = format("%s=%q",k,v)
+ end
+ r[#r+1] = "},\n"
+ end
+ r[#r+1] = "}"
+ return concat(r)
+end
+
+function table.drop(t,slow)
+ if #t == 0 then
+ return "return { }"
+ elseif slow == true then
+ return slowdrop(t) -- less memory
+ else
+ return fastdrop(t) -- some 15% faster
+ end
+end
+
end -- of closure
@@ -4520,11 +5164,10 @@ local concat = table.concat
local type, next = type, next
utilities = utilities or {}
-utilities.merger = utilities.merger or { } -- maybe mergers
+local merger = utilities.merger or { }
+utilities.merger = merger
utilities.report = logs and logs.reporter("system") or print
-local merger = utilities.merger
-
merger.strip_comment = true
local m_begin_merge = "begin library merge"
@@ -4570,9 +5213,11 @@ end
local function self_save(name, data)
if data ~= "" then
if merger.strip_comment then
- -- saves some 20K
local n = #data
+ -- saves some 20K .. scite comments
data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
+ -- saves some 20K .. ldx comments
+ data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-ldx%]%]%-%-","")
utilities.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
end
io.savedata(name,data)
@@ -4653,36 +5298,208 @@ if not modules then modules = { } end modules ['util-lua'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ comment = "the strip code is written by Peter Cawley",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-utilities = utilities or {}
-utilities.lua = utilities.lua or { }
-utilities.report = logs and logs.reporter("system") or print
+local rep, sub, byte, dump, format = string.rep, string.sub, string.byte, string.dump, string.format
+local loadstring, loadfile, type = loadstring, loadfile, type
+
+utilities = utilities or {}
+utilities.lua = utilities.lua or { }
+local luautilities = utilities.lua
+
+utilities.report = logs and logs.reporter("system") or print -- can be overloaded later
+
+local tracestripping = false
+local forcestupidcompile = true -- use internal bytecode compiler
+luautilities.stripcode = true -- support stripping when asked for
+luautilities.alwaysstripcode = false -- saves 1 meg on 7 meg compressed format file (2012.08.12)
+luautilities.nofstrippedchunks = 0
+luautilities.nofstrippedbytes = 0
+
+-- The next function was posted by Peter Cawley on the lua list and strips line
+-- number information etc. from the bytecode data blob. We only apply this trick
+-- when we store data tables. Stripping makes the compressed format file about
+-- 1MB smaller (and uncompressed we save at least 6MB).
+--
+-- You can consider this feature an experiment, so it might disappear. There is
+-- no noticeable gain in runtime although the memory footprint should be somewhat
+-- smaller (and the file system has a bit less to deal with).
+--
+-- Begin of borrowed code ... works for Lua 5.1 which LuaTeX currently uses ...
+
+local function strip_code_pc(dump,name)
+ local before = #dump
+ local version, format, endian, int, size, ins, num = byte(dump,5,11)
+ local subint
+ if endian == 1 then
+ subint = function(dump, i, l)
+ local val = 0
+ for n = l, 1, -1 do
+ val = val * 256 + byte(dump,i + n - 1)
+ end
+ return val, i + l
+ end
+ else
+ subint = function(dump, i, l)
+ local val = 0
+ for n = 1, l, 1 do
+ val = val * 256 + byte(dump,i + n - 1)
+ end
+ return val, i + l
+ end
+ end
+ local strip_function
+ strip_function = function(dump)
+ local count, offset = subint(dump, 1, size)
+ local stripped, dirty = rep("\0", size), offset + count
+ offset = offset + count + int * 2 + 4
+ offset = offset + int + subint(dump, offset, int) * ins
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ local t
+ t, offset = subint(dump, offset, 1)
+ if t == 1 then
+ offset = offset + 1
+ elseif t == 4 then
+ offset = offset + size + subint(dump, offset, size)
+ elseif t == 3 then
+ offset = offset + num
+ end
+ end
+ count, offset = subint(dump, offset, int)
+ stripped = stripped .. sub(dump,dirty, offset - 1)
+ for n = 1, count do
+ local proto, off = strip_function(sub(dump,offset, -1))
+ stripped, offset = stripped .. proto, offset + off - 1
+ end
+ offset = offset + subint(dump, offset, int) * int + int
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ offset = offset + subint(dump, offset, size) + size + int * 2
+ end
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ offset = offset + subint(dump, offset, size) + size
+ end
+ stripped = stripped .. rep("\0", int * 3)
+ return stripped, offset
+ end
+ dump = sub(dump,1,12) .. strip_function(sub(dump,13,-1))
+ local after = #dump
+ local delta = before-after
+ if tracestripping then
+ utilities.report("stripped bytecode: %s, before %s, after %s, delta %s",name or "unknown",before,after,delta)
+ end
+ luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1
+ luautilities.nofstrippedbytes = luautilities.nofstrippedbytes + delta
+ return dump, delta
+end
+
+-- ... end of borrowed code.
+
+local function strippedbytecode(code,forcestrip,name)
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ return strip_code_pc(code,name)
+ else
+ return code, 0
+ end
+end
+
+luautilities.stripbytecode = strip_code_pc
+luautilities.strippedbytecode = strippedbytecode
+
+local function fatalerror(name)
+ utilities.report(format("fatal error in %q",name or "unknown"))
+end
+
+-- quite subtle ... doing this wrong incidentally can give more bytes
+
+
+function luautilities.loadedluacode(fullname,forcestrip,name)
+ -- quite subtle ... doing this wrong incidentally can give more bytes
+ name = name or fullname
+ local code = loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip) == "function" then
+ forcestrip = forcestrip(fullname)
+ end
+ if forcestrip then
+ local code, n = strip_code_pc(dump(code,name))
+ return loadstring(code), n
+ elseif luautilities.alwaysstripcode then
+ return loadstring(strip_code_pc(dump(code),name))
+ else
+ return code, 0
+ end
+ elseif luautilities.alwaysstripcode then
+ return loadstring(strip_code_pc(dump(code),name))
+ else
+ return code, 0
+ end
+end
+
+function luautilities.strippedloadstring(code,forcestrip,name) -- not executed
+ local n = 0
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ code = loadstring(code)
+ if not code then
+ fatalerror(name)
+ end
+ code, n = strip_code_pc(dump(code),name)
+ end
+ return loadstring(code), n
+end
-local function stupidcompile(luafile,lucfile)
- local data = io.loaddata(luafile)
- if data and data ~= "" then
- data = string.dump(data)
- if data and data ~= "" then
- io.savedata(lucfile,data)
+local function stupidcompile(luafile,lucfile,strip)
+ local code = io.loaddata(luafile)
+ local n = 0
+ if code and code ~= "" then
+ code = loadstring(code)
+ if not code then
+ fatalerror()
+ end
+ code = dump(code)
+ if strip then
+ code, n = strippedbytecode(code,true,luafile) -- last one is reported
+ end
+ if code and code ~= "" then
+ io.savedata(lucfile,code)
end
end
+ return n
end
-function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
+local luac_normal = "texluac -o %q %q"
+local luac_strip = "texluac -s -o %q %q"
+
+function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
- local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
+ local done = false
if strip ~= false then
- command = "-s " .. command
+ strip = true
+ end
+ if forcestupidcompile then
+ fallback = true
+ elseif strip then
+ done = os.spawn(format(luac_strip, lucfile,luafile)) == 0
+ else
+ done = os.spawn(format(luac_normal,lucfile,luafile)) == 0
end
- local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
if not done and fallback then
- utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
- stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
- cleanup = false -- better see how worse it is
+ local n = stupidcompile(luafile,lucfile,strip)
+ if n > 0 then
+ utilities.report("lua: %s dumped into %s (%i bytes stripped)",luafile,lucfile,n)
+ else
+ utilities.report("lua: %s dumped into %s (unstripped)",luafile,lucfile)
+ end
+ cleanup = false -- better see how bad it is
end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
@@ -4697,7 +5514,6 @@ end
-
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -4710,8 +5526,10 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
-local lpegmatch = lpeg.match
+local lpeg, table, string = lpeg, table, string
+
+local P, R, V, S, C, Ct, Cs, Carg, Cc = lpeg.P, lpeg.R, lpeg.V, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg, lpeg.Cc
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
@@ -4723,29 +5541,39 @@ parsers.patterns = parsers.patterns or { }
local setmetatableindex = table.setmetatableindex
local sortedhash = table.sortedhash
+-- we share some patterns
+
+local space = P(' ')
+local equal = P("=")
+local comma = P(",")
+local lbrace = P("{")
+local rbrace = P("}")
+local period = S(".")
+local punctuation = S(".,:;")
+local spacer = patterns.spacer
+local whitespace = patterns.whitespace
+local newline = patterns.newline
+local anything = patterns.anything
+local endofstring = patterns.endofstring
+
-- we could use a Cf Cg construct
local escape, left, right = P("\\"), P('{'), P('}')
-lpeg.patterns.balanced = P {
+patterns.balanced = P {
[1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
[2] = left * V(1) * right
}
-local space = P(' ')
-local equal = P("=")
-local comma = P(",")
-local lbrace = P("{")
-local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
-local content = (1-P(-1))^0
+local content = (1-endofstring)^0
-lpeg.patterns.nested = nested -- no capture
-lpeg.patterns.argument = argument -- argument after e.g. =
-lpeg.patterns.content = content -- rest after e.g =
+patterns.nested = nested -- no capture
+patterns.argument = argument -- argument after e.g. =
+patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4764,10 +5592,6 @@ local function set(key,value)
hash[key] = value
end
-local function set(key,value)
- hash[key] = value
-end
-
local pattern_a_s = (pattern_a/set)^1
local pattern_b_s = (pattern_b/set)^1
local pattern_c_s = (pattern_c/set)^1
@@ -4818,7 +5642,7 @@ end
local separator = comma * space^0
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
-local pattern = Ct(value*(separator*value)^0)
+local pattern = spaces * Ct(value*(separator*value)^0)
-- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
@@ -4942,6 +5766,37 @@ function parsers.listitem(str)
return gmatch(str,"[^, ]+")
end
+--
+local digit = R("09")
+
+local pattern = Cs { "start",
+ start = V("one") + V("two") + V("three"),
+ rest = (Cc(",") * V("thousand"))^0 * (P(".") + endofstring) * anything^0,
+ thousand = digit * digit * digit,
+ one = digit * V("rest"),
+ two = digit * digit * V("rest"),
+ three = V("thousand") * V("rest"),
+}
+
+patterns.splitthousands = pattern -- maybe better in the parsers namespace ?
+
+function parsers.splitthousands(str)
+ return lpegmatch(pattern,str) or str
+end
+
+-- print(parsers.splitthousands("11111111111.11"))
+
+local optionalwhitespace = whitespace^0
+
+patterns.words = Ct((Cs((1-punctuation-whitespace)^1) + anything)^1)
+patterns.sentences = Ct((optionalwhitespace * Cs((1-period)^0 * period))^1)
+patterns.paragraphs = Ct((optionalwhitespace * Cs((whitespace^1*endofstring/"" + 1 - (spacer^0*newline*newline))^1))^1)
+
+-- local str = " Word1 word2. \n Word3 word4. \n\n Word5 word6.\n "
+-- inspect(lpegmatch(patterns.paragraphs,str))
+-- inspect(lpegmatch(patterns.sentences,str))
+-- inspect(lpegmatch(patterns.words,str))
+
end -- of closure
@@ -5043,7 +5898,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util.deb'] = {
+if not modules then modules = { } end modules ['util-deb'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -5155,6 +6010,7 @@ function inspect(i) -- global function
else
print(tostring(i))
end
+ return i -- so that we can inline the inspect
end
-- from the lua book:
@@ -5194,7 +6050,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
-local write_nl = texio.write_nl
+local write_nl = texio and texio.write_nl or print
statistics = statistics or { }
local statistics = statistics
@@ -5277,7 +6133,7 @@ statistics.elapsedtime = elapsedtime
statistics.elapsedindeed = elapsedindeed
statistics.elapsedseconds = elapsedseconds
--- general function
+-- general function .. we might split this module
function statistics.register(tag,fnc)
if statistics.enable and type(fnc) == "function" then
@@ -5387,6 +6243,8 @@ if not modules then modules = { } end modules ['trac-set'] = { -- might become u
license = "see context related readme files"
}
+-- maybe this should be util-set.lua
+
local type, next, tostring = type, next, tostring
local concat = table.concat
local format, find, lower, gsub, escapedpattern = string.format, string.find, string.lower, string.gsub, string.escapedpattern
@@ -5586,7 +6444,7 @@ function setters.show(t)
local value, default, modules = functions.value, functions.default, #functions
value = value == nil and "unset" or tostring(value)
default = default == nil and "unset" or tostring(default)
- t.report("%-30s modules: %2i default: %6s value: %6s",name,modules,default,value)
+ t.report("%-50s modules: %2i default: %6s value: %6s",name,modules,default,value)
end
end
t.report()
@@ -5678,17 +6536,31 @@ end)
-- experiment
-local flags = environment and environment.engineflags
+if environment then
-if flags then
- if trackers and flags.trackers then
- setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
- -- t_enable(flags.trackers)
- end
- if directives and flags.directives then
- setters.initialize("flags","directives", settings_to_hash(flags.directives))
- -- d_enable(flags.directives)
+ -- The engineflags are known earlier than environment.arguments but maybe we
+ -- need to handle them both as the later are parsed differently. The c: prefix
+ -- is used by mtx-context to isolate the flags from those that concern luatex.
+
+ local engineflags = environment.engineflags
+
+ if engineflags then
+ if trackers then
+ local list = engineflags["c:trackers"] or engineflags["trackers"]
+ if type(list) == "string" then
+ setters.initialize("flags","trackers",settings_to_hash(list))
+ -- t_enable(list)
+ end
+ end
+ if directives then
+ local list = engineflags["c:directives"] or engineflags["directives"]
+ if type(list) == "string" then
+ setters.initialize("flags","directives", settings_to_hash(list))
+ -- d_enable(list)
+ end
+ end
end
+
end
-- here
@@ -5741,10 +6613,7 @@ local next, type = next, type
local setmetatableindex = table.setmetatableindex
---[[ldx--
-
This is a prelude to a more extensive logging module. We no longer
-provide based logging a sparsing is relatively easy anyway.
---ldx]]--
+
logs = logs or { }
local logs = logs
@@ -6560,7 +7429,8 @@ local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
local unquoted, quoted = string.unquoted, string.quoted
-local concat = table.concat
+local concat, insert, remove = table.concat, table.insert, table.remove
+local loadedluacode = utilities.lua.loadedluacode
-- precautions
@@ -6578,8 +7448,28 @@ if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaon
for k=3,#arg do
arg[k-2] = arg[k]
end
- arg[#arg] = nil -- last
- arg[#arg] = nil -- pre-last
+ remove(arg) -- last
+ remove(arg) -- pre-last
+end
+
+-- This is an ugly hack but it permits symlinking a script (say 'context') to 'mtxrun' as in:
+--
+-- ln -s /opt/minimals/tex/texmf-linux-64/bin/mtxrun context
+--
+-- The special mapping hack is needed because 'luatools' boils down to 'mtxrun --script base'
+-- but it's unlikely that there will be more of this
+
+do
+
+ local originalzero = file.basename(arg[0])
+ local specialmapping = { luatools == "base" }
+
+ if originalzero ~= "mtxrun" and originalzero ~= "mtxrun.lua" then
+ arg[0] = specialmapping[originalzero] or originalzero
+ insert(arg,0,"--script")
+ insert(arg,0,"mtxrun")
+ end
+
end
-- environment
@@ -6619,6 +7509,8 @@ local mt = {
setmetatable(environment,mt)
+-- context specific arguments (in order not to confuse the engine)
+
function environment.initializearguments(arg)
local arguments, files = { }, { }
environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
@@ -6627,10 +7519,12 @@ function environment.initializearguments(arg)
if index > 0 then
local flag, value = match(argument,"^%-+(.-)=(.-)$")
if flag then
+ flag = gsub(flag,"^c:","")
arguments[flag] = unquoted(value or "")
else
flag = match(argument,"^%-+(.+)")
if flag then
+ flag = gsub(flag,"^c:","")
arguments[flag] = true
else
files[#files+1] = argument
@@ -6650,7 +7544,7 @@ end
-- tricky: too many hits when we support partials unless we add
-- a registration of arguments so from now on we have 'partial'
-function environment.argument(name,partial)
+function environment.getargument(name,partial)
local arguments, sortedflags = environment.arguments, environment.sortedflags
if arguments[name] then
return arguments[name]
@@ -6673,6 +7567,8 @@ function environment.argument(name,partial)
return nil
end
+environment.argument = environment.getargument
+
function environment.splitarguments(separator) -- rather special, cut-off before separator
local done, before, after = false, { }, { }
local originalarguments = environment.originalarguments
@@ -6758,7 +7654,7 @@ function environment.texfile(filename)
return resolvers.findfile(filename,'tex')
end
-function environment.luafile(filename)
+function environment.luafile(filename) -- needs checking
local resolved = resolvers.findfile(filename,'tex') or ""
if resolved ~= "" then
return resolved
@@ -6770,13 +7666,16 @@ function environment.luafile(filename)
return resolvers.findfile(filename,'luatexlibs') or ""
end
-environment.loadedluacode = loadfile -- can be overloaded
+local function checkstrip(filename)
+ local modu = modules[file.nameonly(filename)]
+ return modu and modu.dataonly
+end
function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
filename = file.replacesuffix(filename, "lua")
local fullname = environment.luafile(filename)
if fullname and fullname ~= "" then
- local data = environment.loadedluacode(fullname)
+ local data = loadedluacode(fullname,checkstrip,filename)
if trace_locating then
report_lua("loading file %s%s", fullname, not data and " failed" or "")
elseif not silent then
@@ -6874,21 +7773,7 @@ local trace_entities = false trackers.register("xml.entities", function(v) trac
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
---[[ldx--
-
The parser used here is inspired by the variant discussed in the lua book, but
-handles comment and processing instructions, has a different structure, provides
-parent access; a first version used different trickery but was less optimized to we
-went this route. First we had a find based parser, now we have an based one.
-The find based parser can be found in l-xml-edu.lua along with other older code.
-
Beware, the interface may change. For instance at, ns, tg, dt may get more
-verbose names. Once the code is stable we will also remove some tracing and
-optimize the code.
-
-
I might even decide to reimplement the parser using the latest trickery
-as the current variant was written when showed up and it's easier now to
-build tables in one go.
---ldx]]--
xml = xml or { }
local xml = xml
@@ -6898,46 +7783,25 @@ local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
+local utfchar = utf.char
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
---[[ldx--
-
First a hack to enable namespace resolving. A namespace is characterized by
-a . The following function associates a namespace prefix with a
-pattern. We use , which in this case is more than twice as fast as a
-find based solution where we loop over an array of patterns. Less code and
-much cleaner.
---ldx]]--
+
xml.xmlns = xml.xmlns or { }
local check = P(false)
local parse = check
---[[ldx--
-
The next function associates a namespace prefix with an . This
-normally happens independent of parsing.
-
-xml.registerns("mml","mathml")
-
---ldx]]--
function xml.registerns(namespace, pattern) -- pattern can be an lpeg
check = check + C(P(lower(pattern))) / namespace
parse = P { P(check) + 1 * V(1) }
end
---[[ldx--
-
The next function also registers a namespace, but this time we map a
-given namespace prefix onto a registered one, using the given
-. This used for attributes like xmlns:m.
-
-xml.checkns("m","http://www.w3.org/mathml")
-
---ldx]]--
function xml.checkns(namespace,url)
local ns = lpegmatch(parse,lower(url))
@@ -6946,66 +7810,15 @@ function xml.checkns(namespace,url)
end
end
---[[ldx--
-
Next we provide a way to turn an into a registered
-namespace. This used for the xmlns attribute.
-
-resolvedns = xml.resolvens("http://www.w3.org/mathml")
-
-
-This returns mml.
---ldx]]--
function xml.resolvens(url)
return lpegmatch(parse,lower(url)) or ""
end
---[[ldx--
-
A namespace in an element can be remapped onto the registered
-one efficiently by using the xml.xmlns table.
---ldx]]--
-
---[[ldx--
-
This version uses . We follow the same approach as before, stack and top and
-such. This version is about twice as fast which is mostly due to the fact that
-we don't have to prepare the stream for cdata, doctype etc etc. This variant is
-is dedicated to Luigi Scarso, who challenged me with 40 megabyte files that
-took 12.5 seconds to load (1.5 for file io and the rest for tree building). With
-the implementation we got that down to less 7.3 seconds. Loading the 14
- interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.
-
-
Next comes the parser. The rather messy doctype definition comes in many
-disguises so it is no surprice that later on have to dedicate quite some
- code to it.
-
-
-
-
-
-
-
-
-
-
-
The code may look a bit complex but this is mostly due to the fact that we
-resolve namespaces and attach metatables. There is only one public function:
-
-
-local x = xml.convert(somestring)
-
-
-
An optional second boolean argument tells this function not to create a root
-element.
-
-
Valid entities are:
-
-
-
-
-
-
---ldx]]--
+
+
+
-- not just one big nested table capture (lpeg overflow)
@@ -7220,15 +8033,7 @@ local privates_n = {
-- keeps track of defined ones
}
-local function escaped(s)
- if s == "" then
- return ""
- else -- if utffind(s,privates_u) then
- return (utfgsub(s,".",privates_u))
- -- else
- -- return s
- end
-end
+local escaped = utf.remapper(privates_u)
local function unescaped(s)
local p = privates_n[s]
@@ -7243,13 +8048,7 @@ local function unescaped(s)
return p
end
-local function unprivatized(s,resolve)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_p))
- end
-end
+local unprivatized = utf.remapper(privates_p)
xml.privatetoken = unescaped
xml.unprivatized = unprivatized
@@ -7589,7 +8388,12 @@ local function _xmlconvert_(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler(format("load error: %s",errorstr))
+ local currentresource = settings.currentresource
+ if currentresource and currentresource ~= "" then
+ xml.errorhandler(format("load error in [%s]: %s",currentresource,errorstr))
+ else
+ xml.errorhandler(format("load error: %s",errorstr))
+ end
end
end
else
@@ -7634,7 +8438,7 @@ function xmlconvert(data,settings)
if ok then
return result
else
- return _xmlconvert_("")
+ return _xmlconvert_("",settings)
end
end
@@ -7655,10 +8459,7 @@ function xml.inheritedconvert(data,xmldata) -- xmldata is parent
return xc
end
---[[ldx--
-
Packaging data in an xml like table is done with the following
-function. Maybe it will go away (when not used).
---ldx]]--
+
function xml.is_valid(root)
return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
@@ -7677,11 +8478,7 @@ end
xml.errorhandler = report_xml
---[[ldx--
-
We cannot load an from a filehandle so we need to load
-the whole file first. The function accepts a string representing
-a filename or a file handle.
---ldx]]--
+
function xml.load(filename,settings)
local data = ""
@@ -7695,13 +8492,17 @@ function xml.load(filename,settings)
elseif filename then -- filehandle
data = filename:read("*all")
end
- return xmlconvert(data,settings)
+ if settings then
+ settings.currentresource = filename
+ local result = xmlconvert(data,settings)
+ settings.currentresource = nil
+ return result
+ else
+ return xmlconvert(data,{ currentresource = filename })
+ end
end
---[[ldx--
-
When we inject new elements, we need to convert strings to
-valid trees, which is what the next function does.
---ldx]]--
+
local no_root = { no_root = true }
@@ -7714,11 +8515,7 @@ function xml.toxml(data)
end
end
---[[ldx--
-
For copying a tree we use a dedicated function instead of the
-generic table copier. Since we know what we're dealing with we
-can speed up things a bit. The second argument is not to be used!
---ldx]]--
+
local function copy(old,tables)
if old then
@@ -7742,13 +8539,7 @@ end
xml.copy = copy
---[[ldx--
-
In serializing the tree or parts of the tree is a major
-actitivity which is why the following function is pretty optimized resulting
-in a few more lines of code than needed. The variant that uses the formatting
-function for all components is about 15% slower than the concatinating
-alternative.
---ldx]]--
+
-- todo: add when not present
@@ -7761,15 +8552,12 @@ function xml.checkbom(root) -- can be made faster
return
end
end
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 1, { special = true, ns = "", tg = "@pi@", dt = { "xml version='1.0' standalone='yes'" } } )
insert(dt, 2, "\n" )
end
end
---[[ldx--
-
At the cost of some 25% runtime overhead you can first convert the tree to a string
-and then handle the lot.
How you deal with saving data depends on your preferences. For a 40 MB database
-file the timing on a 2.3 Core Duo are as follows (time in seconds):
-
-
-1.3 : load data from file to string
-6.1 : convert string into tree
-5.3 : saving in file using xmlsave
-6.8 : converting to string using xml.tostring
-3.6 : saving converted string in file
-
-
Beware, these were timing with the old routine but measurements will not be that
-much different I guess.
---ldx]]--
-- maybe this will move to lxml-xml
@@ -8054,10 +8828,7 @@ xml.newhandlers = newhandlers
xml.serialize = serialize
xml.tostring = xmltostring
---[[ldx--
-
The next function operated on the content only and needs a handle function
-that accepts a string.
---ldx]]--
+
local function xmlstring(e,handle)
if not handle or (e.special and e.tg ~= "@rt@") then
@@ -8076,9 +8847,7 @@ end
xml.string = xmlstring
---[[ldx--
-
A few helpers:
---ldx]]--
+
function xml.settings(e)
@@ -8122,11 +8891,7 @@ function xml.name(root)
end
end
---[[ldx--
-
The next helper erases an element but keeps the table as it is,
-and since empty strings are not serialized (effectively) it does
-not harm. Copying the table would take more time. Usage:
---ldx]]--
+
function xml.erase(dt,k)
if dt then
@@ -8138,13 +8903,7 @@ function xml.erase(dt,k)
end
end
---[[ldx--
-
The next helper assigns a tree (or string). Usage:
-
-dt[k] = xml.assign(root) or xml.assign(dt,k,root)
-
---ldx]]--
function xml.assign(dt,k,root)
if dt and k then
@@ -8157,20 +8916,14 @@ end
-- the following helpers may move
---[[ldx--
-
The next helper assigns a tree (or string). Usage:
-
-xml.tocdata(e)
-xml.tocdata(e,"error")
-
---ldx]]--
+
function xml.tocdata(e,wrapper) -- a few more in the aux module
local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s%s>",wrapper,whatever,wrapper)
end
- local t = { special = true, ns = "", tg = "@cd@", at = {}, rn = "", dt = { whatever }, __p__ = e }
+ local t = { special = true, ns = "", tg = "@cd@", at = { }, rn = "", dt = { whatever }, __p__ = e }
setmetatable(t,getmetatable(e))
e.dt = { t }
end
@@ -8225,7 +8978,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-pth'] = {
+if not modules then modules = { } end modules ['lxml-lpt'] = {
version = 1.001,
comment = "this module is the basis for the lxml-* ones",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -8246,28 +8999,9 @@ local setmetatableindex = table.setmetatableindex
-- beware, this is not xpath ... e.g. position is different (currently) and
-- we have reverse-sibling as reversed preceding sibling
---[[ldx--
-
This module can be used stand alone but also inside in
-which case it hooks into the tracker code. Therefore we provide a few
-functions that set the tracers. Here we overload a previously defined
-function.
-
If I can get in the mood I will make a variant that is XSLT compliant
-but I wonder if it makes sense.
---ldx]]--
-
---[[ldx--
-
Expecially the lpath code is experimental, we will support some of xpath, but
-only things that make sense for us; as compensation it is possible to hook in your
-own functions. Apart from preprocessing content for we also need
-this module for process management, like handling and
-files.
-
-
-a/b/c /*/c
-a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
-a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
-
---ldx]]--
+
+
+
local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end
local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
@@ -8275,11 +9009,7 @@ local trace_lprofile = false if trackers then trackers.register("xml.profile",
local report_lpath = logs.reporter("xml","lpath")
---[[ldx--
-
We've now arrived at an interesting part: accessing the tree using a subset
-of and since we're not compatible we call it . We
-will explain more about its usage in other documents.
---ldx]]--
+
local xml = xml
@@ -8731,14 +9461,23 @@ local lp_builtin = P (
-- for the moment we keep namespaces with attributes
local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
-local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end)
-local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end)
+
+-- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end
+-- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end
+
+lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0"
+lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))"
+
local lp_fastpos = lp_fastpos_n + lp_fastpos_p
+
local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
-local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
- return t .. "("
-end
+-- local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
+-- return t .. "("
+-- end
+
+-- local lp_lua_function = (R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / "%0("
+local lp_lua_function = Cs((R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(")) / "%0"
local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling
if expressions[t] then
@@ -9254,9 +9993,7 @@ end
xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do
---[[ldx--
-
This is the main filter function. It returns whatever is asked for.
---ldx]]--
+
function xml.filter(root,pattern) -- no longer funny attribute handling here
return applylpath(root,pattern)
@@ -9354,12 +10091,12 @@ xml.selection = selection -- new method, simple handle
-- generic function finalizer (independant namespace)
-local function dofunction(collected,fnc)
+local function dofunction(collected,fnc,...)
if collected then
local f = functions[fnc]
if f then
for c=1,#collected do
- f(collected[c])
+ f(collected[c],...)
end
else
report_lpath("unknown function '%s'",fnc)
@@ -9460,21 +10197,7 @@ expressions.tag = function(e,n) -- only tg
end
end
---[[ldx--
-
Often using an iterators looks nicer in the code than passing handler
-functions. The book describes how to use coroutines for that
-purpose (). This permits
-code like:
-
-for r, d, k in xml.elements(xml.load('text.xml'),"title") do
- print(d[k]) -- old method
-end
-for e in xml.collected(xml.load('text.xml'),"title") do
- print(e) -- new one
-end
-
---ldx]]--
local wrap, yield = coroutine.wrap, coroutine.yield
@@ -9515,6 +10238,32 @@ function xml.inspect(collection,pattern)
end
end
+-- texy (see xfdf):
+
+local function split(e)
+ local dt = e.dt
+ if dt then
+ for i=1,#dt do
+ local dti = dt[i]
+ if type(dti) == "string" then
+ dti = gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
+ dti = gsub(dti,"[\n\r]+","\n\n")
+ dt[i] = dti
+ else
+ split(dti)
+ end
+ end
+ end
+ return e
+end
+
+function xml.finalizers.paragraphs(c)
+ for i=1,#c do
+ split(c[i])
+ end
+ return c
+end
+
end -- of closure
@@ -9539,13 +10288,7 @@ local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, l
lpegpatterns.xml = lpegpatterns.xml or { }
local xmlpatterns = lpegpatterns.xml
---[[ldx--
-
The following helper functions best belong to the lxml-ini
-module. Some are here because we need then in the mk
-document and other manuals, others came up when playing with
-this module. Since this module is also used in we've
-put them here instead of loading mode modules there then needed.
---ldx]]--
+
local function xmlgsub(t,old,new) -- will be replaced
local dt = t.dt
@@ -9731,9 +10474,7 @@ function xml.processattributes(root,pattern,handle)
return collected
end
---[[ldx--
-
The following functions collect elements and texts.
---ldx]]--
+
-- are these still needed -> lxml-cmp.lua
@@ -9772,9 +10513,7 @@ function xml.collect_tags(root, pattern, nonamespace)
end
end
---[[ldx--
-
We've now arrived at the functions that manipulate the tree.
---ldx]]--
+
local no_root = { no_root = true }
@@ -10160,9 +10899,7 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
end
---[[ldx--
-
Helper (for q2p).
---ldx]]--
+
function xml.cdatatotext(e)
local dt = e.dt
@@ -10259,9 +10996,7 @@ end
-- xml.addentitiesdoctype(x,"hexadecimal")
-- print(x)
---[[ldx--
-
Here are a few synonyms.
---ldx]]--
+
xml.all = xml.each
xml.insert = xml.insertafter
@@ -10852,7 +11587,7 @@ local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string
local concat = table.concat
local next, type = next, type
-local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
@@ -11202,12 +11937,14 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
for s in gmatch(str,"[^,]+") do
s = validate(s)
if s then
- n = n + 1 ; t[n] = s
+ n = n + 1
+ t[n] = s
end
end
else
for s in gmatch(str,"[^,]+") do
- n = n + 1 ; t[n] = s
+ n = n + 1
+ t[n] = s
end
end
if trace_expansions then
@@ -11221,7 +11958,7 @@ end
-- We could make the previous one public.
local function validate(s)
- s = collapsepath(s) -- already keeps the //
+ s = collapsepath(s) -- already keeps the trailing / and //
return s ~= "" and not find(s,"^!*unset/*$") and s
end
@@ -11559,7 +12296,7 @@ local resolvers = resolvers
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
-local fileextname = file.extname
+local suffixonly = file.suffixonly
local formats = allocate()
local suffixes = allocate()
@@ -11814,7 +12551,7 @@ function resolvers.formatofvariable(str)
end
function resolvers.formatofsuffix(str) -- of file
- return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
+ return suffixmap[suffixonly(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
end
function resolvers.variableofformat(str)
@@ -11826,7 +12563,7 @@ function resolvers.variableofformatorsuffix(str)
if v then
return v
end
- v = suffixmap[fileextname(str)]
+ v = suffixmap[suffixonly(str)]
if v then
return formats[v]
end
@@ -11847,21 +12584,7 @@ if not modules then modules = { } end modules ['data-tmp'] = {
license = "see context related readme files"
}
---[[ldx--
-
This module deals with caching data. It sets up the paths and
-implements loaders and savers for tables. Best is to set the
-following variable. When not set, the usual paths will be
-checked. Personally I prefer the (users) temporary path.
Currently we do no locking when we write files. This is no real
-problem because most caching involves fonts and the chance of them
-being written at the same time is small. We also need to extend
-luatools with a recache feature.
---ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
local serialize, serializetofile = table.serialize, table.tofile
@@ -12396,11 +13119,12 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local filedirname = file.dirname
local filebasename = file.basename
-local fileextname = file.extname
+local suffixonly = file.suffixonly
local filejoin = file.join
local collapsepath = file.collapsepath
local joinpath = file.joinpath
local allocate = utilities.storage.allocate
+local settings_to_array = utilities.parsers.settings_to_array
local setmetatableindex = table.setmetatableindex
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -12424,7 +13148,7 @@ resolvers.cacheversion = '1.0.1'
resolvers.configbanner = ''
resolvers.homedir = environment.homedir
resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
-resolvers.luacnfname = 'texmfcnf.lua'
+resolvers.luacnfname = "texmfcnf.lua"
resolvers.luacnfstate = "unknown"
-- The web2c tex binaries as well as kpse have built in paths for the configuration
@@ -12696,7 +13420,7 @@ end
local function identify_configuration_files()
local specification = instance.specification
if #specification == 0 then
- local cnfspec = getenv('TEXMFCNF')
+ local cnfspec = getenv("TEXMFCNF")
if cnfspec == "" then
cnfspec = resolvers.luacnfspec
resolvers.luacnfstate = "default"
@@ -12784,7 +13508,7 @@ local function load_configuration_files()
-- we push the value into the main environment (osenv) so
-- that it takes precedence over the default one and therefore
-- also over following definitions
- resolvers.setenv('TEXMFCNF',cnfspec) -- resolves prefixes
+ resolvers.setenv("TEXMFCNF",cnfspec) -- resolves prefixes
-- we now identify and load the specified configuration files
instance.specification = { }
identify_configuration_files()
@@ -12832,10 +13556,11 @@ end
local function locate_file_databases()
-- todo: cache:// and tree:// (runtime)
- local texmfpaths = resolvers.expandedpathlist('TEXMF')
+ local texmfpaths = resolvers.expandedpathlist("TEXMF")
if #texmfpaths > 0 then
for i=1,#texmfpaths do
local path = collapsepath(texmfpaths[i])
+ path = gsub(path,"/+$","") -- in case $HOME expands to something with a trailing /
local stripped = lpegmatch(inhibitstripper,path) -- the !! thing
if stripped ~= "" then
local runtime = stripped == path
@@ -12964,9 +13689,9 @@ function resolvers.prependhash(type,name,cache)
end
function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
- local t = resolvers.splitpath(getenv('TEXMF'))
+ local t = resolvers.splitpath(getenv("TEXMF")) -- okay?
insert(t,1,specification)
- local newspec = concat(t,";")
+ local newspec = concat(t,",") -- not ;
if instance.environment["TEXMF"] then
instance.environment["TEXMF"] = newspec
elseif instance.variables["TEXMF"] then
@@ -13041,14 +13766,19 @@ function resolvers.resetextrapath()
end
function resolvers.registerextrapath(paths,subpaths)
+ paths = settings_to_array(paths)
+ subpaths = settings_to_array(subpaths)
local ep = instance.extra_paths or { }
local oldn = #ep
local newn = oldn
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
+ local nofpaths = #paths
+ local nofsubpaths = #subpaths
+ if nofpaths > 0 then
+ if nofsubpaths > 0 then
+ for i=1,nofpaths do
+ local p = paths[i]
+ for j=1,nofsubpaths do
+ local s = subpaths[j]
local ps = p .. "/" .. s
if not done[ps] then
newn = newn + 1
@@ -13058,7 +13788,8 @@ function resolvers.registerextrapath(paths,subpaths)
end
end
else
- for p in gmatch(paths,"[^,]+") do
+ for i=1,nofpaths do
+ local p = paths[i]
if not done[p] then
newn = newn + 1
ep[newn] = resolvers.cleanpath(p)
@@ -13066,10 +13797,10 @@ function resolvers.registerextrapath(paths,subpaths)
end
end
end
- elseif subpaths and subpaths ~= "" then
+ elseif nofsubpaths > 0 then
for i=1,oldn do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
+ for j=1,nofsubpaths do
+ local s = subpaths[j]
local ps = ep[i] .. "/" .. s
if not done[ps] then
newn = newn + 1
@@ -13147,18 +13878,21 @@ function resolvers.expandedpathlist(str)
return { }
elseif instance.savelists then
str = lpegmatch(dollarstripper,str)
- if not instance.lists[str] then -- cached
- local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
- instance.lists[str] = expandedpathfromlist(lst)
- end
- return instance.lists[str]
+ local lists = instance.lists
+ local lst = lists[str]
+ if not lst then
+ local l = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ lst = expandedpathfromlist(l)
+ lists[str] = lst
+ end
+ return lst
else
local lst = resolvers.splitpath(resolvers.expansion(str))
return made_list(instance,expandedpathfromlist(lst))
end
end
-function resolvers.expandedpathlistfromvariable(str) -- brrr
+function resolvers.expandedpathlistfromvariable(str) -- brrr / could also have cleaner ^!! /$ //
str = lpegmatch(dollarstripper,str)
local tmp = resolvers.variableofformatorsuffix(str)
return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
@@ -13315,7 +14049,7 @@ local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
local collect_instance_files
local function find_analyze(filename,askedformat,allresults)
- local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ local filetype, wantedfiles, ext = '', { }, suffixonly(filename)
-- too tricky as filename can be bla.1.2.3:
--
-- if not suffixmap[ext] then
@@ -13393,7 +14127,7 @@ local function find_qualified(filename,allresults) -- this one will be split too
if trace_detail then
report_resolving("locating qualified file '%s'", filename)
end
- local forcedname, suffix = "", fileextname(filename)
+ local forcedname, suffix = "", suffixonly(filename)
if suffix == "" then -- why
local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
if format_suffixes then
@@ -14063,6 +14797,8 @@ local gsub = string.gsub
local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+local joinpath, basename, dirname = file.join, file.basename, file.dirname
+local getmetatable, rawset, type = getmetatable, rawset, type
-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
@@ -14104,28 +14840,43 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
+ return cleanpath(basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.dirname((fullname ~= "" and fullname) or str))
+ return cleanpath(dirname((fullname ~= "" and fullname) or str))
end
prefixes.selfautoloc = function(str)
- return cleanpath(file.join(getenv('SELFAUTOLOC'),str))
+ return cleanpath(joinpath(getenv('SELFAUTOLOC'),str))
end
prefixes.selfautoparent = function(str)
- return cleanpath(file.join(getenv('SELFAUTOPARENT'),str))
+ return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str))
end
prefixes.selfautodir = function(str)
- return cleanpath(file.join(getenv('SELFAUTODIR'),str))
+ return cleanpath(joinpath(getenv('SELFAUTODIR'),str))
end
prefixes.home = function(str)
- return cleanpath(file.join(getenv('HOME'),str))
+ return cleanpath(joinpath(getenv('HOME'),str))
+end
+
+local function toppath()
+ local pathname = dirname(inputstack[#inputstack] or "")
+ if pathname == "" then
+ return "."
+ else
+ return pathname
+ end
+end
+
+resolvers.toppath = toppath
+
+prefixes.toppath = function(str)
+ return cleanpath(joinpath(toppath(),str))
end
prefixes.env = prefixes.environment
@@ -14161,6 +14912,8 @@ function resolvers.resetresolve(str)
resolved, abstract = { }, { }
end
+-- todo: use an lpeg (see data-lua for !! / stripper)
+
local function resolve(str) -- use schemes, this one is then for the commandline only
if type(str) == "table" then
local t = { }
@@ -14186,7 +14939,7 @@ end
resolvers.resolve = resolve
resolvers.unresolve = unresolve
-if os.uname then
+if type(os.uname) == "function" then
for k, v in next, os.uname() do
if not prefixes[k] then
@@ -14198,11 +14951,17 @@ end
if os.type == "unix" then
+ -- We need to distringuish between a prefix and something else : so we
+ -- have a special repath variant for linux. Also, when a new prefix is
+ -- defined, we need to remake the matcher.
+
local pattern
local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
local colon = P(":")
- local p
for k, v in table.sortedpairs(prefixes) do
if p then
p = P(k) + p
@@ -14211,9 +14970,6 @@ if os.type == "unix" then
end
end
pattern = Cs((p * colon + colon/";" + P(1))^0)
- if t then
- t[k] = v
- end
end
makepattern()
@@ -14424,18 +15180,7 @@ local trace_cache = false trackers.register("resolvers.cache", functi
local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
---[[ldx--
-
Once we found ourselves defining similar cache constructs
-several times, containers were introduced. Containers are used
-to collect tables in memory and reuse them when possible based
-on (unique) hashes (to be provided by the calling function).
-
-
Caching to disk is disabled by default. Version numbers are
-stored in the saved table which makes it possible to change the
-table structures without bothering about the disk cache.
-
Examples of usage can be found in the font related code.
---ldx]]--
containers = containers or { }
local containers = containers
@@ -14670,11 +15415,7 @@ local trace_locating = false trackers.register("resolvers.locating", function(v
local report_zip = logs.reporter("resolvers","zip")
--- zip:///oeps.zip?name=bla/bla.tex
--- zip:///oeps.zip?tree=tex/texmf-local
--- zip:///texmf.zip?tree=/tex/texmf
--- zip:///texmf.zip?tree=/tex/texmf-local
--- zip:///texmf-mine.zip?tree=/tex/texmf-projects
+
local resolvers = resolvers
@@ -14999,7 +15740,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-crl'] = {
+if not modules then modules = { } end modules ['data-sch'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -15007,60 +15748,199 @@ if not modules then modules = { } end modules ['data-crl'] = {
license = "see context related readme files"
}
--- this one is replaced by data-sch.lua --
+local loadstring = loadstring
+local gsub, concat, format = string.gsub, table.concat, string.format
+local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-local gsub = string.gsub
+local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end)
+local report_schemes = logs.reporter("resolvers","schemes")
-local resolvers = resolvers
+local http = require("socket.http")
+local ltn12 = require("ltn12")
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
+local resolvers = resolvers
+local schemes = resolvers.schemes or { }
+resolvers.schemes = schemes
+
+local cleaners = { }
+schemes.cleaners = cleaners
+
+local threshold = 24 * 60 * 60
+
+directives.register("schemes.threshold", function(v) threshold = tonumber(v) or threshold end)
+
+function cleaners.none(specification)
+ return specification.original
+end
+
+function cleaners.strip(specification)
+ return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods
+end
+
+function cleaners.md5(specification)
+ return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
+end
+
+local cleaner = cleaners.strip
+
+directives.register("schemes.cleanmethod", function(v) cleaner = cleaners[v] or cleaners.strip end)
+
+function resolvers.schemes.cleanname(specification)
+ local hash = cleaner(specification)
+ if trace_schemes then
+ report_schemes("hashing %s to %s",specification.original,hash)
+ end
+ return hash
+end
-resolvers.curl = resolvers.curl or { }
-local curl = resolvers.curl
+local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { }
-local cached = { }
+local function runcurl(name,cachename) -- we use sockets instead or the curl library when possible
+ local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name
+ os.spawn(command)
+end
-local function runcurl(specification)
+local function fetch(specification)
local original = specification.original
- -- local scheme = specification.scheme
- local cleanname = gsub(original,"[^%a%d%.]+","-")
- local cachename = caches.setfirstwritablefile(cleanname,"curl")
+ local scheme = specification.scheme
+ local cleanname = schemes.cleanname(specification)
+ local cachename = caches.setfirstwritablefile(cleanname,"schemes")
if not cached[original] then
- if not io.exists(cachename) then
+ statistics.starttiming(schemes)
+ if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > (thresholds[protocol] or threshold)) then
cached[original] = cachename
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
- os.spawn(command)
+ local handler = handlers[scheme]
+ if handler then
+ if trace_schemes then
+ report_schemes("fetching '%s', protocol '%s', method 'built-in'",original,scheme)
+ end
+ logs.flush()
+ handler(specification,cachename)
+ else
+ if trace_schemes then
+ report_schemes("fetching '%s', protocol '%s', method 'curl'",original,scheme)
+ end
+ logs.flush()
+ runcurl(original,cachename)
+ end
end
if io.exists(cachename) then
cached[original] = cachename
+ if trace_schemes then
+ report_schemes("using cached '%s', protocol '%s', cachename '%s'",original,scheme,cachename)
+ end
else
cached[original] = ""
+ if trace_schemes then
+ report_schemes("using missing '%s', protocol '%s'",original,scheme)
+ end
end
+ loaded[scheme] = loaded[scheme] + 1
+ statistics.stoptiming(schemes)
+ else
+ if trace_schemes then
+ report_schemes("reusing '%s', protocol '%s'",original,scheme)
+ end
+ reused[scheme] = reused[scheme] + 1
end
return cached[original]
end
--- old code: we could be cleaner using specification (see schemes)
-
local function finder(specification,filetype)
- return resolvers.methodhandler("finders",runcurl(specification),filetype)
+ return resolvers.methodhandler("finders",fetch(specification),filetype)
end
local opener = openers.file
local loader = loaders.file
-local function install(scheme)
- finders[scheme] = finder
- openers[scheme] = opener
- loaders[scheme] = loader
+local function install(scheme,handler,newthreshold)
+ handlers [scheme] = handler
+ loaded [scheme] = 0
+ reused [scheme] = 0
+ finders [scheme] = finder
+ openers [scheme] = opener
+ loaders [scheme] = loader
+ thresholds[scheme] = newthreshold or threshold
end
-resolvers.curl.install = install
+schemes.install = install
+
+local function http_handler(specification,cachename)
+ local tempname = cachename .. ".tmp"
+ local f = io.open(tempname,"wb")
+ local status, message = http.request {
+ url = specification.original,
+ sink = ltn12.sink.file(f)
+ }
+ if not status then
+ os.remove(tempname)
+ else
+ os.remove(cachename)
+ os.rename(tempname,cachename)
+ end
+ return cachename
+end
-install('http')
-install('https')
+install('http',http_handler)
+install('https') -- see pod
install('ftp')
+statistics.register("scheme handling time", function()
+ local l, r, nl, nr = { }, { }, 0, 0
+ for k, v in table.sortedhash(loaded) do
+ if v > 0 then
+ nl = nl + 1
+ l[nl] = k .. ":" .. v
+ end
+ end
+ for k, v in table.sortedhash(reused) do
+ if v > 0 then
+ nr = nr + 1
+ r[nr] = k .. ":" .. v
+ end
+ end
+ local n = nl + nr
+ if n > 0 then
+ l = nl > 0 and concat(l) or "none"
+ r = nr > 0 and concat(r) or "none"
+ return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s",
+ statistics.elapsedtime(schemes), n, threshold, l, r)
+ else
+ return nil
+ end
+end)
+
+-- We provide a few more helpers:
+
+----- http = require("socket.http")
+local httprequest = http.request
+local toquery = url.toquery
+
+-- local function httprequest(url)
+-- return os.resultof(format("curl --silent %q", url))
+-- end
+
+local function fetchstring(url,data)
+ local q = data and toquery(data)
+ if q then
+ url = url .. "?" .. q
+ end
+ local reply = httprequest(url)
+ return reply -- just one argument
+end
+
+schemes.fetchstring = fetchstring
+
+function schemes.fetchtable(url,data)
+ local reply = fetchstring(url,data)
+ if reply then
+ local s = loadstring("return " .. reply)
+ if s then
+ return s()
+ end
+ end
+end
+
end -- of closure
@@ -15074,170 +15954,199 @@ if not modules then modules = { } end modules ['data-lua'] = {
license = "see context related readme files"
}
--- some loading stuff ... we might move this one to slot 2 depending
--- on the developments (the loaders must not trigger kpse); we could
--- of course use a more extensive lib path spec
+-- We overload the regular loader. We do so because we operate mostly in
+-- tds and use our own loader code. Alternatively we could use a more
+-- extensive definition of package.path and package.cpath but even then
+-- we're not done. Also, we now have better tracing.
+--
+-- -- local mylib = require("libtest")
+-- -- local mysql = require("luasql.mysql")
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local concat = table.concat
+
+local trace_libraries = false
+
+trackers.register("resolvers.libraries", function(v) trace_libraries = v end)
+trackers.register("resolvers.locating", function(v) trace_libraries = v end)
local report_libraries = logs.reporter("resolvers","libraries")
local gsub, insert = string.gsub, table.insert
+local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
local unpack = unpack or table.unpack
+local is_readable = file.is_readable
local resolvers, package = resolvers, package
-local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs'
-local clibformats = { 'lib' }
-
-local _path_, libpaths, _cpath_, clibpaths
-
-function package.libpaths()
- if not _path_ or package.path ~= _path_ then
- _path_ = package.path
- libpaths = file.splitpath(_path_,";")
+local libsuffixes = { 'tex', 'lua' }
+local clibsuffixes = { 'lib' }
+local libformats = { 'TEXINPUTS', 'LUAINPUTS' }
+local clibformats = { 'CLUAINPUTS' }
+
+local libpaths = nil
+local clibpaths = nil
+local libhash = { }
+local clibhash = { }
+local libextras = { }
+local clibextras = { }
+
+local pattern = Cs(P("!")^0 / "" * (P("/") * P(-1) / "/" + P("/")^1 / "/" + 1)^0)
+
+local function cleanpath(path) --hm, don't we have a helper for this?
+ return resolvers.resolve(lpegmatch(pattern,path))
+end
+
+local function getlibpaths()
+ if not libpaths then
+ libpaths = { }
+ for i=1,#libformats do
+ local paths = resolvers.expandedpathlistfromvariable(libformats[i])
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not libhash[path] then
+ libpaths[#libpaths+1] = path
+ libhash[path] = true
+ end
+ end
+ end
end
return libpaths
end
-function package.clibpaths()
- if not _cpath_ or package.cpath ~= _cpath_ then
- _cpath_ = package.cpath
- clibpaths = file.splitpath(_cpath_,";")
+local function getclibpaths()
+ if not clibpaths then
+ clibpaths = { }
+ for i=1,#clibformats do
+ local paths = resolvers.expandedpathlistfromvariable(clibformats[i])
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not clibhash[path] then
+ clibpaths[#clibpaths+1] = path
+ clibhash[path] = true
+ end
+ end
+ end
end
return clibpaths
end
-local function thepath(...)
- local t = { ... } t[#t+1] = "?.lua"
- local path = file.join(unpack(t))
- if trace_locating then
- report_libraries("! appending '%s' to 'package.path'",path)
+package.libpaths = getlibpaths
+package.clibpaths = getclibpaths
+
+function package.extralibpath(...)
+ local paths = { ... }
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not libhash[path] then
+ if trace_libraries then
+ report_libraries("! extra lua path '%s'",path)
+ end
+ libextras[#libextras+1] = path
+ libpaths[#libpaths +1] = path
+ end
end
- return path
end
-local p_libpaths, a_libpaths = { }, { }
-
-function package.appendtolibpath(...)
- insert(a_libpath,thepath(...))
+function package.extraclibpath(...)
+ local paths = { ... }
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not clibhash[path] then
+ if trace_libraries then
+ report_libraries("! extra lib path '%s'",path)
+ end
+ clibextras[#clibextras+1] = path
+ clibpaths[#clibpaths +1] = path
+ end
+ end
end
-function package.prependtolibpath(...)
- insert(p_libpaths,1,thepath(...))
+if not package.loaders[-2] then
+ -- use package-path and package-cpath
+ package.loaders[-2] = package.loaders[2]
end
--- beware, we need to return a loadfile result !
+local function loadedaslib(resolved,rawname)
+ return package.loadlib(resolved,"luaopen_" .. gsub(rawname,"%.","_"))
+end
-local function loaded(libpaths,name,simple)
- for i=1,#libpaths do -- package.path, might become option
- local libpath = libpaths[i]
- local resolved = gsub(libpath,"%?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.path': '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
+local function loadedbylua(name)
+ if trace_libraries then
+ report_libraries("! locating %q using normal loader",name)
end
+ local resolved = package.loaders[-2](name)
end
-package.loaders[2] = function(name) -- was [#package.loaders+1]
- if file.suffix(name) == "" then
- name = file.addsuffix(name,"lua") -- maybe a list
- if trace_locating then -- mode detail
- report_libraries("! locating '%s' with forced suffix",name)
- end
- else
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
- end
+local function loadedbyformat(name,rawname,suffixes,islib)
+ if trace_libraries then
+ report_libraries("! locating %q as %q using formats %q",rawname,name,concat(suffixes))
end
- for i=1,#libformats do
- local format = libformats[i]
+ for i=1,#suffixes do -- so we use findfile and not a lookup loop
+ local format = suffixes[i]
local resolved = resolvers.findfile(name,format) or ""
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'libformat path': '%s'",name,format)
+ if trace_libraries then
+ report_libraries("! checking for %q' using format %q",name,format)
end
if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located via environment: '%s'",name,resolved)
+ if trace_libraries then
+ report_libraries("! lib %q located on %q",name,resolved)
end
- return loadfile(resolved)
- end
- end
- -- libpaths
- local libpaths, clibpaths = package.libpaths(), package.clibpaths()
- local simple = gsub(name,"%.lua$","")
- local simple = gsub(simple,"%.","/")
- local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple)
- if resolved then
- return resolved
- end
- --
- local libname = file.addsuffix(simple,os.libsuffix)
- for i=1,#clibformats do
- -- better have a dedicated loop
- local format = clibformats[i]
- local paths = resolvers.expandedpathlistfromvariable(format)
- for p=1,#paths do
- local path = paths[p]
- local resolved = file.join(path,libname)
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'clibformat path': '%s'",libname,path)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
- end
- return package.loadlib(resolved,name)
+ if islib then
+ return loadedaslib(resolved,rawname)
+ else
+ return loadfile(resolved)
end
end
end
- for i=1,#clibpaths do -- package.path, might become option
- local libpath = clibpaths[i]
- local resolved = gsub(libpath,"?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
+end
+
+local function loadedbypath(name,rawname,paths,islib,what)
+ if trace_libraries then
+ report_libraries("! locating %q as %q on %q paths",rawname,name,what)
+ end
+ for p=1,#paths do
+ local path = paths[p]
+ local resolved = file.join(path,name)
+ if trace_libraries then -- mode detail
+ report_libraries("! checking for %q using %q path %q",name,what,path)
end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
+ if is_readable(resolved) then
+ if trace_libraries then
+ report_libraries("! lib %q located on %q",name,resolved)
+ end
+ if islib then
+ return loadedaslib(resolved,rawname)
+ else
+ return loadfile(resolved)
end
- return package.loadlib(resolved,name)
- end
- end
- -- just in case the distribution is messed up
- if trace_loading then -- more detail
- report_libraries("! checking for '%s' using 'luatexlibs': '%s'",name)
- end
- local resolved = resolvers.findfile(file.basename(name),'luatexlibs') or ""
- if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located by basename via environment: '%s'",name,resolved)
end
- return loadfile(resolved)
end
- if trace_locating then
- report_libraries('? unable to locate lib: %s',name)
- end
--- return "unable to locate " .. name
end
-resolvers.loadlualib = require
-
--- -- -- --
+local function notloaded(name)
+ if trace_libraries then
+ report_libraries("? unable to locate library %q",name)
+ end
+end
-package.obsolete = package.obsolete or { }
+package.loaders[2] = function(name)
+ local thename = gsub(name,"%.","/")
+ local luaname = file.addsuffix(thename,"lua")
+ local libname = file.addsuffix(thename,os.libsuffix)
+ return
+ loadedbyformat(luaname,name,libsuffixes, false)
+ or loadedbyformat(libname,name,clibsuffixes, true)
+ or loadedbypath (luaname,name,getlibpaths (),false,"lua")
+ or loadedbypath (luaname,name,getclibpaths(),false,"lua")
+ or loadedbypath (libname,name,getclibpaths(),true, "lib")
+ or loadedbylua (name)
+ or notloaded (name)
+end
-package.append_libpath = appendtolibpath -- will become obsolete
-package.prepend_libpath = prependtolibpath -- will become obsolete
+-- package.loaders[3] = nil
+-- package.loaders[4] = nil
-package.obsolete.append_libpath = appendtolibpath -- will become obsolete
-package.obsolete.prepend_libpath = prependtolibpath -- will become obsolete
+resolvers.loadlualib = require
end -- of closure
@@ -15707,7 +16616,6 @@ function environment.make_format(name)
end
function environment.run_format(name,data,more)
- -- hm, rather old code here; we can now use the file.whatever functions
if name and name ~= "" then
local barename = file.removesuffix(name)
local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
@@ -15736,6 +16644,129 @@ function environment.run_format(name,data,more)
end
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['util-tpl'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is experimental code. Coming from dos and windows, I've always used %whatever%
+-- as template variables so let's stick to it. After all, it's easy to parse and stands
+-- out well. A double %% is turned into a regular %.
+
+utilities.templates = utilities.templates or { }
+local templates = utilities.templates
+
+local trace_template = false trackers.register("templates.trace",function(v) trace_template = v end)
+local report_template = logs.reporter("template")
+
+local format = string.format
+local P, C, Cs, Carg, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.Carg, lpeg.match
+
+-- todo: make installable template.new
+
+local replacer
+
+local function replacekey(k,t,recursive)
+ local v = t[k]
+ if not v then
+ if trace_template then
+ report_template("unknown key %q",k)
+ end
+ return ""
+ else
+ if trace_template then
+ report_template("setting key %q to value %q",k,v)
+ end
+ if recursive then
+ return lpegmatch(replacer,v,1,t)
+ else
+ return v
+ end
+ end
+end
+
+local sqlescape = lpeg.replacer {
+ { "'", "''" },
+ { "\\", "\\\\" },
+ { "\r\n", "\\n" },
+ { "\r", "\\n" },
+ -- { "\t", "\\t" },
+}
+
+local escapers = {
+ lua = function(s)
+ return format("%q",s)
+ end,
+ sql = function(s)
+ return lpegmatch(sqlescape,s)
+ end,
+}
+
+local function replacekeyunquoted(s,t,how,recurse) -- ".. \" "
+ local escaper = how and escapers[how] or escapers.lua
+ return escaper(replacekey(s,t,recurse))
+end
+
+local single = P("%") -- test %test% test : resolves test
+local double = P("%%") -- test 10%% test : %% becomes %
+local lquoted = P("%[") -- test %[test]" test : resolves test with escaped "'s
+local rquoted = P("]%") --
+
+local escape = double / '%%'
+local nosingle = single / ''
+local nodouble = double / ''
+local nolquoted = lquoted / ''
+local norquoted = rquoted / ''
+
+local key = nosingle * (C((1-nosingle)^1 * Carg(1) * Carg(2) * Carg(3))/replacekey) * nosingle
+local unquoted = nolquoted * ((C((1 - norquoted)^1) * Carg(1) * Carg(2) * Carg(3))/replacekeyunquoted) * norquoted
+local any = P(1)
+
+ replacer = Cs((unquoted + escape + key + any)^0)
+
+local function replace(str,mapping,how,recurse)
+ if mapping then
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ else
+ return str
+ end
+end
+
+-- print(replace("test '%[x]%' test",{ x = [[a 'x' a]] }))
+-- print(replace("test '%[x]%' test",{ x = [[a 'x' a]] },'sql'))
+
+templates.replace = replace
+
+function templates.load(filename,mapping,how,recurse)
+ local data = io.loaddata(filename) or ""
+ if mapping and next(mapping) then
+ return replace(data,mapping,how,recurse)
+ else
+ return data
+ end
+end
+
+function templates.resolve(t,mapping,how,recurse)
+ if not mapping then
+ mapping = t
+ end
+ for k, v in next, t do
+ t[k] = replace(v,mapping,how,recurse)
+ end
+ return t
+end
+
+-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
+-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
+
+
end -- of closure
-- end library merge
@@ -15796,7 +16827,7 @@ own.libs = { -- order can be made better
-- 'data-bin.lua',
'data-zip.lua',
'data-tre.lua',
- 'data-crl.lua',
+ 'data-sch.lua',
'data-lua.lua',
'data-aux.lua', -- updater
'data-tmf.lua',
@@ -15804,6 +16835,8 @@ own.libs = { -- order can be made better
'luat-sta.lua',
'luat-fmt.lua',
+
+ 'util-tpl.lua',
}
-- We need this hack till luatex is fixed.
@@ -15824,7 +16857,7 @@ own.path = gsub(match(own.name,"^(.+)[\\/].-$") or ".","\\","/")
local ownpath, owntree = own.path, environment and environment.ownpath or own.path
-own.list = {
+own.list = { -- predictable paths
'.',
ownpath ,
ownpath .. "/../sources", -- HH's development path
@@ -15848,7 +16881,7 @@ local function locate_libs()
local filename = pth .. "/" .. lib
local found = lfs.isfile(filename)
if found then
- package.path = package.path .. ";" .. pth .. "/?.lua" -- in case l-* does a require
+ package.path = package.path .. ";" .. pth .. "/?.lua" -- in case l-* does a require (probably obsolete)
return pth
end
end
@@ -15980,6 +17013,7 @@ local helpinfo = [[
--var-value report value of variable
--find-file report file location
--find-path report path of file
+--show-package-path report package paths
--pattern=str filter variables
]]
@@ -16093,7 +17127,8 @@ function runners.execute_script(fullname,internal,nosplit)
elseif state == 'skip' then
return true
elseif state == "run" then
- local path, name, suffix, result = file.dirname(fullname), file.basename(fullname), file.extname(fullname), ""
+ local path, name, suffix = file.splitname(fullname)
+ local result = ""
if path ~= "" then
result = fullname
elseif name then
@@ -16104,7 +17139,7 @@ function runners.execute_script(fullname,internal,nosplit)
name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
- suffix = file.extname(name)
+ suffix = file.suffix(name)
end
if suffix == "" then
-- loop over known suffixes
@@ -16131,7 +17166,7 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local binary = runners.applications[file.extname(result)]
+ local binary = runners.applications[file.suffix(result)]
result = string.quoted(string.unquoted(result))
-- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
-- result = '"' .. result .. '"'
@@ -16324,7 +17359,7 @@ function resolvers.launch(str)
-- maybe we also need to test on mtxrun.launcher.suffix environment
-- variable or on windows consult the assoc and ftype vars and such
local launchers = runners.launchers[os.platform] if launchers then
- local suffix = file.extname(str) if suffix then
+ local suffix = file.suffix(str) if suffix then
local runner = launchers[suffix] if runner then
str = runner .. " " .. str
end
@@ -16383,7 +17418,7 @@ function runners.find_mtx_script(filename)
end
filename = file.addsuffix(filename,"lua")
local basename = file.removesuffix(file.basename(filename))
- local suffix = file.extname(filename)
+ local suffix = file.suffix(filename)
-- qualified path, raw name
local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename
if fullname and fullname ~= "" then
@@ -16438,7 +17473,7 @@ function runners.execute_ctx_script(filename,...)
runners.register_arguments(...)
local arguments = environment.arguments_after
local fullname = runners.find_mtx_script(filename) or ""
- if file.extname(fullname) == "cld" then
+ if file.suffix(fullname) == "cld" then
-- handy in editors where we force --autopdf
report("running cld script: %s",filename)
table.insert(arguments,1,fullname)
@@ -16546,6 +17581,21 @@ function runners.timed(action)
statistics.timed(action)
end
+function runners.associate(filename)
+ os.launch(filename)
+end
+
+function runners.gethelp(filename)
+ local url = environment.argument("url")
+ if url and url ~= "" then
+ local command = string.gsub(environment.argument("command") or "unknown","^%s*\\*(.-)%s*$","%1")
+ url = utilities.templates.replace(url,{ command = command })
+ os.launch(url)
+ else
+ report("no --url given")
+ end
+end
+
-- this is a bit dirty ... first we store the first filename and next we
-- split the arguments so that we only see the ones meant for this script
-- ... later we will use the second half
@@ -16648,7 +17698,18 @@ else
end
-if e_argument("selfmerge") then
+if e_argument("script") or e_argument("scripts") then
+
+ -- run a script by loading it (using libs), pass args
+
+ runners.loadbase()
+ if is_mkii_stub then
+ ok = runners.execute_script(filename,false,true)
+ else
+ ok = runners.execute_ctx_script(filename)
+ end
+
+elseif e_argument("selfmerge") then
-- embed used libraries
@@ -16671,23 +17732,25 @@ elseif e_argument("selfupdate") then
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif e_argument("ctxlua") or e_argument("internal") then
+elseif e_argument("show-package-path") or e_argument("show-package-paths") then
- -- run a script by loading it (using libs)
+ local l = package.libpaths()
+ local c = package.clibpaths()
- runners.loadbase()
- ok = runners.execute_script(filename,true)
+ for i=1,#l do
+ report("package lib path %s: %s",i,l[i])
+ end
-elseif e_argument("script") or e_argument("scripts") then
+ for i=1,#c do
+ report("package clib path %s: %s",i,c[i])
+ end
- -- run a script by loading it (using libs), pass args
+elseif e_argument("ctxlua") or e_argument("internal") then
+
+ -- run a script by loading it (using libs)
runners.loadbase()
- if is_mkii_stub then
- ok = runners.execute_script(filename,false,true)
- else
- ok = runners.execute_ctx_script(filename)
- end
+ ok = runners.execute_script(filename,true)
elseif e_argument("execute") then
@@ -16715,6 +17778,14 @@ elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
+elseif e_argument("associate") then
+
+ runners.associate(filename)
+
+elseif e_argument("gethelp") then
+
+ runners.gethelp()
+
elseif e_argument("makestubs") then
-- make stubs (depricated)
@@ -16806,7 +17877,7 @@ elseif e_argument("find-path") then
elseif e_argument("expand-braces") then
- -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
resolvers.load("nofiles")
runners.register_arguments(filename)
diff --git a/scripts/context/ruby/fcd_start.rb b/scripts/context/ruby/fcd_start.rb
deleted file mode 100644
index b1fa42a2a..000000000
--- a/scripts/context/ruby/fcd_start.rb
+++ /dev/null
@@ -1,472 +0,0 @@
-# Hans Hagen / PRAGMA ADE / 2005 / www.pragma-ade.com
-#
-# Fast Change Dir
-#
-# This is a kind of variant of the good old ncd
-# program. This script uses the same indirect cmd
-# trick as Erwin Waterlander's wcd program.
-#
-# === windows: fcd.cmd ===
-#
-# @echo off
-# ruby -S fcd_start.rb %1 %2 %3 %4 %5 %6 %7 %8 %9
-# if exist "%HOME%/fcd_stage.cmd" call %HOME%/fcd_stage.cmd
-#
-# === linux: fcd (fcd.sh) ===
-#
-# !/usr/bin/env sh
-# ruby -S fcd_start.rb $1 $2 $3 $4 $5 $6 $7 $8 $9
-# if test -f "$HOME/fcd_stage.sh" ; then
-# . $HOME/fcd_stage.sh ;
-# fi;
-#
-# ===
-#
-# On linux, one should source the file: ". fcd args" in order
-# to make the chdir persistent.
-#
-# You can create a stub with:
-#
-# ruby fcd_start.rb --stub --verbose
-#
-# usage:
-#
-# fcd --make t:\
-# fcd --add f:\project
-# fcd [--find] whatever
-# fcd [--find] whatever c (c being a list entry)
-# fcd [--find] whatever . (last choice with this pattern)
-# fcd --list
-
-# todo: HOMEDRIVE\HOMEPATH
-
-require 'rbconfig'
-
-class FastCD
-
- @@rootpath = nil
-
- ['HOME','TEMP','TMP','TMPDIR'].each do |key|
- if ENV[key] then
- if FileTest.directory?(ENV[key]) then
- @@rootpath = ENV[key]
- break
- end
- end
- end
-
- exit unless @@rootpath
-
- @@mswindows = Config::CONFIG['host_os'] =~ /mswin/
- @@maxlength = 26
-
- require 'Win32API' if @@mswindows
-
- if @@mswindows then
- @@stubcode = [
- '@echo off',
- '',
- 'if not exist "%HOME%" goto temp',
- '',
- ':home',
- '',
- 'ruby -S fcd_start.rb %1 %2 %3 %4 %5 %6 %7 %8 %9',
- '',
- 'if exist "%HOME%\fcd_stage.cmd" call %HOME%\fcd_stage.cmd',
- 'goto end',
- '',
- ':temp',
- '',
- 'ruby -S fcd_start.rb %1 %2 %3 %4 %5 %6 %7 %8 %9',
- '',
- 'if exist "%TEMP%\fcd_stage.cmd" call %TEMP%\fcd_stage.cmd',
- 'goto end',
- '',
- ':end'
- ].join("\n")
- else
- @@stubcode = [
- '#!/usr/bin/env sh',
- '',
- 'ruby -S fcd_start.rb $1 $2 $3 $4 $5 $6 $7 $8 $9',
- '',
- 'if test -f "$HOME/fcd_stage.sh" ; then',
- ' . $HOME/fcd_stage.sh ;',
- 'fi;'
- ].join("\n")
- end
-
- @@selfpath = File.dirname($0)
- @@datafile = File.join(@@rootpath,'fcd_state.dat')
- @@histfile = File.join(@@rootpath,'fcd_state.his')
- @@cdirfile = File.join(@@rootpath,if @@mswindows then 'fcd_stage.cmd' else 'fcd_stage.sh' end)
- @@stubfile = File.join(@@selfpath,if @@mswindows then 'fcd.cmd' else 'fcd' end)
-
- def initialize(verbose=false)
- @list = Array.new
- @hist = Hash.new
- @result = Array.new
- @pattern = ''
- @result = ''
- @verbose = verbose
- if f = File.open(@@cdirfile,'w') then
- f << "#{if @@mswindows then 'rem' else '#' end} no dir to change to"
- f.close
- else
- report("unable to create stub #{@@cdirfile}")
- end
- end
-
- def filename(name)
- File.join(@@root,name)
- end
-
- def report(str,verbose=@verbose)
- puts(">> #{str}") if verbose
- end
-
- def flush(str,verbose=@verbose)
- print(str) if verbose
- end
-
- def clear
- if FileTest.file?(@@histfile)
- begin
- File.delete(@@histfile)
- rescue
- report("error in deleting history file '#{@histfile}'")
- else
- report("history file '#{@histfile}' is deleted")
- end
- else
- report("no history file '#{@histfile}'")
- end
- end
-
- def scan(dir='.')
- begin
- [dir].flatten.sort.uniq.each do |dir|
- begin
- Dir.chdir(dir)
- report("scanning '#{dir}'")
- # flush(">> ")
- Dir.glob("**/*").each do |d|
- if FileTest.directory?(d) then
- @list << File.expand_path(d)
- # flush(".")
- end
- end
- # flush("\n")
- @list = @list.sort.uniq
- report("#{@list.size} entries found")
- rescue
- report("unknown directory '#{dir}'")
- end
- end
- rescue
- report("invalid dir specification ")
- end
- end
-
- def save
- begin
- if f = File.open(@@datafile,'w') then
- @list.each do |l|
- f.puts(l)
- end
- f.close
- report("#{@list.size} status bytes saved in #{@@datafile}")
- else
- report("unable to save status in #{@@datafile}")
- end
- rescue
- report("error in saving status in #{@@datafile}")
- end
- end
-
- def remember
- if @hist[@pattern] == @result then
- # no need to save result
- else
- begin
- if f = File.open(@@histfile,'w') then
- @hist[@pattern] = @result
- @hist.keys.each do |k|
- f.puts("#{k} #{@hist[k]}")
- end
- f.close
- report("#{@hist.size} history entries saved in #{@@histfile}")
- else
- report("unable to save history in #{@@histfile}")
- end
- rescue
- report("error in saving history in #{@@histfile}")
- end
- end
- end
-
- def load
- begin
- @list = IO.read(@@datafile).split("\n")
- report("#{@list.length} status bytes loaded from #{@@datafile}")
- rescue
- report("error in loading status from #{@@datafile}")
- end
- begin
- IO.readlines(@@histfile).each do |line|
- if line =~ /^(.*?)\s+(.*)$/i then
- @hist[$1] = $2
- end
- end
- report("#{@hist.length} history entries loaded from #{@@histfile}")
- rescue
- report("error in loading history from #{@@histfile}")
- end
- end
-
- def show
- begin
- puts("directories:")
- puts("\n")
- if @list.length > 0 then
- @list.each do |l|
- puts(l)
- end
- else
- puts("no entries")
- end
- puts("\n")
- puts("history:")
- puts("\n")
- if @hist.length > 0 then
- @hist.keys.sort.each do |h|
- puts("#{h} >> #{@hist[h]}")
- end
- else
- puts("no entries")
- end
- rescue
- end
- end
-
- def find(pattern=nil)
- begin
- if pattern = [pattern].flatten.first then
- if pattern.length > 0 and @pattern = pattern then
- @result = @list.grep(/\/#{@pattern}$/i)
- if @result.length == 0 then
- @result = @list.grep(/\/#{@pattern}[^\/]*$/i)
- end
- end
- else
- puts(Dir.pwd.gsub(/\\/o, '/'))
- end
- rescue
- puts("some error")
- end
- end
-
- def chdir(dir)
- begin
- if dir then
- if f = File.open(@@cdirfile,'w') then
- if @@mswindows then
- f.puts("cd /d #{dir.gsub('/','\\')}")
- else
- f.puts("cd #{dir.gsub("\\",'/')}")
- end
- f.close
- end
- @result = dir
- report("changing to #{dir}",true)
- else
- report("not changing dir")
- end
- rescue
- end
- end
-
- def choose(args=[])
- offset = 97
- unless @pattern.empty? then
- begin
- case @result.size
- when 0 then
- report("dir '#{@pattern}' not found",true)
- when 1 then
- chdir(@result[0])
- else
- list = @result.dup
- begin
- if answer = args[1] then # assignment & test
- if answer == '.' and @hist.key?(@pattern) then
- if FileTest.directory?(@hist[@pattern]) then
- print("last choice ")
- chdir(@hist[@pattern])
- return
- end
- else
- index = answer[0] - offset
- if dir = list[index] then
- chdir(dir)
- return
- end
- end
- end
- rescue
- puts("some error")
- end
- loop do
- print("\n")
- list.each_index do |i|
-begin
- if i < @@maxlength then
- # puts("#{(i+?a).chr} #{list[i]}")
- puts("#{(i+offset).chr} #{list[i]}")
- else
- puts("\n there are #{list.length-@@maxlength} entries more")
- break
- end
-rescue
- puts("some error")
-end
- end
- print("\n>> ")
- if answer = wait then
- if answer >= offset and answer <= offset+25 then
- index = answer - offset
- if dir = list[index] then
- print("#{answer.chr} ")
- chdir(dir)
- elsif @hist.key?(@pattern) and FileTest.directory?(@hist[@pattern]) then
- print("last choice ")
- chdir(@hist[@pattern])
- else
- print("quit\n")
- end
- break
- elsif list.length >= @@maxlength then
- @@maxlength.times do |i| list.shift end
- print("next set")
- print("\n")
- elsif @hist.key?(@pattern) and FileTest.directory?(@hist[@pattern]) then
- print("last choice ")
- chdir(@hist[@pattern])
- break
- else
- print("quit\n")
- break
- end
- end
- end
- end
- rescue
- report($!)
- end
- end
- end
-
- def wait
- begin
- $stdout.flush
- return getc
- rescue
- return nil
- end
- end
-
- def getc
- begin
- if @@mswindows then
- ch = Win32API.new('crtdll','_getch',[],'L').call
- else
- system('stty raw -echo')
- ch = $stdin.getc
- system('stty -raw echo')
- end
- rescue
- ch = nil
- end
- return ch
- end
-
- def check
- unless FileTest.file?(@@stubfile) then
- report("creating stub #{@@stubfile}")
- begin
- if f = File.open(@@stubfile,'w') then
- f.puts(@@stubcode)
- f.close
- end
- rescue
- report("unable to create stub #{@@stubfile}")
- else
- unless @mswindows then
- begin
- File.chmod(0755,@@stubfile)
- rescue
- report("unable to change protections on #{@@stubfile}")
- end
- end
- end
- else
- report("stub #{@@stubfile} already present")
- end
- end
-
-end
-
-$stdout.sync = true
-
-verbose, action, args = false, :find, Array.new
-
-usage = "fcd [--add|clear|find|list|make|show|stub] [--verbose] [pattern]"
-version = "1.0.2"
-
-def quit(message)
- puts(message)
- exit
-end
-
-ARGV.each do |a|
- case a
- when '-a', '--add' then action = :add
- when '-c', '--clear' then action = :clear
- when '-f', '--find' then action = :find
- when '-l', '--list' then action = :show
- when '-m', '--make' then action = :make
- when '-s', '--show' then action = :show
- when '--stub' then action = :stub
- when '-v', '--verbose' then verbose = true
- when '--version' then quit("version: #{version}")
- when '-h', '--help' then quit("usage: #{usage}")
- when /^\-\-.*/ then quit("error: unknown switch #{a}, try --help")
- else args << a
- end
-end
-
-fcd = FastCD.new(verbose)
-fcd.report("Fast Change Dir / version #{version}")
-
-case action
- when :make then
- fcd.clear
- fcd.scan(args)
- fcd.save
- when :clear then
- fcd.clear
- when :add then
- fcd.load
- fcd.scan(args)
- fcd.save
- when :show then
- fcd.load
- fcd.show
- when :find then
- fcd.load
- fcd.find(args)
- fcd.choose(args)
- fcd.remember
- when :stub
- fcd.check
-end
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 108f2a8a1..e6bbbe2b5 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -8,6 +8,11 @@ if not modules then modules = { } end modules ['mtxrun'] = {
license = "see context related readme files"
}
+-- if not lpeg then require("lpeg") end
+-- if not md5 then require("md5") end
+-- if not lfs then require("lfs") end
+-- if not texconfig then texconfig = { } end
+
-- one can make a stub:
--
-- #!/bin/sh
@@ -150,11 +155,28 @@ function string.topattern(str,lowercase,strict)
end
end
+
+function string.valid(str,default)
+ return (type(str) == "string" and str ~= "" and str) or default or nil
+end
+
-- obsolete names:
string.quote = string.quoted
string.unquote = string.unquoted
+-- handy fallback
+
+string.itself = function(s) return s end
+
+-- also handy (see utf variant)
+
+local pattern = Ct(C(1)^0)
+
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+
end -- of closure
@@ -168,7 +190,8 @@ if not modules then modules = { } end modules ['l-table'] = {
license = "see context related readme files"
}
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
+local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
+local table, string = table, string
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
@@ -179,6 +202,8 @@ local getinfo = debug.getinfo
-- impact on ConTeXt was not that large; the remaining ipairs already
-- have been replaced. In a similar fashion we also hardly used pairs.
--
+-- Hm, actually ipairs was retained, but we no longer use it anyway.
+--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -238,12 +263,16 @@ function table.strip(tab)
end
function table.keys(t)
- local keys, k = { }, 0
- for key, _ in next, t do
- k = k + 1
- keys[k] = key
+ if t then
+ local keys, k = { }, 0
+ for key, _ in next, t do
+ k = k + 1
+ keys[k] = key
+ end
+ return keys
+ else
+ return { }
end
- return keys
end
local function compare(a,b)
@@ -256,41 +285,49 @@ local function compare(a,b)
end
local function sortedkeys(tab)
- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- s = s + 1
- srt[s] = key
- if category == 3 then
- -- no further check
- else
- local tkey = type(key)
- if tkey == "string" then
- category = (category == 2 and 3) or 1
- elseif tkey == "number" then
- category = (category == 1 and 3) or 2
+ if tab then
+ local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
+ for key,_ in next, tab do
+ s = s + 1
+ srt[s] = key
+ if category == 3 then
+ -- no further check
else
- category = 3
+ local tkey = type(key)
+ if tkey == "string" then
+ category = (category == 2 and 3) or 1
+ elseif tkey == "number" then
+ category = (category == 1 and 3) or 2
+ else
+ category = 3
+ end
end
end
- end
- if category == 0 or category == 3 then
- sort(srt,compare)
+ if category == 0 or category == 3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
else
- sort(srt)
+ return { }
end
- return srt
end
local function sortedhashkeys(tab) -- fast one
- local srt, s = { }, 0
- for key,_ in next, tab do
- if key then
- s= s + 1
- srt[s] = key
+ if tab then
+ local srt, s = { }, 0
+ for key,_ in next, tab do
+ if key then
+ s= s + 1
+ srt[s] = key
+ end
end
+ sort(srt)
+ return srt
+ else
+ return { }
end
- sort(srt)
- return srt
end
table.sortedkeys = sortedkeys
@@ -315,7 +352,7 @@ end
table.sortedhash = sortedhash
table.sortedpairs = sortedhash
-function table.append(t, list)
+function table.append(t,list)
local n = #t
for i=1,#list do
n = n + 1
@@ -550,12 +587,26 @@ local function do_serialize(root,name,depth,level,indexed)
end
-- we could check for k (index) being number (cardinal)
if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- if compact then
+ -- -- NOT: for k=1,#root do (we need to quit at nil)
+ -- for k,v in ipairs(root) do -- can we use next?
+ -- if not first then first = k end
+ -- last = last + 1
+ -- end
+ -- end
+ local first, last = nil, 0
if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
+ last = #root
+ for k=1,last do
+-- if not root[k] then
+ if root[k] == nil then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
end
end
local sk = sortedkeys(root)
@@ -1027,23 +1078,27 @@ function table.reversed(t)
end
end
-function table.sequenced(t,sep,simple) -- hash only
- local s, n = { }, 0
- for k, v in sortedhash(t) do
- if simple then
- if v == true then
- n = n + 1
- s[n] = k
- elseif v and v~= "" then
+function table.sequenced(t,sep) -- hash only
+ if t then
+ local s, n = { }, 0
+ for k, v in sortedhash(t) do
+ if simple then
+ if v == true then
+ n = n + 1
+ s[n] = k
+ elseif v and v~= "" then
+ n = n + 1
+ s[n] = k .. "=" .. tostring(v)
+ end
+ else
n = n + 1
s[n] = k .. "=" .. tostring(v)
end
- else
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
end
+ return concat(s, sep or " | ")
+ else
+ return ""
end
- return concat(s, sep or " | ")
end
function table.print(t,...)
@@ -1124,6 +1179,8 @@ local lpeg = require("lpeg")
-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+-- some code will move to unicode and string
+
local report = texio and texio.write_nl or print
-- local lpmatch = lpeg.match
@@ -1160,8 +1217,8 @@ local report = texio and texio.write_nl or print
-- function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end
-- function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end
-local type = type
-local byte, char, gmatch = string.byte, string.char, string.gmatch
+local type, next = type, next
+local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format
-- Beware, we predefine a bunch of patterns here and one reason for doing so
-- is that we get consistent behaviour in some of the visualizers.
@@ -1169,9 +1226,8 @@ local byte, char, gmatch = string.byte, string.char, string.gmatch
lpeg.patterns = lpeg.patterns or { } -- so that we can share
local patterns = lpeg.patterns
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
+local P, R, S, V, Ct, C, Cs, Cc, Cp = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp
+local lpegtype, lpegmatch = lpeg.type, lpeg.match
local utfcharacters = string.utfcharacters
local utfgmatch = unicode and unicode.utf8.gmatch
@@ -1222,6 +1278,10 @@ patterns.utf8char = utf8char
patterns.validutf8 = validutf8char
patterns.validutf8char = validutf8char
+local eol = S("\n\r")
+local spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+local whitespace = eol + spacer
+
patterns.digit = digit
patterns.sign = sign
patterns.cardinal = sign^0 * digit^1
@@ -1241,16 +1301,16 @@ patterns.letter = patterns.lowercase + patterns.uppercase
patterns.space = space
patterns.tab = P("\t")
patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.eol = eol
+patterns.spacer = spacer
+patterns.whitespace = whitespace
patterns.newline = newline
patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.nonspacer = 1 - spacer
+patterns.nonwhitespace = 1 - whitespace
patterns.equal = P("=")
patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.commaspacer = P(",") * spacer^0
patterns.period = P(".")
patterns.colon = P(":")
patterns.semicolon = P(";")
@@ -1265,6 +1325,10 @@ patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
patterns.unspacer = ((patterns.spacer^1)/"")^0
+patterns.singlequoted = squote * patterns.nosquote * squote
+patterns.doublequoted = dquote * patterns.nodquote * dquote
+patterns.quoted = patterns.doublequoted + patterns.singlequoted
+
patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
patterns.beginline = #(1-newline)
@@ -1275,8 +1339,17 @@ patterns.beginline = #(1-newline)
-- print(string.unquoted('"test"'))
-- print(string.unquoted('"test"'))
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
+local function anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) }
+end
+
+lpeg.anywhere = anywhere
+
+function lpeg.instringchecker(p)
+ p = anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
end
function lpeg.splitter(pattern, action)
@@ -1325,7 +1398,7 @@ function string.splitup(str,separator)
if not separator then
separator = ","
end
- return match(splitters_m[separator] or splitat(separator),str)
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
end
@@ -1337,16 +1410,20 @@ function lpeg.split(separator,str)
c = tsplitat(separator)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
+ if separator then
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
end
- return match(c,str)
end
local spacing = patterns.spacer^0 * newline -- sort of strip
@@ -1362,7 +1439,7 @@ local linesplitter = tsplitat(newline)
patterns.linesplitter = linesplitter
function string.splitlines(str)
- return match(linesplitter,str)
+ return lpegmatch(linesplitter,str)
end
local utflinesplitter = utfbom^-1 * tsplitat(newline)
@@ -1370,7 +1447,58 @@ local utflinesplitter = utfbom^-1 * tsplitat(newline)
patterns.utflinesplitter = utflinesplitter
function string.utfsplitlines(str)
- return match(utflinesplitter,str or "")
+ return lpegmatch(utflinesplitter,str or "")
+end
+
+local utfcharsplitter_ows = utfbom^-1 * Ct(C(utf8char)^0)
+local utfcharsplitter_iws = utfbom^-1 * Ct((whitespace^1 + C(utf8char))^0)
+
+function string.utfsplit(str,ignorewhitespace) -- new
+ if ignorewhitespace then
+ return lpegmatch(utfcharsplitter_iws,str or "")
+ else
+ return lpegmatch(utfcharsplitter_ows,str or "")
+ end
+end
+
+-- inspect(string.utfsplit("a b c d"))
+-- inspect(string.utfsplit("a b c d",true))
+
+-- -- alternative 1: 0.77
+--
+-- local utfcharcounter = utfbom^-1 * Cs((utf8char/'!')^0)
+--
+-- function string.utflength(str)
+-- return #lpegmatch(utfcharcounter,str or "")
+-- end
+--
+-- -- alternative 2: 1.70
+--
+-- local n = 0
+--
+-- local utfcharcounter = utfbom^-1 * (utf8char/function() n = n + 1 end)^0 -- slow
+--
+-- function string.utflength(str)
+-- n = 0
+-- lpegmatch(utfcharcounter,str or "")
+-- return n
+-- end
+--
+-- -- alternative 3: 0.24 (native unicode.utf8.len: 0.047)
+
+local n = 0
+
+local utfcharcounter = utfbom^-1 * Cs ( (
+ Cp() * (lpeg.patterns.utf8one )^1 * Cp() / function(f,t) n = n + t - f end
+ + Cp() * (lpeg.patterns.utf8two )^1 * Cp() / function(f,t) n = n + (t - f)/2 end
+ + Cp() * (lpeg.patterns.utf8three)^1 * Cp() / function(f,t) n = n + (t - f)/3 end
+ + Cp() * (lpeg.patterns.utf8four )^1 * Cp() / function(f,t) n = n + (t - f)/4 end
+)^0 )
+
+function string.utflength(str)
+ n = 0
+ lpegmatch(utfcharcounter,str or "")
+ return n
end
@@ -1384,7 +1512,7 @@ function lpeg.checkedsplit(separator,str)
c = Ct(separator^0 * other * (separator^1 * other)^0)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
function string.checkedsplit(str,separator)
@@ -1395,7 +1523,7 @@ function string.checkedsplit(str,separator)
c = Ct(separator^0 * other * (separator^1 * other)^0)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
@@ -1440,11 +1568,11 @@ function lpeg.keeper(str)
end
function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
+ return (P(str) + P(true)) * Cs(anything^0)
end
function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
+ return Cs((1 - P(str) * endofstring)^0)
end
-- Just for fun I looked at the used bytecode and
@@ -1453,8 +1581,22 @@ end
function lpeg.replacer(one,two)
if type(one) == "table" then
local no = #one
- if no > 0 then
- local p
+ local p
+ if no == 0 then
+ for k, v in next, one do
+ local pp = P(k) / v
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ elseif no == 1 then
+ local o = one[1]
+ one, two = P(o[1]), o[2]
+ return Cs(((1-one)^1 + one/two)^0)
+ else
for i=1,no do
local o = one[i]
local pp = P(o[1]) / o[2]
@@ -1467,11 +1609,16 @@ function lpeg.replacer(one,two)
return Cs((p + 1)^0)
end
else
+ one = P(one)
two = two or ""
- return Cs((P(one)/two + 1)^0)
+ return Cs(((1-one)^1 + one/two)^0)
end
end
+-- print(lpeg.match(lpeg.replacer("e","a"),"test test"))
+-- print(lpeg.match(lpeg.replacer{{"e","a"}},"test test"))
+-- print(lpeg.match(lpeg.replacer({ e = "a", t = "x" }),"test test"))
+
local splitters_f, splitters_s = { }, { }
function lpeg.firstofsplit(separator) -- always return value
@@ -1506,7 +1653,7 @@ local nany = utf8char/""
function lpeg.counter(pattern)
pattern = Cs((P(pattern)/" " + nany)^0)
return function(str)
- return #match(pattern,str)
+ return #lpegmatch(pattern,str)
end
end
@@ -1520,7 +1667,7 @@ if utfgmatch then
end
return n
else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
+ return #lpegmatch(Cs((P(what)/" " + nany)^0),str)
end
end
@@ -1535,9 +1682,9 @@ else
p = Cs((P(what)/" " + nany)^0)
cache[p] = p
end
- return #match(p,str)
+ return #lpegmatch(p,str)
else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
+ return #lpegmatch(Cs((P(what)/" " + nany)^0),str)
end
end
@@ -1564,7 +1711,7 @@ local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
+ return lpegmatch(simple and s or p,str)
end
-- utf extensies
@@ -1611,7 +1758,7 @@ else
p = P(uc)
end
end
- match((utf8char/f)^0,str)
+ lpegmatch((utf8char/f)^0,str)
return p
end
@@ -1627,7 +1774,7 @@ function lpeg.UR(str,more)
first = str
last = more or first
else
- first, last = match(range,str)
+ first, last = lpegmatch(range,str)
if not last then
return P(str)
end
@@ -1654,11 +1801,15 @@ end
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order
if type(list) ~= "table" then
list = { list, ... }
end
- -- sort(list) -- longest match first
+ -- table.sort(list) -- longest match first
local p = P(list[1])
for l=2,#list do
p = p + P(list[l])
@@ -1666,10 +1817,6 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
return p
end
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-- For the moment here, but it might move to utilities. Beware, we need to
-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
-- loop back from the end cq. prepend.
@@ -1827,6 +1974,24 @@ end
-- utfchar(0x205F), -- math thinspace
-- } )
+-- handy from within tex:
+
+local lpegmatch = lpeg.match
+
+local replacer = lpeg.replacer("@","%%") -- Watch the escaped % in lpeg!
+
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+
+-- strips leading and trailing spaces and collapsed all other spaces
+
+local pattern = Cs(whitespace^0/"" * ((whitespace^1 * P(-1) / "") + (whitespace^1/" ") + P(1))^0)
+
+function string.collapsespaces(str)
+ return lpegmatch(pattern,str)
+end
+
end -- of closure
@@ -1851,14 +2016,14 @@ else
io.fileseparator, io.pathseparator = "/" , ":"
end
-function io.loaddata(filename,textmode)
+function io.loaddata(filename,textmode) -- return nil if empty
local f = io.open(filename,(textmode and 'r') or 'rb')
if f then
local data = f:read('*all')
f:close()
- return data
- else
- return nil
+ if #data > 0 then
+ return data
+ end
end
end
@@ -1880,6 +2045,45 @@ function io.savedata(filename,data,joiner)
end
end
+function io.loadlines(filename,n) -- return nil if empty
+ local f = io.open(filename,'r')
+ if f then
+ if n then
+ local lines = { }
+ for i=1,n do
+ local line = f:read("*lines")
+ if line then
+ lines[#lines+1] = line
+ else
+ break
+ end
+ end
+ f:close()
+ lines = concat(lines,"\n")
+ if #lines > 0 then
+ return lines
+ end
+ else
+ local line = f:read("*line") or ""
+ assert(f:close())
+ if #line > 0 then
+ return line
+ end
+ end
+ end
+end
+
+function io.loadchunk(filename,n)
+ local f = io.open(filename,'rb')
+ if f then
+ local data = f:read(n or 1024)
+ f:close()
+ if #data > 0 then
+ return data
+ end
+ end
+end
+
function io.exists(filename)
local f = io.open(filename)
if f == nil then
@@ -2107,7 +2311,7 @@ if not modules then modules = { } end modules ['l-number'] = {
-- this module will be replaced when we have the bit library
-local tostring = tostring
+local tostring, tonumber = tostring, tonumber
local format, floor, match, rep = string.format, math.floor, string.match, string.rep
local concat, insert = table.concat, table.insert
local lpegmatch = lpeg.match
@@ -2170,11 +2374,11 @@ function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
end
function number.setbit(x, p)
- return hasbit(x, p) and x or x + p
+ return (x % (p + p) >= p) and x or x + p
end
function number.clearbit(x, p)
- return hasbit(x, p) and x - p or x
+ return (x % (p + p) >= p) and x - p or x
end
@@ -2208,6 +2412,10 @@ function number.tobitstring(n,m)
end
+function number.valid(str,default)
+ return tonumber(str) or default or nil
+end
+
end -- of closure
@@ -2319,17 +2527,28 @@ if not modules then modules = { } end modules ['l-os'] = {
-- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
-- os.platform : extended os.name with architecture
+-- os.sleep() => socket.sleep()
+-- math.randomseed(tonumber(string.sub(string.reverse(tostring(math.floor(socket.gettime()*10000))),1,6)))
+
-- maybe build io.flush in os.execute
local os = os
+local date, time = os.date, os.time
local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
-local random, ceil = math.random, math.ceil
-local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+local random, ceil, randomseed = math.random, math.ceil, math.randomseed
+local rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring = rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring
-- The following code permits traversing the environment table, at least
-- in luatex. Internally all environment names are uppercase.
+-- The randomseed in Lua is not that random, although this depends on the operating system as well
+-- as the binary (Luatex is normally okay). But to be sure we set the seed anyway.
+
+math.initialseed = tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
+
+randomseed(math.initialseed)
+
if not os.__getenv__ then
os.__getenv__ = os.getenv
@@ -2433,12 +2652,14 @@ else
os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
+local launchers = {
+ windows = "start %s",
+ macosx = "open %s",
+ unix = "$BROWSER %s &> /dev/null &",
+}
+
function os.launch(str)
- if os.type == "windows" then
- os.execute("start " .. str) -- os.spawn ?
- else
- os.execute(str .. " &") -- os.spawn ?
- end
+ os.execute(format(launchers[os.name] or launchers.unix,str))
end
if not os.times then
@@ -2649,7 +2870,7 @@ end
local d
function os.timezone(delta)
- d = d or tonumber(tonumber(os.date("%H")-os.date("!%H")))
+ d = d or tonumber(tonumber(date("%H")-date("!%H")))
if delta then
if d > 0 then
return format("+%02i:00",d)
@@ -2661,6 +2882,44 @@ function os.timezone(delta)
end
end
+local timeformat = format("%%s%s",os.timezone(true))
+local dateformat = "!%Y-%m-%d %H:%M:%S"
+
+function os.fulltime(t,default)
+ t = tonumber(t) or 0
+ if t > 0 then
+ -- valid time
+ elseif default then
+ return default
+ else
+ t = nil
+ end
+ return format(timeformat,date(dateformat,t))
+end
+
+local dateformat = "%Y-%m-%d %H:%M:%S"
+
+function os.localtime(t,default)
+ t = tonumber(t) or 0
+ if t > 0 then
+ -- valid time
+ elseif default then
+ return default
+ else
+ t = nil
+ end
+ return date(dateformat,t)
+end
+
+function os.converttime(t,default)
+ local t = tonumber(t)
+ if t and t > 0 then
+ return date(dateformat,t)
+ else
+ return default or "-"
+ end
+end
+
local memory = { }
local function which(filename)
@@ -2735,7 +2994,7 @@ local function nameonly(name)
return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$",""))
end
-local function extname(name,default)
+local function suffixonly(name,default)
return match(name,"^.+%.([^/\\]-)$") or default or ""
end
@@ -2744,11 +3003,16 @@ local function splitname(name)
return n or name, s or ""
end
-file.basename = basename
-file.dirname = dirname
-file.nameonly = nameonly
-file.extname = extname
-file.suffix = extname
+file.basename = basename
+
+file.pathpart = dirname
+file.dirname = dirname
+
+file.nameonly = nameonly
+
+file.suffixonly = suffixonly
+file.extname = suffixonly -- obsolete
+file.suffix = suffixonly
function file.removesuffix(filename)
return (gsub(filename,"%.[%a%d]+$",""))
@@ -2864,6 +3128,11 @@ end
file.isreadable = file.is_readable -- depricated
file.iswritable = file.is_writable -- depricated
+function file.size(name)
+ local a = attributes(name)
+ return a and a.size or 0
+end
+
-- todo: lpeg \\ / .. does not save much
local checkedsplit = string.checkedsplit
@@ -3001,6 +3270,7 @@ local drive = C(R("az","AZ")) * P(":")
local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
+local rest = C(P(1)^0)
drive = drive + Cc("")
path = path + Cc("")
@@ -3009,7 +3279,8 @@ suffix = suffix + Cc("")
local pattern_a = drive * path * base * suffix
local pattern_b = path * base * suffix
-local pattern_c = C(drive * path) * C(base * suffix)
+local pattern_c = C(drive * path) * C(base * suffix) -- trick: two extra captures
+local pattern_d = path * rest
function file.splitname(str,splitdrive)
if splitdrive then
@@ -3019,6 +3290,10 @@ function file.splitname(str,splitdrive)
end
end
+function file.splitbase(str)
+ return lpegmatch(pattern_d,str) -- returns path, base+suffix
+end
+
function file.nametotable(str,splitdrive) -- returns table
local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
if splitdrive then
@@ -3040,6 +3315,8 @@ function file.nametotable(str,splitdrive) -- returns table
end
end
+-- print(file.splitbase("a/b/c.txt"))
+
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
--
-- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
@@ -3081,15 +3358,30 @@ if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
-function file.needs_updating(oldname,newname,threshold) -- size modification access change
- local oldtime = lfs.attributes(oldname, modification)
- local newtime = lfs.attributes(newname, modification)
- if newtime >= oldtime then
- return false
- elseif oldtime - newtime < (threshold or 1) then
- return false
+function file.needsupdating(oldname,newname,threshold) -- size modification access change
+ local oldtime = lfs.attributes(oldname,"modification")
+ if oldtime then
+ local newtime = lfs.attributes(newname,"modification")
+ if not newtime then
+ return true -- no new file, so no updating needed
+ elseif newtime >= oldtime then
+ return false -- new file definitely needs updating
+ elseif oldtime - newtime < (threshold or 1) then
+ return false -- new file is probably still okay
+ else
+ return true -- new file has to be updated
+ end
else
- return true
+ return false -- no old file, so no updating needed
+ end
+end
+
+file.needs_updating = file.needsupdating
+
+function file.syncmtimes(oldname,newname)
+ local oldtime = lfs.attributes(oldname,"modification")
+ if oldtime and lfs.isfile(newname) then
+ lfs.touch(newname,oldtime,oldtime)
end
end
@@ -3111,7 +3403,7 @@ function file.loadchecksum(name)
return nil
end
-function file.savechecksum(name, checksum)
+function file.savechecksum(name,checksum)
if not checksum then checksum = file.checksum(name) end
if checksum then
io.savedata(name .. ".md5",checksum)
@@ -3136,7 +3428,7 @@ if not modules then modules = { } end modules ['l-url'] = {
local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
local concat = table.concat
local tonumber, type = tonumber, type
-local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
+local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V
local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
-- from wikipedia:
@@ -3169,15 +3461,19 @@ local endofstring = P(-1)
local hexdigit = R("09","AF","af")
local plus = P("+")
local nothing = Cc("")
-local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
+local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar
+local escaped = (plus / " ") + escapedchar
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-- we also assume that when we have a scheme, we also have an authority
+--
+-- maybe we should already split the query (better for unescaping as = & can be part of a value
local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
local pathstr = Cs((escaped+(1- qmark-hash))^0)
-local querystr = Cs((escaped+(1- hash))^0)
+----- querystr = Cs((escaped+(1- hash))^0)
+local querystr = Cs(( (1- hash))^0)
local fragmentstr = Cs((escaped+(1- endofstring))^0)
local scheme = schemestr * colon + nothing
@@ -3192,11 +3488,20 @@ local parser = Ct(validurl)
lpegpatterns.url = validurl
lpegpatterns.urlsplitter = parser
-local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
+local escapes = { }
-local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
+setmetatable(escapes, { __index = function(t,k)
+ local v = format("%%%02X",byte(k))
+ t[k] = v
+ return v
+end })
-lpegpatterns.urlescaper = escaper
+local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most
+local unescaper = Cs((escapedchar + 1)^0)
+
+lpegpatterns.urlunescaped = escapedchar
+lpegpatterns.urlescaper = escaper
+lpegpatterns.urlunescaper = unescaper
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
@@ -3208,8 +3513,12 @@ end
local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
local function hasscheme(str)
- local scheme = lpegmatch(isscheme,str) -- at least one character
- return scheme ~= "" and scheme or false
+ if str then
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
+ else
+ return false
+ end
end
@@ -3228,10 +3537,32 @@ local rootbased = P("/")
local barswapper = replacer("|",":")
local backslashswapper = replacer("\\","/")
+-- queries:
+
+local equal = P("=")
+local amp = P("&")
+local key = Cs(((escapedchar+1)-equal )^0)
+local value = Cs(((escapedchar+1)-amp -endofstring)^0)
+
+local splitquery = Cf ( Ct("") * P { "sequence",
+ sequence = V("pair") * (amp * V("pair"))^0,
+ pair = Cg(key * equal * value),
+}, rawset)
+
+-- hasher
+
local function hashed(str) -- not yet ok (/test?test)
+ if str == "" then
+ return {
+ scheme = "invalid",
+ original = str,
+ }
+ end
local s = split(str)
- local somescheme = s[1] ~= ""
- local somequery = s[4] ~= ""
+ local rawscheme = s[1]
+ local rawquery = s[4]
+ local somescheme = rawscheme ~= ""
+ local somequery = rawquery ~= ""
if not somescheme and not somequery then
s = {
scheme = "file",
@@ -3247,14 +3578,17 @@ local function hashed(str) -- not yet ok (/test?test)
local authority, path, filename = s[2], s[3]
if authority == "" then
filename = path
+ elseif path == "" then
+ filename = ""
else
filename = authority .. "/" .. path
end
s = {
- scheme = s[1],
+ scheme = rawscheme,
authority = authority,
path = path,
- query = s[4],
+ query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and =
+ queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped
fragment = s[5],
original = str,
noscheme = false,
@@ -3264,6 +3598,8 @@ local function hashed(str) -- not yet ok (/test?test)
return s
end
+-- inspect(hashed("template://test"))
+
-- Here we assume:
--
-- files: /// = relative
@@ -3306,23 +3642,65 @@ function url.construct(hash) -- dodo: we need to escape !
return lpegmatch(escaper,concat(fullurl))
end
-function url.filename(filename)
+function url.filename(filename) -- why no lpeg here ?
local t = hashed(filename)
return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
end
+local function escapestring(str)
+ return lpegmatch(escaper,str)
+end
+
+url.escape = escapestring
+
+-- function url.query(str) -- separator could be an option
+-- if type(str) == "string" then
+-- local t = { }
+-- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
+-- t[k] = v
+-- end
+-- return t
+-- else
+-- return str
+-- end
+-- end
+
function url.query(str)
if type(str) == "string" then
- local t = { }
- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
- t[k] = v
- end
- return t
+ return lpegmatch(splitquery,str) or ""
else
return str
end
end
+function url.toquery(data)
+ local td = type(data)
+ if td == "string" then
+ return #str and escape(data) or nil -- beware of double escaping
+ elseif td == "table" then
+ if next(data) then
+ local t = { }
+ for k, v in next, data do
+ t[#t+1] = format("%s=%s",k,escapestring(v))
+ end
+ return concat(t,"&")
+ end
+ else
+ -- nil is a signal that no query
+ end
+end
+
+-- /test/ | /test | test/ | test => test
+
+function url.barepath(path)
+ if not path or path == "" then
+ return ""
+ else
+ return (gsub(path,"^/?(.-)/?$","%1"))
+ end
+end
+
+
@@ -3363,6 +3741,24 @@ local isdir = lfs.isdir
local isfile = lfs.isfile
local currentdir = lfs.currentdir
+-- in case we load outside luatex
+
+if not isdir then
+ function isdir(name)
+ local a = attributes(name)
+ return a and a.mode == "directory"
+ end
+ lfs.isdir = isdir
+end
+
+if not isfile then
+ function isfile(name)
+ local a = attributes(name)
+ return a and a.mode == "file"
+ end
+ lfs.isfile = isfile
+end
+
-- handy
function dir.current()
@@ -3738,28 +4134,49 @@ function boolean.tonumber(b)
end
function toboolean(str,tolerant)
- if tolerant then
- local tstr = type(str)
- if tstr == "string" then
- return str == "true" or str == "yes" or str == "on" or str == "1" or str == "t"
- elseif tstr == "number" then
- return tonumber(str) ~= 0
- elseif tstr == "nil" then
- return false
- else
- return str
- end
+ if str == nil then
+ return false
+ elseif str == false then
+ return false
+ elseif str == true then
+ return true
elseif str == "true" then
return true
elseif str == "false" then
return false
+ elseif not tolerant then
+ return false
+ elseif str == 0 then
+ return false
+ elseif (tonumber(str) or 0) > 0 then
+ return true
else
- return str
+ return str == "yes" or str == "on" or str == "t"
end
end
string.toboolean = toboolean
+function string.booleanstring(str)
+ if str == nil then
+ return false
+ elseif str == false then
+ return false
+ elseif str == true then
+ return true
+ elseif str == "true" then
+ return true
+ elseif str == "false" then
+ return false
+ elseif str == 0 then
+ return false
+ elseif (tonumber(str) or 0) > 0 then
+ return true
+ else
+ return str == "yes" or str == "on" or str == "t"
+ end
+end
+
function string.is_boolean(str,default)
if type(str) == "string" then
if str == "true" or str == "yes" or str == "on" or str == "t" then
@@ -3784,57 +4201,229 @@ if not modules then modules = { } end modules ['l-unicode'] = {
license = "see context related readme files"
}
+-- this module will be reorganized
+
+-- todo: utf.sub replacement (used in syst-aux)
+
+local concat = table.concat
+local type = type
+local P, C, R, Cs, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Ct
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local utftype = patterns.utftype
+local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
+local utfsplitlines = string.utfsplitlines
+
if not unicode then
- unicode = { utf8 = { } }
+ unicode = { }
+
+end
+
+local unicode = unicode
+
+utf = utf or unicode.utf8
+
+if not utf then
+
+ utf8 = { }
+ unicode.utf8 = utf8
+ utf = utf8
+
+end
+
+if not utf.char then
local floor, char = math.floor, string.char
- function unicode.utf8.utfchar(n)
+ function utf.char(n)
if n < 0x80 then
+ -- 0aaaaaaa : 0x80
return char(n)
elseif n < 0x800 then
+ -- 110bbbaa : 0xC0 : n >> 6
+ -- 10aaaaaa : 0x80 : n & 0x3F
return char(
0xC0 + floor(n/0x40),
0x80 + (n % 0x40)
)
elseif n < 0x10000 then
+ -- 1110bbbb : 0xE0 : n >> 12
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
return char(
0xE0 + floor(n/0x1000),
0x80 + (floor(n/0x40) % 0x40),
0x80 + (n % 0x40)
)
- elseif n < 0x40000 then
+ elseif n < 0x200000 then
+ -- 11110ccc : 0xF0 : n >> 18
+ -- 10ccbbbb : 0x80 : (n >> 12) & 0x3F
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
+ -- dddd : ccccc - 1
return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + floor(n/0x1000),
+ 0xF0 + floor(n/0x40000),
+ 0x80 + (floor(n/0x1000) % 0x40),
0x80 + (floor(n/0x40) % 0x40),
0x80 + (n % 0x40)
)
else
- -- return char(
- -- 0xF1 + floor(n/0x1000000),
- -- 0x80 + floor(n/0x40000),
- -- 0x80 + floor(n/0x1000),
- -- 0x80 + (floor(n/0x40) % 0x40),
- -- 0x80 + (n % 0x40)
- -- )
- return "?"
+ return ""
end
end
end
-local unicode = unicode
+if not utf.byte then
-utf = utf or unicode.utf8
+ local utf8byte = patterns.utf8byte
-local concat = table.concat
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
-local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
-local type = type
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
-local utfsplitlines = string.utfsplitlines
+end
+
+local utfchar, utfbyte = utf.char, utf.byte
+
+-- As we want to get rid of the (unmaintained) utf library we implement our own
+-- variants (in due time an independent module):
+
+function unicode.filetype(data)
+ return data and lpegmatch(utftype,data) or "unknown"
+end
+
+local toentities = Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+
+
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+-- actually one of them is already utf ... sort of useless this one
+
+-- function utf.char(n)
+-- if n < 0x80 then
+-- return char(n)
+-- elseif n < 0x800 then
+-- return char(
+-- 0xC0 + floor(n/0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x10000 then
+-- return char(
+-- 0xE0 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x40000 then
+-- return char(
+-- 0xF0 + floor(n/0x40000),
+-- 0x80 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- else
+-- -- return char(
+-- -- 0xF1 + floor(n/0x1000000),
+-- -- 0x80 + floor(n/0x40000),
+-- -- 0x80 + floor(n/0x1000),
+-- -- 0x80 + (floor(n/0x40) % 0x40),
+-- -- 0x80 + (n % 0x40)
+-- -- )
+-- return "?"
+-- end
+-- end
+--
+-- merge into:
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+local validatedutf = Cs (
+ (
+ patterns.utf8one
+ + patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ + P(1) / "�"
+ )^0
+)
+
+patterns.validatedutf = validatedutf
+
+function string.validutf(str)
+ return lpegmatch(validatedutf,str)
+end
+
+
+utf.length = string.utflength
+utf.split = string.utfsplit
+utf.splitines = string.utfsplitlines
+utf.valid = string.validutf
+
+if not utf.len then
+ utf.len = utf.length
+end
+
+-- a replacement for simple gsubs:
+
+local utf8char = patterns.utf8char
+
+function utf.remapper(mapping)
+ local pattern = Cs((utf8char/mapping)^0)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end, pattern
+end
+
+-- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" }
+-- print(remap("abcd 1234 abcd"))
-- 0 EF BB BF UTF-8
-- 1 FF FE UTF-16-little-endian
@@ -4027,11 +4616,22 @@ local function big(c)
end
end
+-- function unicode.utf8_to_utf16(str,littleendian)
+-- if littleendian then
+-- return char(255,254) .. utfgsub(str,".",little)
+-- else
+-- return char(254,255) .. utfgsub(str,".",big)
+-- end
+-- end
+
+local _, l_remap = utf.remapper(little)
+local _, b_remap = utf.remapper(big)
+
function unicode.utf8_to_utf16(str,littleendian)
if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
+ return char(255,254) .. lpegmatch(l_remap,str)
else
- return char(254,255) .. utfgsub(str,".",big)
+ return char(254,255) .. lpegmatch(b_remap,str)
end
end
@@ -4052,84 +4652,12 @@ function unicode.xstring(s)
return format("0x%05X",type(s) == "number" and s or utfbyte(s))
end
+--
-local lpegmatch = lpeg.match
-local patterns = lpeg.patterns
-local utftype = patterns.utftype
-
-function unicode.filetype(data)
- return data and lpegmatch(utftype,data) or "unknown"
-end
-
-local toentities = lpeg.Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("%X;",b) end end
- )^0
-)
-
-patterns.toentities = toentities
-
-function utf.toentities(str)
- return lpegmatch(toentities,str)
-end
-
-
-
-
-local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
-
--- actually one of them is already utf ... sort of useless this one
-
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
-
-function string.toutf(s)
- return lpegmatch(pattern,s) or s -- todo: utf32
-end
-
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
-
-patterns.validatedutf = validatedutf
+local pattern = Ct(C(patterns.utf8char)^0)
-function string.validutf(str)
- return lpegmatch(validatedutf,str)
+function utf.totable(str)
+ return lpegmatch(pattern,str)
end
@@ -4189,10 +4717,11 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch, rep = string.format, string.gmatch, string.rep
+local format, gmatch, rep, gsub = string.format, string.gmatch, string.rep, string.gsub
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber = type, next, rawset, tonumber
+local type, next, rawset, tonumber, loadstring = type, next, rawset, tonumber, loadstring
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -4346,6 +4875,121 @@ function tables.encapsulate(core,capsule,protect)
end
end
+local function serialize(t,r,outer) -- no mixes
+ r[#r+1] = "{"
+ local n = #t
+ if n > 0 then
+ for i=1,n do
+ local v = t[i]
+ local tv = type(v)
+ if tv == "string" then
+ r[#r+1] = format("%q,",v)
+ elseif tv == "number" then
+ r[#r+1] = format("%s,",v)
+ elseif tv == "table" then
+ serialize(v,r)
+ elseif tv == "boolean" then
+ r[#r+1] = format("%s,",tostring(v))
+ end
+ end
+ else
+ for k, v in next, t do
+ local tv = type(v)
+ if tv == "string" then
+ r[#r+1] = format("[%q]=%q,",k,v)
+ elseif tv == "number" then
+ r[#r+1] = format("[%q]=%s,",k,v)
+ elseif tv == "table" then
+ r[#r+1] = format("[%q]=",k)
+ serialize(v,r)
+ elseif tv == "boolean" then
+ r[#r+1] = format("[%q]=%s,",k,tostring(v))
+ end
+ end
+ end
+ if outer then
+ r[#r+1] = "}"
+ else
+ r[#r+1] = "},"
+ end
+ return r
+end
+
+function table.fastserialize(t,prefix)
+ return concat(serialize(t,{ prefix or "return" },true))
+end
+
+function table.deserialize(str)
+ if not str or str == "" then
+ return
+ end
+ local code = loadstring(str)
+ if not code then
+ return
+ end
+ code = code()
+ if not code then
+ return
+ end
+ return code
+end
+
+-- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } })
+
+function table.load(filename)
+ if filename then
+ local t = io.loaddata(filename)
+ if t and t ~= "" then
+ t = loadstring(t)
+ if type(t) == "function" then
+ t = t()
+ if type(t) == "table" then
+ return t
+ end
+ end
+ end
+ end
+end
+
+local function slowdrop(t)
+ local r = { }
+ local l = { }
+ for i=1,#t do
+ local ti = t[i]
+ local j = 0
+ for k, v in next, ti do
+ j = j + 1
+ l[j] = format("%s=%q",k,v)
+ end
+ r[i] = format(" {%s},\n",concat(l))
+ end
+ return format("return {\n%s}",concat(r))
+end
+
+local function fastdrop(t)
+ local r = { "return {\n" }
+ for i=1,#t do
+ local ti = t[i]
+ r[#r+1] = " {"
+ for k, v in next, ti do
+ r[#r+1] = format("%s=%q",k,v)
+ end
+ r[#r+1] = "},\n"
+ end
+ r[#r+1] = "}"
+ return concat(r)
+end
+
+function table.drop(t,slow)
+ if #t == 0 then
+ return "return { }"
+ elseif slow == true then
+ return slowdrop(t) -- less memory
+ else
+ return fastdrop(t) -- some 15% faster
+ end
+end
+
end -- of closure
@@ -4520,11 +5164,10 @@ local concat = table.concat
local type, next = type, next
utilities = utilities or {}
-utilities.merger = utilities.merger or { } -- maybe mergers
+local merger = utilities.merger or { }
+utilities.merger = merger
utilities.report = logs and logs.reporter("system") or print
-local merger = utilities.merger
-
merger.strip_comment = true
local m_begin_merge = "begin library merge"
@@ -4570,9 +5213,11 @@ end
local function self_save(name, data)
if data ~= "" then
if merger.strip_comment then
- -- saves some 20K
local n = #data
+ -- saves some 20K .. scite comments
data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
+ -- saves some 20K .. ldx comments
+ data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-ldx%]%]%-%-","")
utilities.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
end
io.savedata(name,data)
@@ -4653,36 +5298,208 @@ if not modules then modules = { } end modules ['util-lua'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ comment = "the strip code is written by Peter Cawley",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-utilities = utilities or {}
-utilities.lua = utilities.lua or { }
-utilities.report = logs and logs.reporter("system") or print
+local rep, sub, byte, dump, format = string.rep, string.sub, string.byte, string.dump, string.format
+local loadstring, loadfile, type = loadstring, loadfile, type
+
+utilities = utilities or {}
+utilities.lua = utilities.lua or { }
+local luautilities = utilities.lua
+
+utilities.report = logs and logs.reporter("system") or print -- can be overloaded later
+
+local tracestripping = false
+local forcestupidcompile = true -- use internal bytecode compiler
+luautilities.stripcode = true -- support stripping when asked for
+luautilities.alwaysstripcode = false -- saves 1 meg on 7 meg compressed format file (2012.08.12)
+luautilities.nofstrippedchunks = 0
+luautilities.nofstrippedbytes = 0
+
+-- The next function was posted by Peter Cawley on the lua list and strips line
+-- number information etc. from the bytecode data blob. We only apply this trick
+-- when we store data tables. Stripping makes the compressed format file about
+-- 1MB smaller (and uncompressed we save at least 6MB).
+--
+-- You can consider this feature an experiment, so it might disappear. There is
+-- no noticeable gain in runtime although the memory footprint should be somewhat
+-- smaller (and the file system has a bit less to deal with).
+--
+-- Begin of borrowed code ... works for Lua 5.1 which LuaTeX currently uses ...
+
+local function strip_code_pc(dump,name)
+ local before = #dump
+ local version, format, endian, int, size, ins, num = byte(dump,5,11)
+ local subint
+ if endian == 1 then
+ subint = function(dump, i, l)
+ local val = 0
+ for n = l, 1, -1 do
+ val = val * 256 + byte(dump,i + n - 1)
+ end
+ return val, i + l
+ end
+ else
+ subint = function(dump, i, l)
+ local val = 0
+ for n = 1, l, 1 do
+ val = val * 256 + byte(dump,i + n - 1)
+ end
+ return val, i + l
+ end
+ end
+ local strip_function
+ strip_function = function(dump)
+ local count, offset = subint(dump, 1, size)
+ local stripped, dirty = rep("\0", size), offset + count
+ offset = offset + count + int * 2 + 4
+ offset = offset + int + subint(dump, offset, int) * ins
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ local t
+ t, offset = subint(dump, offset, 1)
+ if t == 1 then
+ offset = offset + 1
+ elseif t == 4 then
+ offset = offset + size + subint(dump, offset, size)
+ elseif t == 3 then
+ offset = offset + num
+ end
+ end
+ count, offset = subint(dump, offset, int)
+ stripped = stripped .. sub(dump,dirty, offset - 1)
+ for n = 1, count do
+ local proto, off = strip_function(sub(dump,offset, -1))
+ stripped, offset = stripped .. proto, offset + off - 1
+ end
+ offset = offset + subint(dump, offset, int) * int + int
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ offset = offset + subint(dump, offset, size) + size + int * 2
+ end
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ offset = offset + subint(dump, offset, size) + size
+ end
+ stripped = stripped .. rep("\0", int * 3)
+ return stripped, offset
+ end
+ dump = sub(dump,1,12) .. strip_function(sub(dump,13,-1))
+ local after = #dump
+ local delta = before-after
+ if tracestripping then
+ utilities.report("stripped bytecode: %s, before %s, after %s, delta %s",name or "unknown",before,after,delta)
+ end
+ luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1
+ luautilities.nofstrippedbytes = luautilities.nofstrippedbytes + delta
+ return dump, delta
+end
+
+-- ... end of borrowed code.
+
+local function strippedbytecode(code,forcestrip,name)
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ return strip_code_pc(code,name)
+ else
+ return code, 0
+ end
+end
+
+luautilities.stripbytecode = strip_code_pc
+luautilities.strippedbytecode = strippedbytecode
+
+local function fatalerror(name)
+ utilities.report(format("fatal error in %q",name or "unknown"))
+end
+
+-- quite subtle ... doing this wrong incidentally can give more bytes
+
+
+function luautilities.loadedluacode(fullname,forcestrip,name)
+ -- quite subtle ... doing this wrong incidentally can give more bytes
+ name = name or fullname
+ local code = loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip) == "function" then
+ forcestrip = forcestrip(fullname)
+ end
+ if forcestrip then
+ local code, n = strip_code_pc(dump(code,name))
+ return loadstring(code), n
+ elseif luautilities.alwaysstripcode then
+ return loadstring(strip_code_pc(dump(code),name))
+ else
+ return code, 0
+ end
+ elseif luautilities.alwaysstripcode then
+ return loadstring(strip_code_pc(dump(code),name))
+ else
+ return code, 0
+ end
+end
+
+function luautilities.strippedloadstring(code,forcestrip,name) -- not executed
+ local n = 0
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ code = loadstring(code)
+ if not code then
+ fatalerror(name)
+ end
+ code, n = strip_code_pc(dump(code),name)
+ end
+ return loadstring(code), n
+end
-local function stupidcompile(luafile,lucfile)
- local data = io.loaddata(luafile)
- if data and data ~= "" then
- data = string.dump(data)
- if data and data ~= "" then
- io.savedata(lucfile,data)
+local function stupidcompile(luafile,lucfile,strip)
+ local code = io.loaddata(luafile)
+ local n = 0
+ if code and code ~= "" then
+ code = loadstring(code)
+ if not code then
+ fatalerror()
+ end
+ code = dump(code)
+ if strip then
+ code, n = strippedbytecode(code,true,luafile) -- last one is reported
+ end
+ if code and code ~= "" then
+ io.savedata(lucfile,code)
end
end
+ return n
end
-function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
+local luac_normal = "texluac -o %q %q"
+local luac_strip = "texluac -s -o %q %q"
+
+function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
- local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
+ local done = false
if strip ~= false then
- command = "-s " .. command
+ strip = true
+ end
+ if forcestupidcompile then
+ fallback = true
+ elseif strip then
+ done = os.spawn(format(luac_strip, lucfile,luafile)) == 0
+ else
+ done = os.spawn(format(luac_normal,lucfile,luafile)) == 0
end
- local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
if not done and fallback then
- utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
- stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
- cleanup = false -- better see how worse it is
+ local n = stupidcompile(luafile,lucfile,strip)
+ if n > 0 then
+ utilities.report("lua: %s dumped into %s (%i bytes stripped)",luafile,lucfile,n)
+ else
+ utilities.report("lua: %s dumped into %s (unstripped)",luafile,lucfile)
+ end
+ cleanup = false -- better see how bad it is
end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
@@ -4697,7 +5514,6 @@ end
-
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -4710,8 +5526,10 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
-local lpegmatch = lpeg.match
+local lpeg, table, string = lpeg, table, string
+
+local P, R, V, S, C, Ct, Cs, Carg, Cc = lpeg.P, lpeg.R, lpeg.V, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg, lpeg.Cc
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
@@ -4723,29 +5541,39 @@ parsers.patterns = parsers.patterns or { }
local setmetatableindex = table.setmetatableindex
local sortedhash = table.sortedhash
+-- we share some patterns
+
+local space = P(' ')
+local equal = P("=")
+local comma = P(",")
+local lbrace = P("{")
+local rbrace = P("}")
+local period = S(".")
+local punctuation = S(".,:;")
+local spacer = patterns.spacer
+local whitespace = patterns.whitespace
+local newline = patterns.newline
+local anything = patterns.anything
+local endofstring = patterns.endofstring
+
-- we could use a Cf Cg construct
local escape, left, right = P("\\"), P('{'), P('}')
-lpeg.patterns.balanced = P {
+patterns.balanced = P {
[1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
[2] = left * V(1) * right
}
-local space = P(' ')
-local equal = P("=")
-local comma = P(",")
-local lbrace = P("{")
-local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
-local content = (1-P(-1))^0
+local content = (1-endofstring)^0
-lpeg.patterns.nested = nested -- no capture
-lpeg.patterns.argument = argument -- argument after e.g. =
-lpeg.patterns.content = content -- rest after e.g =
+patterns.nested = nested -- no capture
+patterns.argument = argument -- argument after e.g. =
+patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4764,10 +5592,6 @@ local function set(key,value)
hash[key] = value
end
-local function set(key,value)
- hash[key] = value
-end
-
local pattern_a_s = (pattern_a/set)^1
local pattern_b_s = (pattern_b/set)^1
local pattern_c_s = (pattern_c/set)^1
@@ -4818,7 +5642,7 @@ end
local separator = comma * space^0
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
-local pattern = Ct(value*(separator*value)^0)
+local pattern = spaces * Ct(value*(separator*value)^0)
-- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
@@ -4942,6 +5766,37 @@ function parsers.listitem(str)
return gmatch(str,"[^, ]+")
end
+--
+local digit = R("09")
+
+local pattern = Cs { "start",
+ start = V("one") + V("two") + V("three"),
+ rest = (Cc(",") * V("thousand"))^0 * (P(".") + endofstring) * anything^0,
+ thousand = digit * digit * digit,
+ one = digit * V("rest"),
+ two = digit * digit * V("rest"),
+ three = V("thousand") * V("rest"),
+}
+
+patterns.splitthousands = pattern -- maybe better in the parsers namespace ?
+
+function parsers.splitthousands(str)
+ return lpegmatch(pattern,str) or str
+end
+
+-- print(parsers.splitthousands("11111111111.11"))
+
+local optionalwhitespace = whitespace^0
+
+patterns.words = Ct((Cs((1-punctuation-whitespace)^1) + anything)^1)
+patterns.sentences = Ct((optionalwhitespace * Cs((1-period)^0 * period))^1)
+patterns.paragraphs = Ct((optionalwhitespace * Cs((whitespace^1*endofstring/"" + 1 - (spacer^0*newline*newline))^1))^1)
+
+-- local str = " Word1 word2. \n Word3 word4. \n\n Word5 word6.\n "
+-- inspect(lpegmatch(patterns.paragraphs,str))
+-- inspect(lpegmatch(patterns.sentences,str))
+-- inspect(lpegmatch(patterns.words,str))
+
end -- of closure
@@ -5043,7 +5898,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util.deb'] = {
+if not modules then modules = { } end modules ['util-deb'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -5155,6 +6010,7 @@ function inspect(i) -- global function
else
print(tostring(i))
end
+ return i -- so that we can inline the inspect
end
-- from the lua book:
@@ -5194,7 +6050,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
-local write_nl = texio.write_nl
+local write_nl = texio and texio.write_nl or print
statistics = statistics or { }
local statistics = statistics
@@ -5277,7 +6133,7 @@ statistics.elapsedtime = elapsedtime
statistics.elapsedindeed = elapsedindeed
statistics.elapsedseconds = elapsedseconds
--- general function
+-- general function .. we might split this module
function statistics.register(tag,fnc)
if statistics.enable and type(fnc) == "function" then
@@ -5387,6 +6243,8 @@ if not modules then modules = { } end modules ['trac-set'] = { -- might become u
license = "see context related readme files"
}
+-- maybe this should be util-set.lua
+
local type, next, tostring = type, next, tostring
local concat = table.concat
local format, find, lower, gsub, escapedpattern = string.format, string.find, string.lower, string.gsub, string.escapedpattern
@@ -5586,7 +6444,7 @@ function setters.show(t)
local value, default, modules = functions.value, functions.default, #functions
value = value == nil and "unset" or tostring(value)
default = default == nil and "unset" or tostring(default)
- t.report("%-30s modules: %2i default: %6s value: %6s",name,modules,default,value)
+ t.report("%-50s modules: %2i default: %6s value: %6s",name,modules,default,value)
end
end
t.report()
@@ -5678,17 +6536,31 @@ end)
-- experiment
-local flags = environment and environment.engineflags
+if environment then
-if flags then
- if trackers and flags.trackers then
- setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
- -- t_enable(flags.trackers)
- end
- if directives and flags.directives then
- setters.initialize("flags","directives", settings_to_hash(flags.directives))
- -- d_enable(flags.directives)
+ -- The engineflags are known earlier than environment.arguments but maybe we
+ -- need to handle them both as the later are parsed differently. The c: prefix
+ -- is used by mtx-context to isolate the flags from those that concern luatex.
+
+ local engineflags = environment.engineflags
+
+ if engineflags then
+ if trackers then
+ local list = engineflags["c:trackers"] or engineflags["trackers"]
+ if type(list) == "string" then
+ setters.initialize("flags","trackers",settings_to_hash(list))
+ -- t_enable(list)
+ end
+ end
+ if directives then
+ local list = engineflags["c:directives"] or engineflags["directives"]
+ if type(list) == "string" then
+ setters.initialize("flags","directives", settings_to_hash(list))
+ -- d_enable(list)
+ end
+ end
end
+
end
-- here
@@ -5741,10 +6613,7 @@ local next, type = next, type
local setmetatableindex = table.setmetatableindex
---[[ldx--
-
This is a prelude to a more extensive logging module. We no longer
-provide based logging a sparsing is relatively easy anyway.
---ldx]]--
+
logs = logs or { }
local logs = logs
@@ -6560,7 +7429,8 @@ local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
local unquoted, quoted = string.unquoted, string.quoted
-local concat = table.concat
+local concat, insert, remove = table.concat, table.insert, table.remove
+local loadedluacode = utilities.lua.loadedluacode
-- precautions
@@ -6578,8 +7448,28 @@ if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaon
for k=3,#arg do
arg[k-2] = arg[k]
end
- arg[#arg] = nil -- last
- arg[#arg] = nil -- pre-last
+ remove(arg) -- last
+ remove(arg) -- pre-last
+end
+
+-- This is an ugly hack but it permits symlinking a script (say 'context') to 'mtxrun' as in:
+--
+-- ln -s /opt/minimals/tex/texmf-linux-64/bin/mtxrun context
+--
+-- The special mapping hack is needed because 'luatools' boils down to 'mtxrun --script base'
+-- but it's unlikely that there will be more of this
+
+do
+
+ local originalzero = file.basename(arg[0])
+ local specialmapping = { luatools == "base" }
+
+ if originalzero ~= "mtxrun" and originalzero ~= "mtxrun.lua" then
+ arg[0] = specialmapping[originalzero] or originalzero
+ insert(arg,0,"--script")
+ insert(arg,0,"mtxrun")
+ end
+
end
-- environment
@@ -6619,6 +7509,8 @@ local mt = {
setmetatable(environment,mt)
+-- context specific arguments (in order not to confuse the engine)
+
function environment.initializearguments(arg)
local arguments, files = { }, { }
environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
@@ -6627,10 +7519,12 @@ function environment.initializearguments(arg)
if index > 0 then
local flag, value = match(argument,"^%-+(.-)=(.-)$")
if flag then
+ flag = gsub(flag,"^c:","")
arguments[flag] = unquoted(value or "")
else
flag = match(argument,"^%-+(.+)")
if flag then
+ flag = gsub(flag,"^c:","")
arguments[flag] = true
else
files[#files+1] = argument
@@ -6650,7 +7544,7 @@ end
-- tricky: too many hits when we support partials unless we add
-- a registration of arguments so from now on we have 'partial'
-function environment.argument(name,partial)
+function environment.getargument(name,partial)
local arguments, sortedflags = environment.arguments, environment.sortedflags
if arguments[name] then
return arguments[name]
@@ -6673,6 +7567,8 @@ function environment.argument(name,partial)
return nil
end
+environment.argument = environment.getargument
+
function environment.splitarguments(separator) -- rather special, cut-off before separator
local done, before, after = false, { }, { }
local originalarguments = environment.originalarguments
@@ -6758,7 +7654,7 @@ function environment.texfile(filename)
return resolvers.findfile(filename,'tex')
end
-function environment.luafile(filename)
+function environment.luafile(filename) -- needs checking
local resolved = resolvers.findfile(filename,'tex') or ""
if resolved ~= "" then
return resolved
@@ -6770,13 +7666,16 @@ function environment.luafile(filename)
return resolvers.findfile(filename,'luatexlibs') or ""
end
-environment.loadedluacode = loadfile -- can be overloaded
+local function checkstrip(filename)
+ local modu = modules[file.nameonly(filename)]
+ return modu and modu.dataonly
+end
function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
filename = file.replacesuffix(filename, "lua")
local fullname = environment.luafile(filename)
if fullname and fullname ~= "" then
- local data = environment.loadedluacode(fullname)
+ local data = loadedluacode(fullname,checkstrip,filename)
if trace_locating then
report_lua("loading file %s%s", fullname, not data and " failed" or "")
elseif not silent then
@@ -6874,21 +7773,7 @@ local trace_entities = false trackers.register("xml.entities", function(v) trac
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
---[[ldx--
-
The parser used here is inspired by the variant discussed in the lua book, but
-handles comment and processing instructions, has a different structure, provides
-parent access; a first version used different trickery but was less optimized to we
-went this route. First we had a find based parser, now we have an based one.
-The find based parser can be found in l-xml-edu.lua along with other older code.
-
Beware, the interface may change. For instance at, ns, tg, dt may get more
-verbose names. Once the code is stable we will also remove some tracing and
-optimize the code.
-
-
I might even decide to reimplement the parser using the latest trickery
-as the current variant was written when showed up and it's easier now to
-build tables in one go.
---ldx]]--
xml = xml or { }
local xml = xml
@@ -6898,46 +7783,25 @@ local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
+local utfchar = utf.char
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
---[[ldx--
-
First a hack to enable namespace resolving. A namespace is characterized by
-a . The following function associates a namespace prefix with a
-pattern. We use , which in this case is more than twice as fast as a
-find based solution where we loop over an array of patterns. Less code and
-much cleaner.
---ldx]]--
+
xml.xmlns = xml.xmlns or { }
local check = P(false)
local parse = check
---[[ldx--
-
The next function associates a namespace prefix with an . This
-normally happens independent of parsing.
-
-xml.registerns("mml","mathml")
-
---ldx]]--
function xml.registerns(namespace, pattern) -- pattern can be an lpeg
check = check + C(P(lower(pattern))) / namespace
parse = P { P(check) + 1 * V(1) }
end
---[[ldx--
-
The next function also registers a namespace, but this time we map a
-given namespace prefix onto a registered one, using the given
-. This used for attributes like xmlns:m.
-
-xml.checkns("m","http://www.w3.org/mathml")
-
---ldx]]--
function xml.checkns(namespace,url)
local ns = lpegmatch(parse,lower(url))
@@ -6946,66 +7810,15 @@ function xml.checkns(namespace,url)
end
end
---[[ldx--
-
Next we provide a way to turn an into a registered
-namespace. This used for the xmlns attribute.
-
-resolvedns = xml.resolvens("http://www.w3.org/mathml")
-
-
-This returns mml.
---ldx]]--
function xml.resolvens(url)
return lpegmatch(parse,lower(url)) or ""
end
---[[ldx--
-
A namespace in an element can be remapped onto the registered
-one efficiently by using the xml.xmlns table.
---ldx]]--
-
---[[ldx--
-
This version uses . We follow the same approach as before, stack and top and
-such. This version is about twice as fast which is mostly due to the fact that
-we don't have to prepare the stream for cdata, doctype etc etc. This variant is
-is dedicated to Luigi Scarso, who challenged me with 40 megabyte files that
-took 12.5 seconds to load (1.5 for file io and the rest for tree building). With
-the implementation we got that down to less 7.3 seconds. Loading the 14
- interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.
-
-
Next comes the parser. The rather messy doctype definition comes in many
-disguises so it is no surprice that later on have to dedicate quite some
- code to it.
-
-
-
-
-
-
-
-
-
-
-
The code may look a bit complex but this is mostly due to the fact that we
-resolve namespaces and attach metatables. There is only one public function:
-
-
-local x = xml.convert(somestring)
-
-
-
An optional second boolean argument tells this function not to create a root
-element.
-
-
Valid entities are:
-
-
-
-
-
-
---ldx]]--
+
+
+
-- not just one big nested table capture (lpeg overflow)
@@ -7220,15 +8033,7 @@ local privates_n = {
-- keeps track of defined ones
}
-local function escaped(s)
- if s == "" then
- return ""
- else -- if utffind(s,privates_u) then
- return (utfgsub(s,".",privates_u))
- -- else
- -- return s
- end
-end
+local escaped = utf.remapper(privates_u)
local function unescaped(s)
local p = privates_n[s]
@@ -7243,13 +8048,7 @@ local function unescaped(s)
return p
end
-local function unprivatized(s,resolve)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_p))
- end
-end
+local unprivatized = utf.remapper(privates_p)
xml.privatetoken = unescaped
xml.unprivatized = unprivatized
@@ -7589,7 +8388,12 @@ local function _xmlconvert_(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler(format("load error: %s",errorstr))
+ local currentresource = settings.currentresource
+ if currentresource and currentresource ~= "" then
+ xml.errorhandler(format("load error in [%s]: %s",currentresource,errorstr))
+ else
+ xml.errorhandler(format("load error: %s",errorstr))
+ end
end
end
else
@@ -7634,7 +8438,7 @@ function xmlconvert(data,settings)
if ok then
return result
else
- return _xmlconvert_("")
+ return _xmlconvert_("",settings)
end
end
@@ -7655,10 +8459,7 @@ function xml.inheritedconvert(data,xmldata) -- xmldata is parent
return xc
end
---[[ldx--
-
Packaging data in an xml like table is done with the following
-function. Maybe it will go away (when not used).
---ldx]]--
+
function xml.is_valid(root)
return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
@@ -7677,11 +8478,7 @@ end
xml.errorhandler = report_xml
---[[ldx--
-
We cannot load an from a filehandle so we need to load
-the whole file first. The function accepts a string representing
-a filename or a file handle.
---ldx]]--
+
function xml.load(filename,settings)
local data = ""
@@ -7695,13 +8492,17 @@ function xml.load(filename,settings)
elseif filename then -- filehandle
data = filename:read("*all")
end
- return xmlconvert(data,settings)
+ if settings then
+ settings.currentresource = filename
+ local result = xmlconvert(data,settings)
+ settings.currentresource = nil
+ return result
+ else
+ return xmlconvert(data,{ currentresource = filename })
+ end
end
---[[ldx--
-
When we inject new elements, we need to convert strings to
-valid trees, which is what the next function does.
---ldx]]--
+
local no_root = { no_root = true }
@@ -7714,11 +8515,7 @@ function xml.toxml(data)
end
end
---[[ldx--
-
For copying a tree we use a dedicated function instead of the
-generic table copier. Since we know what we're dealing with we
-can speed up things a bit. The second argument is not to be used!
---ldx]]--
+
local function copy(old,tables)
if old then
@@ -7742,13 +8539,7 @@ end
xml.copy = copy
---[[ldx--
-
In serializing the tree or parts of the tree is a major
-actitivity which is why the following function is pretty optimized resulting
-in a few more lines of code than needed. The variant that uses the formatting
-function for all components is about 15% slower than the concatinating
-alternative.
---ldx]]--
+
-- todo: add when not present
@@ -7761,15 +8552,12 @@ function xml.checkbom(root) -- can be made faster
return
end
end
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 1, { special = true, ns = "", tg = "@pi@", dt = { "xml version='1.0' standalone='yes'" } } )
insert(dt, 2, "\n" )
end
end
---[[ldx--
-
At the cost of some 25% runtime overhead you can first convert the tree to a string
-and then handle the lot.
How you deal with saving data depends on your preferences. For a 40 MB database
-file the timing on a 2.3 Core Duo are as follows (time in seconds):
-
-
-1.3 : load data from file to string
-6.1 : convert string into tree
-5.3 : saving in file using xmlsave
-6.8 : converting to string using xml.tostring
-3.6 : saving converted string in file
-
-
Beware, these were timing with the old routine but measurements will not be that
-much different I guess.
---ldx]]--
-- maybe this will move to lxml-xml
@@ -8054,10 +8828,7 @@ xml.newhandlers = newhandlers
xml.serialize = serialize
xml.tostring = xmltostring
---[[ldx--
-
The next function operated on the content only and needs a handle function
-that accepts a string.
---ldx]]--
+
local function xmlstring(e,handle)
if not handle or (e.special and e.tg ~= "@rt@") then
@@ -8076,9 +8847,7 @@ end
xml.string = xmlstring
---[[ldx--
-
A few helpers:
---ldx]]--
+
function xml.settings(e)
@@ -8122,11 +8891,7 @@ function xml.name(root)
end
end
---[[ldx--
-
The next helper erases an element but keeps the table as it is,
-and since empty strings are not serialized (effectively) it does
-not harm. Copying the table would take more time. Usage:
---ldx]]--
+
function xml.erase(dt,k)
if dt then
@@ -8138,13 +8903,7 @@ function xml.erase(dt,k)
end
end
---[[ldx--
-
The next helper assigns a tree (or string). Usage:
-
-dt[k] = xml.assign(root) or xml.assign(dt,k,root)
-
---ldx]]--
function xml.assign(dt,k,root)
if dt and k then
@@ -8157,20 +8916,14 @@ end
-- the following helpers may move
---[[ldx--
-
The next helper assigns a tree (or string). Usage:
-
-xml.tocdata(e)
-xml.tocdata(e,"error")
-
---ldx]]--
+
function xml.tocdata(e,wrapper) -- a few more in the aux module
local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s%s>",wrapper,whatever,wrapper)
end
- local t = { special = true, ns = "", tg = "@cd@", at = {}, rn = "", dt = { whatever }, __p__ = e }
+ local t = { special = true, ns = "", tg = "@cd@", at = { }, rn = "", dt = { whatever }, __p__ = e }
setmetatable(t,getmetatable(e))
e.dt = { t }
end
@@ -8225,7 +8978,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-pth'] = {
+if not modules then modules = { } end modules ['lxml-lpt'] = {
version = 1.001,
comment = "this module is the basis for the lxml-* ones",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -8246,28 +8999,9 @@ local setmetatableindex = table.setmetatableindex
-- beware, this is not xpath ... e.g. position is different (currently) and
-- we have reverse-sibling as reversed preceding sibling
---[[ldx--
-
This module can be used stand alone but also inside in
-which case it hooks into the tracker code. Therefore we provide a few
-functions that set the tracers. Here we overload a previously defined
-function.
-
If I can get in the mood I will make a variant that is XSLT compliant
-but I wonder if it makes sense.
---ldx]]--
-
---[[ldx--
-
Expecially the lpath code is experimental, we will support some of xpath, but
-only things that make sense for us; as compensation it is possible to hook in your
-own functions. Apart from preprocessing content for we also need
-this module for process management, like handling and
-files.
-
-
-a/b/c /*/c
-a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
-a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
-
---ldx]]--
+
+
+
local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end
local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
@@ -8275,11 +9009,7 @@ local trace_lprofile = false if trackers then trackers.register("xml.profile",
local report_lpath = logs.reporter("xml","lpath")
---[[ldx--
-
We've now arrived at an interesting part: accessing the tree using a subset
-of and since we're not compatible we call it . We
-will explain more about its usage in other documents.
---ldx]]--
+
local xml = xml
@@ -8731,14 +9461,23 @@ local lp_builtin = P (
-- for the moment we keep namespaces with attributes
local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
-local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end)
-local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end)
+
+-- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end
+-- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end
+
+lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0"
+lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))"
+
local lp_fastpos = lp_fastpos_n + lp_fastpos_p
+
local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
-local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
- return t .. "("
-end
+-- local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
+-- return t .. "("
+-- end
+
+-- local lp_lua_function = (R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / "%0("
+local lp_lua_function = Cs((R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(")) / "%0"
local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling
if expressions[t] then
@@ -9254,9 +9993,7 @@ end
xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do
---[[ldx--
-
This is the main filter function. It returns whatever is asked for.
---ldx]]--
+
function xml.filter(root,pattern) -- no longer funny attribute handling here
return applylpath(root,pattern)
@@ -9354,12 +10091,12 @@ xml.selection = selection -- new method, simple handle
-- generic function finalizer (independant namespace)
-local function dofunction(collected,fnc)
+local function dofunction(collected,fnc,...)
if collected then
local f = functions[fnc]
if f then
for c=1,#collected do
- f(collected[c])
+ f(collected[c],...)
end
else
report_lpath("unknown function '%s'",fnc)
@@ -9460,21 +10197,7 @@ expressions.tag = function(e,n) -- only tg
end
end
---[[ldx--
-
Often using an iterators looks nicer in the code than passing handler
-functions. The book describes how to use coroutines for that
-purpose (). This permits
-code like:
-
-for r, d, k in xml.elements(xml.load('text.xml'),"title") do
- print(d[k]) -- old method
-end
-for e in xml.collected(xml.load('text.xml'),"title") do
- print(e) -- new one
-end
-
---ldx]]--
local wrap, yield = coroutine.wrap, coroutine.yield
@@ -9515,6 +10238,32 @@ function xml.inspect(collection,pattern)
end
end
+-- texy (see xfdf):
+
+local function split(e)
+ local dt = e.dt
+ if dt then
+ for i=1,#dt do
+ local dti = dt[i]
+ if type(dti) == "string" then
+ dti = gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
+ dti = gsub(dti,"[\n\r]+","\n\n")
+ dt[i] = dti
+ else
+ split(dti)
+ end
+ end
+ end
+ return e
+end
+
+function xml.finalizers.paragraphs(c)
+ for i=1,#c do
+ split(c[i])
+ end
+ return c
+end
+
end -- of closure
@@ -9539,13 +10288,7 @@ local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, l
lpegpatterns.xml = lpegpatterns.xml or { }
local xmlpatterns = lpegpatterns.xml
---[[ldx--
-
The following helper functions best belong to the lxml-ini
-module. Some are here because we need then in the mk
-document and other manuals, others came up when playing with
-this module. Since this module is also used in we've
-put them here instead of loading mode modules there then needed.
---ldx]]--
+
local function xmlgsub(t,old,new) -- will be replaced
local dt = t.dt
@@ -9731,9 +10474,7 @@ function xml.processattributes(root,pattern,handle)
return collected
end
---[[ldx--
-
The following functions collect elements and texts.
---ldx]]--
+
-- are these still needed -> lxml-cmp.lua
@@ -9772,9 +10513,7 @@ function xml.collect_tags(root, pattern, nonamespace)
end
end
---[[ldx--
-
We've now arrived at the functions that manipulate the tree.
---ldx]]--
+
local no_root = { no_root = true }
@@ -10160,9 +10899,7 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
end
---[[ldx--
-
Helper (for q2p).
---ldx]]--
+
function xml.cdatatotext(e)
local dt = e.dt
@@ -10259,9 +10996,7 @@ end
-- xml.addentitiesdoctype(x,"hexadecimal")
-- print(x)
---[[ldx--
-
Here are a few synonyms.
---ldx]]--
+
xml.all = xml.each
xml.insert = xml.insertafter
@@ -10852,7 +11587,7 @@ local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string
local concat = table.concat
local next, type = next, type
-local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
@@ -11202,12 +11937,14 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
for s in gmatch(str,"[^,]+") do
s = validate(s)
if s then
- n = n + 1 ; t[n] = s
+ n = n + 1
+ t[n] = s
end
end
else
for s in gmatch(str,"[^,]+") do
- n = n + 1 ; t[n] = s
+ n = n + 1
+ t[n] = s
end
end
if trace_expansions then
@@ -11221,7 +11958,7 @@ end
-- We could make the previous one public.
local function validate(s)
- s = collapsepath(s) -- already keeps the //
+ s = collapsepath(s) -- already keeps the trailing / and //
return s ~= "" and not find(s,"^!*unset/*$") and s
end
@@ -11559,7 +12296,7 @@ local resolvers = resolvers
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
-local fileextname = file.extname
+local suffixonly = file.suffixonly
local formats = allocate()
local suffixes = allocate()
@@ -11814,7 +12551,7 @@ function resolvers.formatofvariable(str)
end
function resolvers.formatofsuffix(str) -- of file
- return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
+ return suffixmap[suffixonly(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
end
function resolvers.variableofformat(str)
@@ -11826,7 +12563,7 @@ function resolvers.variableofformatorsuffix(str)
if v then
return v
end
- v = suffixmap[fileextname(str)]
+ v = suffixmap[suffixonly(str)]
if v then
return formats[v]
end
@@ -11847,21 +12584,7 @@ if not modules then modules = { } end modules ['data-tmp'] = {
license = "see context related readme files"
}
---[[ldx--
-
This module deals with caching data. It sets up the paths and
-implements loaders and savers for tables. Best is to set the
-following variable. When not set, the usual paths will be
-checked. Personally I prefer the (users) temporary path.
Currently we do no locking when we write files. This is no real
-problem because most caching involves fonts and the chance of them
-being written at the same time is small. We also need to extend
-luatools with a recache feature.
---ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
local serialize, serializetofile = table.serialize, table.tofile
@@ -12396,11 +13119,12 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local filedirname = file.dirname
local filebasename = file.basename
-local fileextname = file.extname
+local suffixonly = file.suffixonly
local filejoin = file.join
local collapsepath = file.collapsepath
local joinpath = file.joinpath
local allocate = utilities.storage.allocate
+local settings_to_array = utilities.parsers.settings_to_array
local setmetatableindex = table.setmetatableindex
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -12424,7 +13148,7 @@ resolvers.cacheversion = '1.0.1'
resolvers.configbanner = ''
resolvers.homedir = environment.homedir
resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
-resolvers.luacnfname = 'texmfcnf.lua'
+resolvers.luacnfname = "texmfcnf.lua"
resolvers.luacnfstate = "unknown"
-- The web2c tex binaries as well as kpse have built in paths for the configuration
@@ -12696,7 +13420,7 @@ end
local function identify_configuration_files()
local specification = instance.specification
if #specification == 0 then
- local cnfspec = getenv('TEXMFCNF')
+ local cnfspec = getenv("TEXMFCNF")
if cnfspec == "" then
cnfspec = resolvers.luacnfspec
resolvers.luacnfstate = "default"
@@ -12784,7 +13508,7 @@ local function load_configuration_files()
-- we push the value into the main environment (osenv) so
-- that it takes precedence over the default one and therefore
-- also over following definitions
- resolvers.setenv('TEXMFCNF',cnfspec) -- resolves prefixes
+ resolvers.setenv("TEXMFCNF",cnfspec) -- resolves prefixes
-- we now identify and load the specified configuration files
instance.specification = { }
identify_configuration_files()
@@ -12832,10 +13556,11 @@ end
local function locate_file_databases()
-- todo: cache:// and tree:// (runtime)
- local texmfpaths = resolvers.expandedpathlist('TEXMF')
+ local texmfpaths = resolvers.expandedpathlist("TEXMF")
if #texmfpaths > 0 then
for i=1,#texmfpaths do
local path = collapsepath(texmfpaths[i])
+ path = gsub(path,"/+$","") -- in case $HOME expands to something with a trailing /
local stripped = lpegmatch(inhibitstripper,path) -- the !! thing
if stripped ~= "" then
local runtime = stripped == path
@@ -12964,9 +13689,9 @@ function resolvers.prependhash(type,name,cache)
end
function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
- local t = resolvers.splitpath(getenv('TEXMF'))
+ local t = resolvers.splitpath(getenv("TEXMF")) -- okay?
insert(t,1,specification)
- local newspec = concat(t,";")
+ local newspec = concat(t,",") -- not ;
if instance.environment["TEXMF"] then
instance.environment["TEXMF"] = newspec
elseif instance.variables["TEXMF"] then
@@ -13041,14 +13766,19 @@ function resolvers.resetextrapath()
end
function resolvers.registerextrapath(paths,subpaths)
+ paths = settings_to_array(paths)
+ subpaths = settings_to_array(subpaths)
local ep = instance.extra_paths or { }
local oldn = #ep
local newn = oldn
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
+ local nofpaths = #paths
+ local nofsubpaths = #subpaths
+ if nofpaths > 0 then
+ if nofsubpaths > 0 then
+ for i=1,nofpaths do
+ local p = paths[i]
+ for j=1,nofsubpaths do
+ local s = subpaths[j]
local ps = p .. "/" .. s
if not done[ps] then
newn = newn + 1
@@ -13058,7 +13788,8 @@ function resolvers.registerextrapath(paths,subpaths)
end
end
else
- for p in gmatch(paths,"[^,]+") do
+ for i=1,nofpaths do
+ local p = paths[i]
if not done[p] then
newn = newn + 1
ep[newn] = resolvers.cleanpath(p)
@@ -13066,10 +13797,10 @@ function resolvers.registerextrapath(paths,subpaths)
end
end
end
- elseif subpaths and subpaths ~= "" then
+ elseif nofsubpaths > 0 then
for i=1,oldn do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
+ for j=1,nofsubpaths do
+ local s = subpaths[j]
local ps = ep[i] .. "/" .. s
if not done[ps] then
newn = newn + 1
@@ -13147,18 +13878,21 @@ function resolvers.expandedpathlist(str)
return { }
elseif instance.savelists then
str = lpegmatch(dollarstripper,str)
- if not instance.lists[str] then -- cached
- local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
- instance.lists[str] = expandedpathfromlist(lst)
- end
- return instance.lists[str]
+ local lists = instance.lists
+ local lst = lists[str]
+ if not lst then
+ local l = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ lst = expandedpathfromlist(l)
+ lists[str] = lst
+ end
+ return lst
else
local lst = resolvers.splitpath(resolvers.expansion(str))
return made_list(instance,expandedpathfromlist(lst))
end
end
-function resolvers.expandedpathlistfromvariable(str) -- brrr
+function resolvers.expandedpathlistfromvariable(str) -- brrr / could also have cleaner ^!! /$ //
str = lpegmatch(dollarstripper,str)
local tmp = resolvers.variableofformatorsuffix(str)
return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
@@ -13315,7 +14049,7 @@ local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
local collect_instance_files
local function find_analyze(filename,askedformat,allresults)
- local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ local filetype, wantedfiles, ext = '', { }, suffixonly(filename)
-- too tricky as filename can be bla.1.2.3:
--
-- if not suffixmap[ext] then
@@ -13393,7 +14127,7 @@ local function find_qualified(filename,allresults) -- this one will be split too
if trace_detail then
report_resolving("locating qualified file '%s'", filename)
end
- local forcedname, suffix = "", fileextname(filename)
+ local forcedname, suffix = "", suffixonly(filename)
if suffix == "" then -- why
local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
if format_suffixes then
@@ -14063,6 +14797,8 @@ local gsub = string.gsub
local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+local joinpath, basename, dirname = file.join, file.basename, file.dirname
+local getmetatable, rawset, type = getmetatable, rawset, type
-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
@@ -14104,28 +14840,43 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
+ return cleanpath(basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.dirname((fullname ~= "" and fullname) or str))
+ return cleanpath(dirname((fullname ~= "" and fullname) or str))
end
prefixes.selfautoloc = function(str)
- return cleanpath(file.join(getenv('SELFAUTOLOC'),str))
+ return cleanpath(joinpath(getenv('SELFAUTOLOC'),str))
end
prefixes.selfautoparent = function(str)
- return cleanpath(file.join(getenv('SELFAUTOPARENT'),str))
+ return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str))
end
prefixes.selfautodir = function(str)
- return cleanpath(file.join(getenv('SELFAUTODIR'),str))
+ return cleanpath(joinpath(getenv('SELFAUTODIR'),str))
end
prefixes.home = function(str)
- return cleanpath(file.join(getenv('HOME'),str))
+ return cleanpath(joinpath(getenv('HOME'),str))
+end
+
+local function toppath()
+ local pathname = dirname(inputstack[#inputstack] or "")
+ if pathname == "" then
+ return "."
+ else
+ return pathname
+ end
+end
+
+resolvers.toppath = toppath
+
+prefixes.toppath = function(str)
+ return cleanpath(joinpath(toppath(),str))
end
prefixes.env = prefixes.environment
@@ -14161,6 +14912,8 @@ function resolvers.resetresolve(str)
resolved, abstract = { }, { }
end
+-- todo: use an lpeg (see data-lua for !! / stripper)
+
local function resolve(str) -- use schemes, this one is then for the commandline only
if type(str) == "table" then
local t = { }
@@ -14186,7 +14939,7 @@ end
resolvers.resolve = resolve
resolvers.unresolve = unresolve
-if os.uname then
+if type(os.uname) == "function" then
for k, v in next, os.uname() do
if not prefixes[k] then
@@ -14198,11 +14951,17 @@ end
if os.type == "unix" then
+ -- We need to distringuish between a prefix and something else : so we
+ -- have a special repath variant for linux. Also, when a new prefix is
+ -- defined, we need to remake the matcher.
+
local pattern
local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
local colon = P(":")
- local p
for k, v in table.sortedpairs(prefixes) do
if p then
p = P(k) + p
@@ -14211,9 +14970,6 @@ if os.type == "unix" then
end
end
pattern = Cs((p * colon + colon/";" + P(1))^0)
- if t then
- t[k] = v
- end
end
makepattern()
@@ -14424,18 +15180,7 @@ local trace_cache = false trackers.register("resolvers.cache", functi
local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
---[[ldx--
-
Once we found ourselves defining similar cache constructs
-several times, containers were introduced. Containers are used
-to collect tables in memory and reuse them when possible based
-on (unique) hashes (to be provided by the calling function).
-
-
Caching to disk is disabled by default. Version numbers are
-stored in the saved table which makes it possible to change the
-table structures without bothering about the disk cache.
-
Examples of usage can be found in the font related code.
---ldx]]--
containers = containers or { }
local containers = containers
@@ -14670,11 +15415,7 @@ local trace_locating = false trackers.register("resolvers.locating", function(v
local report_zip = logs.reporter("resolvers","zip")
--- zip:///oeps.zip?name=bla/bla.tex
--- zip:///oeps.zip?tree=tex/texmf-local
--- zip:///texmf.zip?tree=/tex/texmf
--- zip:///texmf.zip?tree=/tex/texmf-local
--- zip:///texmf-mine.zip?tree=/tex/texmf-projects
+
local resolvers = resolvers
@@ -14999,7 +15740,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-crl'] = {
+if not modules then modules = { } end modules ['data-sch'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -15007,60 +15748,199 @@ if not modules then modules = { } end modules ['data-crl'] = {
license = "see context related readme files"
}
--- this one is replaced by data-sch.lua --
+local loadstring = loadstring
+local gsub, concat, format = string.gsub, table.concat, string.format
+local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-local gsub = string.gsub
+local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end)
+local report_schemes = logs.reporter("resolvers","schemes")
-local resolvers = resolvers
+local http = require("socket.http")
+local ltn12 = require("ltn12")
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
+local resolvers = resolvers
+local schemes = resolvers.schemes or { }
+resolvers.schemes = schemes
+
+local cleaners = { }
+schemes.cleaners = cleaners
+
+local threshold = 24 * 60 * 60
+
+directives.register("schemes.threshold", function(v) threshold = tonumber(v) or threshold end)
+
+function cleaners.none(specification)
+ return specification.original
+end
+
+function cleaners.strip(specification)
+ return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods
+end
+
+function cleaners.md5(specification)
+ return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
+end
+
+local cleaner = cleaners.strip
+
+directives.register("schemes.cleanmethod", function(v) cleaner = cleaners[v] or cleaners.strip end)
+
+function resolvers.schemes.cleanname(specification)
+ local hash = cleaner(specification)
+ if trace_schemes then
+ report_schemes("hashing %s to %s",specification.original,hash)
+ end
+ return hash
+end
-resolvers.curl = resolvers.curl or { }
-local curl = resolvers.curl
+local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { }
-local cached = { }
+local function runcurl(name,cachename) -- we use sockets instead or the curl library when possible
+ local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name
+ os.spawn(command)
+end
-local function runcurl(specification)
+local function fetch(specification)
local original = specification.original
- -- local scheme = specification.scheme
- local cleanname = gsub(original,"[^%a%d%.]+","-")
- local cachename = caches.setfirstwritablefile(cleanname,"curl")
+ local scheme = specification.scheme
+ local cleanname = schemes.cleanname(specification)
+ local cachename = caches.setfirstwritablefile(cleanname,"schemes")
if not cached[original] then
- if not io.exists(cachename) then
+ statistics.starttiming(schemes)
+ if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > (thresholds[protocol] or threshold)) then
cached[original] = cachename
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
- os.spawn(command)
+ local handler = handlers[scheme]
+ if handler then
+ if trace_schemes then
+ report_schemes("fetching '%s', protocol '%s', method 'built-in'",original,scheme)
+ end
+ logs.flush()
+ handler(specification,cachename)
+ else
+ if trace_schemes then
+ report_schemes("fetching '%s', protocol '%s', method 'curl'",original,scheme)
+ end
+ logs.flush()
+ runcurl(original,cachename)
+ end
end
if io.exists(cachename) then
cached[original] = cachename
+ if trace_schemes then
+ report_schemes("using cached '%s', protocol '%s', cachename '%s'",original,scheme,cachename)
+ end
else
cached[original] = ""
+ if trace_schemes then
+ report_schemes("using missing '%s', protocol '%s'",original,scheme)
+ end
end
+ loaded[scheme] = loaded[scheme] + 1
+ statistics.stoptiming(schemes)
+ else
+ if trace_schemes then
+ report_schemes("reusing '%s', protocol '%s'",original,scheme)
+ end
+ reused[scheme] = reused[scheme] + 1
end
return cached[original]
end
--- old code: we could be cleaner using specification (see schemes)
-
local function finder(specification,filetype)
- return resolvers.methodhandler("finders",runcurl(specification),filetype)
+ return resolvers.methodhandler("finders",fetch(specification),filetype)
end
local opener = openers.file
local loader = loaders.file
-local function install(scheme)
- finders[scheme] = finder
- openers[scheme] = opener
- loaders[scheme] = loader
+local function install(scheme,handler,newthreshold)
+ handlers [scheme] = handler
+ loaded [scheme] = 0
+ reused [scheme] = 0
+ finders [scheme] = finder
+ openers [scheme] = opener
+ loaders [scheme] = loader
+ thresholds[scheme] = newthreshold or threshold
end
-resolvers.curl.install = install
+schemes.install = install
+
+local function http_handler(specification,cachename)
+ local tempname = cachename .. ".tmp"
+ local f = io.open(tempname,"wb")
+ local status, message = http.request {
+ url = specification.original,
+ sink = ltn12.sink.file(f)
+ }
+ if not status then
+ os.remove(tempname)
+ else
+ os.remove(cachename)
+ os.rename(tempname,cachename)
+ end
+ return cachename
+end
-install('http')
-install('https')
+install('http',http_handler)
+install('https') -- see pod
install('ftp')
+statistics.register("scheme handling time", function()
+ local l, r, nl, nr = { }, { }, 0, 0
+ for k, v in table.sortedhash(loaded) do
+ if v > 0 then
+ nl = nl + 1
+ l[nl] = k .. ":" .. v
+ end
+ end
+ for k, v in table.sortedhash(reused) do
+ if v > 0 then
+ nr = nr + 1
+ r[nr] = k .. ":" .. v
+ end
+ end
+ local n = nl + nr
+ if n > 0 then
+ l = nl > 0 and concat(l) or "none"
+ r = nr > 0 and concat(r) or "none"
+ return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s",
+ statistics.elapsedtime(schemes), n, threshold, l, r)
+ else
+ return nil
+ end
+end)
+
+-- We provide a few more helpers:
+
+----- http = require("socket.http")
+local httprequest = http.request
+local toquery = url.toquery
+
+-- local function httprequest(url)
+-- return os.resultof(format("curl --silent %q", url))
+-- end
+
+local function fetchstring(url,data)
+ local q = data and toquery(data)
+ if q then
+ url = url .. "?" .. q
+ end
+ local reply = httprequest(url)
+ return reply -- just one argument
+end
+
+schemes.fetchstring = fetchstring
+
+function schemes.fetchtable(url,data)
+ local reply = fetchstring(url,data)
+ if reply then
+ local s = loadstring("return " .. reply)
+ if s then
+ return s()
+ end
+ end
+end
+
end -- of closure
@@ -15074,170 +15954,199 @@ if not modules then modules = { } end modules ['data-lua'] = {
license = "see context related readme files"
}
--- some loading stuff ... we might move this one to slot 2 depending
--- on the developments (the loaders must not trigger kpse); we could
--- of course use a more extensive lib path spec
+-- We overload the regular loader. We do so because we operate mostly in
+-- tds and use our own loader code. Alternatively we could use a more
+-- extensive definition of package.path and package.cpath but even then
+-- we're not done. Also, we now have better tracing.
+--
+-- -- local mylib = require("libtest")
+-- -- local mysql = require("luasql.mysql")
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local concat = table.concat
+
+local trace_libraries = false
+
+trackers.register("resolvers.libraries", function(v) trace_libraries = v end)
+trackers.register("resolvers.locating", function(v) trace_libraries = v end)
local report_libraries = logs.reporter("resolvers","libraries")
local gsub, insert = string.gsub, table.insert
+local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
local unpack = unpack or table.unpack
+local is_readable = file.is_readable
local resolvers, package = resolvers, package
-local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs'
-local clibformats = { 'lib' }
-
-local _path_, libpaths, _cpath_, clibpaths
-
-function package.libpaths()
- if not _path_ or package.path ~= _path_ then
- _path_ = package.path
- libpaths = file.splitpath(_path_,";")
+local libsuffixes = { 'tex', 'lua' }
+local clibsuffixes = { 'lib' }
+local libformats = { 'TEXINPUTS', 'LUAINPUTS' }
+local clibformats = { 'CLUAINPUTS' }
+
+local libpaths = nil
+local clibpaths = nil
+local libhash = { }
+local clibhash = { }
+local libextras = { }
+local clibextras = { }
+
+local pattern = Cs(P("!")^0 / "" * (P("/") * P(-1) / "/" + P("/")^1 / "/" + 1)^0)
+
+local function cleanpath(path) --hm, don't we have a helper for this?
+ return resolvers.resolve(lpegmatch(pattern,path))
+end
+
+local function getlibpaths()
+ if not libpaths then
+ libpaths = { }
+ for i=1,#libformats do
+ local paths = resolvers.expandedpathlistfromvariable(libformats[i])
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not libhash[path] then
+ libpaths[#libpaths+1] = path
+ libhash[path] = true
+ end
+ end
+ end
end
return libpaths
end
-function package.clibpaths()
- if not _cpath_ or package.cpath ~= _cpath_ then
- _cpath_ = package.cpath
- clibpaths = file.splitpath(_cpath_,";")
+local function getclibpaths()
+ if not clibpaths then
+ clibpaths = { }
+ for i=1,#clibformats do
+ local paths = resolvers.expandedpathlistfromvariable(clibformats[i])
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not clibhash[path] then
+ clibpaths[#clibpaths+1] = path
+ clibhash[path] = true
+ end
+ end
+ end
end
return clibpaths
end
-local function thepath(...)
- local t = { ... } t[#t+1] = "?.lua"
- local path = file.join(unpack(t))
- if trace_locating then
- report_libraries("! appending '%s' to 'package.path'",path)
+package.libpaths = getlibpaths
+package.clibpaths = getclibpaths
+
+function package.extralibpath(...)
+ local paths = { ... }
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not libhash[path] then
+ if trace_libraries then
+ report_libraries("! extra lua path '%s'",path)
+ end
+ libextras[#libextras+1] = path
+ libpaths[#libpaths +1] = path
+ end
end
- return path
end
-local p_libpaths, a_libpaths = { }, { }
-
-function package.appendtolibpath(...)
- insert(a_libpath,thepath(...))
+function package.extraclibpath(...)
+ local paths = { ... }
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not clibhash[path] then
+ if trace_libraries then
+ report_libraries("! extra lib path '%s'",path)
+ end
+ clibextras[#clibextras+1] = path
+ clibpaths[#clibpaths +1] = path
+ end
+ end
end
-function package.prependtolibpath(...)
- insert(p_libpaths,1,thepath(...))
+if not package.loaders[-2] then
+ -- use package-path and package-cpath
+ package.loaders[-2] = package.loaders[2]
end
--- beware, we need to return a loadfile result !
+local function loadedaslib(resolved,rawname)
+ return package.loadlib(resolved,"luaopen_" .. gsub(rawname,"%.","_"))
+end
-local function loaded(libpaths,name,simple)
- for i=1,#libpaths do -- package.path, might become option
- local libpath = libpaths[i]
- local resolved = gsub(libpath,"%?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.path': '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
+local function loadedbylua(name)
+ if trace_libraries then
+ report_libraries("! locating %q using normal loader",name)
end
+ local resolved = package.loaders[-2](name)
end
-package.loaders[2] = function(name) -- was [#package.loaders+1]
- if file.suffix(name) == "" then
- name = file.addsuffix(name,"lua") -- maybe a list
- if trace_locating then -- mode detail
- report_libraries("! locating '%s' with forced suffix",name)
- end
- else
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
- end
+local function loadedbyformat(name,rawname,suffixes,islib)
+ if trace_libraries then
+ report_libraries("! locating %q as %q using formats %q",rawname,name,concat(suffixes))
end
- for i=1,#libformats do
- local format = libformats[i]
+ for i=1,#suffixes do -- so we use findfile and not a lookup loop
+ local format = suffixes[i]
local resolved = resolvers.findfile(name,format) or ""
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'libformat path': '%s'",name,format)
+ if trace_libraries then
+ report_libraries("! checking for %q' using format %q",name,format)
end
if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located via environment: '%s'",name,resolved)
+ if trace_libraries then
+ report_libraries("! lib %q located on %q",name,resolved)
end
- return loadfile(resolved)
- end
- end
- -- libpaths
- local libpaths, clibpaths = package.libpaths(), package.clibpaths()
- local simple = gsub(name,"%.lua$","")
- local simple = gsub(simple,"%.","/")
- local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple)
- if resolved then
- return resolved
- end
- --
- local libname = file.addsuffix(simple,os.libsuffix)
- for i=1,#clibformats do
- -- better have a dedicated loop
- local format = clibformats[i]
- local paths = resolvers.expandedpathlistfromvariable(format)
- for p=1,#paths do
- local path = paths[p]
- local resolved = file.join(path,libname)
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'clibformat path': '%s'",libname,path)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
- end
- return package.loadlib(resolved,name)
+ if islib then
+ return loadedaslib(resolved,rawname)
+ else
+ return loadfile(resolved)
end
end
end
- for i=1,#clibpaths do -- package.path, might become option
- local libpath = clibpaths[i]
- local resolved = gsub(libpath,"?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
+end
+
+local function loadedbypath(name,rawname,paths,islib,what)
+ if trace_libraries then
+ report_libraries("! locating %q as %q on %q paths",rawname,name,what)
+ end
+ for p=1,#paths do
+ local path = paths[p]
+ local resolved = file.join(path,name)
+ if trace_libraries then -- mode detail
+ report_libraries("! checking for %q using %q path %q",name,what,path)
end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
+ if is_readable(resolved) then
+ if trace_libraries then
+ report_libraries("! lib %q located on %q",name,resolved)
+ end
+ if islib then
+ return loadedaslib(resolved,rawname)
+ else
+ return loadfile(resolved)
end
- return package.loadlib(resolved,name)
- end
- end
- -- just in case the distribution is messed up
- if trace_loading then -- more detail
- report_libraries("! checking for '%s' using 'luatexlibs': '%s'",name)
- end
- local resolved = resolvers.findfile(file.basename(name),'luatexlibs') or ""
- if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located by basename via environment: '%s'",name,resolved)
end
- return loadfile(resolved)
end
- if trace_locating then
- report_libraries('? unable to locate lib: %s',name)
- end
--- return "unable to locate " .. name
end
-resolvers.loadlualib = require
-
--- -- -- --
+local function notloaded(name)
+ if trace_libraries then
+ report_libraries("? unable to locate library %q",name)
+ end
+end
-package.obsolete = package.obsolete or { }
+package.loaders[2] = function(name)
+ local thename = gsub(name,"%.","/")
+ local luaname = file.addsuffix(thename,"lua")
+ local libname = file.addsuffix(thename,os.libsuffix)
+ return
+ loadedbyformat(luaname,name,libsuffixes, false)
+ or loadedbyformat(libname,name,clibsuffixes, true)
+ or loadedbypath (luaname,name,getlibpaths (),false,"lua")
+ or loadedbypath (luaname,name,getclibpaths(),false,"lua")
+ or loadedbypath (libname,name,getclibpaths(),true, "lib")
+ or loadedbylua (name)
+ or notloaded (name)
+end
-package.append_libpath = appendtolibpath -- will become obsolete
-package.prepend_libpath = prependtolibpath -- will become obsolete
+-- package.loaders[3] = nil
+-- package.loaders[4] = nil
-package.obsolete.append_libpath = appendtolibpath -- will become obsolete
-package.obsolete.prepend_libpath = prependtolibpath -- will become obsolete
+resolvers.loadlualib = require
end -- of closure
@@ -15707,7 +16616,6 @@ function environment.make_format(name)
end
function environment.run_format(name,data,more)
- -- hm, rather old code here; we can now use the file.whatever functions
if name and name ~= "" then
local barename = file.removesuffix(name)
local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
@@ -15736,6 +16644,129 @@ function environment.run_format(name,data,more)
end
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['util-tpl'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is experimental code. Coming from dos and windows, I've always used %whatever%
+-- as template variables so let's stick to it. After all, it's easy to parse and stands
+-- out well. A double %% is turned into a regular %.
+
+utilities.templates = utilities.templates or { }
+local templates = utilities.templates
+
+local trace_template = false trackers.register("templates.trace",function(v) trace_template = v end)
+local report_template = logs.reporter("template")
+
+local format = string.format
+local P, C, Cs, Carg, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.Carg, lpeg.match
+
+-- todo: make installable template.new
+
+local replacer
+
+local function replacekey(k,t,recursive)
+ local v = t[k]
+ if not v then
+ if trace_template then
+ report_template("unknown key %q",k)
+ end
+ return ""
+ else
+ if trace_template then
+ report_template("setting key %q to value %q",k,v)
+ end
+ if recursive then
+ return lpegmatch(replacer,v,1,t)
+ else
+ return v
+ end
+ end
+end
+
+local sqlescape = lpeg.replacer {
+ { "'", "''" },
+ { "\\", "\\\\" },
+ { "\r\n", "\\n" },
+ { "\r", "\\n" },
+ -- { "\t", "\\t" },
+}
+
+local escapers = {
+ lua = function(s)
+ return format("%q",s)
+ end,
+ sql = function(s)
+ return lpegmatch(sqlescape,s)
+ end,
+}
+
+local function replacekeyunquoted(s,t,how,recurse) -- ".. \" "
+ local escaper = how and escapers[how] or escapers.lua
+ return escaper(replacekey(s,t,recurse))
+end
+
+local single = P("%") -- test %test% test : resolves test
+local double = P("%%") -- test 10%% test : %% becomes %
+local lquoted = P("%[") -- test %[test]" test : resolves test with escaped "'s
+local rquoted = P("]%") --
+
+local escape = double / '%%'
+local nosingle = single / ''
+local nodouble = double / ''
+local nolquoted = lquoted / ''
+local norquoted = rquoted / ''
+
+local key = nosingle * (C((1-nosingle)^1 * Carg(1) * Carg(2) * Carg(3))/replacekey) * nosingle
+local unquoted = nolquoted * ((C((1 - norquoted)^1) * Carg(1) * Carg(2) * Carg(3))/replacekeyunquoted) * norquoted
+local any = P(1)
+
+ replacer = Cs((unquoted + escape + key + any)^0)
+
+local function replace(str,mapping,how,recurse)
+ if mapping then
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ else
+ return str
+ end
+end
+
+-- print(replace("test '%[x]%' test",{ x = [[a 'x' a]] }))
+-- print(replace("test '%[x]%' test",{ x = [[a 'x' a]] },'sql'))
+
+templates.replace = replace
+
+function templates.load(filename,mapping,how,recurse)
+ local data = io.loaddata(filename) or ""
+ if mapping and next(mapping) then
+ return replace(data,mapping,how,recurse)
+ else
+ return data
+ end
+end
+
+function templates.resolve(t,mapping,how,recurse)
+ if not mapping then
+ mapping = t
+ end
+ for k, v in next, t do
+ t[k] = replace(v,mapping,how,recurse)
+ end
+ return t
+end
+
+-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
+-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
+
+
end -- of closure
-- end library merge
@@ -15796,7 +16827,7 @@ own.libs = { -- order can be made better
-- 'data-bin.lua',
'data-zip.lua',
'data-tre.lua',
- 'data-crl.lua',
+ 'data-sch.lua',
'data-lua.lua',
'data-aux.lua', -- updater
'data-tmf.lua',
@@ -15804,6 +16835,8 @@ own.libs = { -- order can be made better
'luat-sta.lua',
'luat-fmt.lua',
+
+ 'util-tpl.lua',
}
-- We need this hack till luatex is fixed.
@@ -15824,7 +16857,7 @@ own.path = gsub(match(own.name,"^(.+)[\\/].-$") or ".","\\","/")
local ownpath, owntree = own.path, environment and environment.ownpath or own.path
-own.list = {
+own.list = { -- predictable paths
'.',
ownpath ,
ownpath .. "/../sources", -- HH's development path
@@ -15848,7 +16881,7 @@ local function locate_libs()
local filename = pth .. "/" .. lib
local found = lfs.isfile(filename)
if found then
- package.path = package.path .. ";" .. pth .. "/?.lua" -- in case l-* does a require
+ package.path = package.path .. ";" .. pth .. "/?.lua" -- in case l-* does a require (probably obsolete)
return pth
end
end
@@ -15980,6 +17013,7 @@ local helpinfo = [[
--var-value report value of variable
--find-file report file location
--find-path report path of file
+--show-package-path report package paths
--pattern=str filter variables
]]
@@ -16093,7 +17127,8 @@ function runners.execute_script(fullname,internal,nosplit)
elseif state == 'skip' then
return true
elseif state == "run" then
- local path, name, suffix, result = file.dirname(fullname), file.basename(fullname), file.extname(fullname), ""
+ local path, name, suffix = file.splitname(fullname)
+ local result = ""
if path ~= "" then
result = fullname
elseif name then
@@ -16104,7 +17139,7 @@ function runners.execute_script(fullname,internal,nosplit)
name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
- suffix = file.extname(name)
+ suffix = file.suffix(name)
end
if suffix == "" then
-- loop over known suffixes
@@ -16131,7 +17166,7 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local binary = runners.applications[file.extname(result)]
+ local binary = runners.applications[file.suffix(result)]
result = string.quoted(string.unquoted(result))
-- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
-- result = '"' .. result .. '"'
@@ -16324,7 +17359,7 @@ function resolvers.launch(str)
-- maybe we also need to test on mtxrun.launcher.suffix environment
-- variable or on windows consult the assoc and ftype vars and such
local launchers = runners.launchers[os.platform] if launchers then
- local suffix = file.extname(str) if suffix then
+ local suffix = file.suffix(str) if suffix then
local runner = launchers[suffix] if runner then
str = runner .. " " .. str
end
@@ -16383,7 +17418,7 @@ function runners.find_mtx_script(filename)
end
filename = file.addsuffix(filename,"lua")
local basename = file.removesuffix(file.basename(filename))
- local suffix = file.extname(filename)
+ local suffix = file.suffix(filename)
-- qualified path, raw name
local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename
if fullname and fullname ~= "" then
@@ -16438,7 +17473,7 @@ function runners.execute_ctx_script(filename,...)
runners.register_arguments(...)
local arguments = environment.arguments_after
local fullname = runners.find_mtx_script(filename) or ""
- if file.extname(fullname) == "cld" then
+ if file.suffix(fullname) == "cld" then
-- handy in editors where we force --autopdf
report("running cld script: %s",filename)
table.insert(arguments,1,fullname)
@@ -16546,6 +17581,21 @@ function runners.timed(action)
statistics.timed(action)
end
+function runners.associate(filename)
+ os.launch(filename)
+end
+
+function runners.gethelp(filename)
+ local url = environment.argument("url")
+ if url and url ~= "" then
+ local command = string.gsub(environment.argument("command") or "unknown","^%s*\\*(.-)%s*$","%1")
+ url = utilities.templates.replace(url,{ command = command })
+ os.launch(url)
+ else
+ report("no --url given")
+ end
+end
+
-- this is a bit dirty ... first we store the first filename and next we
-- split the arguments so that we only see the ones meant for this script
-- ... later we will use the second half
@@ -16648,7 +17698,18 @@ else
end
-if e_argument("selfmerge") then
+if e_argument("script") or e_argument("scripts") then
+
+ -- run a script by loading it (using libs), pass args
+
+ runners.loadbase()
+ if is_mkii_stub then
+ ok = runners.execute_script(filename,false,true)
+ else
+ ok = runners.execute_ctx_script(filename)
+ end
+
+elseif e_argument("selfmerge") then
-- embed used libraries
@@ -16671,23 +17732,25 @@ elseif e_argument("selfupdate") then
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif e_argument("ctxlua") or e_argument("internal") then
+elseif e_argument("show-package-path") or e_argument("show-package-paths") then
- -- run a script by loading it (using libs)
+ local l = package.libpaths()
+ local c = package.clibpaths()
- runners.loadbase()
- ok = runners.execute_script(filename,true)
+ for i=1,#l do
+ report("package lib path %s: %s",i,l[i])
+ end
-elseif e_argument("script") or e_argument("scripts") then
+ for i=1,#c do
+ report("package clib path %s: %s",i,c[i])
+ end
- -- run a script by loading it (using libs), pass args
+elseif e_argument("ctxlua") or e_argument("internal") then
+
+ -- run a script by loading it (using libs)
runners.loadbase()
- if is_mkii_stub then
- ok = runners.execute_script(filename,false,true)
- else
- ok = runners.execute_ctx_script(filename)
- end
+ ok = runners.execute_script(filename,true)
elseif e_argument("execute") then
@@ -16715,6 +17778,14 @@ elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
+elseif e_argument("associate") then
+
+ runners.associate(filename)
+
+elseif e_argument("gethelp") then
+
+ runners.gethelp()
+
elseif e_argument("makestubs") then
-- make stubs (depricated)
@@ -16806,7 +17877,7 @@ elseif e_argument("find-path") then
elseif e_argument("expand-braces") then
- -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
resolvers.load("nofiles")
runners.register_arguments(filename)
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 108f2a8a1..e6bbbe2b5 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -8,6 +8,11 @@ if not modules then modules = { } end modules ['mtxrun'] = {
license = "see context related readme files"
}
+-- if not lpeg then require("lpeg") end
+-- if not md5 then require("md5") end
+-- if not lfs then require("lfs") end
+-- if not texconfig then texconfig = { } end
+
-- one can make a stub:
--
-- #!/bin/sh
@@ -150,11 +155,28 @@ function string.topattern(str,lowercase,strict)
end
end
+
+function string.valid(str,default)
+ return (type(str) == "string" and str ~= "" and str) or default or nil
+end
+
-- obsolete names:
string.quote = string.quoted
string.unquote = string.unquoted
+-- handy fallback
+
+string.itself = function(s) return s end
+
+-- also handy (see utf variant)
+
+local pattern = Ct(C(1)^0)
+
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+
end -- of closure
@@ -168,7 +190,8 @@ if not modules then modules = { } end modules ['l-table'] = {
license = "see context related readme files"
}
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
+local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
+local table, string = table, string
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
@@ -179,6 +202,8 @@ local getinfo = debug.getinfo
-- impact on ConTeXt was not that large; the remaining ipairs already
-- have been replaced. In a similar fashion we also hardly used pairs.
--
+-- Hm, actually ipairs was retained, but we no longer use it anyway.
+--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -238,12 +263,16 @@ function table.strip(tab)
end
function table.keys(t)
- local keys, k = { }, 0
- for key, _ in next, t do
- k = k + 1
- keys[k] = key
+ if t then
+ local keys, k = { }, 0
+ for key, _ in next, t do
+ k = k + 1
+ keys[k] = key
+ end
+ return keys
+ else
+ return { }
end
- return keys
end
local function compare(a,b)
@@ -256,41 +285,49 @@ local function compare(a,b)
end
local function sortedkeys(tab)
- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- s = s + 1
- srt[s] = key
- if category == 3 then
- -- no further check
- else
- local tkey = type(key)
- if tkey == "string" then
- category = (category == 2 and 3) or 1
- elseif tkey == "number" then
- category = (category == 1 and 3) or 2
+ if tab then
+ local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
+ for key,_ in next, tab do
+ s = s + 1
+ srt[s] = key
+ if category == 3 then
+ -- no further check
else
- category = 3
+ local tkey = type(key)
+ if tkey == "string" then
+ category = (category == 2 and 3) or 1
+ elseif tkey == "number" then
+ category = (category == 1 and 3) or 2
+ else
+ category = 3
+ end
end
end
- end
- if category == 0 or category == 3 then
- sort(srt,compare)
+ if category == 0 or category == 3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
else
- sort(srt)
+ return { }
end
- return srt
end
local function sortedhashkeys(tab) -- fast one
- local srt, s = { }, 0
- for key,_ in next, tab do
- if key then
- s= s + 1
- srt[s] = key
+ if tab then
+ local srt, s = { }, 0
+ for key,_ in next, tab do
+ if key then
+ s= s + 1
+ srt[s] = key
+ end
end
+ sort(srt)
+ return srt
+ else
+ return { }
end
- sort(srt)
- return srt
end
table.sortedkeys = sortedkeys
@@ -315,7 +352,7 @@ end
table.sortedhash = sortedhash
table.sortedpairs = sortedhash
-function table.append(t, list)
+function table.append(t,list)
local n = #t
for i=1,#list do
n = n + 1
@@ -550,12 +587,26 @@ local function do_serialize(root,name,depth,level,indexed)
end
-- we could check for k (index) being number (cardinal)
if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- if compact then
+ -- -- NOT: for k=1,#root do (we need to quit at nil)
+ -- for k,v in ipairs(root) do -- can we use next?
+ -- if not first then first = k end
+ -- last = last + 1
+ -- end
+ -- end
+ local first, last = nil, 0
if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
+ last = #root
+ for k=1,last do
+-- if not root[k] then
+ if root[k] == nil then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
end
end
local sk = sortedkeys(root)
@@ -1027,23 +1078,27 @@ function table.reversed(t)
end
end
-function table.sequenced(t,sep,simple) -- hash only
- local s, n = { }, 0
- for k, v in sortedhash(t) do
- if simple then
- if v == true then
- n = n + 1
- s[n] = k
- elseif v and v~= "" then
+function table.sequenced(t,sep) -- hash only
+ if t then
+ local s, n = { }, 0
+ for k, v in sortedhash(t) do
+ if simple then
+ if v == true then
+ n = n + 1
+ s[n] = k
+ elseif v and v~= "" then
+ n = n + 1
+ s[n] = k .. "=" .. tostring(v)
+ end
+ else
n = n + 1
s[n] = k .. "=" .. tostring(v)
end
- else
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
end
+ return concat(s, sep or " | ")
+ else
+ return ""
end
- return concat(s, sep or " | ")
end
function table.print(t,...)
@@ -1124,6 +1179,8 @@ local lpeg = require("lpeg")
-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+-- some code will move to unicode and string
+
local report = texio and texio.write_nl or print
-- local lpmatch = lpeg.match
@@ -1160,8 +1217,8 @@ local report = texio and texio.write_nl or print
-- function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end
-- function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end
-local type = type
-local byte, char, gmatch = string.byte, string.char, string.gmatch
+local type, next = type, next
+local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format
-- Beware, we predefine a bunch of patterns here and one reason for doing so
-- is that we get consistent behaviour in some of the visualizers.
@@ -1169,9 +1226,8 @@ local byte, char, gmatch = string.byte, string.char, string.gmatch
lpeg.patterns = lpeg.patterns or { } -- so that we can share
local patterns = lpeg.patterns
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
+local P, R, S, V, Ct, C, Cs, Cc, Cp = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp
+local lpegtype, lpegmatch = lpeg.type, lpeg.match
local utfcharacters = string.utfcharacters
local utfgmatch = unicode and unicode.utf8.gmatch
@@ -1222,6 +1278,10 @@ patterns.utf8char = utf8char
patterns.validutf8 = validutf8char
patterns.validutf8char = validutf8char
+local eol = S("\n\r")
+local spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+local whitespace = eol + spacer
+
patterns.digit = digit
patterns.sign = sign
patterns.cardinal = sign^0 * digit^1
@@ -1241,16 +1301,16 @@ patterns.letter = patterns.lowercase + patterns.uppercase
patterns.space = space
patterns.tab = P("\t")
patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.eol = eol
+patterns.spacer = spacer
+patterns.whitespace = whitespace
patterns.newline = newline
patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.nonspacer = 1 - spacer
+patterns.nonwhitespace = 1 - whitespace
patterns.equal = P("=")
patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.commaspacer = P(",") * spacer^0
patterns.period = P(".")
patterns.colon = P(":")
patterns.semicolon = P(";")
@@ -1265,6 +1325,10 @@ patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
patterns.unspacer = ((patterns.spacer^1)/"")^0
+patterns.singlequoted = squote * patterns.nosquote * squote
+patterns.doublequoted = dquote * patterns.nodquote * dquote
+patterns.quoted = patterns.doublequoted + patterns.singlequoted
+
patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
patterns.beginline = #(1-newline)
@@ -1275,8 +1339,17 @@ patterns.beginline = #(1-newline)
-- print(string.unquoted('"test"'))
-- print(string.unquoted('"test"'))
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
+local function anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) }
+end
+
+lpeg.anywhere = anywhere
+
+function lpeg.instringchecker(p)
+ p = anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
end
function lpeg.splitter(pattern, action)
@@ -1325,7 +1398,7 @@ function string.splitup(str,separator)
if not separator then
separator = ","
end
- return match(splitters_m[separator] or splitat(separator),str)
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
end
@@ -1337,16 +1410,20 @@ function lpeg.split(separator,str)
c = tsplitat(separator)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
+ if separator then
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
end
- return match(c,str)
end
local spacing = patterns.spacer^0 * newline -- sort of strip
@@ -1362,7 +1439,7 @@ local linesplitter = tsplitat(newline)
patterns.linesplitter = linesplitter
function string.splitlines(str)
- return match(linesplitter,str)
+ return lpegmatch(linesplitter,str)
end
local utflinesplitter = utfbom^-1 * tsplitat(newline)
@@ -1370,7 +1447,58 @@ local utflinesplitter = utfbom^-1 * tsplitat(newline)
patterns.utflinesplitter = utflinesplitter
function string.utfsplitlines(str)
- return match(utflinesplitter,str or "")
+ return lpegmatch(utflinesplitter,str or "")
+end
+
+local utfcharsplitter_ows = utfbom^-1 * Ct(C(utf8char)^0)
+local utfcharsplitter_iws = utfbom^-1 * Ct((whitespace^1 + C(utf8char))^0)
+
+function string.utfsplit(str,ignorewhitespace) -- new
+ if ignorewhitespace then
+ return lpegmatch(utfcharsplitter_iws,str or "")
+ else
+ return lpegmatch(utfcharsplitter_ows,str or "")
+ end
+end
+
+-- inspect(string.utfsplit("a b c d"))
+-- inspect(string.utfsplit("a b c d",true))
+
+-- -- alternative 1: 0.77
+--
+-- local utfcharcounter = utfbom^-1 * Cs((utf8char/'!')^0)
+--
+-- function string.utflength(str)
+-- return #lpegmatch(utfcharcounter,str or "")
+-- end
+--
+-- -- alternative 2: 1.70
+--
+-- local n = 0
+--
+-- local utfcharcounter = utfbom^-1 * (utf8char/function() n = n + 1 end)^0 -- slow
+--
+-- function string.utflength(str)
+-- n = 0
+-- lpegmatch(utfcharcounter,str or "")
+-- return n
+-- end
+--
+-- -- alternative 3: 0.24 (native unicode.utf8.len: 0.047)
+
+local n = 0
+
+local utfcharcounter = utfbom^-1 * Cs ( (
+ Cp() * (lpeg.patterns.utf8one )^1 * Cp() / function(f,t) n = n + t - f end
+ + Cp() * (lpeg.patterns.utf8two )^1 * Cp() / function(f,t) n = n + (t - f)/2 end
+ + Cp() * (lpeg.patterns.utf8three)^1 * Cp() / function(f,t) n = n + (t - f)/3 end
+ + Cp() * (lpeg.patterns.utf8four )^1 * Cp() / function(f,t) n = n + (t - f)/4 end
+)^0 )
+
+function string.utflength(str)
+ n = 0
+ lpegmatch(utfcharcounter,str or "")
+ return n
end
@@ -1384,7 +1512,7 @@ function lpeg.checkedsplit(separator,str)
c = Ct(separator^0 * other * (separator^1 * other)^0)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
function string.checkedsplit(str,separator)
@@ -1395,7 +1523,7 @@ function string.checkedsplit(str,separator)
c = Ct(separator^0 * other * (separator^1 * other)^0)
cache[separator] = c
end
- return match(c,str)
+ return lpegmatch(c,str)
end
@@ -1440,11 +1568,11 @@ function lpeg.keeper(str)
end
function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
+ return (P(str) + P(true)) * Cs(anything^0)
end
function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
+ return Cs((1 - P(str) * endofstring)^0)
end
-- Just for fun I looked at the used bytecode and
@@ -1453,8 +1581,22 @@ end
function lpeg.replacer(one,two)
if type(one) == "table" then
local no = #one
- if no > 0 then
- local p
+ local p
+ if no == 0 then
+ for k, v in next, one do
+ local pp = P(k) / v
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ elseif no == 1 then
+ local o = one[1]
+ one, two = P(o[1]), o[2]
+ return Cs(((1-one)^1 + one/two)^0)
+ else
for i=1,no do
local o = one[i]
local pp = P(o[1]) / o[2]
@@ -1467,11 +1609,16 @@ function lpeg.replacer(one,two)
return Cs((p + 1)^0)
end
else
+ one = P(one)
two = two or ""
- return Cs((P(one)/two + 1)^0)
+ return Cs(((1-one)^1 + one/two)^0)
end
end
+-- print(lpeg.match(lpeg.replacer("e","a"),"test test"))
+-- print(lpeg.match(lpeg.replacer{{"e","a"}},"test test"))
+-- print(lpeg.match(lpeg.replacer({ e = "a", t = "x" }),"test test"))
+
local splitters_f, splitters_s = { }, { }
function lpeg.firstofsplit(separator) -- always return value
@@ -1506,7 +1653,7 @@ local nany = utf8char/""
function lpeg.counter(pattern)
pattern = Cs((P(pattern)/" " + nany)^0)
return function(str)
- return #match(pattern,str)
+ return #lpegmatch(pattern,str)
end
end
@@ -1520,7 +1667,7 @@ if utfgmatch then
end
return n
else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
+ return #lpegmatch(Cs((P(what)/" " + nany)^0),str)
end
end
@@ -1535,9 +1682,9 @@ else
p = Cs((P(what)/" " + nany)^0)
cache[p] = p
end
- return #match(p,str)
+ return #lpegmatch(p,str)
else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
+ return #lpegmatch(Cs((P(what)/" " + nany)^0),str)
end
end
@@ -1564,7 +1711,7 @@ local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
+ return lpegmatch(simple and s or p,str)
end
-- utf extensies
@@ -1611,7 +1758,7 @@ else
p = P(uc)
end
end
- match((utf8char/f)^0,str)
+ lpegmatch((utf8char/f)^0,str)
return p
end
@@ -1627,7 +1774,7 @@ function lpeg.UR(str,more)
first = str
last = more or first
else
- first, last = match(range,str)
+ first, last = lpegmatch(range,str)
if not last then
return P(str)
end
@@ -1654,11 +1801,15 @@ end
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order
if type(list) ~= "table" then
list = { list, ... }
end
- -- sort(list) -- longest match first
+ -- table.sort(list) -- longest match first
local p = P(list[1])
for l=2,#list do
p = p + P(list[l])
@@ -1666,10 +1817,6 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
return p
end
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-- For the moment here, but it might move to utilities. Beware, we need to
-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
-- loop back from the end cq. prepend.
@@ -1827,6 +1974,24 @@ end
-- utfchar(0x205F), -- math thinspace
-- } )
+-- handy from within tex:
+
+local lpegmatch = lpeg.match
+
+local replacer = lpeg.replacer("@","%%") -- Watch the escaped % in lpeg!
+
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+
+-- strips leading and trailing spaces and collapsed all other spaces
+
+local pattern = Cs(whitespace^0/"" * ((whitespace^1 * P(-1) / "") + (whitespace^1/" ") + P(1))^0)
+
+function string.collapsespaces(str)
+ return lpegmatch(pattern,str)
+end
+
end -- of closure
@@ -1851,14 +2016,14 @@ else
io.fileseparator, io.pathseparator = "/" , ":"
end
-function io.loaddata(filename,textmode)
+function io.loaddata(filename,textmode) -- return nil if empty
local f = io.open(filename,(textmode and 'r') or 'rb')
if f then
local data = f:read('*all')
f:close()
- return data
- else
- return nil
+ if #data > 0 then
+ return data
+ end
end
end
@@ -1880,6 +2045,45 @@ function io.savedata(filename,data,joiner)
end
end
+function io.loadlines(filename,n) -- return nil if empty
+ local f = io.open(filename,'r')
+ if f then
+ if n then
+ local lines = { }
+ for i=1,n do
+ local line = f:read("*lines")
+ if line then
+ lines[#lines+1] = line
+ else
+ break
+ end
+ end
+ f:close()
+ lines = concat(lines,"\n")
+ if #lines > 0 then
+ return lines
+ end
+ else
+ local line = f:read("*line") or ""
+ assert(f:close())
+ if #line > 0 then
+ return line
+ end
+ end
+ end
+end
+
+function io.loadchunk(filename,n)
+ local f = io.open(filename,'rb')
+ if f then
+ local data = f:read(n or 1024)
+ f:close()
+ if #data > 0 then
+ return data
+ end
+ end
+end
+
function io.exists(filename)
local f = io.open(filename)
if f == nil then
@@ -2107,7 +2311,7 @@ if not modules then modules = { } end modules ['l-number'] = {
-- this module will be replaced when we have the bit library
-local tostring = tostring
+local tostring, tonumber = tostring, tonumber
local format, floor, match, rep = string.format, math.floor, string.match, string.rep
local concat, insert = table.concat, table.insert
local lpegmatch = lpeg.match
@@ -2170,11 +2374,11 @@ function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
end
function number.setbit(x, p)
- return hasbit(x, p) and x or x + p
+ return (x % (p + p) >= p) and x or x + p
end
function number.clearbit(x, p)
- return hasbit(x, p) and x - p or x
+ return (x % (p + p) >= p) and x - p or x
end
@@ -2208,6 +2412,10 @@ function number.tobitstring(n,m)
end
+function number.valid(str,default)
+ return tonumber(str) or default or nil
+end
+
end -- of closure
@@ -2319,17 +2527,28 @@ if not modules then modules = { } end modules ['l-os'] = {
-- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
-- os.platform : extended os.name with architecture
+-- os.sleep() => socket.sleep()
+-- math.randomseed(tonumber(string.sub(string.reverse(tostring(math.floor(socket.gettime()*10000))),1,6)))
+
-- maybe build io.flush in os.execute
local os = os
+local date, time = os.date, os.time
local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
-local random, ceil = math.random, math.ceil
-local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+local random, ceil, randomseed = math.random, math.ceil, math.randomseed
+local rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring = rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring
-- The following code permits traversing the environment table, at least
-- in luatex. Internally all environment names are uppercase.
+-- The randomseed in Lua is not that random, although this depends on the operating system as well
+-- as the binary (Luatex is normally okay). But to be sure we set the seed anyway.
+
+math.initialseed = tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
+
+randomseed(math.initialseed)
+
if not os.__getenv__ then
os.__getenv__ = os.getenv
@@ -2433,12 +2652,14 @@ else
os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
+local launchers = {
+ windows = "start %s",
+ macosx = "open %s",
+ unix = "$BROWSER %s &> /dev/null &",
+}
+
function os.launch(str)
- if os.type == "windows" then
- os.execute("start " .. str) -- os.spawn ?
- else
- os.execute(str .. " &") -- os.spawn ?
- end
+ os.execute(format(launchers[os.name] or launchers.unix,str))
end
if not os.times then
@@ -2649,7 +2870,7 @@ end
local d
function os.timezone(delta)
- d = d or tonumber(tonumber(os.date("%H")-os.date("!%H")))
+ d = d or tonumber(tonumber(date("%H")-date("!%H")))
if delta then
if d > 0 then
return format("+%02i:00",d)
@@ -2661,6 +2882,44 @@ function os.timezone(delta)
end
end
+local timeformat = format("%%s%s",os.timezone(true))
+local dateformat = "!%Y-%m-%d %H:%M:%S"
+
+function os.fulltime(t,default)
+ t = tonumber(t) or 0
+ if t > 0 then
+ -- valid time
+ elseif default then
+ return default
+ else
+ t = nil
+ end
+ return format(timeformat,date(dateformat,t))
+end
+
+local dateformat = "%Y-%m-%d %H:%M:%S"
+
+function os.localtime(t,default)
+ t = tonumber(t) or 0
+ if t > 0 then
+ -- valid time
+ elseif default then
+ return default
+ else
+ t = nil
+ end
+ return date(dateformat,t)
+end
+
+function os.converttime(t,default)
+ local t = tonumber(t)
+ if t and t > 0 then
+ return date(dateformat,t)
+ else
+ return default or "-"
+ end
+end
+
local memory = { }
local function which(filename)
@@ -2735,7 +2994,7 @@ local function nameonly(name)
return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$",""))
end
-local function extname(name,default)
+local function suffixonly(name,default)
return match(name,"^.+%.([^/\\]-)$") or default or ""
end
@@ -2744,11 +3003,16 @@ local function splitname(name)
return n or name, s or ""
end
-file.basename = basename
-file.dirname = dirname
-file.nameonly = nameonly
-file.extname = extname
-file.suffix = extname
+file.basename = basename
+
+file.pathpart = dirname
+file.dirname = dirname
+
+file.nameonly = nameonly
+
+file.suffixonly = suffixonly
+file.extname = suffixonly -- obsolete
+file.suffix = suffixonly
function file.removesuffix(filename)
return (gsub(filename,"%.[%a%d]+$",""))
@@ -2864,6 +3128,11 @@ end
file.isreadable = file.is_readable -- depricated
file.iswritable = file.is_writable -- depricated
+function file.size(name)
+ local a = attributes(name)
+ return a and a.size or 0
+end
+
-- todo: lpeg \\ / .. does not save much
local checkedsplit = string.checkedsplit
@@ -3001,6 +3270,7 @@ local drive = C(R("az","AZ")) * P(":")
local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
+local rest = C(P(1)^0)
drive = drive + Cc("")
path = path + Cc("")
@@ -3009,7 +3279,8 @@ suffix = suffix + Cc("")
local pattern_a = drive * path * base * suffix
local pattern_b = path * base * suffix
-local pattern_c = C(drive * path) * C(base * suffix)
+local pattern_c = C(drive * path) * C(base * suffix) -- trick: two extra captures
+local pattern_d = path * rest
function file.splitname(str,splitdrive)
if splitdrive then
@@ -3019,6 +3290,10 @@ function file.splitname(str,splitdrive)
end
end
+function file.splitbase(str)
+ return lpegmatch(pattern_d,str) -- returns path, base+suffix
+end
+
function file.nametotable(str,splitdrive) -- returns table
local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
if splitdrive then
@@ -3040,6 +3315,8 @@ function file.nametotable(str,splitdrive) -- returns table
end
end
+-- print(file.splitbase("a/b/c.txt"))
+
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
--
-- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
@@ -3081,15 +3358,30 @@ if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
-function file.needs_updating(oldname,newname,threshold) -- size modification access change
- local oldtime = lfs.attributes(oldname, modification)
- local newtime = lfs.attributes(newname, modification)
- if newtime >= oldtime then
- return false
- elseif oldtime - newtime < (threshold or 1) then
- return false
+function file.needsupdating(oldname,newname,threshold) -- size modification access change
+ local oldtime = lfs.attributes(oldname,"modification")
+ if oldtime then
+ local newtime = lfs.attributes(newname,"modification")
+ if not newtime then
+ return true -- no new file, so no updating needed
+ elseif newtime >= oldtime then
+ return false -- new file definitely needs updating
+ elseif oldtime - newtime < (threshold or 1) then
+ return false -- new file is probably still okay
+ else
+ return true -- new file has to be updated
+ end
else
- return true
+ return false -- no old file, so no updating needed
+ end
+end
+
+file.needs_updating = file.needsupdating
+
+function file.syncmtimes(oldname,newname)
+ local oldtime = lfs.attributes(oldname,"modification")
+ if oldtime and lfs.isfile(newname) then
+ lfs.touch(newname,oldtime,oldtime)
end
end
@@ -3111,7 +3403,7 @@ function file.loadchecksum(name)
return nil
end
-function file.savechecksum(name, checksum)
+function file.savechecksum(name,checksum)
if not checksum then checksum = file.checksum(name) end
if checksum then
io.savedata(name .. ".md5",checksum)
@@ -3136,7 +3428,7 @@ if not modules then modules = { } end modules ['l-url'] = {
local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
local concat = table.concat
local tonumber, type = tonumber, type
-local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
+local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V
local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
-- from wikipedia:
@@ -3169,15 +3461,19 @@ local endofstring = P(-1)
local hexdigit = R("09","AF","af")
local plus = P("+")
local nothing = Cc("")
-local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
+local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar
+local escaped = (plus / " ") + escapedchar
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-- we also assume that when we have a scheme, we also have an authority
+--
+-- maybe we should already split the query (better for unescaping as = & can be part of a value
local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
local pathstr = Cs((escaped+(1- qmark-hash))^0)
-local querystr = Cs((escaped+(1- hash))^0)
+----- querystr = Cs((escaped+(1- hash))^0)
+local querystr = Cs(( (1- hash))^0)
local fragmentstr = Cs((escaped+(1- endofstring))^0)
local scheme = schemestr * colon + nothing
@@ -3192,11 +3488,20 @@ local parser = Ct(validurl)
lpegpatterns.url = validurl
lpegpatterns.urlsplitter = parser
-local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
+local escapes = { }
-local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
+setmetatable(escapes, { __index = function(t,k)
+ local v = format("%%%02X",byte(k))
+ t[k] = v
+ return v
+end })
-lpegpatterns.urlescaper = escaper
+local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most
+local unescaper = Cs((escapedchar + 1)^0)
+
+lpegpatterns.urlunescaped = escapedchar
+lpegpatterns.urlescaper = escaper
+lpegpatterns.urlunescaper = unescaper
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
@@ -3208,8 +3513,12 @@ end
local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
local function hasscheme(str)
- local scheme = lpegmatch(isscheme,str) -- at least one character
- return scheme ~= "" and scheme or false
+ if str then
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
+ else
+ return false
+ end
end
@@ -3228,10 +3537,32 @@ local rootbased = P("/")
local barswapper = replacer("|",":")
local backslashswapper = replacer("\\","/")
+-- queries:
+
+local equal = P("=")
+local amp = P("&")
+local key = Cs(((escapedchar+1)-equal )^0)
+local value = Cs(((escapedchar+1)-amp -endofstring)^0)
+
+local splitquery = Cf ( Ct("") * P { "sequence",
+ sequence = V("pair") * (amp * V("pair"))^0,
+ pair = Cg(key * equal * value),
+}, rawset)
+
+-- hasher
+
local function hashed(str) -- not yet ok (/test?test)
+ if str == "" then
+ return {
+ scheme = "invalid",
+ original = str,
+ }
+ end
local s = split(str)
- local somescheme = s[1] ~= ""
- local somequery = s[4] ~= ""
+ local rawscheme = s[1]
+ local rawquery = s[4]
+ local somescheme = rawscheme ~= ""
+ local somequery = rawquery ~= ""
if not somescheme and not somequery then
s = {
scheme = "file",
@@ -3247,14 +3578,17 @@ local function hashed(str) -- not yet ok (/test?test)
local authority, path, filename = s[2], s[3]
if authority == "" then
filename = path
+ elseif path == "" then
+ filename = ""
else
filename = authority .. "/" .. path
end
s = {
- scheme = s[1],
+ scheme = rawscheme,
authority = authority,
path = path,
- query = s[4],
+ query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and =
+ queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped
fragment = s[5],
original = str,
noscheme = false,
@@ -3264,6 +3598,8 @@ local function hashed(str) -- not yet ok (/test?test)
return s
end
+-- inspect(hashed("template://test"))
+
-- Here we assume:
--
-- files: /// = relative
@@ -3306,23 +3642,65 @@ function url.construct(hash) -- dodo: we need to escape !
return lpegmatch(escaper,concat(fullurl))
end
-function url.filename(filename)
+function url.filename(filename) -- why no lpeg here ?
local t = hashed(filename)
return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
end
+local function escapestring(str)
+ return lpegmatch(escaper,str)
+end
+
+url.escape = escapestring
+
+-- function url.query(str) -- separator could be an option
+-- if type(str) == "string" then
+-- local t = { }
+-- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
+-- t[k] = v
+-- end
+-- return t
+-- else
+-- return str
+-- end
+-- end
+
function url.query(str)
if type(str) == "string" then
- local t = { }
- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
- t[k] = v
- end
- return t
+ return lpegmatch(splitquery,str) or ""
else
return str
end
end
+function url.toquery(data)
+ local td = type(data)
+ if td == "string" then
+ return #str and escape(data) or nil -- beware of double escaping
+ elseif td == "table" then
+ if next(data) then
+ local t = { }
+ for k, v in next, data do
+ t[#t+1] = format("%s=%s",k,escapestring(v))
+ end
+ return concat(t,"&")
+ end
+ else
+ -- nil is a signal that no query
+ end
+end
+
+-- /test/ | /test | test/ | test => test
+
+function url.barepath(path)
+ if not path or path == "" then
+ return ""
+ else
+ return (gsub(path,"^/?(.-)/?$","%1"))
+ end
+end
+
+
@@ -3363,6 +3741,24 @@ local isdir = lfs.isdir
local isfile = lfs.isfile
local currentdir = lfs.currentdir
+-- in case we load outside luatex
+
+if not isdir then
+ function isdir(name)
+ local a = attributes(name)
+ return a and a.mode == "directory"
+ end
+ lfs.isdir = isdir
+end
+
+if not isfile then
+ function isfile(name)
+ local a = attributes(name)
+ return a and a.mode == "file"
+ end
+ lfs.isfile = isfile
+end
+
-- handy
function dir.current()
@@ -3738,28 +4134,49 @@ function boolean.tonumber(b)
end
function toboolean(str,tolerant)
- if tolerant then
- local tstr = type(str)
- if tstr == "string" then
- return str == "true" or str == "yes" or str == "on" or str == "1" or str == "t"
- elseif tstr == "number" then
- return tonumber(str) ~= 0
- elseif tstr == "nil" then
- return false
- else
- return str
- end
+ if str == nil then
+ return false
+ elseif str == false then
+ return false
+ elseif str == true then
+ return true
elseif str == "true" then
return true
elseif str == "false" then
return false
+ elseif not tolerant then
+ return false
+ elseif str == 0 then
+ return false
+ elseif (tonumber(str) or 0) > 0 then
+ return true
else
- return str
+ return str == "yes" or str == "on" or str == "t"
end
end
string.toboolean = toboolean
+function string.booleanstring(str)
+ if str == nil then
+ return false
+ elseif str == false then
+ return false
+ elseif str == true then
+ return true
+ elseif str == "true" then
+ return true
+ elseif str == "false" then
+ return false
+ elseif str == 0 then
+ return false
+ elseif (tonumber(str) or 0) > 0 then
+ return true
+ else
+ return str == "yes" or str == "on" or str == "t"
+ end
+end
+
function string.is_boolean(str,default)
if type(str) == "string" then
if str == "true" or str == "yes" or str == "on" or str == "t" then
@@ -3784,57 +4201,229 @@ if not modules then modules = { } end modules ['l-unicode'] = {
license = "see context related readme files"
}
+-- this module will be reorganized
+
+-- todo: utf.sub replacement (used in syst-aux)
+
+local concat = table.concat
+local type = type
+local P, C, R, Cs, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Ct
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local utftype = patterns.utftype
+local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
+local utfsplitlines = string.utfsplitlines
+
if not unicode then
- unicode = { utf8 = { } }
+ unicode = { }
+
+end
+
+local unicode = unicode
+
+utf = utf or unicode.utf8
+
+if not utf then
+
+ utf8 = { }
+ unicode.utf8 = utf8
+ utf = utf8
+
+end
+
+if not utf.char then
local floor, char = math.floor, string.char
- function unicode.utf8.utfchar(n)
+ function utf.char(n)
if n < 0x80 then
+ -- 0aaaaaaa : 0x80
return char(n)
elseif n < 0x800 then
+ -- 110bbbaa : 0xC0 : n >> 6
+ -- 10aaaaaa : 0x80 : n & 0x3F
return char(
0xC0 + floor(n/0x40),
0x80 + (n % 0x40)
)
elseif n < 0x10000 then
+ -- 1110bbbb : 0xE0 : n >> 12
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
return char(
0xE0 + floor(n/0x1000),
0x80 + (floor(n/0x40) % 0x40),
0x80 + (n % 0x40)
)
- elseif n < 0x40000 then
+ elseif n < 0x200000 then
+ -- 11110ccc : 0xF0 : n >> 18
+ -- 10ccbbbb : 0x80 : (n >> 12) & 0x3F
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
+ -- dddd : ccccc - 1
return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + floor(n/0x1000),
+ 0xF0 + floor(n/0x40000),
+ 0x80 + (floor(n/0x1000) % 0x40),
0x80 + (floor(n/0x40) % 0x40),
0x80 + (n % 0x40)
)
else
- -- return char(
- -- 0xF1 + floor(n/0x1000000),
- -- 0x80 + floor(n/0x40000),
- -- 0x80 + floor(n/0x1000),
- -- 0x80 + (floor(n/0x40) % 0x40),
- -- 0x80 + (n % 0x40)
- -- )
- return "?"
+ return ""
end
end
end
-local unicode = unicode
+if not utf.byte then
-utf = utf or unicode.utf8
+ local utf8byte = patterns.utf8byte
-local concat = table.concat
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
-local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
-local type = type
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
-local utfsplitlines = string.utfsplitlines
+end
+
+local utfchar, utfbyte = utf.char, utf.byte
+
+-- As we want to get rid of the (unmaintained) utf library we implement our own
+-- variants (in due time an independent module):
+
+function unicode.filetype(data)
+ return data and lpegmatch(utftype,data) or "unknown"
+end
+
+local toentities = Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+
+
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+-- actually one of them is already utf ... sort of useless this one
+
+-- function utf.char(n)
+-- if n < 0x80 then
+-- return char(n)
+-- elseif n < 0x800 then
+-- return char(
+-- 0xC0 + floor(n/0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x10000 then
+-- return char(
+-- 0xE0 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x40000 then
+-- return char(
+-- 0xF0 + floor(n/0x40000),
+-- 0x80 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- else
+-- -- return char(
+-- -- 0xF1 + floor(n/0x1000000),
+-- -- 0x80 + floor(n/0x40000),
+-- -- 0x80 + floor(n/0x1000),
+-- -- 0x80 + (floor(n/0x40) % 0x40),
+-- -- 0x80 + (n % 0x40)
+-- -- )
+-- return "?"
+-- end
+-- end
+--
+-- merge into:
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+local validatedutf = Cs (
+ (
+ patterns.utf8one
+ + patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ + P(1) / "�"
+ )^0
+)
+
+patterns.validatedutf = validatedutf
+
+function string.validutf(str)
+ return lpegmatch(validatedutf,str)
+end
+
+
+utf.length = string.utflength
+utf.split = string.utfsplit
+utf.splitines = string.utfsplitlines
+utf.valid = string.validutf
+
+if not utf.len then
+ utf.len = utf.length
+end
+
+-- a replacement for simple gsubs:
+
+local utf8char = patterns.utf8char
+
+function utf.remapper(mapping)
+ local pattern = Cs((utf8char/mapping)^0)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end, pattern
+end
+
+-- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" }
+-- print(remap("abcd 1234 abcd"))
-- 0 EF BB BF UTF-8
-- 1 FF FE UTF-16-little-endian
@@ -4027,11 +4616,22 @@ local function big(c)
end
end
+-- function unicode.utf8_to_utf16(str,littleendian)
+-- if littleendian then
+-- return char(255,254) .. utfgsub(str,".",little)
+-- else
+-- return char(254,255) .. utfgsub(str,".",big)
+-- end
+-- end
+
+local _, l_remap = utf.remapper(little)
+local _, b_remap = utf.remapper(big)
+
function unicode.utf8_to_utf16(str,littleendian)
if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
+ return char(255,254) .. lpegmatch(l_remap,str)
else
- return char(254,255) .. utfgsub(str,".",big)
+ return char(254,255) .. lpegmatch(b_remap,str)
end
end
@@ -4052,84 +4652,12 @@ function unicode.xstring(s)
return format("0x%05X",type(s) == "number" and s or utfbyte(s))
end
+--
-local lpegmatch = lpeg.match
-local patterns = lpeg.patterns
-local utftype = patterns.utftype
-
-function unicode.filetype(data)
- return data and lpegmatch(utftype,data) or "unknown"
-end
-
-local toentities = lpeg.Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("%X;",b) end end
- )^0
-)
-
-patterns.toentities = toentities
-
-function utf.toentities(str)
- return lpegmatch(toentities,str)
-end
-
-
-
-
-local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
-
--- actually one of them is already utf ... sort of useless this one
-
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
-
-function string.toutf(s)
- return lpegmatch(pattern,s) or s -- todo: utf32
-end
-
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
-
-patterns.validatedutf = validatedutf
+local pattern = Ct(C(patterns.utf8char)^0)
-function string.validutf(str)
- return lpegmatch(validatedutf,str)
+function utf.totable(str)
+ return lpegmatch(pattern,str)
end
@@ -4189,10 +4717,11 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch, rep = string.format, string.gmatch, string.rep
+local format, gmatch, rep, gsub = string.format, string.gmatch, string.rep, string.gsub
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber = type, next, rawset, tonumber
+local type, next, rawset, tonumber, loadstring = type, next, rawset, tonumber, loadstring
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -4346,6 +4875,121 @@ function tables.encapsulate(core,capsule,protect)
end
end
+local function serialize(t,r,outer) -- no mixes
+ r[#r+1] = "{"
+ local n = #t
+ if n > 0 then
+ for i=1,n do
+ local v = t[i]
+ local tv = type(v)
+ if tv == "string" then
+ r[#r+1] = format("%q,",v)
+ elseif tv == "number" then
+ r[#r+1] = format("%s,",v)
+ elseif tv == "table" then
+ serialize(v,r)
+ elseif tv == "boolean" then
+ r[#r+1] = format("%s,",tostring(v))
+ end
+ end
+ else
+ for k, v in next, t do
+ local tv = type(v)
+ if tv == "string" then
+ r[#r+1] = format("[%q]=%q,",k,v)
+ elseif tv == "number" then
+ r[#r+1] = format("[%q]=%s,",k,v)
+ elseif tv == "table" then
+ r[#r+1] = format("[%q]=",k)
+ serialize(v,r)
+ elseif tv == "boolean" then
+ r[#r+1] = format("[%q]=%s,",k,tostring(v))
+ end
+ end
+ end
+ if outer then
+ r[#r+1] = "}"
+ else
+ r[#r+1] = "},"
+ end
+ return r
+end
+
+function table.fastserialize(t,prefix)
+ return concat(serialize(t,{ prefix or "return" },true))
+end
+
+function table.deserialize(str)
+ if not str or str == "" then
+ return
+ end
+ local code = loadstring(str)
+ if not code then
+ return
+ end
+ code = code()
+ if not code then
+ return
+ end
+ return code
+end
+
+-- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } })
+
+function table.load(filename)
+ if filename then
+ local t = io.loaddata(filename)
+ if t and t ~= "" then
+ t = loadstring(t)
+ if type(t) == "function" then
+ t = t()
+ if type(t) == "table" then
+ return t
+ end
+ end
+ end
+ end
+end
+
+local function slowdrop(t)
+ local r = { }
+ local l = { }
+ for i=1,#t do
+ local ti = t[i]
+ local j = 0
+ for k, v in next, ti do
+ j = j + 1
+ l[j] = format("%s=%q",k,v)
+ end
+ r[i] = format(" {%s},\n",concat(l))
+ end
+ return format("return {\n%s}",concat(r))
+end
+
+local function fastdrop(t)
+ local r = { "return {\n" }
+ for i=1,#t do
+ local ti = t[i]
+ r[#r+1] = " {"
+ for k, v in next, ti do
+ r[#r+1] = format("%s=%q",k,v)
+ end
+ r[#r+1] = "},\n"
+ end
+ r[#r+1] = "}"
+ return concat(r)
+end
+
+function table.drop(t,slow)
+ if #t == 0 then
+ return "return { }"
+ elseif slow == true then
+ return slowdrop(t) -- less memory
+ else
+ return fastdrop(t) -- some 15% faster
+ end
+end
+
end -- of closure
@@ -4520,11 +5164,10 @@ local concat = table.concat
local type, next = type, next
utilities = utilities or {}
-utilities.merger = utilities.merger or { } -- maybe mergers
+local merger = utilities.merger or { }
+utilities.merger = merger
utilities.report = logs and logs.reporter("system") or print
-local merger = utilities.merger
-
merger.strip_comment = true
local m_begin_merge = "begin library merge"
@@ -4570,9 +5213,11 @@ end
local function self_save(name, data)
if data ~= "" then
if merger.strip_comment then
- -- saves some 20K
local n = #data
+ -- saves some 20K .. scite comments
data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
+ -- saves some 20K .. ldx comments
+ data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-ldx%]%]%-%-","")
utilities.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
end
io.savedata(name,data)
@@ -4653,36 +5298,208 @@ if not modules then modules = { } end modules ['util-lua'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ comment = "the strip code is written by Peter Cawley",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
-utilities = utilities or {}
-utilities.lua = utilities.lua or { }
-utilities.report = logs and logs.reporter("system") or print
+local rep, sub, byte, dump, format = string.rep, string.sub, string.byte, string.dump, string.format
+local loadstring, loadfile, type = loadstring, loadfile, type
+
+utilities = utilities or {}
+utilities.lua = utilities.lua or { }
+local luautilities = utilities.lua
+
+utilities.report = logs and logs.reporter("system") or print -- can be overloaded later
+
+local tracestripping = false
+local forcestupidcompile = true -- use internal bytecode compiler
+luautilities.stripcode = true -- support stripping when asked for
+luautilities.alwaysstripcode = false -- saves 1 meg on 7 meg compressed format file (2012.08.12)
+luautilities.nofstrippedchunks = 0
+luautilities.nofstrippedbytes = 0
+
+-- The next function was posted by Peter Cawley on the lua list and strips line
+-- number information etc. from the bytecode data blob. We only apply this trick
+-- when we store data tables. Stripping makes the compressed format file about
+-- 1MB smaller (and uncompressed we save at least 6MB).
+--
+-- You can consider this feature an experiment, so it might disappear. There is
+-- no noticeable gain in runtime although the memory footprint should be somewhat
+-- smaller (and the file system has a bit less to deal with).
+--
+-- Begin of borrowed code ... works for Lua 5.1 which LuaTeX currently uses ...
+
+local function strip_code_pc(dump,name)
+ local before = #dump
+ local version, format, endian, int, size, ins, num = byte(dump,5,11)
+ local subint
+ if endian == 1 then
+ subint = function(dump, i, l)
+ local val = 0
+ for n = l, 1, -1 do
+ val = val * 256 + byte(dump,i + n - 1)
+ end
+ return val, i + l
+ end
+ else
+ subint = function(dump, i, l)
+ local val = 0
+ for n = 1, l, 1 do
+ val = val * 256 + byte(dump,i + n - 1)
+ end
+ return val, i + l
+ end
+ end
+ local strip_function
+ strip_function = function(dump)
+ local count, offset = subint(dump, 1, size)
+ local stripped, dirty = rep("\0", size), offset + count
+ offset = offset + count + int * 2 + 4
+ offset = offset + int + subint(dump, offset, int) * ins
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ local t
+ t, offset = subint(dump, offset, 1)
+ if t == 1 then
+ offset = offset + 1
+ elseif t == 4 then
+ offset = offset + size + subint(dump, offset, size)
+ elseif t == 3 then
+ offset = offset + num
+ end
+ end
+ count, offset = subint(dump, offset, int)
+ stripped = stripped .. sub(dump,dirty, offset - 1)
+ for n = 1, count do
+ local proto, off = strip_function(sub(dump,offset, -1))
+ stripped, offset = stripped .. proto, offset + off - 1
+ end
+ offset = offset + subint(dump, offset, int) * int + int
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ offset = offset + subint(dump, offset, size) + size + int * 2
+ end
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ offset = offset + subint(dump, offset, size) + size
+ end
+ stripped = stripped .. rep("\0", int * 3)
+ return stripped, offset
+ end
+ dump = sub(dump,1,12) .. strip_function(sub(dump,13,-1))
+ local after = #dump
+ local delta = before-after
+ if tracestripping then
+ utilities.report("stripped bytecode: %s, before %s, after %s, delta %s",name or "unknown",before,after,delta)
+ end
+ luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1
+ luautilities.nofstrippedbytes = luautilities.nofstrippedbytes + delta
+ return dump, delta
+end
+
+-- ... end of borrowed code.
+
+local function strippedbytecode(code,forcestrip,name)
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ return strip_code_pc(code,name)
+ else
+ return code, 0
+ end
+end
+
+luautilities.stripbytecode = strip_code_pc
+luautilities.strippedbytecode = strippedbytecode
+
+local function fatalerror(name)
+ utilities.report(format("fatal error in %q",name or "unknown"))
+end
+
+-- quite subtle ... doing this wrong incidentally can give more bytes
+
+
+function luautilities.loadedluacode(fullname,forcestrip,name)
+ -- quite subtle ... doing this wrong incidentally can give more bytes
+ name = name or fullname
+ local code = loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip) == "function" then
+ forcestrip = forcestrip(fullname)
+ end
+ if forcestrip then
+ local code, n = strip_code_pc(dump(code,name))
+ return loadstring(code), n
+ elseif luautilities.alwaysstripcode then
+ return loadstring(strip_code_pc(dump(code),name))
+ else
+ return code, 0
+ end
+ elseif luautilities.alwaysstripcode then
+ return loadstring(strip_code_pc(dump(code),name))
+ else
+ return code, 0
+ end
+end
+
+function luautilities.strippedloadstring(code,forcestrip,name) -- not executed
+ local n = 0
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ code = loadstring(code)
+ if not code then
+ fatalerror(name)
+ end
+ code, n = strip_code_pc(dump(code),name)
+ end
+ return loadstring(code), n
+end
-local function stupidcompile(luafile,lucfile)
- local data = io.loaddata(luafile)
- if data and data ~= "" then
- data = string.dump(data)
- if data and data ~= "" then
- io.savedata(lucfile,data)
+local function stupidcompile(luafile,lucfile,strip)
+ local code = io.loaddata(luafile)
+ local n = 0
+ if code and code ~= "" then
+ code = loadstring(code)
+ if not code then
+ fatalerror()
+ end
+ code = dump(code)
+ if strip then
+ code, n = strippedbytecode(code,true,luafile) -- last one is reported
+ end
+ if code and code ~= "" then
+ io.savedata(lucfile,code)
end
end
+ return n
end
-function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
+local luac_normal = "texluac -o %q %q"
+local luac_strip = "texluac -s -o %q %q"
+
+function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
- local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
+ local done = false
if strip ~= false then
- command = "-s " .. command
+ strip = true
+ end
+ if forcestupidcompile then
+ fallback = true
+ elseif strip then
+ done = os.spawn(format(luac_strip, lucfile,luafile)) == 0
+ else
+ done = os.spawn(format(luac_normal,lucfile,luafile)) == 0
end
- local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
if not done and fallback then
- utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
- stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
- cleanup = false -- better see how worse it is
+ local n = stupidcompile(luafile,lucfile,strip)
+ if n > 0 then
+ utilities.report("lua: %s dumped into %s (%i bytes stripped)",luafile,lucfile,n)
+ else
+ utilities.report("lua: %s dumped into %s (unstripped)",luafile,lucfile)
+ end
+ cleanup = false -- better see how bad it is
end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
@@ -4697,7 +5514,6 @@ end
-
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -4710,8 +5526,10 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
-local lpegmatch = lpeg.match
+local lpeg, table, string = lpeg, table, string
+
+local P, R, V, S, C, Ct, Cs, Carg, Cc = lpeg.P, lpeg.R, lpeg.V, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg, lpeg.Cc
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
@@ -4723,29 +5541,39 @@ parsers.patterns = parsers.patterns or { }
local setmetatableindex = table.setmetatableindex
local sortedhash = table.sortedhash
+-- we share some patterns
+
+local space = P(' ')
+local equal = P("=")
+local comma = P(",")
+local lbrace = P("{")
+local rbrace = P("}")
+local period = S(".")
+local punctuation = S(".,:;")
+local spacer = patterns.spacer
+local whitespace = patterns.whitespace
+local newline = patterns.newline
+local anything = patterns.anything
+local endofstring = patterns.endofstring
+
-- we could use a Cf Cg construct
local escape, left, right = P("\\"), P('{'), P('}')
-lpeg.patterns.balanced = P {
+patterns.balanced = P {
[1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
[2] = left * V(1) * right
}
-local space = P(' ')
-local equal = P("=")
-local comma = P(",")
-local lbrace = P("{")
-local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
-local content = (1-P(-1))^0
+local content = (1-endofstring)^0
-lpeg.patterns.nested = nested -- no capture
-lpeg.patterns.argument = argument -- argument after e.g. =
-lpeg.patterns.content = content -- rest after e.g =
+patterns.nested = nested -- no capture
+patterns.argument = argument -- argument after e.g. =
+patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4764,10 +5592,6 @@ local function set(key,value)
hash[key] = value
end
-local function set(key,value)
- hash[key] = value
-end
-
local pattern_a_s = (pattern_a/set)^1
local pattern_b_s = (pattern_b/set)^1
local pattern_c_s = (pattern_c/set)^1
@@ -4818,7 +5642,7 @@ end
local separator = comma * space^0
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
-local pattern = Ct(value*(separator*value)^0)
+local pattern = spaces * Ct(value*(separator*value)^0)
-- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
@@ -4942,6 +5766,37 @@ function parsers.listitem(str)
return gmatch(str,"[^, ]+")
end
+--
+local digit = R("09")
+
+local pattern = Cs { "start",
+ start = V("one") + V("two") + V("three"),
+ rest = (Cc(",") * V("thousand"))^0 * (P(".") + endofstring) * anything^0,
+ thousand = digit * digit * digit,
+ one = digit * V("rest"),
+ two = digit * digit * V("rest"),
+ three = V("thousand") * V("rest"),
+}
+
+patterns.splitthousands = pattern -- maybe better in the parsers namespace ?
+
+function parsers.splitthousands(str)
+ return lpegmatch(pattern,str) or str
+end
+
+-- print(parsers.splitthousands("11111111111.11"))
+
+local optionalwhitespace = whitespace^0
+
+patterns.words = Ct((Cs((1-punctuation-whitespace)^1) + anything)^1)
+patterns.sentences = Ct((optionalwhitespace * Cs((1-period)^0 * period))^1)
+patterns.paragraphs = Ct((optionalwhitespace * Cs((whitespace^1*endofstring/"" + 1 - (spacer^0*newline*newline))^1))^1)
+
+-- local str = " Word1 word2. \n Word3 word4. \n\n Word5 word6.\n "
+-- inspect(lpegmatch(patterns.paragraphs,str))
+-- inspect(lpegmatch(patterns.sentences,str))
+-- inspect(lpegmatch(patterns.words,str))
+
end -- of closure
@@ -5043,7 +5898,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['util.deb'] = {
+if not modules then modules = { } end modules ['util-deb'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -5155,6 +6010,7 @@ function inspect(i) -- global function
else
print(tostring(i))
end
+ return i -- so that we can inline the inspect
end
-- from the lua book:
@@ -5194,7 +6050,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
-local write_nl = texio.write_nl
+local write_nl = texio and texio.write_nl or print
statistics = statistics or { }
local statistics = statistics
@@ -5277,7 +6133,7 @@ statistics.elapsedtime = elapsedtime
statistics.elapsedindeed = elapsedindeed
statistics.elapsedseconds = elapsedseconds
--- general function
+-- general function .. we might split this module
function statistics.register(tag,fnc)
if statistics.enable and type(fnc) == "function" then
@@ -5387,6 +6243,8 @@ if not modules then modules = { } end modules ['trac-set'] = { -- might become u
license = "see context related readme files"
}
+-- maybe this should be util-set.lua
+
local type, next, tostring = type, next, tostring
local concat = table.concat
local format, find, lower, gsub, escapedpattern = string.format, string.find, string.lower, string.gsub, string.escapedpattern
@@ -5586,7 +6444,7 @@ function setters.show(t)
local value, default, modules = functions.value, functions.default, #functions
value = value == nil and "unset" or tostring(value)
default = default == nil and "unset" or tostring(default)
- t.report("%-30s modules: %2i default: %6s value: %6s",name,modules,default,value)
+ t.report("%-50s modules: %2i default: %6s value: %6s",name,modules,default,value)
end
end
t.report()
@@ -5678,17 +6536,31 @@ end)
-- experiment
-local flags = environment and environment.engineflags
+if environment then
-if flags then
- if trackers and flags.trackers then
- setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
- -- t_enable(flags.trackers)
- end
- if directives and flags.directives then
- setters.initialize("flags","directives", settings_to_hash(flags.directives))
- -- d_enable(flags.directives)
+ -- The engineflags are known earlier than environment.arguments but maybe we
+ -- need to handle them both as the later are parsed differently. The c: prefix
+ -- is used by mtx-context to isolate the flags from those that concern luatex.
+
+ local engineflags = environment.engineflags
+
+ if engineflags then
+ if trackers then
+ local list = engineflags["c:trackers"] or engineflags["trackers"]
+ if type(list) == "string" then
+ setters.initialize("flags","trackers",settings_to_hash(list))
+ -- t_enable(list)
+ end
+ end
+ if directives then
+ local list = engineflags["c:directives"] or engineflags["directives"]
+ if type(list) == "string" then
+ setters.initialize("flags","directives", settings_to_hash(list))
+ -- d_enable(list)
+ end
+ end
end
+
end
-- here
@@ -5741,10 +6613,7 @@ local next, type = next, type
local setmetatableindex = table.setmetatableindex
---[[ldx--
-
This is a prelude to a more extensive logging module. We no longer
-provide based logging a sparsing is relatively easy anyway.
---ldx]]--
+
logs = logs or { }
local logs = logs
@@ -6560,7 +7429,8 @@ local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
local unquoted, quoted = string.unquoted, string.quoted
-local concat = table.concat
+local concat, insert, remove = table.concat, table.insert, table.remove
+local loadedluacode = utilities.lua.loadedluacode
-- precautions
@@ -6578,8 +7448,28 @@ if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaon
for k=3,#arg do
arg[k-2] = arg[k]
end
- arg[#arg] = nil -- last
- arg[#arg] = nil -- pre-last
+ remove(arg) -- last
+ remove(arg) -- pre-last
+end
+
+-- This is an ugly hack but it permits symlinking a script (say 'context') to 'mtxrun' as in:
+--
+-- ln -s /opt/minimals/tex/texmf-linux-64/bin/mtxrun context
+--
+-- The special mapping hack is needed because 'luatools' boils down to 'mtxrun --script base'
+-- but it's unlikely that there will be more of this
+
+do
+
+ local originalzero = file.basename(arg[0])
+ local specialmapping = { luatools == "base" }
+
+ if originalzero ~= "mtxrun" and originalzero ~= "mtxrun.lua" then
+ arg[0] = specialmapping[originalzero] or originalzero
+ insert(arg,0,"--script")
+ insert(arg,0,"mtxrun")
+ end
+
end
-- environment
@@ -6619,6 +7509,8 @@ local mt = {
setmetatable(environment,mt)
+-- context specific arguments (in order not to confuse the engine)
+
function environment.initializearguments(arg)
local arguments, files = { }, { }
environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
@@ -6627,10 +7519,12 @@ function environment.initializearguments(arg)
if index > 0 then
local flag, value = match(argument,"^%-+(.-)=(.-)$")
if flag then
+ flag = gsub(flag,"^c:","")
arguments[flag] = unquoted(value or "")
else
flag = match(argument,"^%-+(.+)")
if flag then
+ flag = gsub(flag,"^c:","")
arguments[flag] = true
else
files[#files+1] = argument
@@ -6650,7 +7544,7 @@ end
-- tricky: too many hits when we support partials unless we add
-- a registration of arguments so from now on we have 'partial'
-function environment.argument(name,partial)
+function environment.getargument(name,partial)
local arguments, sortedflags = environment.arguments, environment.sortedflags
if arguments[name] then
return arguments[name]
@@ -6673,6 +7567,8 @@ function environment.argument(name,partial)
return nil
end
+environment.argument = environment.getargument
+
function environment.splitarguments(separator) -- rather special, cut-off before separator
local done, before, after = false, { }, { }
local originalarguments = environment.originalarguments
@@ -6758,7 +7654,7 @@ function environment.texfile(filename)
return resolvers.findfile(filename,'tex')
end
-function environment.luafile(filename)
+function environment.luafile(filename) -- needs checking
local resolved = resolvers.findfile(filename,'tex') or ""
if resolved ~= "" then
return resolved
@@ -6770,13 +7666,16 @@ function environment.luafile(filename)
return resolvers.findfile(filename,'luatexlibs') or ""
end
-environment.loadedluacode = loadfile -- can be overloaded
+local function checkstrip(filename)
+ local modu = modules[file.nameonly(filename)]
+ return modu and modu.dataonly
+end
function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
filename = file.replacesuffix(filename, "lua")
local fullname = environment.luafile(filename)
if fullname and fullname ~= "" then
- local data = environment.loadedluacode(fullname)
+ local data = loadedluacode(fullname,checkstrip,filename)
if trace_locating then
report_lua("loading file %s%s", fullname, not data and " failed" or "")
elseif not silent then
@@ -6874,21 +7773,7 @@ local trace_entities = false trackers.register("xml.entities", function(v) trac
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
---[[ldx--
-
The parser used here is inspired by the variant discussed in the lua book, but
-handles comment and processing instructions, has a different structure, provides
-parent access; a first version used different trickery but was less optimized to we
-went this route. First we had a find based parser, now we have an based one.
-The find based parser can be found in l-xml-edu.lua along with other older code.
-
Beware, the interface may change. For instance at, ns, tg, dt may get more
-verbose names. Once the code is stable we will also remove some tracing and
-optimize the code.
-
-
I might even decide to reimplement the parser using the latest trickery
-as the current variant was written when showed up and it's easier now to
-build tables in one go.
---ldx]]--
xml = xml or { }
local xml = xml
@@ -6898,46 +7783,25 @@ local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
+local utfchar = utf.char
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
---[[ldx--
-
First a hack to enable namespace resolving. A namespace is characterized by
-a . The following function associates a namespace prefix with a
-pattern. We use , which in this case is more than twice as fast as a
-find based solution where we loop over an array of patterns. Less code and
-much cleaner.
---ldx]]--
+
xml.xmlns = xml.xmlns or { }
local check = P(false)
local parse = check
---[[ldx--
-
The next function associates a namespace prefix with an . This
-normally happens independent of parsing.
-
-xml.registerns("mml","mathml")
-
---ldx]]--
function xml.registerns(namespace, pattern) -- pattern can be an lpeg
check = check + C(P(lower(pattern))) / namespace
parse = P { P(check) + 1 * V(1) }
end
---[[ldx--
-
The next function also registers a namespace, but this time we map a
-given namespace prefix onto a registered one, using the given
-. This used for attributes like xmlns:m.
-
-xml.checkns("m","http://www.w3.org/mathml")
-
---ldx]]--
function xml.checkns(namespace,url)
local ns = lpegmatch(parse,lower(url))
@@ -6946,66 +7810,15 @@ function xml.checkns(namespace,url)
end
end
---[[ldx--
-
Next we provide a way to turn an into a registered
-namespace. This used for the xmlns attribute.
-
-resolvedns = xml.resolvens("http://www.w3.org/mathml")
-
-
-This returns mml.
---ldx]]--
function xml.resolvens(url)
return lpegmatch(parse,lower(url)) or ""
end
---[[ldx--
-
A namespace in an element can be remapped onto the registered
-one efficiently by using the xml.xmlns table.
---ldx]]--
-
---[[ldx--
-
This version uses . We follow the same approach as before, stack and top and
-such. This version is about twice as fast which is mostly due to the fact that
-we don't have to prepare the stream for cdata, doctype etc etc. This variant is
-is dedicated to Luigi Scarso, who challenged me with 40 megabyte files that
-took 12.5 seconds to load (1.5 for file io and the rest for tree building). With
-the implementation we got that down to less 7.3 seconds. Loading the 14
- interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.
-
-
Next comes the parser. The rather messy doctype definition comes in many
-disguises so it is no surprice that later on have to dedicate quite some
- code to it.
-
-
-
-
-
-
-
-
-
-
-
The code may look a bit complex but this is mostly due to the fact that we
-resolve namespaces and attach metatables. There is only one public function:
-
-
-local x = xml.convert(somestring)
-
-
-
An optional second boolean argument tells this function not to create a root
-element.
-
-
Valid entities are:
-
-
-
-
-
-
---ldx]]--
+
+
+
-- not just one big nested table capture (lpeg overflow)
@@ -7220,15 +8033,7 @@ local privates_n = {
-- keeps track of defined ones
}
-local function escaped(s)
- if s == "" then
- return ""
- else -- if utffind(s,privates_u) then
- return (utfgsub(s,".",privates_u))
- -- else
- -- return s
- end
-end
+local escaped = utf.remapper(privates_u)
local function unescaped(s)
local p = privates_n[s]
@@ -7243,13 +8048,7 @@ local function unescaped(s)
return p
end
-local function unprivatized(s,resolve)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_p))
- end
-end
+local unprivatized = utf.remapper(privates_p)
xml.privatetoken = unescaped
xml.unprivatized = unprivatized
@@ -7589,7 +8388,12 @@ local function _xmlconvert_(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler(format("load error: %s",errorstr))
+ local currentresource = settings.currentresource
+ if currentresource and currentresource ~= "" then
+ xml.errorhandler(format("load error in [%s]: %s",currentresource,errorstr))
+ else
+ xml.errorhandler(format("load error: %s",errorstr))
+ end
end
end
else
@@ -7634,7 +8438,7 @@ function xmlconvert(data,settings)
if ok then
return result
else
- return _xmlconvert_("")
+ return _xmlconvert_("",settings)
end
end
@@ -7655,10 +8459,7 @@ function xml.inheritedconvert(data,xmldata) -- xmldata is parent
return xc
end
---[[ldx--
-
Packaging data in an xml like table is done with the following
-function. Maybe it will go away (when not used).
---ldx]]--
+
function xml.is_valid(root)
return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
@@ -7677,11 +8478,7 @@ end
xml.errorhandler = report_xml
---[[ldx--
-
We cannot load an from a filehandle so we need to load
-the whole file first. The function accepts a string representing
-a filename or a file handle.
---ldx]]--
+
function xml.load(filename,settings)
local data = ""
@@ -7695,13 +8492,17 @@ function xml.load(filename,settings)
elseif filename then -- filehandle
data = filename:read("*all")
end
- return xmlconvert(data,settings)
+ if settings then
+ settings.currentresource = filename
+ local result = xmlconvert(data,settings)
+ settings.currentresource = nil
+ return result
+ else
+ return xmlconvert(data,{ currentresource = filename })
+ end
end
---[[ldx--
-
When we inject new elements, we need to convert strings to
-valid trees, which is what the next function does.
---ldx]]--
+
local no_root = { no_root = true }
@@ -7714,11 +8515,7 @@ function xml.toxml(data)
end
end
---[[ldx--
-
For copying a tree we use a dedicated function instead of the
-generic table copier. Since we know what we're dealing with we
-can speed up things a bit. The second argument is not to be used!
---ldx]]--
+
local function copy(old,tables)
if old then
@@ -7742,13 +8539,7 @@ end
xml.copy = copy
---[[ldx--
-
In serializing the tree or parts of the tree is a major
-actitivity which is why the following function is pretty optimized resulting
-in a few more lines of code than needed. The variant that uses the formatting
-function for all components is about 15% slower than the concatinating
-alternative.
---ldx]]--
+
-- todo: add when not present
@@ -7761,15 +8552,12 @@ function xml.checkbom(root) -- can be made faster
return
end
end
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 1, { special = true, ns = "", tg = "@pi@", dt = { "xml version='1.0' standalone='yes'" } } )
insert(dt, 2, "\n" )
end
end
---[[ldx--
-
At the cost of some 25% runtime overhead you can first convert the tree to a string
-and then handle the lot.
How you deal with saving data depends on your preferences. For a 40 MB database
-file the timing on a 2.3 Core Duo are as follows (time in seconds):
-
-
-1.3 : load data from file to string
-6.1 : convert string into tree
-5.3 : saving in file using xmlsave
-6.8 : converting to string using xml.tostring
-3.6 : saving converted string in file
-
-
Beware, these were timing with the old routine but measurements will not be that
-much different I guess.
---ldx]]--
-- maybe this will move to lxml-xml
@@ -8054,10 +8828,7 @@ xml.newhandlers = newhandlers
xml.serialize = serialize
xml.tostring = xmltostring
---[[ldx--
-
The next function operated on the content only and needs a handle function
-that accepts a string.
---ldx]]--
+
local function xmlstring(e,handle)
if not handle or (e.special and e.tg ~= "@rt@") then
@@ -8076,9 +8847,7 @@ end
xml.string = xmlstring
---[[ldx--
-
A few helpers:
---ldx]]--
+
function xml.settings(e)
@@ -8122,11 +8891,7 @@ function xml.name(root)
end
end
---[[ldx--
-
The next helper erases an element but keeps the table as it is,
-and since empty strings are not serialized (effectively) it does
-not harm. Copying the table would take more time. Usage:
---ldx]]--
+
function xml.erase(dt,k)
if dt then
@@ -8138,13 +8903,7 @@ function xml.erase(dt,k)
end
end
---[[ldx--
-
The next helper assigns a tree (or string). Usage:
-
-dt[k] = xml.assign(root) or xml.assign(dt,k,root)
-
---ldx]]--
function xml.assign(dt,k,root)
if dt and k then
@@ -8157,20 +8916,14 @@ end
-- the following helpers may move
---[[ldx--
-
The next helper assigns a tree (or string). Usage:
-
-xml.tocdata(e)
-xml.tocdata(e,"error")
-
---ldx]]--
+
function xml.tocdata(e,wrapper) -- a few more in the aux module
local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s%s>",wrapper,whatever,wrapper)
end
- local t = { special = true, ns = "", tg = "@cd@", at = {}, rn = "", dt = { whatever }, __p__ = e }
+ local t = { special = true, ns = "", tg = "@cd@", at = { }, rn = "", dt = { whatever }, __p__ = e }
setmetatable(t,getmetatable(e))
e.dt = { t }
end
@@ -8225,7 +8978,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['lxml-pth'] = {
+if not modules then modules = { } end modules ['lxml-lpt'] = {
version = 1.001,
comment = "this module is the basis for the lxml-* ones",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -8246,28 +8999,9 @@ local setmetatableindex = table.setmetatableindex
-- beware, this is not xpath ... e.g. position is different (currently) and
-- we have reverse-sibling as reversed preceding sibling
---[[ldx--
-
This module can be used stand alone but also inside in
-which case it hooks into the tracker code. Therefore we provide a few
-functions that set the tracers. Here we overload a previously defined
-function.
-
If I can get in the mood I will make a variant that is XSLT compliant
-but I wonder if it makes sense.
---ldx]]--
-
---[[ldx--
-
Expecially the lpath code is experimental, we will support some of xpath, but
-only things that make sense for us; as compensation it is possible to hook in your
-own functions. Apart from preprocessing content for we also need
-this module for process management, like handling and
-files.
-
-
-a/b/c /*/c
-a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
-a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
-
---ldx]]--
+
+
+
local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end
local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
@@ -8275,11 +9009,7 @@ local trace_lprofile = false if trackers then trackers.register("xml.profile",
local report_lpath = logs.reporter("xml","lpath")
---[[ldx--
-
We've now arrived at an interesting part: accessing the tree using a subset
-of and since we're not compatible we call it . We
-will explain more about its usage in other documents.
---ldx]]--
+
local xml = xml
@@ -8731,14 +9461,23 @@ local lp_builtin = P (
-- for the moment we keep namespaces with attributes
local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
-local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end)
-local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end)
+
+-- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end
+-- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end
+
+lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0"
+lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))"
+
local lp_fastpos = lp_fastpos_n + lp_fastpos_p
+
local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
-local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
- return t .. "("
-end
+-- local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
+-- return t .. "("
+-- end
+
+-- local lp_lua_function = (R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / "%0("
+local lp_lua_function = Cs((R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(")) / "%0"
local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling
if expressions[t] then
@@ -9254,9 +9993,7 @@ end
xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do
---[[ldx--
-
This is the main filter function. It returns whatever is asked for.
---ldx]]--
+
function xml.filter(root,pattern) -- no longer funny attribute handling here
return applylpath(root,pattern)
@@ -9354,12 +10091,12 @@ xml.selection = selection -- new method, simple handle
-- generic function finalizer (independant namespace)
-local function dofunction(collected,fnc)
+local function dofunction(collected,fnc,...)
if collected then
local f = functions[fnc]
if f then
for c=1,#collected do
- f(collected[c])
+ f(collected[c],...)
end
else
report_lpath("unknown function '%s'",fnc)
@@ -9460,21 +10197,7 @@ expressions.tag = function(e,n) -- only tg
end
end
---[[ldx--
-
Often using an iterators looks nicer in the code than passing handler
-functions. The book describes how to use coroutines for that
-purpose (). This permits
-code like:
-
-for r, d, k in xml.elements(xml.load('text.xml'),"title") do
- print(d[k]) -- old method
-end
-for e in xml.collected(xml.load('text.xml'),"title") do
- print(e) -- new one
-end
-
---ldx]]--
local wrap, yield = coroutine.wrap, coroutine.yield
@@ -9515,6 +10238,32 @@ function xml.inspect(collection,pattern)
end
end
+-- texy (see xfdf):
+
+local function split(e)
+ local dt = e.dt
+ if dt then
+ for i=1,#dt do
+ local dti = dt[i]
+ if type(dti) == "string" then
+ dti = gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
+ dti = gsub(dti,"[\n\r]+","\n\n")
+ dt[i] = dti
+ else
+ split(dti)
+ end
+ end
+ end
+ return e
+end
+
+function xml.finalizers.paragraphs(c)
+ for i=1,#c do
+ split(c[i])
+ end
+ return c
+end
+
end -- of closure
@@ -9539,13 +10288,7 @@ local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, l
lpegpatterns.xml = lpegpatterns.xml or { }
local xmlpatterns = lpegpatterns.xml
---[[ldx--
-
The following helper functions best belong to the lxml-ini
-module. Some are here because we need then in the mk
-document and other manuals, others came up when playing with
-this module. Since this module is also used in we've
-put them here instead of loading mode modules there then needed.
---ldx]]--
+
local function xmlgsub(t,old,new) -- will be replaced
local dt = t.dt
@@ -9731,9 +10474,7 @@ function xml.processattributes(root,pattern,handle)
return collected
end
---[[ldx--
-
The following functions collect elements and texts.
---ldx]]--
+
-- are these still needed -> lxml-cmp.lua
@@ -9772,9 +10513,7 @@ function xml.collect_tags(root, pattern, nonamespace)
end
end
---[[ldx--
-
We've now arrived at the functions that manipulate the tree.
---ldx]]--
+
local no_root = { no_root = true }
@@ -10160,9 +10899,7 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
end
---[[ldx--
-
Helper (for q2p).
---ldx]]--
+
function xml.cdatatotext(e)
local dt = e.dt
@@ -10259,9 +10996,7 @@ end
-- xml.addentitiesdoctype(x,"hexadecimal")
-- print(x)
---[[ldx--
-
Here are a few synonyms.
---ldx]]--
+
xml.all = xml.each
xml.insert = xml.insertafter
@@ -10852,7 +11587,7 @@ local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string
local concat = table.concat
local next, type = next, type
-local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
@@ -11202,12 +11937,14 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
for s in gmatch(str,"[^,]+") do
s = validate(s)
if s then
- n = n + 1 ; t[n] = s
+ n = n + 1
+ t[n] = s
end
end
else
for s in gmatch(str,"[^,]+") do
- n = n + 1 ; t[n] = s
+ n = n + 1
+ t[n] = s
end
end
if trace_expansions then
@@ -11221,7 +11958,7 @@ end
-- We could make the previous one public.
local function validate(s)
- s = collapsepath(s) -- already keeps the //
+ s = collapsepath(s) -- already keeps the trailing / and //
return s ~= "" and not find(s,"^!*unset/*$") and s
end
@@ -11559,7 +12296,7 @@ local resolvers = resolvers
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
-local fileextname = file.extname
+local suffixonly = file.suffixonly
local formats = allocate()
local suffixes = allocate()
@@ -11814,7 +12551,7 @@ function resolvers.formatofvariable(str)
end
function resolvers.formatofsuffix(str) -- of file
- return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
+ return suffixmap[suffixonly(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
end
function resolvers.variableofformat(str)
@@ -11826,7 +12563,7 @@ function resolvers.variableofformatorsuffix(str)
if v then
return v
end
- v = suffixmap[fileextname(str)]
+ v = suffixmap[suffixonly(str)]
if v then
return formats[v]
end
@@ -11847,21 +12584,7 @@ if not modules then modules = { } end modules ['data-tmp'] = {
license = "see context related readme files"
}
---[[ldx--
-
This module deals with caching data. It sets up the paths and
-implements loaders and savers for tables. Best is to set the
-following variable. When not set, the usual paths will be
-checked. Personally I prefer the (users) temporary path.
Currently we do no locking when we write files. This is no real
-problem because most caching involves fonts and the chance of them
-being written at the same time is small. We also need to extend
-luatools with a recache feature.
---ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
local serialize, serializetofile = table.serialize, table.tofile
@@ -12396,11 +13119,12 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local filedirname = file.dirname
local filebasename = file.basename
-local fileextname = file.extname
+local suffixonly = file.suffixonly
local filejoin = file.join
local collapsepath = file.collapsepath
local joinpath = file.joinpath
local allocate = utilities.storage.allocate
+local settings_to_array = utilities.parsers.settings_to_array
local setmetatableindex = table.setmetatableindex
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -12424,7 +13148,7 @@ resolvers.cacheversion = '1.0.1'
resolvers.configbanner = ''
resolvers.homedir = environment.homedir
resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
-resolvers.luacnfname = 'texmfcnf.lua'
+resolvers.luacnfname = "texmfcnf.lua"
resolvers.luacnfstate = "unknown"
-- The web2c tex binaries as well as kpse have built in paths for the configuration
@@ -12696,7 +13420,7 @@ end
local function identify_configuration_files()
local specification = instance.specification
if #specification == 0 then
- local cnfspec = getenv('TEXMFCNF')
+ local cnfspec = getenv("TEXMFCNF")
if cnfspec == "" then
cnfspec = resolvers.luacnfspec
resolvers.luacnfstate = "default"
@@ -12784,7 +13508,7 @@ local function load_configuration_files()
-- we push the value into the main environment (osenv) so
-- that it takes precedence over the default one and therefore
-- also over following definitions
- resolvers.setenv('TEXMFCNF',cnfspec) -- resolves prefixes
+ resolvers.setenv("TEXMFCNF",cnfspec) -- resolves prefixes
-- we now identify and load the specified configuration files
instance.specification = { }
identify_configuration_files()
@@ -12832,10 +13556,11 @@ end
local function locate_file_databases()
-- todo: cache:// and tree:// (runtime)
- local texmfpaths = resolvers.expandedpathlist('TEXMF')
+ local texmfpaths = resolvers.expandedpathlist("TEXMF")
if #texmfpaths > 0 then
for i=1,#texmfpaths do
local path = collapsepath(texmfpaths[i])
+ path = gsub(path,"/+$","") -- in case $HOME expands to something with a trailing /
local stripped = lpegmatch(inhibitstripper,path) -- the !! thing
if stripped ~= "" then
local runtime = stripped == path
@@ -12964,9 +13689,9 @@ function resolvers.prependhash(type,name,cache)
end
function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
- local t = resolvers.splitpath(getenv('TEXMF'))
+ local t = resolvers.splitpath(getenv("TEXMF")) -- okay?
insert(t,1,specification)
- local newspec = concat(t,";")
+ local newspec = concat(t,",") -- not ;
if instance.environment["TEXMF"] then
instance.environment["TEXMF"] = newspec
elseif instance.variables["TEXMF"] then
@@ -13041,14 +13766,19 @@ function resolvers.resetextrapath()
end
function resolvers.registerextrapath(paths,subpaths)
+ paths = settings_to_array(paths)
+ subpaths = settings_to_array(subpaths)
local ep = instance.extra_paths or { }
local oldn = #ep
local newn = oldn
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
+ local nofpaths = #paths
+ local nofsubpaths = #subpaths
+ if nofpaths > 0 then
+ if nofsubpaths > 0 then
+ for i=1,nofpaths do
+ local p = paths[i]
+ for j=1,nofsubpaths do
+ local s = subpaths[j]
local ps = p .. "/" .. s
if not done[ps] then
newn = newn + 1
@@ -13058,7 +13788,8 @@ function resolvers.registerextrapath(paths,subpaths)
end
end
else
- for p in gmatch(paths,"[^,]+") do
+ for i=1,nofpaths do
+ local p = paths[i]
if not done[p] then
newn = newn + 1
ep[newn] = resolvers.cleanpath(p)
@@ -13066,10 +13797,10 @@ function resolvers.registerextrapath(paths,subpaths)
end
end
end
- elseif subpaths and subpaths ~= "" then
+ elseif nofsubpaths > 0 then
for i=1,oldn do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
+ for j=1,nofsubpaths do
+ local s = subpaths[j]
local ps = ep[i] .. "/" .. s
if not done[ps] then
newn = newn + 1
@@ -13147,18 +13878,21 @@ function resolvers.expandedpathlist(str)
return { }
elseif instance.savelists then
str = lpegmatch(dollarstripper,str)
- if not instance.lists[str] then -- cached
- local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
- instance.lists[str] = expandedpathfromlist(lst)
- end
- return instance.lists[str]
+ local lists = instance.lists
+ local lst = lists[str]
+ if not lst then
+ local l = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ lst = expandedpathfromlist(l)
+ lists[str] = lst
+ end
+ return lst
else
local lst = resolvers.splitpath(resolvers.expansion(str))
return made_list(instance,expandedpathfromlist(lst))
end
end
-function resolvers.expandedpathlistfromvariable(str) -- brrr
+function resolvers.expandedpathlistfromvariable(str) -- brrr / could also have cleaner ^!! /$ //
str = lpegmatch(dollarstripper,str)
local tmp = resolvers.variableofformatorsuffix(str)
return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
@@ -13315,7 +14049,7 @@ local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
local collect_instance_files
local function find_analyze(filename,askedformat,allresults)
- local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ local filetype, wantedfiles, ext = '', { }, suffixonly(filename)
-- too tricky as filename can be bla.1.2.3:
--
-- if not suffixmap[ext] then
@@ -13393,7 +14127,7 @@ local function find_qualified(filename,allresults) -- this one will be split too
if trace_detail then
report_resolving("locating qualified file '%s'", filename)
end
- local forcedname, suffix = "", fileextname(filename)
+ local forcedname, suffix = "", suffixonly(filename)
if suffix == "" then -- why
local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
if format_suffixes then
@@ -14063,6 +14797,8 @@ local gsub = string.gsub
local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+local joinpath, basename, dirname = file.join, file.basename, file.dirname
+local getmetatable, rawset, type = getmetatable, rawset, type
-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
@@ -14104,28 +14840,43 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
+ return cleanpath(basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.dirname((fullname ~= "" and fullname) or str))
+ return cleanpath(dirname((fullname ~= "" and fullname) or str))
end
prefixes.selfautoloc = function(str)
- return cleanpath(file.join(getenv('SELFAUTOLOC'),str))
+ return cleanpath(joinpath(getenv('SELFAUTOLOC'),str))
end
prefixes.selfautoparent = function(str)
- return cleanpath(file.join(getenv('SELFAUTOPARENT'),str))
+ return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str))
end
prefixes.selfautodir = function(str)
- return cleanpath(file.join(getenv('SELFAUTODIR'),str))
+ return cleanpath(joinpath(getenv('SELFAUTODIR'),str))
end
prefixes.home = function(str)
- return cleanpath(file.join(getenv('HOME'),str))
+ return cleanpath(joinpath(getenv('HOME'),str))
+end
+
+local function toppath()
+ local pathname = dirname(inputstack[#inputstack] or "")
+ if pathname == "" then
+ return "."
+ else
+ return pathname
+ end
+end
+
+resolvers.toppath = toppath
+
+prefixes.toppath = function(str)
+ return cleanpath(joinpath(toppath(),str))
end
prefixes.env = prefixes.environment
@@ -14161,6 +14912,8 @@ function resolvers.resetresolve(str)
resolved, abstract = { }, { }
end
+-- todo: use an lpeg (see data-lua for !! / stripper)
+
local function resolve(str) -- use schemes, this one is then for the commandline only
if type(str) == "table" then
local t = { }
@@ -14186,7 +14939,7 @@ end
resolvers.resolve = resolve
resolvers.unresolve = unresolve
-if os.uname then
+if type(os.uname) == "function" then
for k, v in next, os.uname() do
if not prefixes[k] then
@@ -14198,11 +14951,17 @@ end
if os.type == "unix" then
+ -- We need to distringuish between a prefix and something else : so we
+ -- have a special repath variant for linux. Also, when a new prefix is
+ -- defined, we need to remake the matcher.
+
local pattern
local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
local colon = P(":")
- local p
for k, v in table.sortedpairs(prefixes) do
if p then
p = P(k) + p
@@ -14211,9 +14970,6 @@ if os.type == "unix" then
end
end
pattern = Cs((p * colon + colon/";" + P(1))^0)
- if t then
- t[k] = v
- end
end
makepattern()
@@ -14424,18 +15180,7 @@ local trace_cache = false trackers.register("resolvers.cache", functi
local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
---[[ldx--
-
Once we found ourselves defining similar cache constructs
-several times, containers were introduced. Containers are used
-to collect tables in memory and reuse them when possible based
-on (unique) hashes (to be provided by the calling function).
-
-
Caching to disk is disabled by default. Version numbers are
-stored in the saved table which makes it possible to change the
-table structures without bothering about the disk cache.
-
Examples of usage can be found in the font related code.
---ldx]]--
containers = containers or { }
local containers = containers
@@ -14670,11 +15415,7 @@ local trace_locating = false trackers.register("resolvers.locating", function(v
local report_zip = logs.reporter("resolvers","zip")
--- zip:///oeps.zip?name=bla/bla.tex
--- zip:///oeps.zip?tree=tex/texmf-local
--- zip:///texmf.zip?tree=/tex/texmf
--- zip:///texmf.zip?tree=/tex/texmf-local
--- zip:///texmf-mine.zip?tree=/tex/texmf-projects
+
local resolvers = resolvers
@@ -14999,7 +15740,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-crl'] = {
+if not modules then modules = { } end modules ['data-sch'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -15007,60 +15748,199 @@ if not modules then modules = { } end modules ['data-crl'] = {
license = "see context related readme files"
}
--- this one is replaced by data-sch.lua --
+local loadstring = loadstring
+local gsub, concat, format = string.gsub, table.concat, string.format
+local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-local gsub = string.gsub
+local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end)
+local report_schemes = logs.reporter("resolvers","schemes")
-local resolvers = resolvers
+local http = require("socket.http")
+local ltn12 = require("ltn12")
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
+local resolvers = resolvers
+local schemes = resolvers.schemes or { }
+resolvers.schemes = schemes
+
+local cleaners = { }
+schemes.cleaners = cleaners
+
+local threshold = 24 * 60 * 60
+
+directives.register("schemes.threshold", function(v) threshold = tonumber(v) or threshold end)
+
+function cleaners.none(specification)
+ return specification.original
+end
+
+function cleaners.strip(specification)
+ return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods
+end
+
+function cleaners.md5(specification)
+ return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
+end
+
+local cleaner = cleaners.strip
+
+directives.register("schemes.cleanmethod", function(v) cleaner = cleaners[v] or cleaners.strip end)
+
+function resolvers.schemes.cleanname(specification)
+ local hash = cleaner(specification)
+ if trace_schemes then
+ report_schemes("hashing %s to %s",specification.original,hash)
+ end
+ return hash
+end
-resolvers.curl = resolvers.curl or { }
-local curl = resolvers.curl
+local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { }
-local cached = { }
+local function runcurl(name,cachename) -- we use sockets instead or the curl library when possible
+ local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name
+ os.spawn(command)
+end
-local function runcurl(specification)
+local function fetch(specification)
local original = specification.original
- -- local scheme = specification.scheme
- local cleanname = gsub(original,"[^%a%d%.]+","-")
- local cachename = caches.setfirstwritablefile(cleanname,"curl")
+ local scheme = specification.scheme
+ local cleanname = schemes.cleanname(specification)
+ local cachename = caches.setfirstwritablefile(cleanname,"schemes")
if not cached[original] then
- if not io.exists(cachename) then
+ statistics.starttiming(schemes)
+ if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > (thresholds[protocol] or threshold)) then
cached[original] = cachename
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
- os.spawn(command)
+ local handler = handlers[scheme]
+ if handler then
+ if trace_schemes then
+ report_schemes("fetching '%s', protocol '%s', method 'built-in'",original,scheme)
+ end
+ logs.flush()
+ handler(specification,cachename)
+ else
+ if trace_schemes then
+ report_schemes("fetching '%s', protocol '%s', method 'curl'",original,scheme)
+ end
+ logs.flush()
+ runcurl(original,cachename)
+ end
end
if io.exists(cachename) then
cached[original] = cachename
+ if trace_schemes then
+ report_schemes("using cached '%s', protocol '%s', cachename '%s'",original,scheme,cachename)
+ end
else
cached[original] = ""
+ if trace_schemes then
+ report_schemes("using missing '%s', protocol '%s'",original,scheme)
+ end
end
+ loaded[scheme] = loaded[scheme] + 1
+ statistics.stoptiming(schemes)
+ else
+ if trace_schemes then
+ report_schemes("reusing '%s', protocol '%s'",original,scheme)
+ end
+ reused[scheme] = reused[scheme] + 1
end
return cached[original]
end
--- old code: we could be cleaner using specification (see schemes)
-
local function finder(specification,filetype)
- return resolvers.methodhandler("finders",runcurl(specification),filetype)
+ return resolvers.methodhandler("finders",fetch(specification),filetype)
end
local opener = openers.file
local loader = loaders.file
-local function install(scheme)
- finders[scheme] = finder
- openers[scheme] = opener
- loaders[scheme] = loader
+local function install(scheme,handler,newthreshold)
+ handlers [scheme] = handler
+ loaded [scheme] = 0
+ reused [scheme] = 0
+ finders [scheme] = finder
+ openers [scheme] = opener
+ loaders [scheme] = loader
+ thresholds[scheme] = newthreshold or threshold
end
-resolvers.curl.install = install
+schemes.install = install
+
+local function http_handler(specification,cachename)
+ local tempname = cachename .. ".tmp"
+ local f = io.open(tempname,"wb")
+ local status, message = http.request {
+ url = specification.original,
+ sink = ltn12.sink.file(f)
+ }
+ if not status then
+ os.remove(tempname)
+ else
+ os.remove(cachename)
+ os.rename(tempname,cachename)
+ end
+ return cachename
+end
-install('http')
-install('https')
+install('http',http_handler)
+install('https') -- see pod
install('ftp')
+statistics.register("scheme handling time", function()
+ local l, r, nl, nr = { }, { }, 0, 0
+ for k, v in table.sortedhash(loaded) do
+ if v > 0 then
+ nl = nl + 1
+ l[nl] = k .. ":" .. v
+ end
+ end
+ for k, v in table.sortedhash(reused) do
+ if v > 0 then
+ nr = nr + 1
+ r[nr] = k .. ":" .. v
+ end
+ end
+ local n = nl + nr
+ if n > 0 then
+ l = nl > 0 and concat(l) or "none"
+ r = nr > 0 and concat(r) or "none"
+ return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s",
+ statistics.elapsedtime(schemes), n, threshold, l, r)
+ else
+ return nil
+ end
+end)
+
+-- We provide a few more helpers:
+
+----- http = require("socket.http")
+local httprequest = http.request
+local toquery = url.toquery
+
+-- local function httprequest(url)
+-- return os.resultof(format("curl --silent %q", url))
+-- end
+
+local function fetchstring(url,data)
+ local q = data and toquery(data)
+ if q then
+ url = url .. "?" .. q
+ end
+ local reply = httprequest(url)
+ return reply -- just one argument
+end
+
+schemes.fetchstring = fetchstring
+
+function schemes.fetchtable(url,data)
+ local reply = fetchstring(url,data)
+ if reply then
+ local s = loadstring("return " .. reply)
+ if s then
+ return s()
+ end
+ end
+end
+
end -- of closure
@@ -15074,170 +15954,199 @@ if not modules then modules = { } end modules ['data-lua'] = {
license = "see context related readme files"
}
--- some loading stuff ... we might move this one to slot 2 depending
--- on the developments (the loaders must not trigger kpse); we could
--- of course use a more extensive lib path spec
+-- We overload the regular loader. We do so because we operate mostly in
+-- tds and use our own loader code. Alternatively we could use a more
+-- extensive definition of package.path and package.cpath but even then
+-- we're not done. Also, we now have better tracing.
+--
+-- -- local mylib = require("libtest")
+-- -- local mysql = require("luasql.mysql")
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local concat = table.concat
+
+local trace_libraries = false
+
+trackers.register("resolvers.libraries", function(v) trace_libraries = v end)
+trackers.register("resolvers.locating", function(v) trace_libraries = v end)
local report_libraries = logs.reporter("resolvers","libraries")
local gsub, insert = string.gsub, table.insert
+local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
local unpack = unpack or table.unpack
+local is_readable = file.is_readable
local resolvers, package = resolvers, package
-local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs'
-local clibformats = { 'lib' }
-
-local _path_, libpaths, _cpath_, clibpaths
-
-function package.libpaths()
- if not _path_ or package.path ~= _path_ then
- _path_ = package.path
- libpaths = file.splitpath(_path_,";")
+local libsuffixes = { 'tex', 'lua' }
+local clibsuffixes = { 'lib' }
+local libformats = { 'TEXINPUTS', 'LUAINPUTS' }
+local clibformats = { 'CLUAINPUTS' }
+
+local libpaths = nil
+local clibpaths = nil
+local libhash = { }
+local clibhash = { }
+local libextras = { }
+local clibextras = { }
+
+local pattern = Cs(P("!")^0 / "" * (P("/") * P(-1) / "/" + P("/")^1 / "/" + 1)^0)
+
+local function cleanpath(path) --hm, don't we have a helper for this?
+ return resolvers.resolve(lpegmatch(pattern,path))
+end
+
+local function getlibpaths()
+ if not libpaths then
+ libpaths = { }
+ for i=1,#libformats do
+ local paths = resolvers.expandedpathlistfromvariable(libformats[i])
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not libhash[path] then
+ libpaths[#libpaths+1] = path
+ libhash[path] = true
+ end
+ end
+ end
end
return libpaths
end
-function package.clibpaths()
- if not _cpath_ or package.cpath ~= _cpath_ then
- _cpath_ = package.cpath
- clibpaths = file.splitpath(_cpath_,";")
+local function getclibpaths()
+ if not clibpaths then
+ clibpaths = { }
+ for i=1,#clibformats do
+ local paths = resolvers.expandedpathlistfromvariable(clibformats[i])
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not clibhash[path] then
+ clibpaths[#clibpaths+1] = path
+ clibhash[path] = true
+ end
+ end
+ end
end
return clibpaths
end
-local function thepath(...)
- local t = { ... } t[#t+1] = "?.lua"
- local path = file.join(unpack(t))
- if trace_locating then
- report_libraries("! appending '%s' to 'package.path'",path)
+package.libpaths = getlibpaths
+package.clibpaths = getclibpaths
+
+function package.extralibpath(...)
+ local paths = { ... }
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not libhash[path] then
+ if trace_libraries then
+ report_libraries("! extra lua path '%s'",path)
+ end
+ libextras[#libextras+1] = path
+ libpaths[#libpaths +1] = path
+ end
end
- return path
end
-local p_libpaths, a_libpaths = { }, { }
-
-function package.appendtolibpath(...)
- insert(a_libpath,thepath(...))
+function package.extraclibpath(...)
+ local paths = { ... }
+ for i=1,#paths do
+ local path = cleanpath(paths[i])
+ if not clibhash[path] then
+ if trace_libraries then
+ report_libraries("! extra lib path '%s'",path)
+ end
+ clibextras[#clibextras+1] = path
+ clibpaths[#clibpaths +1] = path
+ end
+ end
end
-function package.prependtolibpath(...)
- insert(p_libpaths,1,thepath(...))
+if not package.loaders[-2] then
+ -- use package-path and package-cpath
+ package.loaders[-2] = package.loaders[2]
end
--- beware, we need to return a loadfile result !
+local function loadedaslib(resolved,rawname)
+ return package.loadlib(resolved,"luaopen_" .. gsub(rawname,"%.","_"))
+end
-local function loaded(libpaths,name,simple)
- for i=1,#libpaths do -- package.path, might become option
- local libpath = libpaths[i]
- local resolved = gsub(libpath,"%?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.path': '%s'",name,resolved)
- end
- return loadfile(resolved)
- end
+local function loadedbylua(name)
+ if trace_libraries then
+ report_libraries("! locating %q using normal loader",name)
end
+ local resolved = package.loaders[-2](name)
end
-package.loaders[2] = function(name) -- was [#package.loaders+1]
- if file.suffix(name) == "" then
- name = file.addsuffix(name,"lua") -- maybe a list
- if trace_locating then -- mode detail
- report_libraries("! locating '%s' with forced suffix",name)
- end
- else
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
- end
+local function loadedbyformat(name,rawname,suffixes,islib)
+ if trace_libraries then
+ report_libraries("! locating %q as %q using formats %q",rawname,name,concat(suffixes))
end
- for i=1,#libformats do
- local format = libformats[i]
+ for i=1,#suffixes do -- so we use findfile and not a lookup loop
+ local format = suffixes[i]
local resolved = resolvers.findfile(name,format) or ""
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'libformat path': '%s'",name,format)
+ if trace_libraries then
+ report_libraries("! checking for %q' using format %q",name,format)
end
if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located via environment: '%s'",name,resolved)
+ if trace_libraries then
+ report_libraries("! lib %q located on %q",name,resolved)
end
- return loadfile(resolved)
- end
- end
- -- libpaths
- local libpaths, clibpaths = package.libpaths(), package.clibpaths()
- local simple = gsub(name,"%.lua$","")
- local simple = gsub(simple,"%.","/")
- local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple)
- if resolved then
- return resolved
- end
- --
- local libname = file.addsuffix(simple,os.libsuffix)
- for i=1,#clibformats do
- -- better have a dedicated loop
- local format = clibformats[i]
- local paths = resolvers.expandedpathlistfromvariable(format)
- for p=1,#paths do
- local path = paths[p]
- local resolved = file.join(path,libname)
- if trace_locating then -- mode detail
- report_libraries("! checking for '%s' using 'clibformat path': '%s'",libname,path)
- end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
- end
- return package.loadlib(resolved,name)
+ if islib then
+ return loadedaslib(resolved,rawname)
+ else
+ return loadfile(resolved)
end
end
end
- for i=1,#clibpaths do -- package.path, might become option
- local libpath = clibpaths[i]
- local resolved = gsub(libpath,"?",simple)
- if trace_locating then -- more detail
- report_libraries("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
+end
+
+local function loadedbypath(name,rawname,paths,islib,what)
+ if trace_libraries then
+ report_libraries("! locating %q as %q on %q paths",rawname,name,what)
+ end
+ for p=1,#paths do
+ local path = paths[p]
+ local resolved = file.join(path,name)
+ if trace_libraries then -- mode detail
+ report_libraries("! checking for %q using %q path %q",name,what,path)
end
- if file.is_readable(resolved) then
- if trace_locating then
- report_libraries("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
+ if is_readable(resolved) then
+ if trace_libraries then
+ report_libraries("! lib %q located on %q",name,resolved)
+ end
+ if islib then
+ return loadedaslib(resolved,rawname)
+ else
+ return loadfile(resolved)
end
- return package.loadlib(resolved,name)
- end
- end
- -- just in case the distribution is messed up
- if trace_loading then -- more detail
- report_libraries("! checking for '%s' using 'luatexlibs': '%s'",name)
- end
- local resolved = resolvers.findfile(file.basename(name),'luatexlibs') or ""
- if resolved ~= "" then
- if trace_locating then
- report_libraries("! lib '%s' located by basename via environment: '%s'",name,resolved)
end
- return loadfile(resolved)
end
- if trace_locating then
- report_libraries('? unable to locate lib: %s',name)
- end
--- return "unable to locate " .. name
end
-resolvers.loadlualib = require
-
--- -- -- --
+local function notloaded(name)
+ if trace_libraries then
+ report_libraries("? unable to locate library %q",name)
+ end
+end
-package.obsolete = package.obsolete or { }
+package.loaders[2] = function(name)
+ local thename = gsub(name,"%.","/")
+ local luaname = file.addsuffix(thename,"lua")
+ local libname = file.addsuffix(thename,os.libsuffix)
+ return
+ loadedbyformat(luaname,name,libsuffixes, false)
+ or loadedbyformat(libname,name,clibsuffixes, true)
+ or loadedbypath (luaname,name,getlibpaths (),false,"lua")
+ or loadedbypath (luaname,name,getclibpaths(),false,"lua")
+ or loadedbypath (libname,name,getclibpaths(),true, "lib")
+ or loadedbylua (name)
+ or notloaded (name)
+end
-package.append_libpath = appendtolibpath -- will become obsolete
-package.prepend_libpath = prependtolibpath -- will become obsolete
+-- package.loaders[3] = nil
+-- package.loaders[4] = nil
-package.obsolete.append_libpath = appendtolibpath -- will become obsolete
-package.obsolete.prepend_libpath = prependtolibpath -- will become obsolete
+resolvers.loadlualib = require
end -- of closure
@@ -15707,7 +16616,6 @@ function environment.make_format(name)
end
function environment.run_format(name,data,more)
- -- hm, rather old code here; we can now use the file.whatever functions
if name and name ~= "" then
local barename = file.removesuffix(name)
local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
@@ -15736,6 +16644,129 @@ function environment.run_format(name,data,more)
end
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['util-tpl'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is experimental code. Coming from dos and windows, I've always used %whatever%
+-- as template variables so let's stick to it. After all, it's easy to parse and stands
+-- out well. A double %% is turned into a regular %.
+
+utilities.templates = utilities.templates or { }
+local templates = utilities.templates
+
+local trace_template = false trackers.register("templates.trace",function(v) trace_template = v end)
+local report_template = logs.reporter("template")
+
+local format = string.format
+local P, C, Cs, Carg, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.Carg, lpeg.match
+
+-- todo: make installable template.new
+
+local replacer
+
+local function replacekey(k,t,recursive)
+ local v = t[k]
+ if not v then
+ if trace_template then
+ report_template("unknown key %q",k)
+ end
+ return ""
+ else
+ if trace_template then
+ report_template("setting key %q to value %q",k,v)
+ end
+ if recursive then
+ return lpegmatch(replacer,v,1,t)
+ else
+ return v
+ end
+ end
+end
+
+local sqlescape = lpeg.replacer {
+ { "'", "''" },
+ { "\\", "\\\\" },
+ { "\r\n", "\\n" },
+ { "\r", "\\n" },
+ -- { "\t", "\\t" },
+}
+
+local escapers = {
+ lua = function(s)
+ return format("%q",s)
+ end,
+ sql = function(s)
+ return lpegmatch(sqlescape,s)
+ end,
+}
+
+local function replacekeyunquoted(s,t,how,recurse) -- ".. \" "
+ local escaper = how and escapers[how] or escapers.lua
+ return escaper(replacekey(s,t,recurse))
+end
+
+local single = P("%") -- test %test% test : resolves test
+local double = P("%%") -- test 10%% test : %% becomes %
+local lquoted = P("%[") -- test %[test]" test : resolves test with escaped "'s
+local rquoted = P("]%") --
+
+local escape = double / '%%'
+local nosingle = single / ''
+local nodouble = double / ''
+local nolquoted = lquoted / ''
+local norquoted = rquoted / ''
+
+local key = nosingle * (C((1-nosingle)^1 * Carg(1) * Carg(2) * Carg(3))/replacekey) * nosingle
+local unquoted = nolquoted * ((C((1 - norquoted)^1) * Carg(1) * Carg(2) * Carg(3))/replacekeyunquoted) * norquoted
+local any = P(1)
+
+ replacer = Cs((unquoted + escape + key + any)^0)
+
+local function replace(str,mapping,how,recurse)
+ if mapping then
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ else
+ return str
+ end
+end
+
+-- print(replace("test '%[x]%' test",{ x = [[a 'x' a]] }))
+-- print(replace("test '%[x]%' test",{ x = [[a 'x' a]] },'sql'))
+
+templates.replace = replace
+
+function templates.load(filename,mapping,how,recurse)
+ local data = io.loaddata(filename) or ""
+ if mapping and next(mapping) then
+ return replace(data,mapping,how,recurse)
+ else
+ return data
+ end
+end
+
+function templates.resolve(t,mapping,how,recurse)
+ if not mapping then
+ mapping = t
+ end
+ for k, v in next, t do
+ t[k] = replace(v,mapping,how,recurse)
+ end
+ return t
+end
+
+-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
+-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
+
+
end -- of closure
-- end library merge
@@ -15796,7 +16827,7 @@ own.libs = { -- order can be made better
-- 'data-bin.lua',
'data-zip.lua',
'data-tre.lua',
- 'data-crl.lua',
+ 'data-sch.lua',
'data-lua.lua',
'data-aux.lua', -- updater
'data-tmf.lua',
@@ -15804,6 +16835,8 @@ own.libs = { -- order can be made better
'luat-sta.lua',
'luat-fmt.lua',
+
+ 'util-tpl.lua',
}
-- We need this hack till luatex is fixed.
@@ -15824,7 +16857,7 @@ own.path = gsub(match(own.name,"^(.+)[\\/].-$") or ".","\\","/")
local ownpath, owntree = own.path, environment and environment.ownpath or own.path
-own.list = {
+own.list = { -- predictable paths
'.',
ownpath ,
ownpath .. "/../sources", -- HH's development path
@@ -15848,7 +16881,7 @@ local function locate_libs()
local filename = pth .. "/" .. lib
local found = lfs.isfile(filename)
if found then
- package.path = package.path .. ";" .. pth .. "/?.lua" -- in case l-* does a require
+ package.path = package.path .. ";" .. pth .. "/?.lua" -- in case l-* does a require (probably obsolete)
return pth
end
end
@@ -15980,6 +17013,7 @@ local helpinfo = [[
--var-value report value of variable
--find-file report file location
--find-path report path of file
+--show-package-path report package paths
--pattern=str filter variables
]]
@@ -16093,7 +17127,8 @@ function runners.execute_script(fullname,internal,nosplit)
elseif state == 'skip' then
return true
elseif state == "run" then
- local path, name, suffix, result = file.dirname(fullname), file.basename(fullname), file.extname(fullname), ""
+ local path, name, suffix = file.splitname(fullname)
+ local result = ""
if path ~= "" then
result = fullname
elseif name then
@@ -16104,7 +17139,7 @@ function runners.execute_script(fullname,internal,nosplit)
name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
- suffix = file.extname(name)
+ suffix = file.suffix(name)
end
if suffix == "" then
-- loop over known suffixes
@@ -16131,7 +17166,7 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local binary = runners.applications[file.extname(result)]
+ local binary = runners.applications[file.suffix(result)]
result = string.quoted(string.unquoted(result))
-- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
-- result = '"' .. result .. '"'
@@ -16324,7 +17359,7 @@ function resolvers.launch(str)
-- maybe we also need to test on mtxrun.launcher.suffix environment
-- variable or on windows consult the assoc and ftype vars and such
local launchers = runners.launchers[os.platform] if launchers then
- local suffix = file.extname(str) if suffix then
+ local suffix = file.suffix(str) if suffix then
local runner = launchers[suffix] if runner then
str = runner .. " " .. str
end
@@ -16383,7 +17418,7 @@ function runners.find_mtx_script(filename)
end
filename = file.addsuffix(filename,"lua")
local basename = file.removesuffix(file.basename(filename))
- local suffix = file.extname(filename)
+ local suffix = file.suffix(filename)
-- qualified path, raw name
local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename
if fullname and fullname ~= "" then
@@ -16438,7 +17473,7 @@ function runners.execute_ctx_script(filename,...)
runners.register_arguments(...)
local arguments = environment.arguments_after
local fullname = runners.find_mtx_script(filename) or ""
- if file.extname(fullname) == "cld" then
+ if file.suffix(fullname) == "cld" then
-- handy in editors where we force --autopdf
report("running cld script: %s",filename)
table.insert(arguments,1,fullname)
@@ -16546,6 +17581,21 @@ function runners.timed(action)
statistics.timed(action)
end
+function runners.associate(filename)
+ os.launch(filename)
+end
+
+function runners.gethelp(filename)
+ local url = environment.argument("url")
+ if url and url ~= "" then
+ local command = string.gsub(environment.argument("command") or "unknown","^%s*\\*(.-)%s*$","%1")
+ url = utilities.templates.replace(url,{ command = command })
+ os.launch(url)
+ else
+ report("no --url given")
+ end
+end
+
-- this is a bit dirty ... first we store the first filename and next we
-- split the arguments so that we only see the ones meant for this script
-- ... later we will use the second half
@@ -16648,7 +17698,18 @@ else
end
-if e_argument("selfmerge") then
+if e_argument("script") or e_argument("scripts") then
+
+ -- run a script by loading it (using libs), pass args
+
+ runners.loadbase()
+ if is_mkii_stub then
+ ok = runners.execute_script(filename,false,true)
+ else
+ ok = runners.execute_ctx_script(filename)
+ end
+
+elseif e_argument("selfmerge") then
-- embed used libraries
@@ -16671,23 +17732,25 @@ elseif e_argument("selfupdate") then
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif e_argument("ctxlua") or e_argument("internal") then
+elseif e_argument("show-package-path") or e_argument("show-package-paths") then
- -- run a script by loading it (using libs)
+ local l = package.libpaths()
+ local c = package.clibpaths()
- runners.loadbase()
- ok = runners.execute_script(filename,true)
+ for i=1,#l do
+ report("package lib path %s: %s",i,l[i])
+ end
-elseif e_argument("script") or e_argument("scripts") then
+ for i=1,#c do
+ report("package clib path %s: %s",i,c[i])
+ end
- -- run a script by loading it (using libs), pass args
+elseif e_argument("ctxlua") or e_argument("internal") then
+
+ -- run a script by loading it (using libs)
runners.loadbase()
- if is_mkii_stub then
- ok = runners.execute_script(filename,false,true)
- else
- ok = runners.execute_ctx_script(filename)
- end
+ ok = runners.execute_script(filename,true)
elseif e_argument("execute") then
@@ -16715,6 +17778,14 @@ elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
+elseif e_argument("associate") then
+
+ runners.associate(filename)
+
+elseif e_argument("gethelp") then
+
+ runners.gethelp()
+
elseif e_argument("makestubs") then
-- make stubs (depricated)
@@ -16806,7 +17877,7 @@ elseif e_argument("find-path") then
elseif e_argument("expand-braces") then
- -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
resolvers.load("nofiles")
runners.register_arguments(filename)
diff --git a/tex/context/base/anch-bar.mkiv b/tex/context/base/anch-bar.mkiv
index 9f9770fb6..c7c6190be 100644
--- a/tex/context/base/anch-bar.mkiv
+++ b/tex/context/base/anch-bar.mkiv
@@ -58,7 +58,9 @@
\installcommandhandler \??sidebar {sidebar} \??sidebar
\newcount\c_anch_sidebars_n
-\newdimen\c_anch_sidebars_distance
+\newcount\c_anch_sidebars_current % local
+\newdimen\d_anch_sidebars_distance
+\newcount\c_anch_sidebars_level
% \setupMPvariables
% [mpos:sidebar]
@@ -67,24 +69,60 @@
% distance=5pt]
\setupsidebar
- [\c!rulethickness=2pt,
+ [\c!rulethickness=\dimexpr\bodyfontsize/6\relax, % 2pt default
\c!rulecolor=\s!black,
\c!alternative=0,
- \c!topoffset=0pt,
- \c!bottomoffset=0pt,
- \c!distance=.5\bodyfontsize]
+ \c!topoffset=\zeropoint,
+ \c!bottomoffset=\zeropoint,
+ \c!distance=.5\bodyfontsize,
+ \c!level=,
+ \c!leftmargindistance=\zeropoint]
\let\setupsidebars\setupsidebar
\unexpanded\def\startsidebar
{\dosingleempty\anch_sidebars_start}
-\def\anch_sidebars_start[#1]%
+\unexpanded\def\startsidebar
+ {\dodoubleempty\anch_sidebars_start}
+
+\def\anch_sidebars_start[#1][#2]%
{\bgroup
- \def\currentsidebar{#1}%
\dontleavehmode
+ \advance\c_anch_sidebars_level\plusone
\global\advance\c_anch_sidebars_n\plusone
- \advance\c_anch_sidebars_distance\sidebarparameter\c!distance
+ \c_anch_sidebars_current\c_anch_sidebars_n\relax % relax needed
+ \doifassignmentelse{#1}
+ {\edef\currentsidebar{\the\c_anch_sidebars_level}%
+ \checksidebarparent
+ \setupcurrentsidebar[#1]}
+ {\def\currentsidebar{#1}%
+ \setupcurrentsidebar[#2]}%
+ \scratchdistance\sidebarparameter\c!distance\relax
+ \edef\m_level{\sidebarparameter\c!level}%
+ \ifx\m_level\empty
+ \ifnum\c_anch_sidebars_level=\plusone
+ \scratchdimen\sidebarparameter\c!leftmargindistance\relax
+ \ifdim\scratchdimen=\zeropoint
+ \advance\d_anch_sidebars_distance\scratchdistance\relax
+ \else
+ \d_anch_sidebars_distance\scratchdimen
+ \fi
+ \else
+ \advance\d_anch_sidebars_distance\scratchdistance\relax
+ \fi
+ \else
+ \ifnum\m_level=\plusone
+ \scratchdimen\sidebarparameter\c!leftmargindistance\relax
+ \ifdim\scratchdimen=\zeropoint
+ \advance\d_anch_sidebars_distance\scratchdistance\relax
+ \else
+ \d_anch_sidebars_distance\scratchdimen
+ \fi
+ \else
+ \d_anch_sidebars_distance\dimexpr\scratchdimen+\numexpr\m_level-\plusone\relax\dimexpr\scratchdistance\relax\relax
+ \fi
+ \fi
\startpositionoverlay{text-1}%
\normalexpanded{\setMPpositiongraphicrange % maybe expand in definition
{b:sidebar:\the\c_anch_sidebars_n}%
@@ -94,15 +132,17 @@
linewidth=\sidebarparameter\c!rulethickness,
linecolor=\sidebarparameter\c!rulecolor,
alternative=\sidebarparameter\c!alternative,
- topoffset=\sidebarparameter\c!topoffset,
- bottomoffset=\sidebarparameter\c!bottomoffset,
- distance=\the\c_anch_sidebars_distance}}%
+ topoffset=\the\dimexpr\sidebarparameter\c!topoffset,
+ bottomoffset=\the\dimexpr\sidebarparameter\c!bottomoffset,
+ distance=\the\d_anch_sidebars_distance}%
+ }%
\stoppositionoverlay
- \bpos{sidebar:\the\c_anch_sidebars_n}}
+ \bpos{sidebar:\the\c_anch_sidebars_current}%
+ \ignorespaces}
\unexpanded\def\stopsidebar
{\removelastspace
- \epos{sidebar:\the\c_anch_sidebars_n}
+ \epos{sidebar:\the\c_anch_sidebars_current}
\carryoverpar\egroup}
%D Let's keep this nice and simple (okay, we could pass the 6 variables in
@@ -121,49 +161,73 @@
\MPpositiongraphic{mpos:sidebar}{}%
\stopMPpositionmethod
-%D We now reimplement the margin rules handler defined in
-%D \type {core-rul}:
+%D We now reimplement the \MKII\ margin rules handler in a more
+%D modern way.
%D
-%D \setupmarginrules[level=5]
+%D \setupmarginrules
+%D [rulecolor=darkred,
+%D rulethickness=2pt]
%D
-%D \startmarginrule[1]
-%D First we set the level at~5. Next we typeset this first
-%D paragraph as a level~1 one. As expected no rule show up.
-%D \stopmarginrule
+%D \setupmarginrules % sidebar
+%D [2]
+%D [rulecolor=darkblue]
%D
-%D \startmarginrule[5]
-%D The second paragraph is a level~5 one. As we can see here,
-%D the marginal rule gets a width according to its level.
+%D \startmarginrule[1]
+%D \input ward
+%D \startmarginrule[2]
+%D \input ward
+%D \startmarginrule[3]
+%D \input ward
+%D \startmarginrule[level=6,rulecolor=darkgreen]
+%D \input ward
+%D \stopmarginrule
+%D \input ward
+%D \stopmarginrule
+%D \input ward
+%D \stopmarginrule
+%D \input ward
%D \stopmarginrule
%D
-%D \startmarginrule[8]
-%D It will of course be no surprise that this third paragraph
-%D has a even thicker margin rule. This behavior can be
-%D overruled by specifying the width explictly.
-%D \stopmarginrule
+%D Compared to the old mechanism we now can pass settings too.
\definesidebar
[\v!margin]
- [\c!rulethickness=\@@karulethickness,
- \c!distance=\dimexpr\leftmargindistance-\@@karulethickness/2\relax]
+ [\c!leftmargindistance=\dimexpr\leftmargindistance+\sidebarparameter\c!rulethickness/2\relax]
-\definecomplexorsimple\startmarginrule
+\dorecurse{5}{\definesidebar[\v!margin:#1][\v!margin]} % let's be nice and predefine 5 levels
-\def\simplestartmarginrule
- {\complexstartmarginrule[1]}
+\unexpanded\def\setupmarginrule
+ {\dodoubleargument\anch_marginrules_setup}
-\def\complexstartmarginrule[#1]%
- {\bgroup
- \ifnum#1<\@@kalevel\relax
- \let\stopmarginrule\egroup
+\def\anch_marginrules_setup[#1][#2]%
+ {\ifsecondargument
+ \setupsidebar[\v!margin:#1][#2]%
\else
- \def\@@kadefaultwidth{#1}%
- \let\stopmarginrule\dostopmarginrule
- \normalexpanded{\startsidebar[\v!margin]}% why expanded
+ \setupsidebar[\v!margin][#1]%
\fi}
-\def\dostopmarginrule
- {\stopsidebar
- \egroup}
+\let\setupmarginrules\setupmarginrule
+
+\unexpanded\def\startmarginrule
+ {\dosingleempty\anch_marginrules_start}
+
+\unexpanded\def\startmarginrule
+ {\dosingleempty\anch_marginrules_start}
+
+\def\anch_marginrules_start[#1]% pretty inefficient checking
+ {\edef\m_anch_marginrules_kind{#1}%
+ \ifx\m_anch_marginrules_kind\empty
+ \anch_sidebars_start[\v!margin][]%
+ \else
+ \doifassignmentelse\m_anch_marginrules_kind
+ {\anch_sidebars_start[\v!margin][#1]}%
+ {\anch_marginrules_check{#1}%
+ \anch_sidebars_start[\v!margin:#1][\c!level=#1]}%
+ \fi}
+
+\def\anch_marginrules_check#1%
+ {\doifnotcommandhandler\??sidebar{\v!margin:#1}{\definesidebar[\v!margin:#1][\v!margin]}}
+
+\let\stopmarginrule\stopsidebar
\protect \endinput
diff --git a/tex/context/base/anch-bck.mkvi b/tex/context/base/anch-bck.mkvi
index 8ec056468..79e42dc0a 100644
--- a/tex/context/base/anch-bck.mkvi
+++ b/tex/context/base/anch-bck.mkvi
@@ -276,7 +276,7 @@
\kern\textbackgroundskip\nobreak
\fi \fi
\nobreak
- \vskip-\dimexpr\lineheight+\parskip\relax
+ \vskip-\dimexpr\lineheight+\parskip\relax % problem: we loose the hangindent
\nobreak
\endgroup
\begingroup
@@ -298,7 +298,7 @@
\endgraf % new
\textbackgroundparameter\c!after}
-\unexpanded\def\checkpositionoverlays
+\unexpanded\def\checkpositionoverlays % overloads \relax in anch-pgr
{\ifproductionrun
\enabletextarearegistration
\enablehiddenbackground
@@ -307,7 +307,7 @@
\setuptextbackground
[\c!mp=mpos:region:draw,
- \c!method=mpos:region,
+ \c!method=mpos:region, % mpos:regionshape
\c!state=\v!start,
\c!location=\v!text,
\c!leftoffset=\!!zeropoint, % 1em,
@@ -397,6 +397,14 @@
\includeMPgraphic{mpos:region:anchor} ;
\stopMPpositiongraphic
+\startMPpositiongraphic{mpos:regionshape}{fillcolor,filloffset,linecolor,gridcolor,linewidth,gridwidth,gridshift,lineradius,lineoffset}
+ \includeMPgraphic{mpos:region:setup} ;
+ \includeMPgraphic{mpos:region:extra} ;
+ \MPgetmultishapes{\MPvar{self}}{\MPanchorid} ;
+ \includeMPgraphic{\MPvar{mp}} ;
+ \includeMPgraphic{mpos:region:anchor} ;
+\stopMPpositiongraphic
+
\startMPpositionmethod{mpos:region}
\MPpositiongraphic{mpos:region}{}%
\stopMPpositionmethod
diff --git a/tex/context/base/anch-pgr.lua b/tex/context/base/anch-pgr.lua
index bf4dcbe02..aba61794b 100644
--- a/tex/context/base/anch-pgr.lua
+++ b/tex/context/base/anch-pgr.lua
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ['anch-pgr'] = {
-- todo: we need to clean up lists (of previous pages)
+local commands, context = commands, context
+
local format = string.format
local abs = math.abs
local concat, sort = table.concat, table.sort
@@ -47,34 +49,48 @@ end
local eps = 2
-local function add(t,x,y,last)
+local function add(t,x,y,last,direction)
local n = #t
if n == 0 then
t[n+1] = { x, y }
- elseif n == 1 then
- local tn = t[1]
- if abs(tn[1]-x) <= eps or abs(tn[2]-y) <= eps then
- t[n+1] = { x, y }
- end
else
- local tm = t[n-1]
local tn = t[n]
local lx = tn[1]
local ly = tn[2]
- if abs(lx-tm[1]) <= eps and abs(lx-x) <= eps then
- if abs(ly-y) > eps then
- tn[2] = y
+ if x == lx and y == ly then
+ -- quick skip
+ elseif n == 1 then
+-- if abs(lx-x) <= eps or abs(ly-y) <= eps then
+ if abs(lx-x) > eps or abs(ly-y) > eps then
+ t[n+1] = { x, y }
end
- elseif abs(ly-tm[2]) <= eps and abs(ly-y) <= eps then
- if abs(lx-x) > eps then
- tn[1] = x
+ else
+ local tm = t[n-1]
+ local px = tm[1]
+ local py = tm[2]
+if (direction == "down" and y > ly) or (direction == "up" and y < ly) then
+ -- move back from too much hang
+else
+ if abs(lx-px) <= eps and abs(lx-x) <= eps then
+ if abs(ly-y) > eps then
+ tn[2] = y
+ end
+ elseif abs(ly-py) <= eps and abs(ly-y) <= eps then
+ if abs(lx-x) > eps then
+ tn[1] = x
+ end
+ elseif not last then
+ t[n+1] = { x, y }
end
- elseif not last then
- t[n+1] = { x, y }
+end
end
end
end
+-- local function add(t,x,y,last)
+-- t[#t+1] = { x, y }
+-- end
+
local function finish(t)
local n = #t
if n > 1 then
@@ -109,105 +125,103 @@ end
-- todo: mark regions and free paragraphs in collected
-local function shapes(r,rx,ry,rw,rh,rd,lytop,lybot,rytop,rybot)
+local function shapes(r,rx,ry,rw,rh,rd,lytop,lybot,rytop,rybot,obeyhang)
-- we assume that we only hang per page and not cross pages
-- which makes sense as hanging is only uses in special cases
--
-- we can remove data as soon as a page is done so we could
-- remember per page and discard areas after each shipout
local leftshape, rightshape
--- leftshape = r.leftshape
--- rightshape = r.rightshape
--- if not leftshape then
- leftshape = { { rx, rh } }
- rightshape = { { rw, rh } }
- local paragraphs = r.paragraphs
- local extending = false
- if paragraphs then
- for i=1,#paragraphs do
- local p = paragraphs[i]
- local ha = p.ha
- if ha and ha ~= 0 then
+ leftshape = { { rx, rh } } -- spikes get removed so we can start at the edge
+ rightshape = { { rw, rh } } -- even if we hang next
+ local paragraphs = r.paragraphs
+ local extending = false
+ if paragraphs then
+ for i=1,#paragraphs do
+ local p = paragraphs[i]
+ local ha = p.ha
+ if obeyhang and ha and ha ~= 0 then
+ local py = p.y
+ local ph = p.h
+ local pd = p.d
+ local hi = p.hi
+ local hang = ha * (ph + pd)
+ local py_ph = py + ph
+ -- ha < 0 hi < 0 : right top
+ -- ha < 0 hi > 0 : left top
+ if ha < 0 then
+ if hi < 0 then -- right
+ add(rightshape,rw, py_ph,"up")
+ add(rightshape,rw + hi,py_ph,"up")
+ add(rightshape,rw + hi,py_ph + hang,"up")
+ add(rightshape,rw, py_ph + hang,"up")
+ else
+ -- left
+ add(leftshape,rx,py_ph,"down")
+ add(leftshape,rx + hi,py_ph,"down")
+ add(leftshape,rx + hi,py_ph + hang,"down")
+ add(leftshape,rx,py_ph + hang,"down")
+ end
+ else
+ -- maybe some day
+ end
+ extending = true -- false
+ else -- we need to clip to the next par
+ local ps = p.ps
+ if ps then
local py = p.y
local ph = p.h
local pd = p.d
- local hi = p.hi
- local hang = ha * (ph + pd)
+ local step = ph + pd
+ local size = #ps * step
local py_ph = py + ph
- -- ha < 0 hi < 0 : right top
- -- ha < 0 hi > 0 : left top
- if ha < 0 then
- if hi < 0 then -- right
- add(rightshape,rw , py_ph)
- add(rightshape,rw + hi, py_ph)
- add(rightshape,rw + hi, py_ph + hang)
- add(rightshape,rw , py_ph + hang)
- else
- -- left
- add(leftshape,rx, py_ph)
- add(leftshape,rx + hi, py_ph)
- add(leftshape,rx + hi, py_ph + hang)
- add(leftshape,rx, py_ph + hang)
- end
- end
-extending = false
- else -- we need to clip to the next par
- local ps = p.ps
- if ps then
- local py = p.y
- local ph = p.h
- local pd = p.d
- local step = ph + pd
- local size = #ps * step
- local py_ph = py + ph
- add(leftshape,rx,py_ph)
- add(rightshape,rw,py_ph)
- for i=1,#ps do
- local p = ps[i]
- local l = p[1]
- local w = p[2]
- add(leftshape,rx + l, py_ph)
- add(rightshape,rx + l + w, py_ph)
- py_ph = py_ph - step
- add(leftshape,rx + l, py_ph)
- add(rightshape,rx + l + w, py_ph)
- end
- extending = true
--- add(left,rx,py_ph)
--- add(right,rw,py_ph)
- else
- if extending then
- local py = p.y
- local ph = p.h
- local pd = p.d
- local py_ph = py + ph
- local py_pd = py - pd
- add(leftshape,leftshape[#leftshape][1],py_ph)
- add(rightshape,rightshape[#rightshape][1],py_ph)
- add(leftshape,rx,py_ph)
- add(rightshape,rw,py_ph)
-extending = false
- end
+ add(leftshape,rx,py_ph,"up")
+ add(rightshape,rw,py_ph,"down")
+ for i=1,#ps do
+ local p = ps[i]
+ local l = p[1]
+ local w = p[2]
+ add(leftshape,rx + l, py_ph,"up")
+ add(rightshape,rx + l + w, py_ph,"down")
+ py_ph = py_ph - step
+ add(leftshape,rx + l, py_ph,"up")
+ add(rightshape,rx + l + w, py_ph,"down")
end
+ extending = true
+ elseif extending then
+ local py = p.y
+ local ph = p.h
+ local pd = p.d
+ local py_ph = py + ph
+ local py_pd = py - pd
+ add(leftshape,leftshape[#leftshape][1],py_ph,"up")
+ add(rightshape,rightshape[#rightshape][1],py_ph,"down")
+ add(leftshape,rx,py_ph,"up") -- shouldn't this be py_pd
+ add(rightshape,rw,py_ph,"down") -- shouldn't this be py_pd
+ extending = false
end
end
end
- -- we can have a simple variant when no paragraphs
- if extending then
- -- not ok
- leftshape[#leftshape][2] = rd
- rightshape[#rightshape][2] = rw
- else
- add(leftshape,rx,rd)
- add(rightshape,rw,rd)
- end
--- r.leftshape = leftshape
--- r.rightshape = rightshape
--- end
+ end
+ -- we can have a simple variant when no paragraphs
+ if extending then
+ -- not ok
+ leftshape[#leftshape][2] = rd
+ rightshape[#rightshape][2] = rw
+ else
+ add(leftshape,rx,rd,"up")
+ add(rightshape,rw,rd,"down")
+ end
return clip(leftshape,lytop,lybot), clip(rightshape,rytop,rybot)
end
-local function singlepart(b,e,r,left,right)
+-- local function shapes(r,rx,ry,rw,rh,rd,lytop,lybot,rytop,rybot,obeyhang)
+-- local leftshape = { { rx, rh }, { rx, rd } }
+-- local rightshape = { { rw, rh }, { rw, rd } }
+-- return clip(leftshape,lytop,lybot), clip(rightshape,rytop,rybot)
+-- end
+
+local function singlepart(b,e,r,left,right,obeyhang)
local bx, by = b.x, b.y
local ex, ey = e.x, e.y
local rx, ry = r.x, r.y
@@ -238,7 +252,7 @@ local function singlepart(b,e,r,left,right)
}
else
area = { }
- local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,ed,bh,eh)
+ local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,ed,bh,eh,obeyhang)
add(area,bx,bh-ry)
for i=1,#rightshapes do
local ri = rightshapes[i]
@@ -265,7 +279,7 @@ local function singlepart(b,e,r,left,right)
}
end
-local function firstpart(b,r,left,right)
+local function firstpart(b,r,left,right,obeyhang)
local bx, by = b.x, b.y
local rx, ry = r.x, r.y
local rw = rx + r.w
@@ -278,7 +292,7 @@ local function firstpart(b,r,left,right)
local bh = by + b.h
local bd = by - b.d
local area = { }
- local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,rd,bh,rd)
+ local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,bd,rd,bh,rd,obeyhang)
add(area,bx,bh-ry)
for i=1,#rightshapes do
local ri = rightshapes[i]
@@ -302,7 +316,7 @@ local function firstpart(b,r,left,right)
}
end
-local function middlepart(r,left,right)
+local function middlepart(r,left,right,obeyhang)
local rx, ry = r.x, r.y
local rw = rx + r.w
local rh = ry + r.h
@@ -312,7 +326,7 @@ local function middlepart(r,left,right)
rw = rw - right
end
local area = { }
- local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,rd,rh,rd)
+ local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,rd,rh,rd,obeyhang)
for i=#leftshapes,1,-1 do
local li = leftshapes[i]
add(area,li[1],li[2]-ry)
@@ -333,7 +347,7 @@ local function middlepart(r,left,right)
}
end
-local function lastpart(e,r,left,right)
+local function lastpart(e,r,left,right,obeyhang)
local ex, ey = e.x, e.y
local rx, ry = r.x, r.y
local rw = rx + r.w
@@ -347,7 +361,7 @@ local function lastpart(e,r,left,right)
local ed = ey - e.d
local area = { }
-- two cases: till end and halfway e line
- local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,ed,rh,eh)
+ local leftshapes, rightshapes = shapes(r,rx,ry,rw,rh,rd,rh,ed,rh,eh,obeyhang)
for i=1,#rightshapes do
local ri = rightshapes[i]
add(area,ri[1],ri[2]-ry)
@@ -375,7 +389,7 @@ local backgrounds = { }
graphics.backgrounds = backgrounds
-local function calculatemultipar(tag)
+local function calculatemultipar(tag,obeyhang)
local collected = jobpositions.collected
local b = collected[format("b:%s",tag)]
local e = collected[format("e:%s",tag)]
@@ -429,13 +443,13 @@ local function calculatemultipar(tag)
--
if bindex == eindex then
return {
- list = { [b.p] = { singlepart(b,e,collected[br],left,right) } },
+ list = { [b.p] = { singlepart(b,e,collected[br],left,right,obeyhang) } },
bpos = b,
epos = e,
}
else
local list = {
- [b.p] = { firstpart(b,collected[br],left,right) },
+ [b.p] = { firstpart(b,collected[br],left,right,obeyhang) },
}
for i=bindex+1,eindex-1 do
br = format("%s:%s",btag,i)
@@ -446,18 +460,18 @@ local function calculatemultipar(tag)
local p = r.p
local pp = list[p]
if pp then
- pp[#pp+1] = middlepart(r,left,right)
+ pp[#pp+1] = middlepart(r,left,right,obeyhang)
else
- list[p] = { middlepart(r,left,right) }
+ list[p] = { middlepart(r,left,right,obeyhang) }
end
end
end
local p = e.p
local pp = list[p]
if pp then
- pp[#pp+1] = lastpart(e,collected[er],left,right)
+ pp[#pp+1] = lastpart(e,collected[er],left,right,obeyhang)
else
- list[p] = { lastpart(e,collected[er],left,right) }
+ list[p] = { lastpart(e,collected[er],left,right,obeyhang) }
end
return {
list = list,
@@ -537,10 +551,10 @@ local template_d = [[
setbounds currentpicture to multibox ;
]]
-function backgrounds.fetchmultipar(n,anchor,page)
+function backgrounds.fetchmultipar(n,anchor,page,obeyhang)
local data = pbg[n]
if not data then
- data = calculatemultipar(n)
+ data = calculatemultipar(n,obeyhang)
pbg[n] = data -- can be replaced by register
-- register(data.list,n,anchor)
end
@@ -590,6 +604,10 @@ function commands.fetchmultipar(n,anchor,page)
context(backgrounds.fetchmultipar(n,anchor,page))
end
+function commands.fetchmultishape(n,anchor,page)
+ context(backgrounds.fetchmultipar(n,anchor,page,true))
+end
+
local template_a = [[
path posboxes[], posregions[] ;
numeric pospages[] ;
@@ -642,10 +660,10 @@ end
local doifelse = commands.doifelse
-function commands.doifelsemultipar(n,page)
+function commands.doifelsemultipar(n,page,obeyhang)
local data = pbg[n]
if not data then
- data = calculatemultipar(n)
+ data = calculatemultipar(n,obeyhang)
pbg[n] = data
end
if page then
diff --git a/tex/context/base/anch-pgr.mkiv b/tex/context/base/anch-pgr.mkiv
index a417d26e3..01ef25dc4 100644
--- a/tex/context/base/anch-pgr.mkiv
+++ b/tex/context/base/anch-pgr.mkiv
@@ -33,7 +33,7 @@
\installcorenamespace{positionaction}
\installcorenamespace{positioncleanup}
-\unexpanded\def\dosetpositionaction#1%
+\unexpanded\def\anch_positions_set_action#1%
{\expandafter\gdef\csname\??positionaction#1\endcsname} % nicely gobbles spaces
\unexpanded\def\doifpositionaction#1%
@@ -111,7 +111,7 @@
{\begingroup
\edef\currentpositionanchor
{\ifx\currentpositionoverlay\empty#3\else\currentpositionoverlay::\MPanchoridentifier\fi}%
- \normalexpanded{\dosetpositionaction{\currentpositionanchor}{\noexpand\getvalue{\??positioncleanup\currentpositionanchor}}}%
+ \normalexpanded{\anch_positions_set_action{\currentpositionanchor}{\noexpand\getvalue{\??positioncleanup\currentpositionanchor}}}%
\let#1\relax
\ifcsname\??positioncleanup\currentpositionanchor\endcsname
\setxvalue{\??positioncleanup\currentpositionanchor}%
@@ -140,9 +140,6 @@
\fi
\fi\fi}
-% \def\anch_positions_register_page_indeed#1%
-% {\setbox#1\hbox{\hpos\pageanchor{\box#1}}}
-
\def\anch_positions_register_page_indeed#1% maybe like text
{\ifvbox#1\setbox#1\hbox{\box#1}\fi
\anch_make_page_box{#1}}
@@ -218,7 +215,6 @@
%D \typebuffer[graphic]
\def\MPanchoridentifier{mpa} % {mp-anchor}
-%def\MPoverlayposprefix{MO::} % not used
%D The rest of the definitions concerning such overlays may
%D look complicated,
@@ -237,8 +233,8 @@
\def\textbackgroundoverlay#1{\v!text#1}
\def\MPanchornumber {\the\realpageno}
-\def\positionoverlay % the test prevents too many redundant positions
- {\ifpositioning % in (not used) text* position layers
+\unexpanded\def\positionoverlay % the test prevents too many redundant positions
+ {\ifpositioning % in (not used) text* position layers
\expandafter\anch_positions_overlay_indeed
\else % also \iftrialtypesetting test here?
\expandafter\gobbleoneargument
@@ -269,7 +265,7 @@
\box\scratchbox
\vfill}}
-\def\positionregionoverlay % shares regions
+\unexpanded\def\positionregionoverlay % shares regions
{\ifpositioning
\expandafter\anch_positions_region_overlay_indeed
\else % also \iftrialtypesetting test here?
@@ -308,9 +304,15 @@
\expandafter\anch_positions_overlay_start_yes
\fi}
+\let\stoppositionoverlay\relax
+
\def\anch_positions_overlay_start_nop#1\stoppositionoverlay
{}
+\ifdefined\checkpositionoverlays \else \let\checkpositionoverlays\relax \fi
+
+\let\currentpositionoverlay\empty
+
\def\anch_positions_overlay_start_yes#1%
{\checkpositionoverlays
\edef\currentpositionoverlay{#1}}
@@ -318,9 +320,11 @@
\unexpanded\def\stoppositionoverlay
{\let\currentpositionoverlay\empty}
+% needs checking if still needed
+%
% \def\resetpositionoverlay#1%
-% {\dosetpositionaction{#1::\MPanchoridentifier::}{}}
-
+% {\anch_positions_set_action{#1::\MPanchoridentifier::}{}}
+%
% \def\handlepositionboxes#1#2#3%
% {\handlepositionaction\dohandlepositionboxes\with{#1}{#2}{#3}\on{#2}}
%
@@ -332,15 +336,15 @@
% \appendtoks
% \let\dohandlepositionboxes\doinsertpositionboxes % was handle ?
% \to \everyinsertpositionaction
-
-\def\docleanpositionboxes#1#2#3% pos tag setups
- {\ifnum\MPp{#1}<\realpageno \else
- \noexpand \dohandlepositionboxes{#1}{#2}{#3}% reinsert
- \fi}
-
-\appendtoks
- \let\dohandlepositionboxes\docleanpositionboxes
-\to \everycleanpositionaction
+%
+% \def\docleanpositionboxes#1#2#3% pos tag setups
+% {\ifnum\MPp{#1}<\realpageno \else
+% \noexpand \dohandlepositionboxes{#1}{#2}{#3}% reinsert
+% \fi}
+%
+% \appendtoks
+% \let\dohandlepositionboxes\docleanpositionboxes
+% \to \everycleanpositionaction
%D A position graphic is a normal (non||reused) \METAPOST\
%D graphic, used immediately, with zero dimensions, so that a
@@ -350,7 +354,7 @@
\installcorenamespace{positionmethod}
%installcorenamespace{graphicvariable}
-\newbox\positiongraphicbox
+\newbox\b_anch_positions_graphic
\def\startMPpositiongraphic % id setups
{\dodoublegroupempty\anch_positions_meta_graphic_start}
@@ -361,11 +365,11 @@
\let\stopMPpositiongraphic\relax
\def\anch_positions_meta_graphic_prepare
- {\ifcsname\??gv\currentmpvariableclass:self\endcsname \else
- \letvalue{\??gv\currentmpvariableclass:self}\currentposition
+ {\ifcsname\??graphicvariable\currentmpvariableclass:self\endcsname \else
+ \letvalue{\??graphicvariable\currentmpvariableclass:self}\currentposition
\fi
- \ifcsname\??gv\currentmpvariableclass:from\endcsname \else
- \letvalue{\??gv\currentmpvariableclass:from}\currentposition
+ \ifcsname\??graphicvariable\currentmpvariableclass:from\endcsname \else
+ \letvalue{\??graphicvariable\currentmpvariableclass:from}\currentposition
\fi}
\def\anch_positions_meta_graphic_use#1#2#3%
@@ -375,7 +379,7 @@
\startMPcode#3\stopMPcode
\endgroup}
-\def\MPpositiongraphic
+\unexpanded\def\MPpositiongraphic
{\dodoublegroupempty\anch_positions_meta_graphic_direct}
\def\anch_positions_meta_graphic_direct#1% tag setups
@@ -402,9 +406,9 @@
\anch_positions_meta_graphic_prepare
\obeyMPboxorigin % do we also set the size ? when needed this must be done in mp ... might change
\def\anch_positions_meta_graphic_direct{\anch_positions_meta_graphic_nested{#3}}% takes two extra arguments
- \setbox\positiongraphicbox\hbox{\ignorespaces\csname#1#2\endcsname\removelastspace}%
- \smashbox\positiongraphicbox
- \box\positiongraphicbox
+ \setbox\b_anch_positions_graphic\hbox{\ignorespaces\csname#1#2\endcsname\removelastspace}%
+ \smashbox\b_anch_positions_graphic
+ \box\b_anch_positions_graphic
\endgroup}
\def\anch_positions_meta_graphic_nested#1#2#3% nesting used in prikkels / pascal (might go away)
@@ -422,77 +426,77 @@
%D Simple one position graphics.
-\def\setMPpositiongraphic
- {\dotriplegroupempty\dosetMPpositiongraphic}
+\unexpanded\def\setMPpositiongraphic
+ {\dotriplegroupempty\anch_positions_meta_graphic_set}
-\def\dosetMPpositiongraphic#1#2#3% pos tag vars
+\def\anch_positions_meta_graphic_set#1#2#3% pos tag vars
{\ifx\currentpositionoverlay\empty
- \dosetpositionaction{#1}{\MPpositiongraphic{#2}{#3}}%
+ \anch_positions_set_action{#1}{\MPpositiongraphic{#2}{#3}}%
\else % silly can be one
- \handlepositiongraphics{#1}{#2}{#3}%
+ \anch_positions_meta_graphic_handle{#1}{#2}{#3}%
\fi}
-\def\handlepositiongraphics#1#2#3% combine with boxes
- {\handlepositionaction\dohandleMPpositiongraphic\with{#1}{#2}{#3}\on{#2}}
+\def\anch_positions_meta_graphic_handle#1#2#3% combine with boxes
+ {\handlepositionaction\anch_positions_meta_graphic_handle_indeed\with{#1}{#2}{#3}\on{#2}}
-\def\doinsertMPpositiongraphic#1#2#3% pos tag setups
+\def\anch_positions_meta_graphic_insert#1#2#3% pos tag setups
{\ifnum\MPp{#1}=\realpageno\relax % extra saveguard
\def\currentposition{#1}\MPpositiongraphic{#2}{#3}%
\fi}
\appendtoks
- \let\dohandleMPpositiongraphic\doinsertMPpositiongraphic
+ \let\anch_positions_meta_graphic_handle_indeed\anch_positions_meta_graphic_insert
\to \everyinsertpositionaction
-\def\docleanMPpositiongraphic#1#2#3% pos tag setups
+\def\anch_positions_meta_graphic_cleanup#1#2#3% pos tag setups
{\ifnum\MPp{#1}<\realpageno \else
- \noexpand\dohandleMPpositiongraphic{#1}{#2}{#3}%
+ \noexpand\anch_positions_meta_graphic_handle_indeed{#1}{#2}{#3}%
\fi}
\appendtoks
- \let\dohandleMPpositiongraphic\docleanMPpositiongraphic
+ \let\anch_positions_meta_graphic_handle_indeed\anch_positions_meta_graphic_cleanup
\to \everycleanpositionaction
%D Graphics that span two positions (beware, does not cross pages).
\unexpanded\def\setMPpositiongraphicrange
- {\doquadruplegroupempty\dosetMPpositiongraphicrange}
+ {\doquadruplegroupempty\anch_positions_meta_graphic_set_range}
-\def\dosetMPpositiongraphicrange#1#2#3#4% bpos epos tag vars
+\def\anch_positions_meta_graphic_set_range#1#2#3#4% bpos epos tag vars
{\ifx\currentpositionoverlay\empty
- \dosetpositionaction{#1}{\MPpositiongraphic{#3}{#4}}%
+ \anch_positions_set_action{#1}{\MPpositiongraphic{#3}{#4}}%
\else
- \handlepositiongraphicsrange{#1}{#2}{#3}{#4}%
+ \anch_positions_meta_graphic_handle_range{#1}{#2}{#3}{#4}%
\fi}
-\def\handlepositiongraphicsrange#1#2#3#4%
- {\handlepositionaction\dohandleMPpositiongraphicrange\with{#1}{#2}{#3}{#4}\on{#2}}
+\def\anch_positions_meta_graphic_handle_range#1#2#3#4%
+ {\handlepositionaction\anch_positions_meta_graphic_handle_range_indeed\with{#1}{#2}{#3}{#4}\on{#2}}
-\def\doinsertMPpositiongraphicrange#1#2#3#4% pos pos tag setups
+\def\anch_positions_meta_graphic_insert_range#1#2#3#4% pos pos tag setups
{\ctxcommand{doifelserangeonpage("#1","#2",\number\realpageno)}%
- {%\writestatus{YES}{#1/#2 => #3}%
- \def\currentposition{#1}%
+ {\def\currentposition{#1}%
\MPpositiongraphic{#3}{#4}}%
- {}}%\writestatus{NOP}{#1/#2 = #3}}}
+ {}}
\appendtoks
- \let\dohandleMPpositiongraphicrange\doinsertMPpositiongraphicrange
+ \let\anch_positions_meta_graphic_handle_range_indeed\anch_positions_meta_graphic_insert_range
\to \everyinsertpositionaction
-\def\docleanMPpositiongraphicrange#1#2#3#4% pos tag setups
+\def\anch_positions_meta_graphic_cleanup_range#1#2#3#4% pos tag setups
{\ifnum\MPp{#2}<\realpageno \else
- \noexpand \dohandleMPpositiongraphicrange{#1}{#2}{#3}{#4}%
+ \noexpand \anch_positions_meta_graphic_handle_range_indeed{#1}{#2}{#3}{#4}%
\fi}
\appendtoks
- \let\dohandleMPpositiongraphicrange\docleanMPpositiongraphicrange
+ \let\anch_positions_meta_graphic_handle_range_indeed\anch_positions_meta_graphic_cleanup_range
\to \everycleanpositionaction
-\let\dohandleMPpositiongraphicrange\gobblefourarguments
+\let\anch_positions_meta_graphic_handle_range_indeed\gobblefourarguments
% Helpers:
-\def\MPgetposboxes #1#2{\ctxcommand{fetchposboxes("#1","#2",\the\realpageno)}}
-\def\MPgetmultipars#1#2{\ctxcommand{fetchmultipar("#1","#2",\the\realpageno)}}
+\def\MPgetposboxes #1#2{\ctxcommand{fetchposboxes("#1","#2",\the\realpageno)}}
+\def\MPgetmultipars #1#2{\ctxcommand{fetchmultipar("#1","#2",\the\realpageno)}}
+\def\MPgetmultishapes#1#2{\ctxcommand{fetchmultishape("#1","#2",\the\realpageno)}}
\protect \endinput
diff --git a/tex/context/base/anch-pos.lua b/tex/context/base/anch-pos.lua
index 5d01a6e46..6fa916291 100644
--- a/tex/context/base/anch-pos.lua
+++ b/tex/context/base/anch-pos.lua
@@ -17,6 +17,8 @@ more efficient.
-- maybe replace texsp by our own converter (stay at the lua end)
-- eventually mp will have large numbers so we can use sp there too
+local commands, context = commands, context
+
local tostring, next, rawget, setmetatable = tostring, next, rawget, setmetatable
local concat, sort = table.concat, table.sort
local format, gmatch, match = string.format, string.gmatch, string.match
@@ -969,35 +971,35 @@ function commands.MPxywhd(id)
end
end
--- is testcase already defined? if so, then local
+local doif, doifelse = commands.doif, commands.doifelse
function commands.doifpositionelse(name)
- commands.doifelse(collected[name])
+ doifelse(collected[name])
end
function commands.doifposition(name)
- commands.doif(collected[name])
+ doif(collected[name])
end
function commands.doifpositiononpage(name,page) -- probably always realpageno
local c = collected[name]
- commands.testcase(c and c.p == page)
+ doifelse(c and c.p == page)
end
function commands.doifoverlappingelse(one,two,overlappingmargin)
- commands.testcase(overlapping(one,two,overlappingmargin))
+ doifelse(overlapping(one,two,overlappingmargin))
end
function commands.doifpositionsonsamepageelse(list,page)
- commands.testcase(onsamepage(list))
+ doifelse(onsamepage(list))
end
function commands.doifpositionsonthispageelse(list)
- commands.testcase(onsamepage(list,tostring(tex.count.realpageno)))
+ doifelse(onsamepage(list,tostring(tex.count.realpageno)))
end
function commands.doifelsepositionsused()
- commands.testcase(next(collected))
+ doifelse(next(collected))
end
commands.markcolumnbox = jobpositions.markcolumnbox
diff --git a/tex/context/base/anch-pos.mkiv b/tex/context/base/anch-pos.mkiv
index 2e13eeba2..7ecaa296e 100644
--- a/tex/context/base/anch-pos.mkiv
+++ b/tex/context/base/anch-pos.mkiv
@@ -361,9 +361,9 @@
\startcolor[blue]%
\llap{\infofont\number\c_anch_positions_paragraph}%
\vrule
- \!!width 4\onepoint
- \!!height2\onepoint
- \!!depth 2\onepoint
+ \s!width 4\onepoint
+ \s!height2\onepoint
+ \s!depth 2\onepoint
\stopcolor
\hss}}
@@ -377,7 +377,7 @@
{\smashedhbox
{#1{\infofont#2#3}%
\kern-\onepoint
- \vrule\!!width2\onepoint\!!height\halfapoint\!!depth\halfapoint}}
+ \vrule\s!width2\onepoint\s!height\halfapoint\s!depth\halfapoint}}
\unexpanded\def\anch_positions_trace_left_indeed
{\anch_positions_trace\llap\darkmagenta{\currentposition>}}
diff --git a/tex/context/base/anch-tab.mkiv b/tex/context/base/anch-tab.mkiv
index 4bdaa2ef9..2fb6938a2 100644
--- a/tex/context/base/anch-tab.mkiv
+++ b/tex/context/base/anch-tab.mkiv
@@ -13,6 +13,9 @@
\writestatus{loading}{ConTeXt Anchoring Macros / Table Extensions}
+%D This is just a playground and functionality might change or even
+%D dissappear in favour of better solutions.
+
\unprotect
\newcount\c_anch_tabs
@@ -57,23 +60,22 @@
\global\advance\c_anch_tabs\plusone
\to \everytabulate
-%D Beware, the following code is somewhat weird and experimental
-%D and might be dropped or become a loadable module.
+%D Beware, the following code is somewhat weird and experimental and might be
+%D dropped or become a loadable module.
%D \macros
%D {GFC, GTC, GSC}
%D
-%D The next macros extend tables and tabulation with
-%D backgrounds and position related features. Areas are
-%D specified with symbolic names, and symbolic references to
-%D the graphics involved. Each table has its own namespace.
+%D The next macros extend tables and tabulation with backgrounds and position
+%D related features. Areas are specified with symbolic names, and symbolic
+%D references to the graphics involved. Each table has its own namespace.
\newconditional\tablehaspositions
-\newcount\noftabpositions
-\newtoks \posXCtoks
+\newcount \noftabpositions
+\newtoks \posXCtoks
-\def\dotablebpos{\bpos}
-\def\dotableepos{\epos}
+\def\anch_tabulate_bpos{\bpos}
+\def\anch_tabulate_epos{\epos}
\installcorenamespace{positiontables}
@@ -85,74 +87,80 @@
\let\tabulatepos\tablepos
-\def\dodododoGSC[#1:#2]%
- {\remappositionframed{#2}{\tbPOSprefix#1}%
- \dotablebpos{\tbPOSprefix#1}%
- \doglobal\appendtoks\@EA\dotableepos\@EA{\tbPOSprefix#1}\to\posXCtoks}
+\unexpanded\def\tbXC {\dosingleempty\anch_tables_XC }
+\unexpanded\def\tbGSC{\dosingleempty\anch_tables_GSC}
+\unexpanded\def\tbGFC{\dosingleempty\anch_tables_GFC}
+\unexpanded\def\tbGTC{\dosingleempty\anch_tables_GTC}
-\def\dododoGSC[#1:#2:#3]%
- {\doglobal\appendtoks\dodododoGSC[#1:#2]\to\posXCtoks\NC}
+\def\anch_table_check_state
+ {\iftrialtypesetting
+ \global\settrue\tablehaspositions
+ \firstargumentfalse
+ \fi}
-\def\dodoGSC[#1]%
- {\def\docommand##1{\dododoGSC[##1:##1]}%
- \processcommalist[#1]\docommand}
+\def\anch_tables_XC [#1]{\anch_table_check_state\iffirstargument\anch_tables_indeed_XC [#1]\else\expandafter\fi\NC}
+\def\anch_tables_GSC[#1]{\anch_table_check_state\iffirstargument\anch_tables_indeed_GSC[#1]\else\expandafter\NC\fi}
+\def\anch_tables_GFC[#1]{\anch_table_check_state\iffirstargument\anch_tables_indeed_GFC[#1]\else\expandafter\NC\fi}
+\def\anch_tables_GTC[#1]{\anch_table_check_state\iffirstargument\anch_tables_indeed_GTC[#1]\else\expandafter\NC\fi}
-\def\dodododoGFC[#1:#2:#3]%
- {\remappositionframed{#2}{\tbPOSprefix#1}%
- \dotablebpos{\tbPOSprefix#1}}
+\def\anch_tables_indeed_XC[#1]%
+ {{\let\NC\relax\processcommalist[#1]\anch_tables_step_XC}}
-\def\dododoGFC[#1]%
- {\def\docommand##1{\dodododoGFC[##1:##1]}%
- \processcommalist[#1]\docommand}
+\def\anch_tables_step_XC#1%
+ {\anch_tables_step_indeed_XC[#1]}
-\def\dodoGFC[#1]%
- {\doglobal\appendtoks\dododoGFC[#1]\to\posXCtoks\NC}
+\def\anch_tables_step_indeed_XC[#1#2]%
+ {\if#1>\anch_tables_indeed_GFC [#2:#2]\else
+ \if#1+\anch_tables_indeed_GFC [#2:#2]\else
+ \if#1<\anch_tables_indeed_GTC [#2:#2]\else
+ \if#1-\anch_tables_indeed_GTC [#2:#2]\else
+ \if#1=\anch_tables_indeed_GSC [#2:#2]\else
+ \anch_tables_indeed_GSC[#1#2:#1#2]\fi\fi\fi\fi\fi}
-\def\dododododoGTC[#1:#2]%
- {\dotableepos{\tbPOSprefix#1}}
+\def\anch_tables_indeed_GSC[#1]%
+ {\processcommalist[#1]\anch_tables_step_GSC}
-\def\dodododoGTC[#1]%
- {\def\docommand##1{\dododododoGTC[##1:##1]}%
- \processcommalist[#1]\docommand}
+\def\anch_tables_step_GSC#1%
+ {\anch_tables_append_GSC[#1:#1]}
-\def\dododoGTC[#1]%
- {\doglobal\appendtoks\dodododoGTC[#1]\to\posXCtoks}
+\def\anch_tables_append_GSC[#1:#2:#3]%
+ {\doglobal\appendtoks\anch_tables_process_GSC[#1:#2]\to\posXCtoks\NC}
-\def\dodoGTC[#1]%
- {\doglobal\appendtoks\dododoGTC[#1]\to\posXCtoks\NC}
+\def\anch_tables_process_GSC[#1:#2]%
+ {\remappositionframed{#2}{\tbPOSprefix#1}%
+ \anch_tabulate_bpos{\tbPOSprefix#1}%
+ \doglobal\appendtoks\@EA\anch_tabulate_epos\@EA{\tbPOSprefix#1}\to\posXCtoks}
-\def\dodododoXC[#1#2]%
- {\if#1>\dodoGFC [#2:#2]\else
- \if#1+\dodoGFC [#2:#2]\else
- \if#1<\dodoGTC [#2:#2]\else
- \if#1-\dodoGTC [#2:#2]\else
- \if#1=\dodoGSC [#2:#2]\else
- \dodoGSC[#1#2:#1#2]\fi\fi\fi\fi\fi}
+\def\anch_tables_indeed_GFC[#1]%
+ {\doglobal\appendtoks\anch_tables_delayed_GFC[#1]\to\posXCtoks\NC}
-\def\dododoXC#1%
- {\dodododoXC[#1]}
+\def\anch_tables_delayed_GFC[#1]%
+ {\processcommalist[#1]\anch_tables_step_GFC}
-\def\dodoXC[#1]%
- {{\let\NC\relax\processcommalist[#1]\dododoXC}}
+\def\anch_tables_step_GFC#1%
+ {\anch_tables_process_GFC[#1:#1]}
-\def\@@checktablepositionstate
- {\iftrialtypesetting
- \global\settrue\tablehaspositions
- \firstargumentfalse
- \fi}
+\def\anch_tables_process_GFC[#1:#2:#3]%
+ {\remappositionframed{#2}{\tbPOSprefix#1}%
+ \anch_tabulate_bpos{\tbPOSprefix#1}}
+
+\def\anch_tables_indeed_GTC[#1]%
+ {\doglobal\appendtoks\anch_tables_delayed_GTC[#1]\to\posXCtoks\NC}
-\def\doGSC[#1]{\@@checktablepositionstate\iffirstargument\dodoGSC[#1]\else\expandafter\NC\fi}
-\def\doGFC[#1]{\@@checktablepositionstate\iffirstargument\dodoGFC[#1]\else\expandafter\NC\fi}
-\def\doGTC[#1]{\@@checktablepositionstate\iffirstargument\dodoGTC[#1]\else\expandafter\NC\fi}
-\def\doXC [#1]{\@@checktablepositionstate\iffirstargument\dodoXC [#1]\else\expandafter\fi\NC}
+\def\anch_tables_delayed_GTC[#1]%
+ {\doglobal\appendtoks\anch_tables_process_GTC[#1]\to\posXCtoks}
-\def\tbGSC{\dosingleempty\doGSC}
-\def\tbGFC{\dosingleempty\doGFC}
-\def\tbGTC{\dosingleempty\doGTC}
-\def\tbXC {\dosingleempty\doXC }
+\def\anch_tables_process_GTC[#1]%
+ {\processcommalist[#1]\anch_tables_step_GTC}
-%D The amount of code to support tables and tabulation is
-%D rather minimalistic.
+\def\anch_tables_step_GTC#1%
+ {\anch_tables_step_indeed_GTC[#1:#1]}
+
+\def\anch_tables_step_indeed_GTC[#1:#2]%
+ {\anch_tabulate_epos{\tbPOSprefix#1}}
+
+%D The amount of code to support tables and tabulation is rather
+%D minimalistic.
\let\tabulatepos\tablepos
@@ -187,31 +195,30 @@
% We need to handle paragraphs as well.
-\let\doflushtabulateepos\relax
+\let\anch_tabulate_flush_epos\relax
-\def\dotabulatebpos
+\def\anch_tabulate_bpos_indeed
{\bpos}
-\def\dotabulateepos#1%
+\def\anch_tabulate_epos_indeed#1%
{\ifvoid\b_tabl_tabulate_current\c_tabl_tabulate_column
\epos{#1}%
- \glet\doflushtabulateepos\relax
+ \glet\anch_tabulate_flush_epos\relax
\else
- \gdef\doflushtabulateepos{\epos{#1}}%
+ \gdef\anch_tabulate_flush_epos{\epos{#1}}%
\fi}
\def\flushtabulatesplitbox
{\box\b_tabl_tabulate
- \iftrialtypesetting\else\ifconditional\tablehaspositions\doflushtabulateepos\fi\fi}
+ \iftrialtypesetting\else\ifconditional\tablehaspositions\anch_tabulate_flush_epos\fi\fi}
\appendtoks
- \let\dotablebpos\dotabulatebpos % ?
- \let\dotableepos\dotabulateepos % ?
- \glet\doflushtabulateepos\relax
+ \let\anch_tabulate_bpos\anch_tabulate_bpos_indeed % ?
+ \let\anch_tabulate_epos\anch_tabulate_epos_indeed % ?
+ \glet\anch_tabulate_flush_epos\relax
\to \everytabulate
-%D In order to prevent potential clashes with abbreviations,
-%D postpone the mapping.
+%D In order to prevent potential clashes with abbreviations, postpone the mapping.
\appendtoks
\let\GSC\tbGSC
@@ -223,8 +230,8 @@
%D \macros
%D {definepositionframed}
%D
-%D The next example show how to provide backgrounds to table
-%D cells. First we define some framed backgrounds.
+%D The next example show how to provide backgrounds to table cells. First we define
+%D some framed backgrounds.
%D
%D \startbuffer
%D \definepositionframed[x][background=color,backgroundcolor=red]
@@ -244,8 +251,7 @@
%D \stoptabulate
%D \stopbuffer
%D
-%D The table itself defines three areas (a, b and~c) using
-%D these frames.
+%D The table itself defines three areas (a, b and~c) using these frames.
%D
%D \typebuffer
%D % \getbuffer
@@ -270,9 +276,8 @@
\let\XC \tbXC
\to \everytable
-%D In the previous example, we could have provided an overlay to
-%D the framed definition. A more direct approach is demonstrated
-%D below:
+%D In the previous example, we could have provided an overlay to the framed definition.
+%D A more direct approach is demonstrated below:
%D
%D \startbuffer
%D \def\cw#1{\color[white]{#1}}
@@ -291,8 +296,7 @@
%D
%D \typebuffer \getbuffer
%D
-%D The definition of the table looks about the same as the
-%D previous one:
+%D The definition of the table looks about the same as the previous one:
%D
%D \startbuffer
%D \starttable[|c|c|c|]
@@ -347,22 +351,26 @@
% \definepositionframed[y][background=color,fillcolor=green]
% \definepositionframed[z][background=color,fillcolor=blue]
-\def\remappositionframed#1#2% from to
+\unexpanded\def\remappositionframed#1#2% from to
{\copyposition{b:#1}{b:#2}%
\copyposition{e:#1}{e:#2}%
- \dosetpositionaction{b:#2}{\dopositionaction{b:#1}}}
+ \anch_positions_set_action{b:#2}{\dopositionaction{b:#1}}}
\unexpanded\def\definepositionframed
- {\dodoubleargument\dodefinepositionframed}
+ {\dodoubleargument\anch_framed_define}
-\def\dodefinepositionframed[#1][#2]%
- {\dosetpositionaction{b:#1}{\dopositionframed[#1][#2]}}
+\def\anch_framed_define[#1][#2]%
+ {\anch_positions_set_action{b:#1}{\anch_framed_handle[#1][#2]}}
+
+\unexpanded\def\anch_framed_handle
+ {\bgroup
+ \anch_framed_indeed}
\unexpanded\def\positionframed
{\bgroup
- \dodoubleempty\dopositionframed}
+ \dodoubleempty\anch_framed_indeed}
-\def\dopositionframed[#1][#2]%
+\def\anch_framed_indeed[#1][#2]%
{\setbox\scratchbox\hbox
{\scratchwidth \dimexpr\MPx{e:#1}-\MPx{b:#1}\relax
\scratchdepth \dimexpr\MPy{b:#1}-\MPy{e:#1}+\MPd{e:#1}\relax
diff --git a/tex/context/base/attr-col.lua b/tex/context/base/attr-col.lua
index c592d1dc1..18182ba85 100644
--- a/tex/context/base/attr-col.lua
+++ b/tex/context/base/attr-col.lua
@@ -15,8 +15,11 @@ if not modules then modules = { } end modules ['attr-col'] = {
local type = type
local format = string.format
local concat = table.concat
+local min, max, floor = math.min, math.max, math.floor
-local attributes, nodes = attributes, nodes
+local attributes, nodes, utilities, logs, backends, storage = attributes, nodes, utilities, logs, backends, storage
+local commands, context, interfaces = commands, context, interfaces
+local tex = tex
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -31,11 +34,13 @@ local report_transparencies = logs.reporter("transparencies","support")
-- nb: attributes: color etc is much slower than normal (marks + literals) but ...
-- nb. too many "0 g"s
-local states = attributes.states
-local tasks = nodes.tasks
-local nodeinjections = backends.nodeinjections
-local registrations = backends.registrations
-local unsetvalue = attributes.unsetvalue
+local states = attributes.states
+local tasks = nodes.tasks
+local nodeinjections = backends.nodeinjections
+local registrations = backends.registrations
+local unsetvalue = attributes.unsetvalue
+
+local registerstorage = storage.register
-- We can distinguish between rules and glyphs but it's not worth the trouble. A
-- first implementation did that and while it saves a bit for glyphs and rules, it
@@ -63,7 +68,7 @@ local unsetvalue = attributes.unsetvalue
-- colors.strings[color] = "return colors." .. colorspace .. "(" .. concat({...},",") .. ")"
-- end
--
--- storage.register("attributes/colors/data", colors.strings, "attributes.colors.data") -- evaluated
+-- registerstorage("attributes/colors/data", colors.strings, "attributes.colors.data") -- evaluated
--
-- We assume that only processcolors are defined in the format.
@@ -83,9 +88,17 @@ colors.default = 1
colors.main = nil
colors.triggering = true
colors.supported = true
+colors.model = "all"
+
+local data = colors.data
+local values = colors.values
+local registered = colors.registered
-storage.register("attributes/colors/values", colors.values, "attributes.colors.values")
-storage.register("attributes/colors/registered", colors.registered, "attributes.colors.registered")
+local numbers = attributes.numbers
+local list = attributes.list
+
+registerstorage("attributes/colors/values", values, "attributes.colors.values")
+registerstorage("attributes/colors/registered", registered, "attributes.colors.registered")
local templates = {
rgb = "r:%s:%s:%s",
@@ -104,17 +117,6 @@ local models = {
cmyk = 4,
}
-colors.model = "all"
-
-local data = colors.data
-local values = colors.values
-local registered = colors.registered
-
-local numbers = attributes.numbers
-local list = attributes.list
-
-local min, max, floor = math.min, math.max, math.floor
-
local function rgbtocmyk(r,g,b) -- we could reduce
return 1-r, 1-g, 1-b, 0
end
@@ -125,9 +127,9 @@ end
local function rgbtogray(r,g,b)
if colors.weightgray then
- return .30*r+.59*g+.11*b
+ return .30*r + .59*g + .11*b
else
- return r/3+g/3+b/3
+ return r/3 + g/3 + b/3
end
end
@@ -135,6 +137,17 @@ local function cmyktogray(c,m,y,k)
return rgbtogray(cmyktorgb(c,m,y,k))
end
+-- not critical so not needed:
+--
+-- local function cmyktogray(c,m,y,k)
+-- local r, g, b = 1.0 - min(1.0,c+k), 1.0 - min(1.0,m+k), 1.0 - min(1.0,y+k)
+-- if colors.weightgray then
+-- return .30*r + .59*g + .11*b
+-- else
+-- return r/3 + g/3 + b/3
+-- end
+-- end
+
-- http://en.wikipedia.org/wiki/HSI_color_space
-- http://nl.wikipedia.org/wiki/HSV_(kleurruimte)
@@ -363,15 +376,15 @@ transparencies.triggering = true
transparencies.attribute = a_transparency
transparencies.supported = true
-storage.register("attributes/transparencies/registered", transparencies.registered, "attributes.transparencies.registered")
-storage.register("attributes/transparencies/values", transparencies.values, "attributes.transparencies.values")
+local registered = transparencies.registered -- we could use a 2 dimensional table instead
+local data = transparencies.data
+local values = transparencies.values
+local template = "%s:%s"
-local registered = transparencies.registered -- we could use a 2 dimensional table instead
-local data = transparencies.data
-local values = transparencies.values
-local template = "%s:%s"
+registerstorage("attributes/transparencies/registered", registered, "attributes.transparencies.registered")
+registerstorage("attributes/transparencies/values", values, "attributes.transparencies.values")
-local function inject_transparency (...)
+local function inject_transparency(...)
inject_transparency = nodeinjections.transparency
return inject_transparency(...)
end
diff --git a/tex/context/base/attr-eff.lua b/tex/context/base/attr-eff.lua
index 023d1c51b..60e0c08ac 100644
--- a/tex/context/base/attr-eff.lua
+++ b/tex/context/base/attr-eff.lua
@@ -8,7 +8,9 @@ if not modules then modules = { } end modules ['attr-eff'] = {
local format = string.format
-local attributes, nodes = attributes, nodes
+local commands, interfaces = commands, interfaces
+local attributes, nodes, backends, utilities = attributes, nodes, backends, utilities
+local tex = tex
local states = attributes.states
local tasks = nodes.tasks
@@ -17,6 +19,9 @@ local settexattribute = tex.setattribute
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
+local variables = interfaces.variables
+local v_normal = variables.normal
+
attributes.effects = attributes.effects or { }
local effects = attributes.effects
@@ -27,14 +32,14 @@ effects.values = effects.values or { }
effects.registered = effects.registered or { }
effects.attribute = a_effect
-storage.register("attributes/effects/registered", effects.registered, "attributes.effects.registered")
-storage.register("attributes/effects/values", effects.values, "attributes.effects.values")
+local data = effects.data
+local registered = effects.registered
+local values = effects.values
-local template = "%s:%s:%s"
+local template = "%s:%s:%s"
-local data = effects.data
-local registered = effects.registered
-local values = effects.values
+storage.register("attributes/effects/registered", registered, "attributes.effects.registered")
+storage.register("attributes/effects/values", values, "attributes.effects.values")
-- valid effects: normal inner outer both hidden (stretch,rulethickness,effect)
@@ -66,12 +71,22 @@ effects.handler = nodes.installattributehandler {
processor = states.process,
}
-local function register(effect,stretch,rulethickness)
- local stamp = format(template,effect,stretch,rulethickness)
+local function register(specification)
+ local alternative, stretch, rulethickness
+ if specification then
+ alternative = specification.alternative or v_normal
+ stretch = specification.stretch or 0
+ rulethickness = specification.rulethickness or 0
+ else
+ alternative = v_normal
+ stretch = 0
+ rulethickness = 0
+ end
+ local stamp = format(template,alternative,stretch,rulethickness)
local n = registered[stamp]
if not n then
n = #values + 1
- values[n] = { effect, stretch, rulethickness }
+ values[n] = { alternative, stretch, rulethickness }
registered[stamp] = n
end
return n
@@ -88,10 +103,10 @@ effects.enable = enable
local enabled = false
-function commands.triggereffect(effect,stretch,rulethickness)
+function commands.triggereffect(specification)
if not enabled then
enable()
enabled = true
end
- settexattribute(a_effect,register(effect,stretch,rulethickness))
+ settexattribute(a_effect,register(specification))
end
diff --git a/tex/context/base/attr-eff.mkiv b/tex/context/base/attr-eff.mkiv
index c5c94537c..43f575a7a 100644
--- a/tex/context/base/attr-eff.mkiv
+++ b/tex/context/base/attr-eff.mkiv
@@ -17,38 +17,39 @@
\unprotect
-\gdef\dotriggereffect#1#2#3%
- {\ctxcommand{triggereffect('#1',#2,\number\dimexpr#3\relax)}}
+\installcorenamespace{effect}
-\unexpanded\def\setupeffect
- {\dodoubleargument\dosetupeffect}
+\installcommandhandler \??effect {effect} \??effect
-\def\dosetupeffect[#1][#2]%
- {\getparameters[\??et#1][#2]}
+\setupeffect
+ [\c!method=\v!none,
+ \c!stretch=\zerocount,
+ \c!rulethickness=\zeropoint,
+ \c!alternative=\v!normal]
-\unexpanded\def\defineeffect
- {\dodoubleargument\dodefineeffect}
-
-\def\dodefineeffect[#1][#2]%
- {\getparameters[\??et#1][\c!method=\v!none,\c!stretch=0,\c!rulethickness=\zeropoint,\c!alternative=\v!normal,#2]%
- \doif{\getvalue{\??et#1\c!method}}\v!command
- {\setugvalue{\e!start#1}{\starteffect[#1]}%
- \setugvalue{\e!stop #1}{\stopeffect}}}
-
-% yes or no grouped
+\appendtoks
+ \edef\p_method{\effectparameter\c!method}%
+ \ifx\p_method\v!method
+ \setuxvalue{\e!start\currenteffect}{\starteffect[#1]}%
+ \setuxvalue{\e!stop \currenteffect}{\stopeffect}%
+ \fi
+\to \everydefineeffect
\unexpanded\def\starteffect[#1]%
- {\dotriggereffect
- {\csname\??et#1\c!alternative \endcsname}%
- {\csname\??et#1\c!stretch \endcsname}%
- {\csname\??et#1\c!rulethickness\endcsname}}
+ {\ctxcommand{triggereffect{
+ alternative = "\namedeffectparameter{#1}\c!alternative",
+ stretch = \number\namedeffectparameter{#1}\c!stretch,
+ rulethickness = \number\dimexpr\namedeffectparameter{#1}\c!rulethickness\relax
+ }}}
\unexpanded\def\stopeffect % can be special
- {\dotriggereffect\v!normal0\zeropoint}
+ {\ctxcommand{triggereffect()}} % v!normal 0 0
\unexpanded\def\effect[#1]%
{\groupedcommand{\starteffect[#1]}{\stopeffect}}
+% yes or no grouped
+
\defineeffect [\v!inner] [\c!alternative=\v!inner,\c!rulethickness=.25pt]
\defineeffect [\v!outer] [\c!alternative=\v!outer,\c!rulethickness=.25pt]
\defineeffect [\v!both] [\c!alternative=\v!both, \c!rulethickness=.25pt]
diff --git a/tex/context/base/attr-ini.lua b/tex/context/base/attr-ini.lua
index f3714fcb4..5a3bafce3 100644
--- a/tex/context/base/attr-ini.lua
+++ b/tex/context/base/attr-ini.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['attr-ini'] = {
license = "see context related readme files"
}
+local commands, context, nodes, storage = commands, context, nodes, storage
+
local next, type = next, type
--[[ldx--
@@ -13,11 +15,10 @@ local next, type = next, type
symbolic names later on.
--ldx]]--
-attributes = attributes or { }
-
-local attributes, nodes = attributes, nodes
+attributes = attributes or { }
+local attributes = attributes
--- todo: local and then gobals ... first loaded anyway
+local sharedstorage = storage.shared
attributes.names = attributes.names or { }
attributes.numbers = attributes.numbers or { }
@@ -26,15 +27,14 @@ attributes.states = attributes.states or { }
attributes.handlers = attributes.handlers or { }
attributes.unsetvalue = -0x7FFFFFFF
-local names, numbers, list = attributes.names, attributes.numbers, attributes.list
+local names = attributes.names
+local numbers = attributes.numbers
+local list = attributes.list
storage.register("attributes/names", names, "attributes.names")
storage.register("attributes/numbers", numbers, "attributes.numbers")
storage.register("attributes/list", list, "attributes.list")
-names [0] = "fontdynamic"
-numbers["fontdynamic"] = 0
-
function attributes.define(name,number) -- at the tex end
if not numbers[name] then
numbers[name] = number
@@ -43,13 +43,19 @@ function attributes.define(name,number) -- at the tex end
end
end
+--[[ldx--
+
We reserve this one as we really want it to be always set (faster).
We can use the attributes in the range 127-255 (outside user space). These
are only used when no attribute is set at the \TEX\ end which normally
happens in .
--ldx]]--
-storage.shared.attributes_last_private = storage.shared.attributes_last_private or 127
+sharedstorage.attributes_last_private = sharedstorage.attributes_last_private or 127
-- to be considered (so that we can use an array access):
--
@@ -57,10 +63,10 @@ storage.shared.attributes_last_private = storage.shared.attributes_last_private
--
-- setmetatable(private, {
-- __index = function(t,name)
--- local number = storage.shared.attributes_last_private or 127
+-- local number = sharedstorage.attributes_last_private
-- if number < 1023 then -- tex.count.minallocatedattribute - 1
-- number = number + 1
--- storage.shared.attributes_last_private = number
+-- sharedstorage.attributes_last_private = number
-- end
-- numbers[name], names[number], list[number] = number, name, { }
-- private[name] = number
@@ -74,12 +80,12 @@ storage.shared.attributes_last_private = storage.shared.attributes_last_private
function attributes.private(name) -- at the lua end (hidden from user)
local number = numbers[name]
if not number then
- local last = storage.shared.attributes_last_private or 127
+ local last = sharedstorage.attributes_last_private
if last < 1023 then -- tex.count.minallocatedattribute - 1
last = last + 1
- storage.shared.attributes_last_private = last
+ sharedstorage.attributes_last_private = last
else
- report_attribute("no more room for private attributes") -- fatal
+ report_attribute("no more room for private attributes")
os.exit()
end
number = last
@@ -88,7 +94,7 @@ function attributes.private(name) -- at the lua end (hidden from user)
return number
end
--- new (actually a tracer)
+-- tracers
local report_attribute = logs.reporter("attributes")
diff --git a/tex/context/base/attr-ini.mkiv b/tex/context/base/attr-ini.mkiv
index a1550b4be..8a41dd427 100644
--- a/tex/context/base/attr-ini.mkiv
+++ b/tex/context/base/attr-ini.mkiv
@@ -14,8 +14,8 @@
\writestatus{loading}{ConTeXt Attribute Macros / Initialization}
%D Although it's still somewhat experimental, here we introduce code
-%D related to attributes. Housekeeping will move completely to Lua
-%D and \newattribute will go away.
+%D related to attributes. Housekeeping will move completely to \LUA\
+%D and \type {\newattribute} will go away.
\unprotect
@@ -27,14 +27,16 @@
\unexpanded\def\pushattribute#1%
{\global\advance\csname\??attributestack\string#1\endcsname\plusone
- \global\expandafter\mathchardef\csname\??attributestack\string#1:\number\csname\??attributestack\string#1\endcsname\endcsname\attribute#1}
+ \expandafter\xdef\csname\??attributestack\string#1:\number\csname\??attributestack\string#1\endcsname\endcsname{\number\attribute#1}}
\unexpanded\def\popattribute#1%
- {\attribute#1\csname\??attributestack\string#1:\number\csname\??attributestack\string#1\endcsname\endcsname
+ {\attribute#1\csname\??attributestack\string#1:\number\csname\??attributestack\string#1\endcsname\endcsname\relax
\global\advance\csname\??attributestack\string#1\endcsname\minusone}
\unexpanded\def\installattributestack#1%
- {\expandafter\newcount\csname\??attributestack\string#1\endcsname}
+ {\ifcsname\??attributestack\string#1\endcsname \else
+ \expandafter\newcount\csname\??attributestack\string#1\endcsname
+ \fi}
\newtoks \attributesresetlist
diff --git a/tex/context/base/attr-lay.lua b/tex/context/base/attr-lay.lua
index bcdc541f7..5e5e81ff1 100644
--- a/tex/context/base/attr-lay.lua
+++ b/tex/context/base/attr-lay.lua
@@ -10,9 +10,15 @@ if not modules then modules = { } end modules ['attr-lay'] = {
-- but when we need it stacked layers might show up too; the next function based
-- approach can be replaced by static (metatable driven) resolvers
+-- maybe use backends.registrations here too
+
local type = type
local format = string.format
-local insert, remove = table.insert, table.remove
+local insert, remove, concat = table.insert, table.remove, table.concat
+
+local attributes, nodes, utilities, logs, backends = attributes, nodes, utilities, logs, backends
+local commands, context, interfaces = commands, context, interfaces
+local tex = tex
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -26,8 +32,6 @@ local report_viewerlayers = logs.reporter("viewerlayers")
-- nb. too many "0 g"s
-- nb: more local tables
-local attributes, nodes = attributes, nodes
-
attributes.viewerlayers = attributes.viewerlayers or { }
local viewerlayers = attributes.viewerlayers
@@ -57,22 +61,42 @@ local texgetattribute = tex.getattribute
local texsettokenlist = tex.settoks
local unsetvalue = attributes.unsetvalue
-storage.register("attributes/viewerlayers/registered", viewerlayers.registered, "attributes.viewerlayers.registered")
-storage.register("attributes/viewerlayers/values", viewerlayers.values, "attributes.viewerlayers.values")
-storage.register("attributes/viewerlayers/scopes", viewerlayers.scopes, "attributes.viewerlayers.scopes")
+local nodepool = nodes.pool
+
+local data = viewerlayers.data
+local values = viewerlayers.values
+local listwise = viewerlayers.listwise
+local registered = viewerlayers.registered
+local scopes = viewerlayers.scopes
+
+local template = "%s"
-local data = viewerlayers.data
-local values = viewerlayers.values
-local listwise = viewerlayers.listwise
-local registered = viewerlayers.registered
-local scopes = viewerlayers.scopes
-local template = "%s"
+storage.register("attributes/viewerlayers/registered", registered, "attributes.viewerlayers.registered")
+storage.register("attributes/viewerlayers/values", values, "attributes.viewerlayers.values")
+storage.register("attributes/viewerlayers/scopes", scopes, "attributes.viewerlayers.scopes")
+
+local layerstacker = utilities.stacker.new("layers") -- experiment
+
+layerstacker.mode = "stack"
+layerstacker.unset = attributes.unsetvalue
+
+viewerlayers.resolve_begin = layerstacker.resolve_begin
+viewerlayers.resolve_step = layerstacker.resolve_step
+viewerlayers.resolve_end = layerstacker.resolve_end
+
+function commands.cleanuplayers()
+ layerstacker.clean()
+ -- todo
+end
-- stacked
+local function startlayer(...) startlayer = nodeinjections.startlayer return startlayer(...) end
+local function stoplayer (...) stoplayer = nodeinjections.stoplayer return stoplayer (...) end
+
local function extender(viewerlayers,key)
if viewerlayers.supported and key == "none" then
- local d = nodeinjections.stoplayer()
+ local d = stoplayer()
viewerlayers.none = d
return d
end
@@ -82,7 +106,7 @@ local function reviver(data,n)
if viewerlayers.supported then
local v = values[n]
if v then
- local d = nodeinjections.startlayer(v)
+ local d = startlayer(v)
data[n] = d
return d
else
@@ -91,8 +115,14 @@ local function reviver(data,n)
end
end
-setmetatableindex(viewerlayers, extender)
-setmetatableindex(viewerlayers.data, reviver)
+setmetatableindex(viewerlayers,extender)
+setmetatableindex(viewerlayers.data,reviver)
+
+-- !!!! TEST CODE !!!!
+
+layerstacker.start = function(...) local f = nodeinjections.startstackedlayer layerstacker.start = f return f(...) end
+layerstacker.stop = function(...) local f = nodeinjections.stopstackedlayer layerstacker.stop = f return f(...) end
+layerstacker.change = function(...) local f = nodeinjections.changestackedlayer layerstacker.change = f return f(...) end
local function initializer(...)
return states.initialize(...)
@@ -103,7 +133,8 @@ attributes.viewerlayers.handler = nodes.installattributehandler {
namespace = viewerlayers,
initializer = initializer,
finalizer = states.finalize,
- processor = states.stacked,
+ -- processor = states.stacked,
+ processor = states.stacker,
}
local stack, enabled, global = { }, false, false
@@ -149,12 +180,16 @@ function viewerlayers.setfeatures(hasorder)
viewerlayers.hasorder = hasorder
end
+local usestacker = true -- new, experimental
+
function viewerlayers.start(name)
--- if not enabled then
--- viewerlayers.enable(true)
--- end
- insert(stack,texgetattribute(a_viewerlayer))
- local a = register(name) or unsetvalue
+ local a
+ if usestacker then
+ a = layerstacker.push(register(name) or unsetvalue)
+ else
+ insert(stack,texgetattribute(a_viewerlayer))
+ a = register(name) or unsetvalue
+ end
if global or scopes[name] == v_global then
scopes[a] = v_global -- messy but we don't know the attributes yet
texsetattribute("global",a_viewerlayer,a)
@@ -165,14 +200,21 @@ function viewerlayers.start(name)
end
function viewerlayers.stop()
- local a = remove(stack)
- if a >= 0 then
+ local a
+ if usestacker then
+ a = layerstacker.pop()
+ else
+ a = remove(stack)
+ end
+ if not a then
+ -- error
+ elseif a >= 0 then
if global or scopes[a] == v_global then
texsetattribute("global",a_viewerlayer,a)
else
texsetattribute(a_viewerlayer,a)
end
- texsettokenlist("currentviewerlayertoks",values[a])
+ texsettokenlist("currentviewerlayertoks",values[a] or "")
else
if global or scopes[a] == v_global then
texsetattribute("global",a_viewerlayer,unsetvalue)
@@ -197,9 +239,9 @@ function viewerlayers.define(settings)
end
end
-commands.defineviewerlayer = viewerlayers.define
-commands.startviewerlayer = viewerlayers.start
-commands.stopviewerlayer = viewerlayers.stop
+commands.defineviewerlayer = viewerlayers.define
+commands.startviewerlayer = viewerlayers.start
+commands.stopviewerlayer = viewerlayers.stop
function commands.definedviewerlayer(settings)
viewerlayers.define(settings)
diff --git a/tex/context/base/attr-lay.mkiv b/tex/context/base/attr-lay.mkiv
index 67cd204e2..9c9c3318e 100644
--- a/tex/context/base/attr-lay.mkiv
+++ b/tex/context/base/attr-lay.mkiv
@@ -13,51 +13,51 @@
\writestatus{loading}{ConTeXt Attribute Macros / Viewerlayers}
+%D Currently there is a limitation in mixed inline usage. This has to do with the fact
+%D that we have a stacked model but cannot determine where to revert to (as we can
+%D have AABBCCAA ranges). Maybe I'll solve that one day. It only affects nested inline
+%D layers and these make not much sense anyway. We'd have to store the complete nesting
+%D stack in the attribute in order to be able to cross pages and that demands a new
+%D mechanism.
+
\unprotect
\registerctxluafile{attr-lay}{1.001}
% needs to work over stopitemize grouping etc
-% \def\registerviewerlayer#1#2% global !
-% {\setxvalue{\??ql:#1}{\global\attribute\viewerlayerattribute\ctxlua{tex.write(attributes.viewerlayers.register('#2'))} }}
-%
-% \setevalue{\??ql:}{\global\attribute\viewerlayerattribute\attributeunsetvalue}
+\installcorenamespace{viewerlayer}
+
+\installcommandhandler \??viewerlayer {viewerlayer} \??viewerlayer
-\getparameters
- [\??lr]
+\setupviewerlayer
[\c!state=\v!start,
\c!title=,
\c!printable=\v!yes,
\c!scope=\v!local, % maybe global but needs checking with layout
\c!method=\v!none]
-\def\defineviewerlayer
- {\dodoubleargument\dodefineviewerlayer}
-
-\def\dodefineviewerlayer[#1][#2]% document wide properties
- {\begingroup
- \getparameters[\??lr][#2]%
- \ctxcommand{defineviewerlayer{
- tag = "#1",
- title = "\@@lrtitle",
- visible = "\@@lrstate",
- editable = "\v!yes",
- printable = "\@@lrprintable",
- scope = "\@@lrscope"
- }}%
- \doif\@@lrmethod\v!command
- {\setugvalue{\e!start#1}{\startviewerlayer[#1]}%
- \setugvalue{\e!stop #1}{\stopviewerlayer }}%
- \endgroup}
-
-\unexpanded\def\startviewerlayer[#1]{\ctxcommand{startviewerlayer("#1")}} % not grouped
-\unexpanded\def\stopviewerlayer {\ctxcommand{stopviewerlayer()}} % not grouped
+\appendtoks
+ \ctxcommand{defineviewerlayer{
+ tag = "\currentviewerlayer",
+ title = "\viewerlayerparameter\c!title",
+ visible = "\viewerlayerparameter\c!state",
+ editable = "\v!yes",
+ printable = "\viewerlayerparameter\c!printable",
+ scope = "\viewerlayerparameter\c!scope"
+ }}%
+ \doif{\viewerlayerparameter\c!method}\v!command
+ {\setuxvalue{\e!start#1}{\startviewerlayer[\currentviewerlayer]}%
+ \setuxvalue{\e!stop #1}{\stopviewerlayer}}%
+\to \everydefineviewerlayer
+
+\unexpanded\def\startviewerlayer[#1]{\ctxcommand{startviewerlayer("#1")}} % not grouped
+\unexpanded\def\stopviewerlayer {\ctxcommand{stopviewerlayer()}} % not grouped
\unexpanded\def\viewerlayer [#1]{\groupedcommand{\startviewerlayer[#1]}{\stopviewerlayer}} % grouped
% some day we will keep this at the lua end as the info is only needed there
-\let\currentviewerlayer\empty \newtoks\currentviewerlayertoks % soon we can set macros at the lua end
+\newtoks\currentviewerlayertoks % soon we can set macros at the lua end
\def\currentviewerlayer{\the\currentviewerlayertoks}
@@ -67,7 +67,9 @@
% layout components are implemented rather directly (speed)
-\def\doinitializelayoutcomponent#1%
+\installcorenamespace{layoutcomponentattribute}
+
+\def\attr_layoutcomponent_initialize#1%
{\edef\layoutcomponentboxattribute{\ctxcommand{definedviewerlayer{%
tag = "#1",
title = utilities.strings.nice("#1"), % only here as in steps we have step:
@@ -76,24 +78,28 @@
printable = "\v!yes"
}}}%
\edef\layoutcomponentboxattribute{attr \viewerlayerattribute \layoutcomponentboxattribute\relax}%
- \expandafter\glet\csname\??lr:a:#1\endcsname\layoutcomponentboxattribute}
+ \expandafter\glet\csname\??layoutcomponentattribute#1\endcsname\layoutcomponentboxattribute}
-\def\dosetlayoutcomponentattribute#1% make this faster
- {\expandafter\let\expandafter\layoutcomponentboxattribute\csname\??lr:a:#1\endcsname
+\def\attr_layoutcomponent_set#1% make this faster
+ {\expandafter\let\expandafter\layoutcomponentboxattribute\csname\??layoutcomponentattribute#1\endcsname
\ifx\layoutcomponentboxattribute\relax
- \doinitializelayoutcomponent{#1}% get rid of { }
+ \attr_layoutcomponent_initialize{#1}% get rid of { }
\fi}
-\def\doresetlayoutcomponentattribute
+\def\attr_layoutcomponent_reset
{\let\layoutcomponentboxattribute\empty}
\let\setlayoutcomponentattribute \gobbleoneargument
\let\resetlayoutcomponentattribute\relax
\let\layoutcomponentboxattribute \empty
-\def\showlayoutcomponents
- {\ctxlua{attributes.viewerlayers.enable()}% will go
- \let\setlayoutcomponentattribute \dosetlayoutcomponentattribute
- \let\resetlayoutcomponentattribute\doresetlayoutcomponentattribute}
+\unexpanded\def\showlayoutcomponents
+ {%\ctxlua{attributes.viewerlayers.enable()}% automatic
+ \let\setlayoutcomponentattribute \attr_layoutcomponent_set
+ \let\resetlayoutcomponentattribute\attr_layoutcomponent_reset}
+
+\appendtoks
+ \ctxcommand{cleanuplayers()}%
+\to \everyshipout
\protect \endinput
diff --git a/tex/context/base/attr-neg.lua b/tex/context/base/attr-neg.lua
index 4d89cb49b..c32cec956 100644
--- a/tex/context/base/attr-neg.lua
+++ b/tex/context/base/attr-neg.lua
@@ -11,8 +11,9 @@ if not modules then modules = { } end modules ['attr-neg'] = {
local format = string.format
-
-local attributes, nodes = attributes, nodes
+local attributes, nodes, utilities, logs, backends = attributes, nodes, utilities, logs, backends
+local commands, context, interfaces = commands, context, interfaces
+local tex = tex
local states = attributes.states
local tasks = nodes.tasks
diff --git a/tex/context/base/attr-neg.mkiv b/tex/context/base/attr-neg.mkiv
index 0fc5070f1..102b220ba 100644
--- a/tex/context/base/attr-neg.mkiv
+++ b/tex/context/base/attr-neg.mkiv
@@ -19,12 +19,12 @@
% positive and negative are preregistered
-\def\dotriggernegative#1{\ctxcommand{triggernegative('#1')}}
+\unexpanded\def\startnegative{\attr_trigger_negative\v!negative}
+\unexpanded\def\stopnegative {\attr_trigger_negative\v!positive}
-\unexpanded\def\startnegative{\dotriggernegative\v!negative}
-\unexpanded\def\stopnegative {\dotriggernegative\v!positive}
+\unexpanded\def\startpositive{\attr_trigger_negative\v!positive}
+\unexpanded\def\stoppositive {\attr_trigger_negative\v!negative}
-\unexpanded\def\startpositive{\dotriggernegative\v!positive}
-\unexpanded\def\stoppositive {\dotriggernegative\v!negative}
+\def\attr_trigger_negative#1{\ctxcommand{triggernegative('#1')}}
\protect \endinput
diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua
index 3ce40a6c5..46ce4f96c 100644
--- a/tex/context/base/back-exp.lua
+++ b/tex/context/base/back-exp.lua
@@ -20,8 +20,9 @@ if not modules then modules = { } end modules ['back-exp'] = {
local next, type = next, type
local format, match, concat, rep, sub, gsub, gmatch, find = string.format, string.match, table.concat, string.rep, string.sub, string.gsub, string.gmatch, string.find
+local validstring = string.valid
local lpegmatch = lpeg.match
-local utfchar, utfbyte, utfsub, utfgsub = utf.char, utf.byte, utf.sub, utf.gsub
+local utfchar, utfbyte = utf.char, utf.byte
local insert, remove = table.insert, table.remove
local topoints = number.topoints
local utfvalues = string.utfvalues
@@ -154,6 +155,8 @@ local somespace = { [0x20] = true, [" "] = true } -- for testing
local entities = { ["&"] = "&", [">"] = ">", ["<"] = "<" }
local attribentities = { ["&"] = "&", [">"] = ">", ["<"] = "<", ['"'] = "quot;" }
+local entityremapper = utf.remapper(entities)
+
local alignmapping = {
flushright = "right",
middle = "center",
@@ -300,7 +303,7 @@ local usedstyles = { }
local documenttemplate = [[
document {
- font-size : %s !important ;
+ font-size : %s !important ;
max-width : %s !important ;
text-align : %s !important ;
hyphens : %s !important ;
@@ -382,7 +385,7 @@ local function allusedimages(xmlfile)
for element, details in sortedhash(usedimages) do
for detail, data in sortedhash(details) do
local name = data.name
- if file.extname(name) == "pdf" then
+ if file.suffix(name) == "pdf" then
-- temp hack .. we will have a remapper
name = file.replacesuffix(name,"svg")
end
@@ -397,7 +400,7 @@ local function uniqueusedimages()
for element, details in next, usedimages do
for detail, data in next, details do
local name = data.name
- if file.extname(name) == "pdf" then
+ if file.suffix(name) == "pdf" then
unique[file.replacesuffix(name,"svg")] = name
else
unique[name] = name
@@ -1351,7 +1354,7 @@ local function begintag(result,element,nature,depth,di,skip)
end
result[#result+1] = format("%s\n",spaces[depth])
for k, v in table.sortedpairs(metadata) do
- v = utfgsub(v,".",entities)
+ v = entityremapper(v)
result[#result+1] = format("%s%s\n",spaces[depth+1],k,v)
end
result[#result+1] = format("%s\n",spaces[depth])
@@ -1409,7 +1412,7 @@ local function flushtree(result,data,nature,depth)
-- whatever
elseif di.content then
-- already has breaks
- local content = utfgsub(di.content,".",entities)
+ local content = entityremapper(di.content)
if i == nofdata and sub(content,-1) == "\n" then -- move check
-- can be an end of line in par but can also be the last line
if trace_spacing then
@@ -2362,12 +2365,21 @@ local function stopexport(v)
report_export("saving xhtml variant in '%s",xhtmlfile)
local xmltree = cleanxhtmltree(xml.convert(results))
xml.save(xmltree,xhtmlfile)
+ -- looking at identity is somewhat redundant as we also inherit from interaction
+ -- at the tex end
+ local identity = interactions.general.getidentity()
local specification = {
name = file.removesuffix(v),
identifier = os.uuid(),
images = uniqueusedimages(),
root = xhtmlfile,
files = files,
+ language = languagenames[tex.count.mainlanguagenumber],
+ title = validstring(finetuning.title) or validstring(identity.title),
+ subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle),
+ author = validstring(finetuning.author) or validstring(identity.author),
+ firstpage = validstring(finetuning.firstpage),
+ lastpage = validstring(finetuning.lastpage),
}
report_export("saving specification in '%s' (mtxrun --script epub --make %s)",specificationfilename,specificationfilename)
io.savedata(specificationfilename,table.serialize(specification,true))
diff --git a/tex/context/base/back-exp.mkiv b/tex/context/base/back-exp.mkiv
index 871d08b19..9e65633d4 100644
--- a/tex/context/base/back-exp.mkiv
+++ b/tex/context/base/back-exp.mkiv
@@ -132,6 +132,11 @@
[\c!align=\number\raggedstatus,
\c!bodyfont=\bodyfontsize,
\c!width=\textwidth,
+ \c!title={\directinteractionparameter\c!title},
+ \c!subtitle={\directinteractionparameter\c!subtitle},
+ \c!author={\directinteractionparameter\c!author},
+ % \c!firstpage=, % imagename
+ % \c!lastpage=, % imagename
\c!hyphen=\v!no]
\def\dosynchronizeexport
@@ -141,12 +146,21 @@
bodyfont = \number\dimexpr\exportparameter\c!bodyfont,
width = \number\dimexpr\exportparameter\c!width,
hyphen = "\exportparameter\c!hyphen",
+ title = \!!bs\exportparameter\c!title\!!es,
+ subtitle = \!!bs\exportparameter\c!subtitle\!!es,
+ author = \!!bs\exportparameter\c!author\!!es,
+ firstpage = "\exportparameter\c!firstpage",
+ lastpage = "\exportparameter\c!lastpage",
}}}
\appendtoks
- \doifsomething{\backendparameter\c!export}{\dosynchronizeexport}%
+ \doifsomething{\backendparameter\c!export}\dosynchronizeexport
\to \everystarttext
+\appendtoks
+ \doifsomething{\backendparameter\c!export}\dosynchronizeexport % in case it is done inside \starttext
+\to \everysetupdocument
+
\appendtoks
\doifsomething{\backendparameter\c!xhtml}
{\enabledirectives[backend.export.xhtml=\backendparameter\c!xhtml]}%
@@ -155,11 +169,11 @@
\to \everysetupbackend
\appendtoks
- \doifsomething{\backendparameter\c!export}
- {\setuptagging
- [\c!state=\v!start]%
- \enabledirectives
- [backend.export=\backendparameter\c!export]}%
+ \doifelsenothing{\backendparameter\c!export}
+ {\resetsystemmode\v!export}
+ {\setuptagging[\c!state=\v!start]%
+ \enabledirectives[backend.export=\backendparameter\c!export]%
+ \setsystemmode\v!export}%
\to \everysetupbackend
\protect \endinput
diff --git a/tex/context/base/back-ini.lua b/tex/context/base/back-ini.lua
index 0c02e201a..f76b85438 100644
--- a/tex/context/base/back-ini.lua
+++ b/tex/context/base/back-ini.lua
@@ -93,9 +93,9 @@ tables.vfspecials = allocate {
stopslant = comment,
}
--- -- experimental code --
+-- experimental code --
-function commands.pdfrotation(a)
+function commands.pdfrotation(a) -- somewhat weird here
local s, c = sind(a), cosd(a)
context("%s %s %s %s",c,s,-s,c)
end
diff --git a/tex/context/base/back-ini.mkiv b/tex/context/base/back-ini.mkiv
index 8ece8f6e1..fc8759c14 100644
--- a/tex/context/base/back-ini.mkiv
+++ b/tex/context/base/back-ini.mkiv
@@ -61,13 +61,13 @@
\let \dotransformnextbox\gobblesixarguments % and pass last box
%D \macros
-%D {doovalbox}
+%D {back_ovalbox}
%D
%D When we look at the implementation, this is a complicated
%D one. There are seven arguments.
%D
%D \starttyping
-%D \doovalbox {w} {h} {d} {linewidth} {radius} {stroke} {fill} {variant}
+%D \back_ovalbox {w} {h} {d} {linewidth} {radius} {stroke} {fill} {variant}
%D \stoptyping
%D
%D This command has to return a \type{\vbox} which can be used
@@ -75,7 +75,7 @@
%D degrees, the stroke and fill are~\type{1} (true) of~\type{0}
%D (false).
-\let \doovalbox \gobbleeightarguments
+\let\back_ovalbox \gobbleeightarguments
%D \macros
%D {dostartclipping,dostopclipping}
diff --git a/tex/context/base/back-pdf.mkiv b/tex/context/base/back-pdf.mkiv
index 9e441e224..7e910f07f 100644
--- a/tex/context/base/back-pdf.mkiv
+++ b/tex/context/base/back-pdf.mkiv
@@ -68,16 +68,17 @@
{\ctxcommand{setxmpfile("\backendparameter{xmpfile}")}}%
\to \everysetupbackend
+% \doifsomething{\backendparameter\c!format} .. at the lua end
+
\appendtoks
- \doifsomething{\backendparameter\c!format}
- {\ctxcommand{setformat {
+ \ctxcommand{setformat {
format = "\backendparameter\c!format",
level = "\backendparameter\c!level",
option = "\backendparameter\c!option",
profile = "\backendparameter\c!profile",
intent = "\backendparameter\c!intent",
file = "\backendparameter\c!file",
- }}}%
+ }}%
\to \everysetupbackend
%D For the moment we keep these.
@@ -220,12 +221,12 @@
\newbox\objectbox
-\def\dostartobject#1#2#3#4#5%
+\unexpanded\def\dostartobject#1#2#3#4#5% needs to be \unexpanded
{\bgroup
\setbox\objectbox\vbox\bgroup
\def\back_object_stop{\egroup\back_object_register{#1}{#2}}}
-\def\dostopobject
+\unexpanded\def\dostopobject % needs to be \unexpanded
{\back_object_stop
\egroup}
@@ -237,9 +238,6 @@
\immediate\pdfxform resources {\pdfbackendcurrentresources}\objectbox
\dosetobjectreference{#1}{#2}{\the\pdflastxform}}
-\def\doresetobjects
- {}
-
\let\m_back_object_reference\empty
\def\doinsertobject#1#2%
@@ -263,7 +261,7 @@
% for the moment here
%D \macros
-%D {doovalbox}
+%D {back_ovalbox}
%D
%D Drawing frames with round corners is inherited from the
%D main module.
@@ -276,7 +274,7 @@
% \def\back_oval_calculate#1#2#3%
% {\PointsToBigPoints{\dimexpr#2+#3\relax}#1}
-\unexpanded\def\doovalbox#1#2#3#4#5#6#7#8%
+\unexpanded\def\back_ovalbox#1#2#3#4#5#6#7#8%
{\forcecolorhack
\bgroup
% \scratchdimen#4%
@@ -306,16 +304,16 @@
\PointsToBigPoints{\dimexpr #2-\scratchdimen}\yymax
\PointsToBigPoints{\dimexpr-#3+\scratchdimen}\yymin
%
- \edef\dostroke{#6}%
- \edef\dofill{#7}%
- \edef\mode{\number#8 \space}%
+ \edef\dostroke{\number#6}%
+ \edef\dofill{\number#7}%
+ \edef\mode{\number#8}%
% no \ifcase, else \relax in pdfcode
\setbox\scratchbox\hbox
{\ifnum\dostroke\dofill>\zerocount
\pdfliteral
{q
\stroke\space w
- \ifcase\mode
+ \ifcase\mode\space
\xxmin\space \ymin \space m
\xxmax\space \ymin \space l
\xmax \space \ymin \space \xmax \space \yymin\space y
@@ -495,7 +493,7 @@
\xmin \space \ymin \space \xmin \space \yymin\space y
\or % 28
\fi
- \ifnum\mode>8
+ \ifnum\mode>8\space
S
\else
\ifnum\dostroke=\plusone S \fi
diff --git a/tex/context/base/back-swf.mkiv b/tex/context/base/back-swf.mkiv
index 1d225b550..09745e0f9 100644
--- a/tex/context/base/back-swf.mkiv
+++ b/tex/context/base/back-swf.mkiv
@@ -11,12 +11,12 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% This is only a placeholder that demonstrates the usage of swf
-% resources.
-
-%D \starttyping
-%D \starttext
+%D This is only a placeholder that demonstrates the usage of swf resources.
+%D There is no need to include this file into the format. The module was
+%D tested by Luigi and Willi and based on their suggestions the functionality
+%D was improved.
%D
+%D \starttyping
%D \enabletrackers[graphics.locating]
%D \enabletrackers[backend.swf]
%D
@@ -41,8 +41,6 @@
%D \stopTEXpage
%D \stoptyping
-\stoptext
-
\endinput
\starttext
@@ -72,8 +70,6 @@
},
\stopluaparameterset
-\starttext
-
% preview=swf:myset:display:1
% controls=swf:myset:controls:1
% resources=swf:myset:resources:1
diff --git a/tex/context/base/back-u3d.mkiv b/tex/context/base/back-u3d.mkiv
index dfe8a90c2..89d26ee41 100644
--- a/tex/context/base/back-u3d.mkiv
+++ b/tex/context/base/back-u3d.mkiv
@@ -11,10 +11,9 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% This is only a placeholder that demonstrates the usage of u3d
-% resources. The user interface is rather messy an might be
-% improved. The files and setup is derived from an example by
-% Michael Vidiassov.
+%D This is only a placeholder that demonstrates the usage of u3d resources. The
+%D user interface is rather messy an might be improved. The files and setup is
+%D derived from an example by Michael Vidiassov.
\endinput
diff --git a/tex/context/base/bibl-bib.mkiv b/tex/context/base/bibl-bib.mkiv
index 56007d21c..7ca6799cf 100644
--- a/tex/context/base/bibl-bib.mkiv
+++ b/tex/context/base/bibl-bib.mkiv
@@ -626,7 +626,7 @@
\doifelse{\bibtexpublicationsparameter\c!method}\v!local
{\ctxlua{bibtex.hacks.reset(1)}}% function can take method
{\ctxlua{bibtex.hacks.reset(2)}}%
- \doplacestructurelist
+ \strc_lists_place_current
{\currentbibtexsession}
{\currentbibtexcriterium}
{\namedlistparameter\currentbibtexsession\c!number}%
diff --git a/tex/context/base/bibl-tra.lua b/tex/context/base/bibl-tra.lua
index 6341898ee..ca6403c44 100644
--- a/tex/context/base/bibl-tra.lua
+++ b/tex/context/base/bibl-tra.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['bibl-bib'] = {
+if not modules then modules = { } end modules ['bibl-tra'] = {
version = 1.001,
comment = "this module is the basis for the lxml-* ones",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -115,7 +115,7 @@ function hacks.registerplaced(str)
end
function hacks.doifalreadyplaced(str)
- commands.testcase(used[str])
+ commands.doifelse(used[str])
end
-- we ask for :tag but when we can't find it we go back
diff --git a/tex/context/base/bibl-tra.mkiv b/tex/context/base/bibl-tra.mkiv
index 3531703ad..08d8eb6fe 100644
--- a/tex/context/base/bibl-tra.mkiv
+++ b/tex/context/base/bibl-tra.mkiv
@@ -755,7 +755,7 @@
\ctxlua{bibtex.hacks.filterall()}}
{\doif{\listparameter\c!criterium}\v!cite
{\setuplist[pubs][\c!criterium=\v!here]}%
- \doplacestructurelist
+ \strc_lists_place_current
{pubs}%
{\listparameter\c!criterium}%
{\listparameter\c!number}%
@@ -1244,7 +1244,7 @@
%D \type{\ixbibauthoryearref} stores the data in the macros
%D \type{\currentbibauthor} and \type{\currentbibyear}.
-\def\ifbibinteractionelse
+\def\doifbibinteractionelse
{\iflocation
\edef\test{\bibalternative\c!interaction}%
\ifx\test\v!stop
@@ -1256,20 +1256,11 @@
\@EA\secondoftwoarguments
\fi}
-\def\ifbibinteractionelse
- {\iflocation
- \doifelse{\bibalternative\c!interaction}\v!stop
- {\@EA\secondoftwoarguments}
- {\@EA\firstoftwoarguments}%
- \else
- \@EA\secondoftwoarguments
- \fi}
-
\def\bibmaybeinteractive#1#2%
{\ifbibcitecompress
#2%
\else
- \ifbibinteractionelse{\gotobiblink{#2}[#1]}{#2}%
+ \doifbibinteractionelse{\gotobiblink{#2}[#1]}{#2}%
\fi}
\def\bibauthoryearref[#1]%
@@ -1360,7 +1351,7 @@
\gotobiblink{#1}[#1]}
\def\bibgotoDOI#1#2%
- {\ifbibinteractionelse
+ {\doifbibinteractionelse
{\useURL[bibfooDoi#1][#2]%
\useURL[bibfoo#1][http://dx.doi.org/#2]%
\goto{\url[bibfooDoi#1]}[url(bibfoo#1)]}
@@ -1383,7 +1374,7 @@
\bibalternative\v!right}
\def\bibgotoURL#1#2%
- {\ifbibinteractionelse
+ {\doifbibinteractionelse
{\useURL[bibfoo#1][#2]\goto{\url[bibfoo#1]}[url(bibfoo#1)]}
{\hyphenatedurl{#2}}}
@@ -1411,7 +1402,7 @@
\def\dobibpageref#1%
{\bibinsertrefsep
- \ifbibinteractionelse
+ \doifbibinteractionelse
{\atbiblink[#1]}
{{\referencingfalse\at[#1]}}}
@@ -1484,8 +1475,6 @@
%D And some defaults are loaded from bibl-apa:
-\def\c!monthconversion{monthconversion} % todo
-
\setuppublications
[\c!monthconversion=,
\c!alternative=apa,
@@ -1502,4 +1491,6 @@
% \appendtoks \preloadbiblist \to \everysetuppublications
% \appendtoks \preloadbiblist \to \everystarttext
+\let\ifbibinteractionelse\doifbibinteractionelse
+
\protect \endinput
diff --git a/tex/context/base/blob-ini.lua b/tex/context/base/blob-ini.lua
index b97485b1b..4debaf94c 100644
--- a/tex/context/base/blob-ini.lua
+++ b/tex/context/base/blob-ini.lua
@@ -74,7 +74,7 @@ function blobs.dispose(t)
end
end
-function blobs.append(t,str) -- will be link nodes.link
+function blobs.append(t,str) -- compare concat and link
local typ = type(str)
local dummy = nil
if typ == "number" then
diff --git a/tex/context/base/buff-ini.lua b/tex/context/base/buff-ini.lua
index 7098679ca..11d7cc9f6 100644
--- a/tex/context/base/buff-ini.lua
+++ b/tex/context/base/buff-ini.lua
@@ -13,21 +13,23 @@ local trace_visualize = false trackers.register("buffers.visualize", function(v
local report_buffers = logs.reporter("buffers","usage")
local report_grabbing = logs.reporter("buffers","grabbing")
+local context, commands = context, commands
+
local concat = table.concat
local type, next = type, next
local sub, format, match, find = string.sub, string.format, string.match, string.find
-local count, splitlines = string.count, string.splitlines
+local count, splitlines, validstring = string.count, string.splitlines, string.valid
-local variables = interfaces.variables
+local variables = interfaces.variables
local settings_to_array = utilities.parsers.settings_to_array
-local ctxcatcodes = tex.ctxcatcodes
-local txtcatcodes = tex.txtcatcodes
+local catcodenumbers = catcodes.numbers
-buffers = { }
+local ctxcatcodes = catcodenumbers.ctxcatcodes
+local txtcatcodes = catcodenumbers.txtcatcodes
+buffers = buffers or { }
local buffers = buffers
-local context = context
local cache = { }
@@ -100,14 +102,17 @@ commands.assignbuffer = assign
local P, patterns, lpegmatch = lpeg.P, lpeg.patterns, lpeg.match
+local anything = patterns.anything
+local alwaysmatched = patterns.alwaysmatched
+
local function countnesting(b,e)
local n
local g = P(b) / function() n = n + 1 end
+ P(e) / function() n = n - 1 end
- + patterns.anything
- local p = patterns.alwaysmatched / function() n = 0 end
+ + anything
+ local p = alwaysmatched / function() n = 0 end
* g^0
- * patterns.alwaysmatched / function() return n end
+ * alwaysmatched / function() return n end
return p
end
@@ -150,12 +155,10 @@ function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes) -- maybe
else
if continue then
dn = dn .. sub(bufferdata,2,-2) -- no \r, \n is more generic
+ elseif dn == "" then
+ dn = sub(bufferdata,2,-2)
else
- if dn == "" then
- dn = sub(bufferdata,2,-2)
- else
- dn = dn .. "\n" .. sub(bufferdata,2,-2) -- no \r, \n is more generic
- end
+ dn = dn .. "\n" .. sub(bufferdata,2,-2) -- no \r, \n is more generic
end
local last = sub(dn,-1)
if last == "\n" or last == "\r" then -- \n is unlikely as \r is the endlinechar
@@ -184,7 +187,7 @@ function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes) -- maybe
end
end
assign(name,dn,catcodes)
- commands.testcase(more)
+ commands.doifelse(more)
end
-- The optional prefix hack is there for the typesetbuffer feature and
@@ -232,12 +235,12 @@ end
function commands.getbuffer(name)
local str = getcontent(name)
if str ~= "" then
- context.viafile(str)
+ context.viafile(str,format("buffer.%s",validstring(name,"noname")))
end
end
function commands.getbuffermkvi(name) -- rather direct !
- context.viafile(resolvers.macros.preprocessed(getcontent(name)))
+ context.viafile(resolvers.macros.preprocessed(getcontent(name)),format("buffer.%s.mkiv",validstring(name,"noname")))
end
function commands.gettexbuffer(name)
@@ -265,7 +268,7 @@ function commands.getbufferctxlua(name)
end
function commands.doifelsebuffer(name)
- commands.testcase(exists(name))
+ commands.doifelse(exists(name))
end
-- This only used for mp buffers and is a kludge. Don't change the
diff --git a/tex/context/base/buff-ini.mkiv b/tex/context/base/buff-ini.mkiv
index 239a274c0..5f9d1d1e6 100644
--- a/tex/context/base/buff-ini.mkiv
+++ b/tex/context/base/buff-ini.mkiv
@@ -199,4 +199,28 @@
\def\getbufferdata[#1]{\buff_get_stored_indeed{#1}}
+%D This is a weird one, moved from cont-new. Do we really need it? If not
+%D it will go away.
+
+\bgroup \permitcircumflexescape
+
+\obeylines % don't remove %'s !
+
+\gdef\collapsedspace#1%
+ {\ifx#1^^M%
+ \expandafter\collapsedspace
+ \else
+ \space
+ \expandafter#1%
+ \fi}
+
+\unexpanded\gdef\collapsespaces
+ {\prependtoksonce\relax\to\everyeof%
+ \ignorelines%
+ \ignoretabs%
+ \let\obeyedspace\collapsedspace%
+ \obeyspaces}
+
+\egroup
+
\protect \endinput
diff --git a/tex/context/base/buff-par.lua b/tex/context/base/buff-par.lua
index 2015b0bc0..29742f497 100644
--- a/tex/context/base/buff-par.lua
+++ b/tex/context/base/buff-par.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['buff-ini'] = {
+if not modules then modules = { } end modules ['buff-par'] = {
version = 1.001,
comment = "companion to buff-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -6,21 +6,26 @@ if not modules then modules = { } end modules ['buff-ini'] = {
license = "see context related readme files"
}
-local trace_parallel = false trackers.register("buffers.parallel", function(v) trace_parallel = v end)
-
-local report_parallel = logs.reporter("buffers","parallel")
+local context, commands = context, commands
local insert, remove, find, gmatch = table.insert, table.remove, string.find, string.gmatch
local strip, format = string.strip, string.format
-local variables = interfaces.variables
+local trace_parallel = false trackers.register("buffers.parallel", function(v) trace_parallel = v end)
+
+local report_parallel = logs.reporter("buffers","parallel")
+
+local variables = interfaces.variables
+
+local parallel = buffers.parallel or { }
+buffers.parallel = parallel
-buffers.parallel = { } local parallel = buffers.parallel
+local settings_to_array = utilities.parsers.settings_to_array
-local data = { }
+local data = { }
function parallel.define(category,tags)
- local tags = utilities.parsers.settings_to_array(tags)
+ local tags = settings_to_array(tags)
local entries = { }
data[category] = {
tags = tags,
@@ -38,7 +43,7 @@ function parallel.reset(category,tags)
if not tags or tags == "" or tags == variables.all then
tags = table.keys(entries)
else
- tags = utilities.parsers.settings_to_array(tags)
+ tags = settings_to_array(tags)
end
for i=1,#tags do
entries[tags[i]] = {
@@ -76,7 +81,7 @@ function parallel.save(category,tag,content)
end
-- maybe no strip
-- use lpeg
- if find(content,"^%s*%[") then
+ if find(content,"%s*%[") then
local done = false
for label, content in gmatch(content,"%s*%[(.-)%]%s*([^%[]+)") do
if done then
@@ -175,5 +180,5 @@ commands.placeparallel = parallel.place
commands.resetparallel = parallel.reset
function commands.doifelseparallel(category,tags)
- commands.testcase(parallel.hassomecontent(category,tags))
+ commands.doifelse(parallel.hassomecontent(category,tags))
end
diff --git a/tex/context/base/buff-par.mkiv b/tex/context/base/buff-par.mkiv
deleted file mode 100644
index 7d35676bd..000000000
--- a/tex/context/base/buff-par.mkiv
+++ /dev/null
@@ -1,151 +0,0 @@
-%D \module
-%D [ file=buff-par,
-%D version=2010.12.05,
-%D title=\CONTEXT\ Buffer Macros,
-%D subtitle=Parallel,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{ConTeXt Buffer Macros / Parallel}
-
-\registerctxluafile{buff-par}{1.001}
-
-%D This module is developped for Thomas Schmitz as part of
-%D a project.
-
-\unprotect
-
-\letvalue{\??px:}\empty
-
-\def\parallelparameter#1%
- {\csname\??px:%
- \ifcsname\??px:\currentparallel:\currentparallelinstance:#1\endcsname
- \currentparallel:\currentparallelinstance:#1%
- \else\ifcsname\??px:\currentparallel:#1\endcsname
- \currentparallel:#1%
- \else\ifcsname\??px:#1\endcsname
- #1%
- \fi\fi\fi
- \endcsname}
-
-\unexpanded\def\defineparallel
- {\dodoubleargument\dodefineparallel}
-
-\def\dodefineparallel[#1][#2]%
- {\ctxcommand{defineparallel("#1","#2")}%
- \processcommalist[#2]\dododefineparallel
- \setuvalue{\e!start#1}{\dostartparallelset{#1}}%
- \setuvalue{\e!stop #1}{\dostopparallelset}}
-
-\def\dododefineparallel#1%
- {\definebuffer[#1]%%
- \setuvalue{\e!stop#1}{\dowithparallel{#1}}}
-
-\def\dostartparallelset#1%
- {\def\currentparallel{#1}%
- \ctxcommand{nextparallel("\currentparallel")}}
-
-\def\dostopparallelset#1%
- {}
-
-\def\dowithparallel#1% defined moet ook aan de lua kant kunnen
- {\ctxcommand{saveparallel("\currentparallel","#1",buffers.raw("\thedefinedbuffer{#1}"))}}
-
-\unexpanded\def\placeparallel
- {\dotripleempty\doplaceparallel}
-
-\def\doplaceparallel[#1][#2][#3]%
- {\begingroup
- \def\currentparallel{#1}%
- \ctxcommand{placeparallel("\currentparallel","#2","#3")}%
- \endgroup}
-
-% was: \parallelparameter\c!command}
-
-\def\doflushparallel#1#2#3#4#5% {instance}{status}{line}{label}{content}
- {\begingroup
- \def\currentparallelinstance{#1}%
- \def\currentparallelnumber {#2}%
- \def\currentparallelline {#3}%
- \def\currentparallellabel {#4}%
- \def\currentparallelcontent {#5}%
- \ifcase#2\relax
- \expandafter\noflushparalleldefault
- \or
- \expandafter\doflushparalleldefault
- \fi
- \endgroup}
-
-\def\noflushparalleldefault{}
-\def\doflushparalleldefault{\directsetup{\parallelparameter\c!setups}}
-
-\startsetups parallel:place:default
- \hangafter\plusone
- \hangindent4em
- \dontleavehmode
- \hbox to 3em \bgroup
- \hss
- \bf
- \doifsomething \currentparallellabel {
- \textreference[\currentparallellabel]{\currentparallelline}
- }
- \currentparallelline
- \quad
- \egroup
- \currentparallelcontent
- \par
-\stopsetups
-
-\unexpanded\def\setupparallel
- {\dotripleempty\dosetupparallel}
-
-\def\dosetupparallel[#1][#2][#3]%
- {\ifthirdargument
- \getparameters[\??px:#1:#2:][#3]%
- \else\ifsecondargument
- \getparameters[\??px:#1:][#2]%
- \else
- \getparameters[\??px:][#1]% maybe no : here
- \fi\fi}
-
-\setupparallel
-% [\c!command=\doflushparalleldefault]
- [\c!setups=parallel:place:default]
-
-\def\doifelseparallel#1#2%
- {\cldcontext{commands.doifelseparallel("#1","#2")}}
-
-\def\resetparallel
- {\dodoubleempty\doresetparallel}
-
-\def\resetparallel[#1][#2]%
- {\ctxcommand{resetparallel("#1","#2"))}}
-
-% default
-
-% \def\doflushparalleldefault#1#2#3#4% todo: setups instead
-% {\ifcase#1\or
-% \begingroup
-% \hangafter1
-% \hangindent4em
-% \dontleavehmode
-% \hbox to 3em{\hss\bf\doifsomething{#3}{\textreference[#3]{#2}}#2\quad}#4\par
-% \endgroup
-% \fi}
-
-\protect \endinput
-
-% \def\dododefineparallel#1%
-% {\setuvalue{\e!stop #1}{}%
-% \setuvalue{\e!start#1}{\dostartparallel{#1}}}
-%
-% \def\dostartparallel#1%
-% {\grabuntil{\e!stop#1}{\dododostartparallel{#1}}}
-%
-% \def\dododostartparallel#1#2%
-% {\ctxcommand{saveparallel("\currentparallel","#1",\!!bs\detokenize{#2}\!!es)}}
diff --git a/tex/context/base/buff-par.mkvi b/tex/context/base/buff-par.mkvi
new file mode 100644
index 000000000..404fa8ef3
--- /dev/null
+++ b/tex/context/base/buff-par.mkvi
@@ -0,0 +1,131 @@
+%D \module
+%D [ file=buff-par,
+%D version=2010.12.05,
+%D title=\CONTEXT\ Buffer Macros,
+%D subtitle=Parallel,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Buffer Macros / Parallel}
+
+\registerctxluafile{buff-par}{1.001}
+
+%D This module is developped for Thomas Schmitz as part of a project. There is
+%D no documentation yet.
+%D
+%D \starttyping
+%D \defineparallel[main][one,two]
+%D
+%D \startmain
+%D \startone
+%D first 1
+%D [reference] first 2
+%D first 3
+%D \stopone
+%D \starttwo
+%D second 1
+%D \stoptwo
+%D \stopmain
+%D
+%D \placeparallel[main][one,two][criterium=all]
+
+%D criterium=all start= n=
+
+\unprotect
+
+\installcorenamespace{parallel}
+
+\installcommandhandler \??parallel {parallel} \??parallel
+
+\setupparallel
+ [\c!setups=parallel:place:default]
+
+\let\buff_parallel_define_saved\defineparallel
+
+\unexpanded\def\defineparallel
+ {\dodoubleargument\buff_parallel_define}
+
+\def\buff_parallel_define[#name][#instances]%
+ {\buff_parallel_define_saved[#name]
+ \ctxcommand{defineparallel("#name","#instances")}%
+ \processcommalist[#instances]\buff_parallel_define_instance
+ \setuevalue{\e!start#name}{\buff_parallel_start{#name}}%
+ \setuevalue{\e!stop #name}{\buff_parallel_stop}}
+
+\def\buff_parallel_define_instance#instance%
+ {\normalexpanded{\buff_parallel_define_saved[#instance][\currentparallel]}%
+ \definebuffer[#instance]%
+ \setuevalue{\e!stop#instance}{\buff_parallel_save{#instance}}}
+
+\unexpanded\def\buff_parallel_start#name%
+ {\pushmacro\currentparallel
+ \edef\currentparallel{#name}%
+ \ctxcommand{nextparallel("\currentparallel")}}
+
+\unexpanded\def\buff_parallel_stop
+ {\popmacro\currentparallel}
+
+\unexpanded\def\buff_parallel_save#instance% defined moet ook aan de lua kant kunnen
+ {\ctxcommand{saveparallel("\currentparallel","#instance",buffers.raw("\thedefinedbuffer{#instance}"))}}
+
+\unexpanded\def\placeparallel
+ {\dotripleempty\buff_parallel_place}
+
+\def\buff_parallel_place[#name][#instance][#settings]%
+ {\begingroup
+ \edef\currentparallel{#name}%
+ \ctxcommand{placeparallel("\currentparallel","#instance","#settings")}% -- todo: pass options as k/v
+ \endgroup}
+
+\def\doflushparallel#instance#status#line#label#content% called at lua end
+ {\begingroup
+ \def\currentparallelinstance{#instance}%
+ \def\currentparallelnumber {#status}%
+ \def\currentparallelline {#line}%
+ \def\currentparallellabel {#label}%
+ \def\currentparallelcontent {#content}%
+ \ifcase#status\relax
+ \expandafter\buff_parallel_flush_nop
+ \or
+ \expandafter\buff_parallel_flush_yes
+ \fi
+ \endgroup}
+
+\def\buff_parallel_flush_nop
+ {}
+
+\def\buff_parallel_flush_yes
+ {\directsetup{\namedparallelparameter\currentparallelinstance\c!setups}}
+
+\unexpanded\def\doifelseparallel#name#instance%
+ {\ctxcommand{doifelseparallel("#name","#instance")}}
+
+\unexpanded\def\resetparallel
+ {\dodoubleempty\buff_parallel_reset}
+
+\def\buff_parallel_reset[#name][#instance]%
+ {\ctxcommand{resetparallel("#name","#instance"))}}
+
+\startsetups parallel:place:default
+ \hangafter\plusone
+ \hangindent4em
+ \dontleavehmode
+ \hbox to 3em \bgroup
+ \hss
+ \bf
+ \doifsomething \currentparallellabel {
+ \textreference[\currentparallellabel]{\currentparallelline}
+ }
+ \currentparallelline
+ \quad
+ \egroup
+ \currentparallelcontent
+ \par
+\stopsetups
+
+\protect \endinput
diff --git a/tex/context/base/buff-ver.lua b/tex/context/base/buff-ver.lua
index 004d89858..5748d9298 100644
--- a/tex/context/base/buff-ver.lua
+++ b/tex/context/base/buff-ver.lua
@@ -18,6 +18,8 @@ local concat = table.concat
local C, P, R, S, V, Carg, Cc, Cs = lpeg.C, lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Carg, lpeg.Cc, lpeg.Cs
local patterns, lpegmatch, is_lpeg = lpeg.patterns, lpeg.match, lpeg.is_lpeg
+local context, commands = context, commands
+
local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end)
local report_visualizers = logs.reporter("buffers","visualizers")
@@ -30,7 +32,6 @@ visualizers.specifications = specifications
local tabtospace = utilities.strings.tabtospace
local variables = interfaces.variables
local settings_to_array = utilities.parsers.settings_to_array
-local verbatim = context.verbatim
local variables = interfaces.variables
local findfile = resolvers.findfile
local addsuffix = file.addsuffix
@@ -52,6 +53,7 @@ local dodisplayverbatimemptyline = context.dodisplayverbatimemptyline
local dodisplayverbatimstart = context.dodisplayverbatimstart
local dodisplayverbatimstop = context.dodisplayverbatimstop
+local verbatim = context.verbatim
local doverbatimspace = context.doverbatimspace
local CargOne = Carg(1)
@@ -688,7 +690,7 @@ local getlines = buffers.getlines
-- interface
function commands.doifelsevisualizer(name)
- commands.testcase(specifications[lower(name)])
+ commands.doifelse(specifications[lower(name)])
end
commands.loadvisualizer = visualizers.load
diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv
index 5336c4458..d18883faf 100644
--- a/tex/context/base/buff-ver.mkiv
+++ b/tex/context/base/buff-ver.mkiv
@@ -70,21 +70,22 @@
\csname\??typingspace\typeparameter\c!space\endcsname
\relax\the\everyinitializeverbatim\relax}
+\unexpanded\def\doinitializeverbatim % for use elsewhere .. temp hack (see lxml-ini)
+ {\buff_verbatim_initialize_type_one
+ \buff_verbatim_initialize_type_two}
+
\let\buff_verbatim_set_line_margin\relax
\def\buff_verbatim_set_line_margin_indeed
- {\getpagestatus
- \hskip\ifrightpage\typingparameter\c!oddmargin\else\typingparameter\c!evenmargin\fi\relax}
+ {\hskip\doifoddpageelse{\typingparameter\c!oddmargin}{\typingparameter\c!evenmargin}\relax}
\def\buff_verbatim_check_margins
{\scratchskip\typingparameter\c!oddmargin\relax
- \ifzeropt\scratchskip
- \else
+ \ifzeropt\scratchskip \else
\let\buff_verbatim_set_line_margin\buff_verbatim_set_line_margin_indeed
\fi
\scratchskip\typingparameter\c!evenmargin\relax
- \ifzeropt\scratchskip
- \else
+ \ifzeropt\scratchskip \else
\let\buff_verbatim_set_line_margin\buff_verbatim_set_line_margin_indeed
\fi
\ifx\buff_verbatim_set_line_margin\relax
@@ -331,7 +332,7 @@
\unexpanded\def\specialfixedspace {\kern\interwordspace\relax}
\unexpanded\def\specialobeyedspace {\hskip\interwordspace\relax} % better than spaceskip
-\unexpanded\def\specialstretchedspace{\hskip.5\interwordspace\!!plus.125\interwordstretch\relax} % more but not less
+\unexpanded\def\specialstretchedspace{\hskip.5\interwordspace\s!plus.125\interwordstretch\relax} % more but not less
\unexpanded\def\specialcontrolspace {\hskip\zeropoint\hbox{\normalcontrolspace}\hskip\zeropoint\relax}
\unexpanded\def\obeyhyphens
@@ -369,7 +370,7 @@
\let\inlinemathmarker \textdollar
\def\displaymathmarker{\textdollar\textdollar}
-\def\buff_verbatim_special_type#1#2%
+\def\buff_verbatim_special_type#1#2#% # gobbles spaces
{\dontleavehmode\bgroup
\buff_verbatim_initialize_type_one
\catcode\leftbraceasciicode \begingroupcatcode
@@ -390,6 +391,8 @@
\unexpanded\def\astype{\bgroup\usetypestyleandcolor\c!style\c!color\let\nexttoken}
+\unexpanded\def\asciistr#1{\dontleavehmode{\verbatimfont\detokenize{#1}}} % use in some old styles
+
%D The basic display verbatim commands are defined in an indirect way. As we
%D will see, they are a specific case of a more general mechanism.
@@ -641,9 +644,9 @@
\c!text=\v!no,
\c!style=\tt,
\c!indentnext=\v!yes,
- \c!margin=\!!zeropoint,
- \c!evenmargin=\!!zeropoint,
- \c!oddmargin=\!!zeropoint,
+ \c!margin=\zeropoint,
+ \c!evenmargin=\zeropoint,
+ \c!oddmargin=\zeropoint,
\c!blank=\v!line,
%\c!escape=, % yes | no | {START,STOP} | default when yes: {BTEX,ETEX}
\c!numbering=\v!no,
@@ -836,4 +839,9 @@
% \usevisualizerstyleandcolor\c!style\c!color
% \let\next}
+\appendtoks
+ \def\type#1{\letterbackslash\checkedstrippedcsname#1}% or maybe detokenize
+ \def\tex #1{\letterbackslash#1}%
+\to \everysimplifycommands
+
\protect \endinput
diff --git a/tex/context/base/catc-ctx.mkiv b/tex/context/base/catc-ctx.mkiv
index bd5c16d69..ddade7f52 100644
--- a/tex/context/base/catc-ctx.mkiv
+++ b/tex/context/base/catc-ctx.mkiv
@@ -128,11 +128,18 @@
% for the moment here:
-\def\starttexcode
+\normalprotected\def\starttexcode
{\pushcatcodetable
\catcodetable\prtcatcodes}
-\def\stoptexcode
+\normalprotected\def\stoptexcode
+ {\popcatcodetable}
+
+\normalprotected\def\startcontextcode
+ {\pushcatcodetable
+ \catcodetable\ctxcatcodes}
+
+\normalprotected\def\stopcontextcode
{\popcatcodetable}
\endinput
diff --git a/tex/context/base/catc-def.mkiv b/tex/context/base/catc-def.mkiv
index 26e8cb11e..cfbaed171 100644
--- a/tex/context/base/catc-def.mkiv
+++ b/tex/context/base/catc-def.mkiv
@@ -123,7 +123,7 @@
%D shortcuts to their character representation.
\chardef \^ = \circumflexasciicode
-\chardef \_ = \underscoreasciicode % but way too wide in lm, so ... until that's fixed:
+\chardef \_ = \underscoreasciicode
\chardef \& = \ampersandasciicode
\chardef \% = \commentasciicode
\chardef \# = \hashasciicode
@@ -133,8 +133,9 @@
\chardef \\ = \backslashasciicode
\chardef \| = \barasciicode
-%def\_{\leavevmode \kern.06em \vbox{\hrule width.3em}}
-\def\_{\dontleavehmode \kern.06em \vbox{\hrule width.3em}} % this will become a \chardef
+% way too wide in lm, so one can also use:
+%
+% \def\_{\dontleavehmode \kern.06em \vbox{\hrule width.3em}} % this will become a \chardef
%D From now on we can use the protection mechanisms.
diff --git a/tex/context/base/catc-ini.lua b/tex/context/base/catc-ini.lua
index b2c793a6a..d4f9b65af 100644
--- a/tex/context/base/catc-ini.lua
+++ b/tex/context/base/catc-ini.lua
@@ -6,8 +6,6 @@ if not modules then modules = { } end modules ['catc-ini'] = {
license = "see context related readme files"
}
--- todo: everywhere replace tex.ctxcatcodes -> catcodes.numbers.ctxcatcodes
-
catcodes = catcodes or { }
catcodes.numbers = catcodes.numbers or { }
catcodes.names = catcodes.names or { }
@@ -15,26 +13,29 @@ catcodes.names = catcodes.names or { }
storage.register("catcodes/numbers", catcodes.numbers, "catcodes.numbers")
storage.register("catcodes/names", catcodes.names, "catcodes.names")
+local numbers = catcodes.numbers
+local names = catcodes.names
+
-- this only happens at initime
function catcodes.register(name,number)
- catcodes.numbers[name] = number
- local cnn = catcodes.names[number]
+ numbers[name] = number
+ local cnn = names[number]
if cnn then
cnn[#cnn+1] = name
else
- catcodes.names[number] = { name }
+ names[number] = { name }
end
- tex[name] = number
+ tex[name] = number -- downward compatible
end
-- this only happens at runtime
-for k, v in next, catcodes.numbers do
- tex[k] = v
+for k, v in next, numbers do
+ tex[k] = v -- downward compatible
end
-- nasty
-table.setmetatableindex(catcodes.numbers,function(t,k) if type(k) == "number" then t[k] = k return k end end)
-table.setmetatableindex(catcodes.names, function(t,k) if type(k) == "string" then t[k] = k return k end end)
+table.setmetatableindex(numbers,function(t,k) if type(k) == "number" then t[k] = k return k end end)
+table.setmetatableindex(names, function(t,k) if type(k) == "string" then t[k] = k return k end end)
diff --git a/tex/context/base/catc-ini.mkiv b/tex/context/base/catc-ini.mkiv
index 26c3ceee9..791ce31c4 100644
--- a/tex/context/base/catc-ini.mkiv
+++ b/tex/context/base/catc-ini.mkiv
@@ -87,6 +87,17 @@
\xdef\outputnewlinechar{^^J}%
\endgroup}
+%D We predefine some prefixes ahead of syst-aux and mult-sys. We reserve 8 slots
+%D for catcodes.
+
+\def\??catcodelet {1>>} % let : \let
+\def\??catcodedef {2>>} % def : \def
+\def\??catcodeued {3>>} % ued : \unexpanded\def
+\def\??catcodeget {4>>} % \meaning
+
+\def\??catcodetablet{5>>}
+\def\??catcodetablen{6>>}
+
\newcount\c_syst_catcodes_n \c_syst_catcodes_n\zerocount % 0 = signal, so advance before allocate
\newcount\c_syst_catcodes_a
\newcount\c_syst_catcodes_b
@@ -94,7 +105,7 @@
\normalprotected\def\newcatcodetable#1% we could move the cctdefcounter to lua
{\global\advance\c_syst_catcodes_n\plusone
- \expandafter\xdef\csname\??qm:n:\number\c_syst_catcodes_n\endcsname{\string#1}% logging
+ \expandafter\xdef\csname\??catcodetablen\number\c_syst_catcodes_n\endcsname{\string#1}% logging
\newconstant#1%
#1\c_syst_catcodes_n
\ctxlua{catcodes.register("\expandafter\gobbleoneargument\string#1",\number#1)}}
@@ -151,12 +162,7 @@
\setnewconstant\c_syst_catcodes_hack\tildeasciicode
%D Once a catcode is assigned, the next assignments will happen
-%D faster. We predefine some prefixes ahead of mult-sys.
-
-\def\??ql{@@ql} % let : \let
-\def\??qd{@@qd} % def : \def
-\def\??qu{@@qu} % ued : \unexpanded\def
-\def\??qm{@@qm} % \meaning
+%D faster.
\def\letcatcodecommand{\afterassignment\syst_catcodes_let_a\c_syst_catcodes_a}
\def\defcatcodecommand{\afterassignment\syst_catcodes_def_a\c_syst_catcodes_a}
@@ -167,46 +173,46 @@
\def\syst_catcodes_ued_a{\afterassignment\syst_catcodes_ued_b\c_syst_catcodes_b}
\def\syst_catcodes_let_b % each time
- {\ifcsname\??ql:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
- \csname\??ql:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
+ {\ifcsname\??catcodelet\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
+ \csname\??catcodelet\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
\else
\expandafter\syst_catcodes_let_c
\fi}
\def\syst_catcodes_def_b % each time
- {\ifcsname\??qd:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
- \csname\??qd:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
+ {\ifcsname\??catcodedef\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
+ \csname\??catcodedef\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
\else
\expandafter\syst_catcodes_def_c
\fi}
\def\syst_catcodes_ued_b % each time
- {\ifcsname\??qu:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
- \csname\??qu:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
+ {\ifcsname\??catcodeued\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname
+ \csname\??catcodeued\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
\else
\expandafter\syst_catcodes_ued_c
\fi}
\def\syst_catcodes_let_c % only first time
- {\expandafter\gdef\csname\??ql:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname\expandafter
- {\expandafter\let\csname\??qm:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}%
+ {\expandafter\gdef\csname\??catcodelet\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname\expandafter
+ {\expandafter\let\csname\??catcodeget\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}%
\syst_catcodes_reinstate_unexpanded
- \csname\??ql:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
+ \csname\??catcodelet\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
\def\syst_catcodes_def_c % only first time (we could use \normalexpanded here)
- {\expandafter\gdef\csname\??qd:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
+ {\expandafter\gdef\csname\??catcodedef\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
\expandafter##\expandafter1\expandafter
- {\expandafter\def\csname\??qm:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname{##1}}%
+ {\expandafter\def\csname\??catcodeget\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname{##1}}%
\syst_catcodes_reinstate_normal
- \csname\??qd:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
+ \csname\??catcodedef\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
\def\syst_catcodes_ued_c % only first time
- {\expandafter\gdef\csname\??qu:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
+ {\expandafter\gdef\csname\??catcodeued\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\expandafter\endcsname
\expandafter##\expandafter1\expandafter
- {\expandafter\normalprotected\expandafter\def\csname\??qm:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname{##1}}%
+ {\expandafter\normalprotected\expandafter\def\csname\??catcodeget\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname{##1}}%
\syst_catcodes_reinstate_unexpanded
- \csname\??qu:\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
+ \csname\??catcodeued\number\c_syst_catcodes_a:\number\c_syst_catcodes_b\endcsname}
\def\reinstatecatcodecommand{\afterassignment\syst_catcodes_reinstate_normal\c_syst_catcodes_b}
@@ -227,8 +233,8 @@
\newconstant\defaultcatcodetable
\def\catcodecommand#1%
- {\csname\??qm:\number
- \ifcsname\??qm:\number\currentcatcodetable:\number#1\endcsname
+ {\csname\??catcodeget\number
+ \ifcsname\??catcodeget\number\currentcatcodetable:\number#1\endcsname
\currentcatcodetable \else \defaultcatcodetable
\fi
:\number#1\endcsname}
@@ -251,13 +257,13 @@
\normalprotected\def\pushcatcodetable
{\advance\c_syst_catcodes_level\plusone
\syst_catcodes_trace_push
- \expandafter\chardef\csname\??qm:t:\number\c_syst_catcodes_level\endcsname\currentcatcodetable}
+ \expandafter\chardef\csname\??catcodetablet\number\c_syst_catcodes_level\endcsname\currentcatcodetable}
\normalprotected\def\popcatcodetable
{\ifcase\c_syst_catcodes_level
\syst_catcodes_trace_nesting_error
\else
- \expandafter\catcodetable\csname\??qm:t:\number\c_syst_catcodes_level\endcsname
+ \expandafter\catcodetable\csname\??catcodetablet\number\c_syst_catcodes_level\endcsname
\syst_catcodes_trace_pop
\advance\c_syst_catcodes_level\minusone
\fi}
@@ -269,7 +275,7 @@
\normalprotected\def\restorecatcodes % takes previous level
{\ifnum\c_syst_catcodes_level>\plusone
- \expandafter\catcodetable\csname\??qm:t:\number\numexpr\c_syst_catcodes_level-1\relax\endcsname
+ \expandafter\catcodetable\csname\??catcodetablet\number\numexpr\c_syst_catcodes_level-1\relax\endcsname
\fi}
% \newtoks\everycatcodetable
@@ -294,14 +300,14 @@
\def\syst_catcodes_prev
{\ifnum\numexpr\c_syst_catcodes_level-1\relax>\zerocount
- \csname\??qm:n:\number\csname\??qm:t:\number\numexpr\c_syst_catcodes_level-1\relax\endcsname\endcsname
+ \csname\??catcodetablen\number\csname\??catcodetablet\number\numexpr\c_syst_catcodes_level-1\relax\endcsname\endcsname
\else
-%
\fi}
\def\catcodetablename
{\ifnum\currentcatcodetable>\zerocount
- \csname\??qm:n:\number\currentcatcodetable\endcsname
+ \csname\??catcodetablen\number\currentcatcodetable\endcsname
\else
-%
\fi}
diff --git a/tex/context/base/char-cjk.lua b/tex/context/base/char-cjk.lua
index b077f4a3e..3d7de1423 100644
--- a/tex/context/base/char-cjk.lua
+++ b/tex/context/base/char-cjk.lua
@@ -12,7 +12,8 @@ local floor = math.floor
local format = string.format
local utfchar = utf.char
-local ranges = characters.ranges
+local ranges = characters.ranges
+local allocate = utilities.storage.allocate
-- Hangul Syllable
@@ -209,7 +210,7 @@ local remapped = { -- this might be merged into char-def.lua
[0x11C2] = 0x314E, -- H
}
-characters.hangul = {
+characters.hangul = allocate {
decomposed = decomposed,
description = description,
leadconsonant = leadconsonant,
@@ -226,24 +227,6 @@ local hangul_syllable_basetable = {
linebreak = "h2",
}
---~ local hangul_syllable_metatable = {
---~ __index = function(t,k)
---~ local u = t.unicodeslot
---~ if k == "fscode" then
---~ -- no need to cache this as we normally use fscodes
---~ return leadconsonant(u)
---~ elseif k == "shcode" then
---~ return { decomposed(u) }
---~ elseif k == "specials" then
---~ return { "char", decomposed(u) }
---~ elseif k == "description" then
---~ return description(u)
---~ else
---~ return hangul_syllable_basetable[k]
---~ end
---~ end
---~ }
-
local hangul_syllable_metatable = {
__index = function(t,k)
local u = t.unicodeslot
diff --git a/tex/context/base/char-def.lua b/tex/context/base/char-def.lua
index 804468c2d..6c73cf008 100644
--- a/tex/context/base/char-def.lua
+++ b/tex/context/base/char-def.lua
@@ -3,7 +3,8 @@ if not modules then modules = { } end modules ['char-def'] = {
comment = "companion to char-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
+ dataonly = true,
}
--[[
@@ -56832,6 +56833,16 @@ characters.data={
description="OVERLINE",
direction="on",
linebreak="al",
+ mathspec={
+ {
+ class="topaccent",
+ name="overbar",
+ },
+ {
+ class="botaccent",
+ name="underbar",
+ },
+ },
specials={ "compat", 0x0020, 0x0305 },
unicodeslot=0x203E,
},
@@ -57244,6 +57255,8 @@ characters.data={
description="SUPERSCRIPT PLUS SIGN",
direction="es",
linebreak="al",
+ mathclass="binary",
+ mathname ="positivesign",
specials={ "super", 0x002B },
unicodeslot=0x207A,
},
@@ -57252,6 +57265,8 @@ characters.data={
description="SUPERSCRIPT MINUS",
direction="es",
linebreak="al",
+ mathclass="binary",
+ mathname ="negativesign",
specials={ "super", 0x2212 },
unicodeslot=0x207B,
},
@@ -58669,7 +58684,7 @@ characters.data={
description="TURNED AMPERSAND",
direction="on",
linebreak="al",
- mathclass="bin",
+ mathclass="binary",
mathname="upand",
unicodeslot=0x214B,
},
@@ -60887,11 +60902,11 @@ characters.data={
linebreak="ai",
mathspec={
{
- class="bin",
+ class="binary",
name="vee",
},
{
- class="bin",
+ class="binary",
name="lor",
},
},
@@ -60932,15 +60947,16 @@ characters.data={
description="INTEGRAL",
direction="on",
linebreak="ai",
+ mathclass="limop",
mathspec={
- {
- class="nothing",
- name="intop",
- },
{
class="limop",
name="int",
},
+ {
+ class="nothing",
+ name="intop",
+ },
},
unicodeslot=0x222B,
},
@@ -60951,15 +60967,16 @@ characters.data={
description="DOUBLE INTEGRAL",
direction="on",
linebreak="ai",
+ mathclass="limop",
mathspec={
- {
- class="nothing",
- name="iintop",
- },
{
class="limop",
name="iint",
},
+ {
+ class="nothing",
+ name="iintop",
+ },
},
specials={ "compat", 0x222B, 0x222B },
unicodeslot=0x222C,
@@ -60969,15 +60986,16 @@ characters.data={
description="TRIPLE INTEGRAL",
direction="on",
linebreak="al",
+ mathclass="limop",
mathspec={
- {
- class="nothing",
- name="iiintop",
- },
{
class="limop",
name="iiint",
},
+ {
+ class="nothing",
+ name="iiintop",
+ },
},
specials={ "compat", 0x222B, 0x222B, 0x222B },
unicodeslot=0x222D,
@@ -62402,7 +62420,7 @@ characters.data={
description="NORMAL SUBGROUP OF",
direction="on",
linebreak="al",
- mathclass="bin",
+ mathclass="binary",
mirror=0x22B3,
unicodeslot=0x22B2,
},
@@ -62412,7 +62430,7 @@ characters.data={
description="CONTAINS AS NORMAL SUBGROUP",
direction="on",
linebreak="al",
- mathclass="bin",
+ mathclass="binary",
mirror=0x22B2,
unicodeslot=0x22B3,
},
@@ -68388,6 +68406,8 @@ characters.data={
description="BLACK UP-POINTING TRIANGLE",
direction="on",
linebreak="ai",
+ mathclass="binary",
+ mathname="blacktriangle",
unicodeslot=0x25B2,
},
[0x25B3]={
@@ -68432,7 +68452,7 @@ characters.data={
description="BLACK RIGHT-POINTING TRIANGLE",
direction="on",
linebreak="ai",
- mathclass="bin",
+ mathclass="binary",
mathname="blacktriangleright",
unicodeslot=0x25B6,
},
@@ -68443,7 +68463,7 @@ characters.data={
description="WHITE RIGHT-POINTING TRIANGLE",
direction="on",
linebreak="ai",
- mathclass="bin",
+ mathclass="binary",
mathname="triangleright",
unicodeslot=0x25B7,
},
@@ -68452,8 +68472,6 @@ characters.data={
description="BLACK RIGHT-POINTING SMALL TRIANGLE",
direction="on",
linebreak="al",
- mathclass="bin",
- mathname="blacktriangleleft",
unicodeslot=0x25B8,
},
[0x25B9]={
@@ -68486,6 +68504,8 @@ characters.data={
description="BLACK DOWN-POINTING TRIANGLE",
direction="on",
linebreak="ai",
+ mathclass="binary",
+ mathname="blacktriangledown",
unicodeslot=0x25BC,
},
[0x25BD]={
@@ -68495,8 +68515,16 @@ characters.data={
description="WHITE DOWN-POINTING TRIANGLE",
direction="on",
linebreak="ai",
- mathclass="binary",
- mathname="bigtriangledown",
+ mathspec={
+ {
+ class="binary",
+ name="triangledown",
+ },
+ {
+ class="binary",
+ name="bigtriangledown",
+ },
+ },
unicodeslot=0x25BD,
},
[0x25BE]={
@@ -68521,6 +68549,8 @@ characters.data={
description="BLACK LEFT-POINTING TRIANGLE",
direction="on",
linebreak="ai",
+ mathclass="binary",
+ mathname="blacktriangleleft",
unicodeslot=0x25C0,
},
[0x25C1]={
@@ -68530,7 +68560,7 @@ characters.data={
description="WHITE LEFT-POINTING TRIANGLE",
direction="on",
linebreak="ai",
- mathclass="bin",
+ mathclass="binary",
mathname="triangleleft",
unicodeslot=0x25C1,
},
diff --git a/tex/context/base/char-enc.lua b/tex/context/base/char-enc.lua
index ef6805e54..048837eec 100644
--- a/tex/context/base/char-enc.lua
+++ b/tex/context/base/char-enc.lua
@@ -1,9 +1,10 @@
-if not modules then modules = { } end modules ['char-syn'] = {
+if not modules then modules = { } end modules ['char-enc'] = {
version = 1.001,
comment = "companion to char-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
+ -- dataonly = true,
}
-- Thanks to tex4ht for these mappings.
@@ -144,25 +145,25 @@ characters.synonyms = allocate { -- afm mess
Yen = 0x00A5,
}
---~ if not characters.enccodes then
---~
---~ local enccodes = { } characters.enccodes = enccodes
---~
---~ for unicode, data in next, characters.data do
---~ local encname = data.adobename or data.contextname
---~ if encname then
---~ enccodes[encname] = unicode
---~ end
---~ end
---~
---~ for name, unicode in next, characters.synonyms do
---~ if not enccodes[name] then enccodes[name] = unicode end
---~ end
---~
---~
---~ end
---~
---~ storage.register("characters.enccodes", characters.enccodes, "characters.enccodes")
+-- if not characters.enccodes then
+--
+-- local enccodes = { } characters.enccodes = enccodes
+--
+-- for unicode, data in next, characters.data do
+-- local encname = data.adobename or data.contextname
+-- if encname then
+-- enccodes[encname] = unicode
+-- end
+-- end
+--
+-- for name, unicode in next, characters.synonyms do
+-- if not enccodes[name] then enccodes[name] = unicode end
+-- end
+--
+--
+-- end
+--
+-- storage.register("characters.enccodes", characters.enccodes, "characters.enccodes")
-- As this table is seldom used, we can delay its definition. Beware, this means
-- that table.print would not work on this file unless it is accessed once. This
diff --git a/tex/context/base/char-ent.lua b/tex/context/base/char-ent.lua
index d2ac22bbf..ef55be7e4 100644
--- a/tex/context/base/char-ent.lua
+++ b/tex/context/base/char-ent.lua
@@ -1,15 +1,14 @@
-if not modules then modules = { } end modules ['math-ent'] = {
+if not modules then modules = { } end modules ['char-ent'] = {
version = 1.001,
comment = "companion to math-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "derived from the mathml 2.0 specification",
+ dataonly = true,
}
-- http://www.w3.org/2003/entities/2007/w3centities-f.ent
-- http://www.w3.org/2003/entities/2007/htmlmathml-f.ent
--- this might go into char-def
-
local entities = utilities.storage.allocate {
["AElig"] = "Æ", -- U+000C6
["AMP"] = "&", -- U+00026
diff --git a/tex/context/base/char-ini.lua b/tex/context/base/char-ini.lua
index 244b8d2a7..778035ff4 100644
--- a/tex/context/base/char-ini.lua
+++ b/tex/context/base/char-ini.lua
@@ -11,10 +11,9 @@ if not modules then modules = { } end modules ['char-ini'] = {
-- we can remove the tag range starting at 0xE0000 (special applications)
local tex = tex
-local utf = unicode.utf8
local utfchar, utfbyte, utfvalues = utf.char, utf.byte, string.utfvalues
-local ustring = unicode.ustring
+local ustring, utf = unicode.ustring, unicode.utf8
local concat, unpack, tohash = table.concat, table.unpack, table.tohash
local next, tonumber, type, rawget, rawset = next, tonumber, type, rawget, rawset
local format, lower, gsub, match, gmatch = string.format, string.lower, string.gsub, string.match, string.match, string.gmatch
@@ -28,8 +27,8 @@ local texsetsfcode = tex.setsfcode
local texsetcatcode = tex.setcatcode
local contextsprint = context.sprint
-local ctxcatcodes = tex.ctxcatcodes
-local texcatcodes = tex.texcatcodes
+local ctxcatcodes = catcodes.numbers.ctxcatcodes
+local texcatcodes = catcodes.numbers.texcatcodes
local setmetatableindex = table.setmetatableindex
@@ -48,7 +47,6 @@ loaded!
characters = characters or { }
local characters = characters
-
local data = characters.data
if data then
@@ -455,28 +453,39 @@ table we derive a few more.
if not characters.fallbacks then
- -- we could the definition by using a metatable
-
- characters.fallbacks = { }
- characters.directions = { }
+ characters.fallbacks = { } -- not than many
- local fallbacks = characters.fallbacks
- local directions = characters.directions
+ local fallbacks = characters.fallbacks
- for k,v in next, data do
- local specials = v.specials
- if specials and specials[1] == "compat" and specials[2] == 0x0020 and specials[3] then
+ for k, d in next, data do
+ local specials = d.specials
+ if specials and specials[1] == "compat" and specials[2] == 0x0020 then
local s = specials[3]
- fallbacks[k] = s
- fallbacks[s] = k
+ if s then
+ fallbacks[k] = s
+ fallbacks[s] = k
+ end
end
- directions[k] = v.direction
end
end
-storage.register("characters/fallbacks", characters.fallbacks, "characters.fallbacks") -- accents and such
-storage.register("characters/directions", characters.directions, "characters.directions")
+storage.register("characters/fallbacks", characters.fallbacks, "characters.fallbacks") -- accents and such
+
+characters.directions = { }
+
+setmetatableindex(characters.directions,function(t,k)
+ local d = data[k]
+ if d then
+ local v = d.direction
+ if v then
+ t[k] = v
+ return v
+ end
+ end
+ t[k] = false -- maybe 'l'
+ return v
+end)
--[[ldx--
The context namespace is used to store methods and data
@@ -488,7 +497,7 @@ which is rather specific to .
use the table. After all, we have this information available anyway.
--ldx]]--
-function characters.makeactive(n,name) -- let ?
+function characters.makeactive(n,name) --
contextsprint(ctxcatcodes,format("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name))
-- context("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name)
end
@@ -503,7 +512,7 @@ function tex.uprint(c,n)
end
end
-local temphack = tohash {
+local forbidden = tohash { -- at least now
0x00A0,
0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x200B, 0x200C, 0x200D,
0x202F,
@@ -539,12 +548,10 @@ function characters.define(tobelettered, tobeactivated) -- catcodetables
else
contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s
end
- elseif is_command[category] then
-if not temphack[u] then
+ elseif is_command[category] and not forbidden[u] then
contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}")
a = a + 1
activated[a] = u
-end
end
end
end
@@ -632,15 +639,17 @@ function characters.setcodes()
end
else
local lc, uc = chr.lccode, chr.uccode
- if not lc then chr.lccode, lc = code, code end
- if not uc then chr.uccode, uc = code, code end
- texsetcatcode(code,11) -- letter
- if type(lc) == "table" then
+ if not lc then
+ chr.lccode, lc = code, code
+ elseif type(lc) == "table" then
lc = code
end
- if type(uc) == "table" then
+ if not uc then
+ chr.uccode, uc = code, code
+ elseif type(uc) == "table" then
uc = code
end
+ texsetcatcode(code,11) -- letter
texsetlccode(code,lc,uc)
if cc == "lu" then
texsetsfcode(code,999)
@@ -815,8 +824,7 @@ function characters.unicodechar(asked)
if n then
return n
elseif type(asked) == "string" then
- asked = gsub(asked," ","")
- return descriptions[asked]
+ return descriptions[asked] or descriptions[gsub(asked," ","")]
end
end
@@ -882,17 +890,21 @@ end
function characters.uccode(n) return uccodes[n] end -- obsolete
function characters.lccode(n) return lccodes[n] end -- obsolete
-function characters.flush(n,direct)
+function characters.safechar(n)
local c = data[n]
if c and c.contextname then
- c = "\\" .. c.contextname
+ return "\\" .. c.contextname
else
- c = utfchar(n)
+ return utfchar(n)
end
- if direct then
- return c
+end
+
+function commands.safechar(n)
+ local c = data[n]
+ if c and c.contextname then
+ contextsprint("\\" .. c.contextname) -- context[c.contextname]()
else
- contextsprint(c)
+ contextsprint(utfchar(n))
end
end
diff --git a/tex/context/base/char-map.lua b/tex/context/base/char-map.lua
index 376ebf343..749da5289 100644
--- a/tex/context/base/char-map.lua
+++ b/tex/context/base/char-map.lua
@@ -3,7 +3,8 @@ if not modules then modules = { } end modules ['char-map'] = {
comment = "companion to char-ini.mkiv",
author = "Hans Hagen & Arthur Reutenauer",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
+ dataonly = true,
}
-- not yet used
diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua
index a411c2d82..52fdfc0d0 100644
--- a/tex/context/base/char-utf.lua
+++ b/tex/context/base/char-utf.lua
@@ -19,7 +19,7 @@ in special kinds of output (for instance ).
over a string.
--ldx]]--
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
+local utfchar, utfbyte = utf.char, utf.byte
local concat, gmatch, gsub, find = table.concat, string.gmatch, string.gsub, string.find
local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues
local allocate = utilities.storage.allocate
@@ -76,6 +76,7 @@ local decomposed = allocate {
["ﬖ"] = "վն",
["ﬗ"] = "մխ",
}
+
characters.decomposed = decomposed
local function initialize() -- maybe only 'mn'
@@ -209,9 +210,13 @@ end
private.set = set
-function private.escape (str) return gsub(str,"(.)", escapes) end
-function private.replace(str) return utfgsub(str,"(.)", low ) end
-function private.revert (str) return utfgsub(str,"(.)", high ) end
+-- function private.escape (str) return gsub(str,"(.)", escapes) end
+-- function private.replace(str) return utfgsub(str,"(.)", low ) end
+-- function private.revert (str) return utfgsub(str,"(.)", high ) end
+
+private.escape = utf.remapper(escapes)
+private.replace = utf.remapper(low)
+private.revert = utf.remapper(high)
for ch in gmatch(special,".") do set(ch) end
@@ -481,6 +486,14 @@ if sequencers then
sequencers.enableaction(textfileactions,"characters.filters.utf.decompose")
end
+ directives.register("filters.utf.collapse", function(v)
+ sequencers[v and "enableaction" or "disableaction"](textfileactions,"characters.filters.utf.collapse")
+ end)
+
+ directives.register("filters.utf.decompose", function(v)
+ sequencers[v and "enableaction" or "disableaction"](textfileactions,"characters.filters.utf.decompose")
+ end)
+
end
--[[ldx--
diff --git a/tex/context/base/chem-ini.lua b/tex/context/base/chem-ini.lua
index 4d47982a2..192953ec9 100644
--- a/tex/context/base/chem-ini.lua
+++ b/tex/context/base/chem-ini.lua
@@ -17,8 +17,8 @@ local report_chemistry = logs.reporter("chemistry")
local context = context
-chemicals = chemicals or { }
-local chemicals = chemicals
+chemistry = chemistry or { }
+local chemistry = chemistry
--[[
The next code is an adaptation of code from Wolfgang Schuster
@@ -62,9 +62,9 @@ local high = Cc("\\high{%s}") * superscript * content
local justtext = (1 - somescript)^1
local parser = Cs((csname + lowhigh + highlow + low + high + sign + any)^0)
-chemicals.moleculeparser = parser -- can be used to avoid functioncall
+chemistry.moleculeparser = parser -- can be used to avoid functioncall
-function chemicals.molecule(str)
+function chemistry.molecule(str)
return lpegmatch(parser,str)
end
diff --git a/tex/context/base/chem-str.lua b/tex/context/base/chem-str.lua
index fb325ccea..db1849c5a 100644
--- a/tex/context/base/chem-str.lua
+++ b/tex/context/base/chem-str.lua
@@ -1,12 +1,15 @@
if not modules then modules = { } end modules ['chem-str'] = {
version = 1.001,
comment = "companion to chem-str.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ author = "Hans Hagen and Alan Braslau",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
--- This module in incomplete and experimental.
+-- The original \PPCHTEX\ code was written in pure \TEX\, although later we made
+-- the move from \PICTEX\ to \METAPOST\. The current implementation is a mix between
+-- \TEX\, \LUA\ and \METAPOST. Although the first objective is to get a compatible
+-- but better implementation, later versions might provide more,
-- We can push snippets into an mp instance.
@@ -26,12 +29,12 @@ local P, R, S, C, Cs, Ct, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Ct,
local variables = interfaces.variables
local context = context
-chemicals = chemicals or { }
-local chemicals = chemicals
+chemistry = chemistry or { }
+local chemistry = chemistry
-chemicals.instance = "metafun" -- "ppchtex"
-chemicals.format = "metafun"
-chemicals.structures = 0
+chemistry.instance = "metafun" -- "ppchtex"
+chemistry.format = "metafun"
+chemistry.structures = 0
local remapper = {
["+"] = "p",
@@ -52,7 +55,9 @@ local common_keys = {
mid = "fixed", mids = "fixed", midz = "text",
z = "text", rz = "text", mrz = "text", prz = "text", crz = "text",
rt = "text", rtt = "text", rbt = "text", zt = "text", zn = "number",
- mov = "transform", rot = "transform", adj = "transform", dir = "transform", sub = "transform",
+ zbt = "text", zbn = "number", ztt = "text", ztn = "number",
+ mov = "transform", rot = "transform", adj = "transform", sub = "transform",
+ off = "transform",
}
local front_keys = {
@@ -60,9 +65,11 @@ local front_keys = {
sb = "line", msb = "line", psb = "line",
r = "line", pr = "line", mr = "line",
z = "text", mrz = "text", prz = "text",
+ zt = "text", zn = "number",
}
local one_keys = {
+ b = "line", msb = "line", psb = "line",
sb = "line", db = "line", tb = "line",
ep = "line", es = "line", ed = "line", et = "line",
sd = "line", ldd = "line", rdd = "line",
@@ -81,15 +88,16 @@ local syntax = {
one = {
n = 1, max = 8, keys = one_keys,
align = {
- z = { { "r", "r_b", "b", "l_b", "l", "l_t", "t", "r_t" } },
---~ z = { { "r", "r", "b", "l", "l", "l", "t", "r" } },
+ -- z = { { "r", "r_b", "b", "l_b", "l", "l_t", "t", "r_t" } },
+ -- z = { { "r", "r", "b", "l", "l", "l", "t", "r" } },
}
},
three = {
n = 3, max = 3, keys = common_keys,
align = {
mrz = { { "r","b","l" }, { "b","l","t" }, { "l","t","r" }, { "t","r","b" } },
- rz = { { "r","l_b","l_t" }, { "b","l_t","r_t" }, { "l","r_t","r_b" }, { "t","r_b","l_b" } },
+ rz = { { "auto","auto","auto" }, { "auto","auto","auto" }, { "auto","auto","auto" }, { "auto","auto","auto" } },
+ -- rz = { { "r_t","r_b","l" }, { "r_b","l_b","t" }, { "l_b","l_t","r" }, { "l_t","r_t","b" } },
prz = { { "r","l","t" }, { "b","t","r" }, { "l","r","b" }, { "t","b","l" } },
}
},
@@ -97,7 +105,8 @@ local syntax = {
n = 4, max = 4, keys = common_keys,
align = {
mrz = { { "t","r","b","l" }, { "r","b","l","t" }, { "b","l","t","r" }, { "l","t","r","b" } },
- rz = { { "r_t","r_b","l_b","l_t" }, { "r_b","l_b","l_t","r_t" }, { "l_b","l_t","r_t","r_b" }, { "l_t","r_t","r_b","l_b" } },
+ rz = { { "auto","auto","auto","auto" }, { "auto","auto","auto","auto" }, { "auto","auto","auto","auto" }, { "auto","auto","auto","auto" } },
+ -- rz = { { "r_t","r_b","l_b","l_t" }, { "r_b","l_b","l_t","r_t" }, { "l_b","l_t","r_t","r_b" }, { "l_t","r_t","r_b","l_b" } },
prz = { { "r","b","l","t" }, { "b","l","t","r" }, { "l","t","r","b" }, { "t","r","b","l" } },
}
},
@@ -105,7 +114,8 @@ local syntax = {
n = 5, max = 5, keys = common_keys,
align = {
mrz = { { "t","r","b","b","l" }, { "r","b","l","l","t" }, { "b","l","t","r","r" }, { "l","t","r","r","b" } },
- rz = { { "r","r","b","l","t" }, { "b","b","l","t","r" }, { "l","l","t","r","b" }, { "t","t","r","b","l" } },
+ rz = { { "auto","auto","auto","auto","auto" }, { "auto","auto","auto","auto","auto" }, { "auto","auto","auto","auto","auto" }, { "auto","auto","auto","auto","auto" } },
+ -- rz = { { "r","r","b","l","t" }, { "b","b","l","t","r" }, { "l","l","t","r","b" }, { "t","t","r","b","l" } },
prz = { { "r","b","l","t","t" }, { "b","l","t","r","r" }, { "l","t","r","b","b" }, { "t","r","b","l","l" } },
}
},
@@ -113,7 +123,8 @@ local syntax = {
n = 6, max = 6, keys = common_keys,
align = {
mrz = { { "t","t","r","b","b","l" }, { "r","b","b","l","t","t" }, { "b","b","l","t","t","r" }, { "l","t","t","r","b","b" } },
- rz = { { "r","r","b","l","l","t" }, { "b","b","l","t","t","r" }, { "l","l","t","r","r","b" }, { "t","t","r","b","b","l" } },
+ rz = { { "auto","auto","auto","auto","auto","auto" }, { "auto","auto","auto","auto","auto","auto" }, { "auto","auto","auto","auto","auto","auto" }, { "auto","auto","auto","auto","auto","auto" } },
+ -- rz = { { "r","r","b","l","l","t" }, { "b","b","l","t","t","r" }, { "l","l","t","r","r","b" }, { "t","t","r","b","b","l" } },
prz = { { "r","b","l","l","t","r" }, { "b","l","t","t","r","b" }, { "l","t","r","r","b","l" }, { "t","r","b","b","l","t" } },
}
},
@@ -121,7 +132,8 @@ local syntax = {
n = 8, max = 8, keys = common_keys,
align = { -- todo
mrz = { { "t","r","r","b","b","l","l","t" }, { "r","b","b","l","l","t","t","r" }, { "b","l","l","t","t","r","r","b" }, { "l","t","t","r","r","b","b","l" } },
- rz = { { "r","r","b","b","l","l","t","t" }, { "b","b","l","l","t","t","r","r" }, { "l","l","t","t","r","r","b","b" }, { "t","t","r","r","b","b","l","l" } },
+ rz = { { "auto","auto","auto","auto","auto","auto","auto","auto" }, { "auto","auto","auto","auto","auto","auto","auto","auto" }, { "auto","auto","auto","auto","auto","auto","auto","auto" }, { "auto","auto","auto","auto","auto","auto","auto","auto" } },
+ -- rz = { { "r","r","b","b","l","l","t","t" }, { "b","b","l","l","t","t","r","r" }, { "l","l","t","t","r","r","b","b" }, { "t","t","r","r","b","b","l","l" } },
prz = { { "r","b","b","l","l","t","t","r" }, { "b","l","l","t","t","r","r","b" }, { "l","t","t","r","r","b","b","l" }, { "t","r","r","b","b","l","l","t" } },
}
},
@@ -147,11 +159,11 @@ local syntax = {
local definitions = { }
-function chemicals.undefine(name)
+function chemistry.undefine(name)
definitions[lower(name)] = nil
end
-function chemicals.define(name,spec,text)
+function chemistry.define(name,spec,text)
name = lower(name)
local dn = definitions[name]
if not dn then dn = { } definitions[name] = dn end
@@ -162,7 +174,7 @@ function chemicals.define(name,spec,text)
end
local metacode, variant, keys, bonds, max, txt, textsize, rot, pstack
-local molecule = chemicals.molecule -- or use lpegmatch(chemicals.moleculeparser,...)
+local molecule = chemistry.molecule -- or use lpegmatch(chemistry.moleculeparser,...)
local function fetch(txt)
local st = stack[txt]
@@ -196,35 +208,46 @@ local text = (equal * C(P(1)^0)) + Cc(false)
local pattern =
(amount + Cc(1)) *
- operation *
- special * (
+ Cs(operation/lower) *
+ Cs(special/lower) * (
+-- operation *
+-- special * (
range * Cc(false) * text +
Cc(false) * Cc(false) * set * text +
single * Cc(false) * Cc(false) * text +
Cc(false) * Cc(false) * Cc(false) * text
)
---~ local n, operation, index, upto, set, text = lpegmatch(pattern,"RZ1357")
+-- local n, operation, index, upto, set, text = lpegmatch(pattern,"RZ1357")
---~ print(lpegmatch(pattern,"RZ=x")) 1 RZ false false false x
---~ print(lpegmatch(pattern,"RZ1=x")) 1 RZ 1 false false x
---~ print(lpegmatch(pattern,"RZ1..3=x")) 1 RZ 1 3 false x
---~ print(lpegmatch(pattern,"RZ13=x")) 1 RZ false false table x
+-- print(lpegmatch(pattern,"RZ=x")) -- 1 RZ false false false x
+-- print(lpegmatch(pattern,"RZ1=x")) -- 1 RZ 1 false false x
+-- print(lpegmatch(pattern,"RZ1..3=x")) -- 1 RZ 1 3 false x
+-- print(lpegmatch(pattern,"RZ13=x")) -- 1 RZ false false table x
local function process(spec,text,n,rulethickness,rulecolor,offset)
insert(stack,{ spec=spec, text=text, n=n })
local txt = #stack
local m = #metacode
for i=1,#spec do
- local s = spec[i]
+ local step = spec[i]
+ local s = lower(step)
local d = definitions[s]
if d then
+ if trace_structure then
+ report_chemistry("%s => definition: %s",step,s)
+ end
for i=1,#d do
local di = d[i]
process(di.spec,di.text,1,rulethickness,rulecolor)
end
else
- local rep, operation, special, index, upto, set, text = lpegmatch(pattern,s)
+ local rep, operation, special, index, upto, set, text = lpegmatch(pattern,step)
+ if trace_structure then
+ local set = set and concat(set," ") or "-"
+ report_chemistry("%s => rep: %s, operation: %s, special: %s, index: %s, upto: %s, set: %s, text: %s",
+ step,rep or "-",operation or "-",special and special ~= "" or "-",index or "-",upto or "-",set or "-",text or "-")
+ end
if operation == "pb" then
insert(pstack,variant)
m = m + 1 ; metacode[m] = syntax.pb.direct
@@ -339,7 +362,8 @@ local function process(spec,text,n,rulethickness,rulecolor,offset)
if not t then txt, t = fetch(txt) end
if t then
t = molecule(processor_tostring(t))
- m = m + 1 ; metacode[m] = format('chem_%s_zero("\\chemicaltext{%s}");',operation,t)
+ m = m + 1 ; metacode[m] = format('chem_%s(%s,%s,"\\chemicaltext{%s}");',operation,bonds,index,t)
+ -- m = m + 1 ; metacode[m] = format('chem_%s_zero("\\chemicaltext{%s}");',operation,t)
end
elseif index then
local t = text
@@ -388,8 +412,8 @@ end
--
-- rulethickness in points
-function chemicals.start(settings)
- chemicals.structures = chemicals.structures + 1
+function chemistry.start(settings)
+ chemistry.structures = chemistry.structures + 1
local textsize, rulethickness, rulecolor = settings.size, settings.rulethickness, settings.rulecolor
local width, height, scale, offset = settings.width or 0, settings.height or 0, settings.scale or "medium", settings.offset or 0
local l, r, t, b = settings.left or 0, settings.right or 0, settings.top or 0, settings.bottom or 0
@@ -445,63 +469,73 @@ function chemicals.start(settings)
scale = 0.75 * scale/625
--
metacode[#metacode+1] = format("chem_start_structure(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) ;",
- chemicals.structures,
+ chemistry.structures,
l/25, r/25, t/25, b/25, scale,
tostring(settings.axis == variables.on), tostring(width), tostring(height), tostring(offset)
)
--
- variant, keys, bonds, stack, rot, pstack = "six", { }, 6, { }, 1, { }
+ -- variant, keys, bonds, stack, rot, pstack = "six", { }, 6, { }, 1, { }
+ variant, keys, bonds, stack, rot, pstack = "one", { }, 1, { }, 1, { }
end
-function chemicals.stop()
+function chemistry.stop()
metacode[#metacode+1] = "chem_stop_structure ;"
--
local mpcode = concat(metacode,"\n")
if trace_structure then
report_chemistry("metapost code:\n%s", mpcode)
end
- metapost.graphic(chemicals.instance,chemicals.format,mpcode)
+ metapost.graphic(chemistry.instance,chemistry.format,mpcode)
metacode = nil
end
-function chemicals.component(spec,text,settings)
+function chemistry.component(spec,text,settings)
rulethickness, rulecolor, offset = settings.rulethickness, settings.rulecolor
- local spec = settings_to_array(lower(spec))
+-- local spec = settings_to_array(lower(spec))
+ local spec = settings_to_array(spec)
local text = settings_to_array(text)
metacode[#metacode+1] = "chem_start_component ;"
process(spec,text,1,rulethickness,rulecolor)
metacode[#metacode+1] = "chem_stop_component ;"
end
+statistics.register("chemical formulas", function()
+ if chemistry.structures > 0 then
+ return format("%s chemical structure formulas",chemistry.structures) -- no timing needed, part of metapost
+ end
+end)
+
+-- interfaces
+
+commands.undefinechemical = chemistry.undefine
+commands.definechemical = chemistry.define
+commands.startchemical = chemistry.start
+commands.stopchemical = chemistry.stop
+commands.chemicalcomponent = chemistry.component
+
+-- todo: top / bottom
+
local inline = {
["single"] = "\\chemicalsinglebond", ["-"] = "\\chemicalsinglebond",
- ["double"] = "\\chemicaldoublebond", ["--"] = "\\chemicaldoublebond",
+ ["double"] = "\\chemicaldoublebond", ["--"] = "\\chemicaldoublebond", -- also =? and unicode triple?
["triple"] = "\\chemicaltriplebond", ["---"] = "\\chemicaltriplebond",
["gives"] = "\\chemicalgives", ["->"] = "\\chemicalgives",
["equilibrium"] = "\\chemicalequilibrium", ["<->"] = "\\chemicalequilibrium",
["mesomeric"] = "\\chemicalmesomeric", ["<>"] = "\\chemicalmesomeric",
- ["plus"] = "\\chemicalsplus", ["+"] = "\\chemicalsplus",
- ["minus"] = "\\chemicalsminus",
- ["space"] = "\\chemicalsspace",
+ ["plus"] = "\\chemicalplus", ["+"] = "\\chemicalplus",
+ ["minus"] = "\\chemicalminus",
+ ["space"] = "\\chemicalspace",
}
--- todo: top / bottom
-
-function chemicals.inline(spec)
+function commands.inlinechemical(spec)
local spec = settings_to_array(spec)
for i=1,#spec do
local s = spec[i]
local inl = inline[lower(s)]
if inl then
- context(inl)
+ context(inl) -- could be a fast context.sprint
else
context.chemicalinline(molecule(s))
end
end
end
-
-statistics.register("chemical formulas", function()
- if chemicals.structures > 0 then
- return format("%s chemical structure formulas",chemicals.structures) -- no timing needed, part of metapost
- end
-end)
diff --git a/tex/context/base/chem-str.mkiv b/tex/context/base/chem-str.mkiv
index 1d60a293e..c4b03dd1e 100644
--- a/tex/context/base/chem-str.mkiv
+++ b/tex/context/base/chem-str.mkiv
@@ -2,7 +2,7 @@
%D [ file=chem-ini,
%D version=2009.05.13,
%D subtitle=Chemistry,
-%D author=Hans Hagen,
+%D author=Hans Hagen \& Alan Braslau,
%D date=\currentdate,
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
@@ -10,8 +10,10 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D This module in incomplete and experimental. Eventually this code
-%D will replace \PPCHTEX.
+%D The original \PPCHTEX\ code was written in pure \TEX\, although later we made
+%D the move from \PICTEX\ to \METAPOST\. The current implementation is a mix between
+%D \TEX\, \LUA\ and \METAPOST. Although the first objective is to get a compatible
+%D but better implementation, later versions might provide more,
\writestatus{loading}{ConTeXt Chemistry Macros / Structure}
@@ -26,24 +28,24 @@
% Here we use chemicalformula instead, so no longer a mix:
%
% \startchemicalformula
-% \chemical{H_2}{top}{bottom}
+% \chemical{2H_2}{top}{bottom}
% \chemical{PLUS}{top}{bottom}
-% \chemical{O}{top}{bottom}
+% \chemical{O_2}{top}{bottom}
% \chemical{GIVES}{top}{bottom}
-% \chemical{H_2O}{top}{bottom}
+% \chemical{2H_2O}{top}{bottom}
% \stopchemicalformula
%
% \startchemicalformula
-% \chemical{H_2}
+% \chemical{2H_2}
% \chemical{PLUS}
-% \chemical{O}
+% \chemical{O_2}
% \chemical{GIVES}
-% \chemical{H_2O}
+% \chemical{2H_2O}
% \stopchemicalformula
%
% The inline variant has only one argument:
%
-% \chemical{H_2,PLUS,O,GIVES,H_2O}
+% \chemical{2H_2,PLUS,O_2,GIVES,2H_2O}
% todo: seven | eight | frontsix | fontfive | carbon | newmans | chair
@@ -58,11 +60,17 @@
\let\setupchemicals\setupchemical
-\unexpanded\def\setupchemicalframed
- {\dosingleempty\dosetupchemicalframed}
+%D We use a dedicated framed macro instead of inheriting one. This is both
+%D a historical and practical reason (like shared keys with different meaning
+%D that could clash, e.g.\ align).
+
+\defineframed
+ [\??chemicalframed]
+ [\c!align=\v!normal,
+ \c!strut=\v!no]
-\def\dosetupchemicalframed
- {\getparameters[\??chemicalframed]}
+\unexpanded\def\setupchemicalframed
+ {\setupframed[\??chemicalframed]}
\unexpanded\def\definechemical % is global (so we don't use the commandhandler)
{\dosingleargument\chem_define}
@@ -71,7 +79,7 @@
{\startnointerference
\edef\currentdefinedchemical{#1}%
\let\chemical\chem_chemical_nested
- \ctxlua{chemicals.undefine("#1")}%
+ \ctxcommand{undefinechemical("#1")}%
#2% flush
\stopnointerference}
@@ -79,7 +87,7 @@
{\dodoubleempty\chem_chemical_nested_indeed}
\def\chem_chemical_nested_indeed[#1][#2]%
- {\ctxlua{chemicals.define("\currentdefinedchemical",\!!bs#1\!!es,\!!bs\detokenize{#2}\!!es)}}
+ {\ctxcommand{definechemical("\currentdefinedchemical",\!!bs#1\!!es,\!!bs\detokenize{#2}\!!es)}}
% chemical symbols
@@ -90,20 +98,15 @@
{\setvalue{\??chemicalsymbol#1}{#2}}
\unexpanded\def\chemicalsymbol[#1]%
- {\csname\??chemicalsymbol\ifcsname\??chemicalsymbol#1\endcsname\s!unknown\else#1\fi\endcsname}
+ {\csname\??chemicalsymbol\ifcsname\??chemicalsymbol#1\endcsname#1\else\s!unknown\fi\endcsname}
-\definechemicalsymbol[\s!unknown][] % empty
+\definechemicalsymbol[\s!unknown][] % \char"FFFD empty
% size (small medium big)
\edef\chemicaltoplocation{t}
\edef\chemicalbotlocation{b}
-% \unexpanded\def\chemicaltext#1% in ppchtex we had a more clever alignment
-% {\usechemicalstyleandcolor\c!style\c!color
-% \strut
-% #1} % maybe also \setstrut
-
\unexpanded\def\chemicaltext#1%
{\mathematics
{\usechemicalstyleandcolor\c!style\c!color
@@ -147,7 +150,7 @@
\fi\fi
\the\everystructurechemical
\setbox\b_chem_result\hbox\bgroup
- \ctxlua{chemicals.start {
+ \ctxcommand{startchemical {
width = "\chemicalparameter\c!width",
height = "\chemicalparameter\c!height",
left = \chemicalparameter\c!left,
@@ -162,7 +165,7 @@
\unexpanded\def\stopchemical
{\stopnointerference
- \ctxlua{chemicals.stop()}%
+ \ctxcommand{stopchemical()}%
\egroup
\d_chem_width \wd\b_chem_result
\d_chem_height\ht\b_chem_result
@@ -171,15 +174,16 @@
\doifelsenothing{\chemicalparameter\c!frame}\chem_framed_nop\chem_framed_yes
\egroup}
-\def\chem_framed_yes
- {\localframed%
+\unexpanded\def\chem_framed_yes
+ {\localframedwithsettings
[\??chemicalframed]%
- [\c!frame=\chemicalparameter\c!frame,\c!align=\v!normal,\c!strut=\v!no]{\vbox{\box\b_chem_result\vss}}} % remove depth
+ [\c!frame=\chemicalparameter\c!frame]%
+ {\vbox{\box\b_chem_result\vss}}} % remove depth
-\def\chem_framed_nop
- {\localframed%
+\unexpanded\def\chem_framed_nop
+ {\directlocalframed
[\??chemicalframed]%
- [\c!align=\v!normal,\c!strut=\v!no]{\vbox{\box\b_chem_result\vss}}} % remove depth
+ {\vbox{\box\b_chem_result\vss}}} % remove depth
\let\startstructurechemical\startchemical
\let\stopstructurechemical \stopchemical
@@ -200,14 +204,14 @@
\def\strc_chem_indeed_three[#1][#2][#3]%
{\writestatus\m!chemicals{hyperlinked chemicals not yet supported}% todo reference, for the moment ignored
- \ctxlua{chemicals.component(\!!bs#2\!!es, \!!bs\detokenize{#3}\!!es, { % maybe also pass first two args this way
+ \ctxcommand{chemicalcomponent(\!!bs#2\!!es, \!!bs\detokenize{#3}\!!es, { % maybe also pass first two args this way
rulethickness = "\the\dimexpr\chemicalparameter\c!rulethickness\relax", % todo: scaled points
rulecolor = "\MPcolor{\chemicalparameter\c!rulecolor}" % we can precalculate this for speedup
} ) }%
\ignorespaces}
\def\strc_chem_indeed_two[#1][#2]%
- {\ctxlua{chemicals.component(\!!bs#1\!!es,\!!bs\detokenize{#2}\!!es, { % maybe also pass first two args this way
+ {\ctxcommand{chemicalcomponent(\!!bs#1\!!es,\!!bs\detokenize{#2}\!!es, { % maybe also pass first two args this way
rulethickness = "\the\dimexpr\chemicalparameter\c!rulethickness\relax", % todo: scaled points
rulecolor = "\MPcolor{\chemicalparameter\c!rulecolor}" % we can precalculate this for speedup
} ) }%
@@ -289,8 +293,8 @@
\def\chem_arrow_construct#1#2#3%
{\enspace
\mathematics{#1%
- {\strut\hbox \!!spread 2\emwidth{\hss\ctxlua{chemicals.inline(\!!bs#2\!!es)}\hss}}% {\strut\hbox \!!spread 2em{\hss#2\hss}}%
- {\strut\hbox \!!spread 2\emwidth{\hss\ctxlua{chemicals.inline(\!!bs#3\!!es)}\hss}}}% {\strut\hbox \!!spread 2em{\hss#3\hss}}}%
+ {\strut\hbox \s!spread 2\emwidth{\hss\ctxcommand{inlinechemical(\!!bs#3\!!es)}\hss}}% {\strut\hbox \s!spread 2em{\hss#3\hss}}}%
+ {\strut\hbox \s!spread 2\emwidth{\hss\ctxcommand{inlinechemical(\!!bs#2\!!es)}\hss}}}% {\strut\hbox \s!spread 2em{\hss#2\hss}}%
\enspace}
% special macros (probably needs some more work)
@@ -428,9 +432,9 @@
\usechemicalstyleandcolor\c!style\c!color
\ifthirdargument
\ifsecondargument
- \halign{&\hss##\hss\cr#2\cr\molecule{#1}\cr#3\cr}%
+ \halign{\aligntab\hss\alignmark\alignmark\hss\cr#2\cr\molecule{#1}\cr#3\cr}%
\else
- \halign{&\hss##\hss\cr\molecule{#1}\cr#2\cr}%
+ \halign{\aligntab\hss\alignmark\alignmark\hss \cr\molecule{#1}\cr#2\cr}%
\fi
\else
\hbox{\molecule{#1}}%
@@ -440,30 +444,30 @@
\unexpanded\def\inlinechemical#1%
{\dontleavehmode
- \hbox{\usechemicalstyleandcolor\c!style\c!color\ctxlua{chemicals.inline(\!!bs#1\!!es)}}}
+ \hbox{\usechemicalstyleandcolor\c!style\c!color\ctxcommand{inlinechemical(\!!bs#1\!!es)}}}
\unexpanded\def\chemicalbondrule
- {\hbox{\vrule\!!height.75ex\!!depth-\dimexpr.75ex-\linewidth\relax\!!width1em\relax}}
+ {\hbox{\vrule\s!height.75\exheight\s!depth-\dimexpr.75\exheight-\linewidth\relax\s!width\emwidth\relax}}
\definechemicalsymbol[i:space] [\enspace\quad\enspace]
\definechemicalsymbol[i:plus] [\enspace\mathematics{+}\enspace]
\definechemicalsymbol[i:minus] [\enspace\mathematics{-}\enspace]
\definechemicalsymbol[i:gives] [\enspace\mathematics{\xrightarrow{}{}}\enspace]
-\definechemicalsymbol[i:equilibrium] [\enspace\mathematics{\xrightpverleftarrow{}{}}\enspace]
+\definechemicalsymbol[i:equilibrium] [\enspace\mathematics{\xrightoverleftarrow{}{}}\enspace]
\definechemicalsymbol[i:mesomeric] [\enspace\mathematics{\xleftrightarrow{}{}}\enspace]
\definechemicalsymbol[i:single] [\chemicalbondrule]
-\definechemicalsymbol[i:tripple] [\hbox{\lower.5ex\chemicalbondrule\hskip-1em\raise.5ex\chemicalbondrule}]
-\definechemicalsymbol[i:double] [\hbox{\chemicalbondrule\hskip-1em\lower.5ex\chemicalbondrule\hskip-1em\raise.5ex\chemicalbondrule}]
+\definechemicalsymbol[i:double] [\hbox{\lower.5ex\chemicalbondrule\hskip-1em\raise.5ex\chemicalbondrule}]
+\definechemicalsymbol[i:triple] [\hbox{\chemicalbondrule\hskip-1em\lower.5ex\chemicalbondrule\hskip-1em\raise.5ex\chemicalbondrule}]
\unexpanded\def\chemicalsinglebond {\chemicalsymbol[i:single]}
-\unexpanded\def\chemicaldoublebond {\chemicalsymbol[i:tripple]}
-\unexpanded\def\chemicaltriplebond {\chemicalsymbol[i:double]}
+\unexpanded\def\chemicaldoublebond {\chemicalsymbol[i:double]}
+\unexpanded\def\chemicaltriplebond {\chemicalsymbol[i:triple]}
\unexpanded\def\chemicalgives {\chemicalsymbol[i:gives]}
\unexpanded\def\chemicalmesomeric {\chemicalsymbol[i:mesomeric]}
\unexpanded\def\chemicalequilibrium{\chemicalsymbol[i:equilibrium]}
-\unexpanded\def\chemicalsplus {\chemicalsymbol[i:plus]}
-\unexpanded\def\chemicalsminus {\chemicalsymbol[i:minus]}
-\unexpanded\def\chemicalsspace {\chemicalsymbol[i:space]}
+\unexpanded\def\chemicalplus {\chemicalsymbol[i:plus]}
+\unexpanded\def\chemicalminus {\chemicalsymbol[i:minus]}
+\unexpanded\def\chemicalspace {\chemicalsymbol[i:space]}
\unexpanded\def\chemicalinline #1{#1}
% display
@@ -491,61 +495,133 @@
\setfalse\c_chem_has_bot}
\unexpanded\def\stopchemicalformula
- {\tabskip1em\relax
+ {\tabskip\emwidth\relax
\nointerlineskip
\ifconditional\c_chem_has_top
\ifconditional\c_chem_has_bot
- \halign{&\hss\usechemicalstyleandcolor\c!style\c!color##\hss\cr\the\t_chem_top\cr\the\t_chem_mid\cr\the\t_chem_bot\cr}%
+ \halign{\aligntab\hss\usechemicalstyleandcolor\c!style\c!color\alignmark\alignmark\hss\cr\the\t_chem_top\cr\the\t_chem_mid\cr\the\t_chem_bot\cr}%
\else
- \halign{&\hss\usechemicalstyleandcolor\c!style\c!color##\hss\cr\the\t_chem_top\cr\the\t_chem_mid\cr}%
+ \halign{\aligntab\hss\usechemicalstyleandcolor\c!style\c!color\alignmark\alignmark\hss\cr\the\t_chem_top\cr\the\t_chem_mid\cr}%
\fi
\else
\ifconditional\c_chem_has_bot
- \halign{&\hss\usechemicalstyleandcolor\c!style\c!color##\hss\cr\the\t_chem_mid\cr\the\t_chem_bot\cr}%
+ \halign{\aligntab\hss\usechemicalstyleandcolor\c!style\c!color\alignmark\alignmark\hss\cr\the\t_chem_mid\cr\the\t_chem_bot\cr}%
\else
- \halign{&\hss\usechemicalstyleandcolor\c!style\c!color##\hss\cr\the\t_chem_mid\cr}%
+ \halign{\aligntab\hss\usechemicalstyleandcolor\c!style\c!color\alignmark\alignmark\hss\cr\the\t_chem_mid\cr}%
\fi
\fi
\egroup}
+% for the moment we have a special set
+
+\definechemicalsymbol[d:space] [\enspace\quad\enspace]
+\definechemicalsymbol[d:plus] [\enspace+\enspace]
+\definechemicalsymbol[d:minus] [\enspace-\enspace]
+\definechemicalsymbol[d:gives] [\rightarrowfill] % \chem_arrow_construct\xrightarrow
+\definechemicalsymbol[d:equilibrium] [\rightoverleftarrowfill] % \chem_arrow_construct\xrightoverleftarrow
+\definechemicalsymbol[d:mesomeric] [\leftarrowfill] % \chem_arrow_construct\xleftrightarrow
+\definechemicalsymbol[d:opencomplex] [\mathematics{\Bigg[}] % not yet ok
+\definechemicalsymbol[d:closecomplex][\mathematics{\Bigg]}] % not yet ok
+
+\definechemicalsymbol[d:SPACE] [{\chemicalsymbol[d:space]}]
+\definechemicalsymbol[d:PLUS] [{\chemicalsymbol[d:plus]}]
+\definechemicalsymbol[d:MINUS] [{\chemicalsymbol[d:minus]}]
+\definechemicalsymbol[d:GIVES] [{\chemicalsymbol[d:gives]}]
+\definechemicalsymbol[d:EQUILIBRIUM] [{\chemicalsymbol[d:equilibrium]}]
+\definechemicalsymbol[d:MESOMERIC] [{\chemicalsymbol[d:mesomeric]}]
+\definechemicalsymbol[d:OPENCOMPLEX] [{\chemicalsymbol[d:opencomplex]}]
+\definechemicalsymbol[d:CLOSECOMPLEX][{\chemicalsymbol[d:closecomplex]}]
+
\unexpanded\def\formulachemical
{\relax\dotriplegroupempty\chem_formula}
+% \def\chem_formula#1#2#3% we could do hboxes and measure
+% {\ifcsname\??chemicalsymbol\detokenize{#1}\endcsname
+% \t_chem_mid\expandafter{\the\t_chem_mid\chem_formula_mid{#1}{#2}{#3}}%
+% \else
+% \ifthirdargument
+% \doifelsenothing{#2}\chem_formula_top_nop{\chem_formula_top_yes{#2}}%
+% \doifelsenothing{#3}\chem_formula_bot_nop{\chem_formula_bot_yes{#3}}%
+% \else\ifsecondargument
+% \chem_formula_top_nop
+% \doifelsenothing{#2}\chem_formula_bot_nop{\chem_formula_bot_yes{#2}}%
+% \else
+% \chem_formula_top_nop
+% \chem_formula_bot_nop
+% \fi\fi
+% \t_chem_mid\expandafter{\the\t_chem_mid\molecule{#1}\aligntab}%
+% \fi}
+
\def\chem_formula#1#2#3% we could do hboxes and measure
- {\ifcsname\??chemicalsymbol\detokenize{#1}\endcsname
- \t_chem_mid\expandafter{\the\t_chem_mid\chem_formula_mid{#1}{#2}{#3}}%
+ {\ifthirdargument
+ \doifelsenothing{#2}\chem_formula_top_nop{\chem_formula_top_yes{#2}}%
+ \doifelsenothing{#3}\chem_formula_bot_nop{\chem_formula_bot_yes{#3}}%
+ \else\ifsecondargument
+ \chem_formula_top_nop
+ \doifelsenothing{#2}\chem_formula_bot_nop{\chem_formula_bot_yes{#2}}%
\else
- \ifthirdargument
- \doifelsenothing{#2}\chem_formula_top_nop{\chem_formula_top_yes{#2}}%
- \doifelsenothing{#3}\chem_formula_bot_nop{\chem_formula_bot_yes{#3}}%
- \else\ifsecondargument
- \chem_formula_top_nop
- \doifelsenothing{#2}\chem_formula_bot_nop{\chem_formula_bot_yes{#2}}%
- \else
- \chem_formula_top_nop
- \chem_formula_bot_nop
- \fi\fi
- \t_chem_mid\expandafter{\the\t_chem_mid\molecule{#1}&}%
+ \chem_formula_top_nop
+ \chem_formula_bot_nop
+ \fi\fi
+ \ifcsname\??chemicalsymbol d:\detokenize{#1}\endcsname
+ \t_chem_mid\expandafter{\the\t_chem_mid\chemicalsymbol[d:#1]\aligntab}%
+ \else
+ \t_chem_mid\expandafter{\the\t_chem_mid\molecule{#1}\aligntab}%
\fi}
\def\chem_formula_mid#1%
{\csname\??chemicalsymbol\detokenize{#1}\endcsname}
-\def\chem_formula_top_nop {\t_chem_top\expandafter{\the\t_chem_top&}}
-\def\chem_formula_bot_nop {\t_chem_bot\expandafter{\the\t_chem_bot&}}
-\def\chem_formula_top_yes#1{\t_chem_top\expandafter{\the\t_chem_top\chem_formula_top_indeed{#1}&}\settrue\c_chem_has_top}
-\def\chem_formula_bot_yes#1{\t_chem_bot\expandafter{\the\t_chem_bot\chem_formula_bot_indeed{#1}&}\settrue\c_chem_has_bot}
+\def\chem_formula_top_nop {\t_chem_top\expandafter{\the\t_chem_top\aligntab}}
+\def\chem_formula_bot_nop {\t_chem_bot\expandafter{\the\t_chem_bot\aligntab}}
+\def\chem_formula_top_yes#1{\t_chem_top\expandafter{\the\t_chem_top\chem_formula_top_indeed{#1}\aligntab}\settrue\c_chem_has_top}
+\def\chem_formula_bot_yes#1{\t_chem_bot\expandafter{\the\t_chem_bot\chem_formula_bot_indeed{#1}\aligntab}\settrue\c_chem_has_bot}
\def\chem_formula_top_indeed#1{\strut#1}
\def\chem_formula_bot_indeed#1{\strut#1}
+% Experimental: defaults might change.
+
+\definefloat
+ [\v!chemical]
+ [\v!chemicals]
+
+\setuplabeltext
+ [\v!chemical=]
+
+\setupfloat
+ [\v!chemical]
+ [\c!location=\v!here,
+ \c!inner=\hsize.8\textwidth\dontleavehmode, % brr
+ \c!align={\v!flushleft,\v!lohi}]
+
+\setupcaption
+ [\v!chemical]
+ [\c!location=\v!right,
+ \c!distance=\zeropoint,
+ \c!width=.2\textwidth,
+ \c!align=\v!flushright]
+
+% Can be used as for displayed math: \startplaceformula... to display a chemical formula
+% or a chemical structure:
+%
+% \startplacechemical
+% \startchemicalformula
+% \chemical{2H_2}
+% \chemical{PLUS}
+% \chemical{O_2}
+% \chemical{GIVES}
+% \chemical{2H_2O}
+% \stopchemicalformula
+% \stopplacechemical
+
% gone: state option resolution offset (now frame offset) alternative
\setupchemicalframed
[\c!align=\v!normal,
\c!strut=\v!no,
\c!offset=\v!overlay,
- \c!frame=off]
+ \c!frame=\v!off]
\setupchemical
[\c!frame=,
diff --git a/tex/context/base/cldf-bas.lua b/tex/context/base/cldf-bas.lua
index 30a9265bc..9cf8dcd4a 100644
--- a/tex/context/base/cldf-bas.lua
+++ b/tex/context/base/cldf-bas.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['cldf-ini'] = {
+if not modules then modules = { } end modules ['cldf-bas'] = {
version = 1.001,
comment = "companion to cldf-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -22,6 +22,8 @@ if not modules then modules = { } end modules ['cldf-ini'] = {
-- flush(ctxcatcodes,"}")
-- end
+-- maybe use context.generics
+
local type = type
local format = string.format
local utfchar = utf.char
@@ -32,13 +34,16 @@ local generics = context.generics
local variables = interfaces.variables
local new_rule = nodes.pool.rule
+local texcount = tex.count
function context.char(k) -- used as escape too, so don't change to utf
if type(k) == "table" then
- -- for i=1,#k do
- -- context(format([[\char%s\relax]],k[i]))
- -- end
- context([[\char%s\relax]],concat(k,[[\relax\char]]))
+ local n = #k
+ if n == 1 then
+ context([[\char%s\relax]],k[1])
+ elseif n > 0 then
+ context([[\char%s\relax]],concat(k,[[\relax\char]]))
+ end
elseif k then
context([[\char%s\relax]],k)
end
@@ -98,22 +103,22 @@ context.vrule = context.hrule
-- not yet used ... but will get variant at the tex end as well
-function context.sethboxregister (n) context("\\setbox %s\\hbox",n) end
-function context.setvboxregister (n) context("\\setbox %s\\vbox",n) end
+function context.sethboxregister(n) context([[\setbox %s\hbox]],n) end
+function context.setvboxregister(n) context([[\setbox %s\vbox]],n) end
function context.starthboxregister(n)
if type(n) == "number" then
- context("\\setbox%s\\hbox\\bgroup",n)
+ context([[\setbox%s\hbox{]],n)
else
- context("\\setbox\\%s\\hbox\\bgroup",n)
+ context([[\setbox\%s\hbox{]],n)
end
end
function context.startvboxregister(n)
if type(n) == "number" then
- context("\\setbox%s\\vbox\\bgroup",n)
+ context([[\setbox%s\vbox{]],n)
else
- context("\\setbox\\%s\\vbox\\bgroup",n)
+ context([[\setbox\%s\vbox{]],n)
end
end
@@ -122,19 +127,36 @@ context.stopvboxregister = context.egroup
function context.flushboxregister(n)
if type(n) == "number" then
- context("\\box%s ",n)
+ context([[\box%s ]],n)
else
- context("\\box\\%s",n)
+ context([[\box\%s]],n)
end
end
function context.beginvbox()
- context("\\vbox\\bgroup") -- we can do \bvbox ... \evbox (less tokens)
+ context([[\vbox{]]) -- we can do \bvbox ... \evbox (less tokens)
end
function context.beginhbox()
- context("\\hbox\\bgroup") -- todo: use fast one
+ context([[\hbox{]]) -- todo: use fast one
end
context.endvbox = context.egroup
context.endhbox = context.egroup
+
+local function allocate(name,what,cmd)
+ local a = format("c_syst_last_allocated_%s",what)
+ local n = texcount[a] + 1
+ if n <= texcount.c_syst_max_allocated_register then
+ texcount[a] = n
+ end
+ context("\\global\\expandafter\\%sdef\\csname %s\\endcsname %s\\relax",cmd or what,name,n)
+ return n
+end
+
+function context.newdimen (name) return allocate(name,"dimen") end
+function context.newskip (name) return allocate(name,"skip") end
+function context.newcount (name) return allocate(name,"count") end
+function context.newmuskip(name) return allocate(name,"muskip") end
+function context.newtoks (name) return allocate(name,"toks") end
+function context.newbox (name) return allocate(name,"box","mathchar") end
diff --git a/tex/context/base/cldf-com.lua b/tex/context/base/cldf-com.lua
index bacbbeafd..d9062594e 100644
--- a/tex/context/base/cldf-com.lua
+++ b/tex/context/base/cldf-com.lua
@@ -11,8 +11,8 @@ local context = context
local generics = context.generics -- needs documentation
local variables = interfaces.variables
-generics.starttabulate = "start" .. variables.tabulate -- todo: e!start
-generics.stoptabulate = "stop" .. variables.tabulate -- todo: e!stop
+generics.starttabulate = "starttabulate" -- "start" .. variables.tabulate -- todo: e!start
+generics.stoptabulate = "stoptabulate" -- "stop" .. variables.tabulate -- todo: e!stop
local NC, NR = context.NC, context.NR
diff --git a/tex/context/base/cldf-ini.lua b/tex/context/base/cldf-ini.lua
index ed86c2923..ad5f14855 100644
--- a/tex/context/base/cldf-ini.lua
+++ b/tex/context/base/cldf-ini.lua
@@ -25,7 +25,7 @@ local tex = tex
context = context or { }
local context = context
-local format, find, gmatch, gsub = string.format, string.find, string.gmatch, string.gsub
+local format, find, gmatch, gsub, validstring = string.format, string.find, string.gmatch, string.gsub, string.valid
local next, type, tostring, tonumber, setmetatable = next, type, tostring, tonumber, setmetatable
local insert, remove, concat = table.insert, table.remove, table.concat
local lpegmatch, lpegC, lpegS, lpegP, lpegCc = lpeg.match, lpeg.C, lpeg.S, lpeg.P, lpeg.Cc
@@ -40,12 +40,14 @@ local isnode = node.is_node -- after 0.65 just node.type
local writenode = node.write
local copynodelist = node.copy_list
-local ctxcatcodes = tex.ctxcatcodes
-local prtcatcodes = tex.prtcatcodes
-local texcatcodes = tex.texcatcodes
-local txtcatcodes = tex.txtcatcodes
-local vrbcatcodes = tex.vrbcatcodes
-local xmlcatcodes = tex.xmlcatcodes
+local catcodenumbers = catcodes.numbers
+
+local ctxcatcodes = catcodenumbers.ctxcatcodes
+local prtcatcodes = catcodenumbers.prtcatcodes
+local texcatcodes = catcodenumbers.texcatcodes
+local txtcatcodes = catcodenumbers.txtcatcodes
+local vrbcatcodes = catcodenumbers.vrbcatcodes
+local xmlcatcodes = catcodenumbers.xmlcatcodes
local flush = texsprint
local flushdirect = texprint
@@ -344,9 +346,9 @@ end
local methodhandler = resolvers.methodhandler
-function context.viafile(data)
+function context.viafile(data,tag)
if data and data ~= "" then
- local filename = resolvers.savers.byscheme("virtual","viafile",data)
+ local filename = resolvers.savers.byscheme("virtual",validstring(tag,"viafile"),data)
-- context.startregime { "utf" }
context.input(filename)
-- context.stopregime()
@@ -410,7 +412,11 @@ local function writer(parent,command,first,...) -- already optimized before call
done = true
end
end
- flush(currentcatcodes,"]")
+ if done then
+ flush(currentcatcodes,"]")
+ else
+ flush(currentcatcodes,"[]")
+ end
elseif tn == 1 then -- some 20% faster than the next loop
local tj = ti[1]
if type(tj) == "function" then
diff --git a/tex/context/base/cldf-int.lua b/tex/context/base/cldf-int.lua
index 55db9fa0b..6cbfd666f 100644
--- a/tex/context/base/cldf-int.lua
+++ b/tex/context/base/cldf-int.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['mult-clm'] = {
+if not modules then modules = { } end modules ['cldf-int'] = {
version = 1.001,
comment = "companion to mult-clm.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -14,9 +14,12 @@ if not modules then modules = { } end modules ['mult-clm'] = {
local format, insert, remove, concat = string.format, table.insert, table.remove, table.concat
local unpack = unpack or table.unpack
-local contextsprint = context.sprint
-local ctxcatcodes = tex.ctxcatcodes
-local vrbcatcodes = tex.vrbcatcodes
+local catcodenumbers = catcodes.numbers
+
+local ctxcatcodes = catcodenumbers.ctxcatcodes
+local vrbcatcodes = catcodenumbers.vrbcatcodes
+
+local contextsprint = context.sprint
local trace_define = false trackers.register("context.define", function(v) trace_define = v end)
diff --git a/tex/context/base/cldf-ver.lua b/tex/context/base/cldf-ver.lua
index 237078157..b48fd253a 100644
--- a/tex/context/base/cldf-ver.lua
+++ b/tex/context/base/cldf-ver.lua
@@ -6,6 +6,10 @@ if not modules then modules = { } end modules ['cldf-ver'] = {
license = "see context related readme files"
}
+-- We have better verbatim: context.verbatim so that needs to be looked
+-- into. We can also directly store in buffers although this variant works
+-- better when used mixed with other code (synchronization issue).
+
local concat, tohandle = table.concat, table.tohandle
local find, splitlines = string.find, string.splitlines
local tostring, type = tostring, type
@@ -41,7 +45,7 @@ table .tocontext = t_tocontext
string .tocontext = s_tocontext
boolean.tocontext = b_tocontext
-function tocontext(first,...)
+function context.tocontext(first,...)
local t = type(first)
if t == "string" then
s_tocontext(first,...)
diff --git a/tex/context/base/colo-ext.mkiv b/tex/context/base/colo-ext.mkiv
index af6c3830e..8878da485 100644
--- a/tex/context/base/colo-ext.mkiv
+++ b/tex/context/base/colo-ext.mkiv
@@ -29,18 +29,10 @@
%D
%D will negate the colors in box zero.
-% \unexpanded\def\negatecolorbox#1%
-% {\setbox#1\hbox
-% {\startnegative % might change
-% \startcolor[\s!white]\vrule\!!height\ht#1\!!depth\dp#1\!!width\wd#1\stopcolor
-% \hskip-\wd#1%
-% \box#1%
-% \stopnegative}}
-
\unexpanded\def\negatecolorbox#1%
{\setbox#1\hbox
{\startnegative % might change
-% \startcolor[\s!white]\vrule\!!height\ht#1\!!depth\dp#1\!!width\wd#1\stopcolor
+ % \startcolor[\s!white]\vrule\s!height\ht#1\s!depth\dp#1\s!width\wd#1\stopcolor
\blackrule[\c!color=\s!white,\c!height=\ht#1,\c!depth=\dp#1,\c!width=\wd#1]%
\hskip-\wd#1%
\box#1%
diff --git a/tex/context/base/colo-icc.lua b/tex/context/base/colo-icc.lua
index 904d42143..4ab28eb68 100644
--- a/tex/context/base/colo-icc.lua
+++ b/tex/context/base/colo-icc.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['colo-ini'] = {
+if not modules then modules = { } end modules ['colo-icc'] = {
version = 1.000,
comment = "companion to colo-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
diff --git a/tex/context/base/colo-ini.lua b/tex/context/base/colo-ini.lua
index 747e2116f..e08f3d387 100644
--- a/tex/context/base/colo-ini.lua
+++ b/tex/context/base/colo-ini.lua
@@ -16,7 +16,8 @@ local trace_define = false trackers.register("colors.define",function(v) trace_
local report_colors = logs.reporter("colors","defining")
-local attributes, context, commands = attributes, context, commands
+local attributes, backends, storage = attributes, backends, storage
+local context, commands = context, commands
local settings_to_hash_strict = utilities.parsers.settings_to_hash_strict
@@ -666,83 +667,51 @@ function colors.defineintermediatecolor(name,fraction,c_one,c_two,a_one,a_two,sp
end
end
---~ local function f(one,two,i,fraction_one,fraction_two)
---~ local otf = fraction_one * one[i] + fraction_two * two[i]
---~ if otf > 1 then
---~ otf = 1
---~ end
---~ return otf
---~ end
-
---~ function colors.defineduocolor(name,fraction_one,c_one,fraction_two,c_two,global,freeze)
---~ local one, two = colorvalues[c_one], colorvalues[c_two]
---~ if one and two then
---~ fraction_one = tonumber(fraction_one) or 1
---~ fraction_two = tonumber(fraction_two) or 1
---~ local csone, cstwo = one[1], two[1]
---~ local ca
---~ if csone == 2 then
---~ ca = register_color(name,'gray',f(one,two,2,fraction_one,fraction_two))
---~ elseif csone == 3 then
---~ ca = register_color(name,'rgb', f(one,two,3,fraction_one,fraction_two),
---~ f(one,two,4,fraction_one,fraction_two),
---~ f(one,two,5,fraction_one,fraction_two))
---~ elseif csone == 4 then
---~ ca = register_color(name,'cmyk',f(one,two,6,fraction_one,fraction_two),
---~ f(one,two,7,fraction_one,fraction_two),
---~ f(one,two,8,fraction_one,fraction_two),
---~ f(one,two,9,fraction_one,fraction_two))
---~ else
---~ ca = register_color(name,'gray',f(one,two,2,fraction_one,fraction_two))
---~ end
---~ definecolor(name,ca,global,freeze)
---~ end
---~ end
-
- local function f(i,colors,fraction)
- local otf = 0
- for c=1,#colors do
- otf = otf + (tonumber(fraction[c]) or 1) * colors[c][i]
- end
- if otf > 1 then
- otf = 1
- end
- return otf
+local function f(i,colors,fraction)
+ local otf = 0
+ for c=1,#colors do
+ otf = otf + (tonumber(fraction[c]) or 1) * colors[c][i]
+ end
+ if otf > 1 then
+ otf = 1
end
+ return otf
+end
- function colors.definemixcolor(name,fractions,cs,global,freeze)
- local values = { }
- for i=1,#cs do -- do fraction in here
- local v = colorvalues[cs[i]]
- if not v then
- return
- end
- values[i] = v
- end
- local csone = values[1][1]
- local ca
- if csone == 2 then
- ca = register_color(name,'gray',f(2,values,fractions))
- elseif csone == 3 then
- ca = register_color(name,'rgb', f(3,values,fractions),
- f(4,values,fractions),
- f(5,values,fractions))
- elseif csone == 4 then
- ca = register_color(name,'cmyk',f(6,values,fractions),
- f(7,values,fractions),
- f(8,values,fractions),
- f(9,values,fractions))
- else
- ca = register_color(name,'gray',f(2,values,fractions))
+function colors.definemixcolor(name,fractions,cs,global,freeze)
+ local values = { }
+ for i=1,#cs do -- do fraction in here
+ local v = colorvalues[cs[i]]
+ if not v then
+ return
end
- definecolor(name,ca,global,freeze)
+ values[i] = v
end
+ local csone = values[1][1]
+ local ca
+ if csone == 2 then
+ ca = register_color(name,'gray',f(2,values,fractions))
+ elseif csone == 3 then
+ ca = register_color(name,'rgb', f(3,values,fractions),
+ f(4,values,fractions),
+ f(5,values,fractions))
+ elseif csone == 4 then
+ ca = register_color(name,'cmyk',f(6,values,fractions),
+ f(7,values,fractions),
+ f(8,values,fractions),
+ f(9,values,fractions))
+ else
+ ca = register_color(name,'gray',f(2,values,fractions))
+ end
+ definecolor(name,ca,global,freeze)
+end
-- for the moment downward compatible
local patterns = { "colo-imp-%s.mkiv", "colo-imp-%s.tex", "colo-%s.mkiv", "colo-%s.tex" }
local function action(name,foundname)
+ -- could be one command
context.startreadingfile()
context.startcolorset { name }
context.input(foundname)
@@ -877,3 +846,22 @@ end
-- context.popcatcodes()
-- end
+-- handy
+
+local models = storage.allocate { "all", "gray", "rgb", "cmyk", "spot" }
+
+colors.models = models -- check for usage elsewhere
+
+function colors.spec(name)
+ local l = attributes_list[a_color]
+ local t = colorvalues[l[name]] or colorvalues[l.black]
+ return {
+ model = models[t[1]] or models[1],
+ s = t[2],
+ r = t[3], g = t[4], b = t[5],
+ c = t[6], m = t[7], y = t[8], k = t[9],
+ }
+end
+
+-- inspect(attributes.colors.spec("red"))
+-- inspect(attributes.colors.spec("red socks"))
diff --git a/tex/context/base/colo-ini.mkiv b/tex/context/base/colo-ini.mkiv
index 5721bb513..629f2b96a 100644
--- a/tex/context/base/colo-ini.mkiv
+++ b/tex/context/base/colo-ini.mkiv
@@ -242,8 +242,8 @@
\setfalse\c_colo_convert_gray
\getvalue{\??colorconversions\directcolorsparameter\c!conversion}% could be a nice \ifcsname
% too often:
- \ifconditional\c_colo_rgb_supported \colo_helpers_show_message\m!colors9\v!rgb \fi
- \ifconditional\c_colo_cmyk_supported\colo_helpers_show_message\m!colors9\v!cmyk\fi
+ \ifconditional\c_colo_rgb_supported \colo_helpers_show_message\m!colors{10}\v!rgb \fi
+ \ifconditional\c_colo_cmyk_supported\colo_helpers_show_message\m!colors{10}\v!cmyk\fi
\colo_helpers_set_current_model
\ifproductionrun
\edef\p_pagecolormodel{\directcolorsparameter\c!pagecolormodel}%
@@ -357,16 +357,26 @@
\normalexpanded{\colo_palets_define[#1][\csname\??colorpaletspecification#2\endcsname]}%
\fi}}
-\def\colo_palets_define_one#1#2% get rid of { } in #2
- {\colo_palets_define_two{#1}[#2]}%
+% \def\colo_palets_define_one#1#2% get rid of { } in #2
+% {\colo_palets_define_two{#1}[#2]}%
+
+\def\colo_palets_define_one#1#2% get rid of { }
+ {\doifassignmentelse{#2} % catch empty entries
+ {\colo_palets_define_two{#1}[#2]}
+ {\colo_palets_define_three{#1}{#2}}}
\def\colo_palets_define_two#1[#2=#3]%
{\edef\m_colo_palets_tmp{\ifx\m_colo_palets_tmp\empty\else\m_colo_palets_tmp,\fi#2}%
\colo_palets_define_set{#1}{#2}{#3}}%
+\def\colo_palets_define_three#1#2%
+ {\ifcsname\??colorpaletspecification#2\endcsname
+ \processcommacommand[\csname\??colorpaletspecification#2\endcsname]{\colo_palets_define_one{#1}}%
+ \fi}
+
\let\paletsize\!!zerocount
-\def\getpaletsize[#1]%
+\unexpanded\def\getpaletsize[#1]% only works for valid k=v definitions
{\getcommacommandsize[\csname\??colorpaletspecification#1\endcsname]%
\edef\paletsize{\number\commalistsize}}
@@ -802,9 +812,19 @@
\def\defaulttextcolor {black}
\def\s!themaintextcolor{themaintextcolor}
+\unexpanded\def\inheritmaintextcolor
+ {\ifx\maintextcolor\empty\else\colo_helpers_activate\maintextcolor\fi}
+
+\unexpanded\def\onlyinheritmaintextcolor
+ {\ifx\maintextcolor\empty
+ \deactivatecolor
+ \else
+ \colo_helpers_activate\maintextcolor
+ \fi}
+
\appendtoks
\deactivatecolor % public?
- \ifx\maintextcolor\empty\else\colo_helpers_activate\maintextcolor\fi
+ \inheritmaintextcolor
\to \everybeforeoutput
\def\colo_helpers_switch_to_maintextcolor#1%
@@ -961,7 +981,7 @@
% ignores in attribute handler
%
-% \def\forcecolorhack{\vrule\!!width\zeropoint\!!height\zeropoint\!!depth\zeropoint}
+% \def\forcecolorhack{\vrule\s!width\zeropoint\s!height\zeropoint\s!depth\zeropoint}
% \normal added else fails in metafun manual (leaders do a hard scan)
diff --git a/tex/context/base/colo-run.lua b/tex/context/base/colo-run.lua
index 4f1916d5a..27f7c6b12 100644
--- a/tex/context/base/colo-run.lua
+++ b/tex/context/base/colo-run.lua
@@ -6,8 +6,10 @@ if not modules then modules = { } end modules ['colo-run'] = {
license = "see context related readme files"
}
--- For historic reasons the core has a couple of tracing
--- features. Nowadays these would end up in modules.
+-- For historic reasons the core has a couple of tracing features. Nowadays
+-- these would end up in modules.
+
+local colors, commands, context, utilities = colors, commands, context, utilities
local colors= attributes.colors
diff --git a/tex/context/base/colo-run.mkiv b/tex/context/base/colo-run.mkiv
index c330accf3..5084fdd35 100644
--- a/tex/context/base/colo-run.mkiv
+++ b/tex/context/base/colo-run.mkiv
@@ -42,9 +42,9 @@
%D Palets
\unexpanded\gdef\showpalet
- {\dodoubleargument\doshowpalet}
+ {\dodoubleargument\colo_show_palet}
-\gdef\doshowpalet[#1][#2]%
+\gdef\colo_show_palet[#1][#2]%
{\ifcsname\??colorpalet#1\endcsname
\doifinsetelse\v!vertical{#2} \colo_palets_show_vertical \colo_palets_show_horizontal [#1][#2]%
\fi}
@@ -59,8 +59,8 @@
\tabskip\zeropoint
\def\colo_palets_show_palet##1%
{\doifinsetelse\v!number{#2}{##1\hskip.5em}{}&
- \color[##1]{\vrule\!!width3em\!!height\strutht\!!depth\strutdp}%
- \graycolor[##1]{\vrule\!!width3em\!!height\strutht\!!depth\strutdp}&
+ \color[##1]{\vrule\s!width3em\s!height\strutht\s!depth\strutdp}%
+ \graycolor[##1]{\vrule\s!width3em\s!height\strutht\s!depth\strutdp}&
\doifinset\v!value{#2}{\hskip.5em\colorvalue{##1}}\crcr}
\halign
{\hss##&\hss##\hss#\cr
@@ -92,11 +92,11 @@
\colo_palets_process[#1]\colo_palets_show_palet}\cr
\doifinset\v!name{#2}{#1\hskip.5em}%
\def\colo_palets_show_palet##1%
- {&\strut\color[##1]{\vrule\!!width\!!widtha\!!height\strutht\!!depth\zeropoint}}%
+ {&\strut\color[##1]{\vrule\s!width\!!widtha\s!height\strutht\s!depth\zeropoint}}%
\colo_palets_process[#1]\colo_palets_show_palet\crcr
\noalign{\vskip-\strutdepth}%
\def\colo_palets_show_palet##1%
- {&\graycolor[##1]{\vrule\!!width\!!widtha\!!height\zeropoint\!!depth\strutdp}}%
+ {&\graycolor[##1]{\vrule\s!width\!!widtha\s!height\zeropoint\s!depth\strutdp}}%
\colo_palets_process[#1]\colo_palets_show_palet\crcr
\doifinset\v!value{#2}
{\def\colo_palets_show_palet##1%
@@ -137,13 +137,13 @@
\def\colo_palets_compare##1%
{\hbox
{\setbox0\hbox
- {#1[##1]{\vrule\!!width\hsize\!!height3ex}}%
+ {#1[##1]{\vrule\s!width\hsize\s!height3ex}}%
\wd0\zeropoint
\box0
\hbox to \hsize
{\def\colo_palets_compare####1%
{\hbox to \!!widtha
- {\hss#1[####1]{\vrule\!!width.5\!!widtha\!!height2.25ex\!!depth-.75ex}\hss}}%
+ {\hss#1[####1]{\vrule\s!width.5\!!widtha\s!height2.25ex\s!depth-.75ex}\hss}}%
\processcommacommand[\getvalue{\??colorpalet#2}]\colo_palets_compare}}
\endgraf}
\processcommacommand[\getvalue{\??colorpalet#2}]\colo_palets_compare}}
@@ -171,8 +171,8 @@
{\halign
{\hss####\hss\cr
\doifinset\v!number{#2}{\strut##1}\cr
- \color[#1:##1]{\vrule\!!width4em\!!height\strutht\!!depth\zeropoint}\cr
- \graycolor[#1:##1]{\vrule\!!width4em\!!height\zeropoint\!!depth\strutdp}\cr
+ \color[#1:##1]{\vrule\s!width4em\s!height\strutht\s!depth\zeropoint}\cr
+ \graycolor[#1:##1]{\vrule\s!width4em\s!height\zeropoint\s!depth\strutdp}\cr
\doifinset\v!value{#2}{\colorvalue{#1:##1}\strut}\crcr}}}}%
\hbox
{\doifinset\v!name{#2}
@@ -195,8 +195,8 @@
\def\colo_groups_show_group##1%
{\doifcolor{#1:##1}
{\doifinset\v!number{#2}{##1\hskip.5em}&
- \color[#1:##1]{\vrule\!!width2.5em\!!height\strutht\!!depth\strutdp}%
- \graycolor[#1:##1]{\vrule\!!width2.5em\!!height\strutht\!!depth\strutdp}&
+ \color[#1:##1]{\vrule\s!width2.5em\s!height\strutht\s!depth\strutdp}%
+ \graycolor[#1:##1]{\vrule\s!width2.5em\s!height\strutht\s!depth\strutdp}&
\doifinset\v!value{#2}{\hskip.5em\colorvalue{#1:##1}}\crcr}}%
\halign
{\hss##&\hss##\hss#\hss\cr
@@ -228,11 +228,11 @@
\def\colo_groups_compare_step#1#2#3%
{\hbox to \hsize
{\setbox0\hbox
- {#1[#2:#3]{\vrule\!!width\hsize\!!height3ex}}%
+ {#1[#2:#3]{\vrule\s!width\hsize\s!height3ex}}%
\wd0\zeropoint
\box0
\hbox to \hsize
- {\hss\dorecurse\!!counta{#1[#2:\recurselevel]{\vrule\!!width.5\!!widtha\!!height2.25ex\!!depth-.75ex}\hss}}}
+ {\hss\dorecurse\!!counta{#1[#2:\recurselevel]{\vrule\s!width.5\!!widtha\s!height2.25ex\s!depth-.75ex}\hss}}}
\endgraf}
\protect \endinput
diff --git a/tex/context/base/cont-log.mkiv b/tex/context/base/cont-log.mkiv
index 67647920d..5d4133143 100644
--- a/tex/context/base/cont-log.mkiv
+++ b/tex/context/base/cont-log.mkiv
@@ -118,7 +118,7 @@
\logofont}
\def\syst_logos_meta_hyphen % there is no hyphenchar in this font
- {\discretionary{\vrule\!!height.33em\!!depth-.27em\!!width.33em}{}{}}
+ {\discretionary{\vrule\s!height.33em\s!depth-.27em\s!width.33em}{}{}}
\unexpanded\def\MetaFont
{\dontleavehmode
@@ -270,4 +270,12 @@
\unexpanded\def\MPII{MpII}
\unexpanded\def\MPIV{MpIV}
+\appendtoks
+ \def\ConTeXt {ConTeXt}%
+ \def\MetaPost{MetaPost}%
+ \def\MetaFont{MetaFont}%
+ \def\MetaFun {MetaFun}%
+ \def\TeX {TeX}%
+\to \everysimplifycommands
+
\protect \endinput
diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii
index b4958762f..2c07401d8 100644
--- a/tex/context/base/cont-new.mkii
+++ b/tex/context/base/cont-new.mkii
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2012.05.30 11:26}
+\newcontextversion{2012.10.19 00:06}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 5a28f8e29..e791d3ba8 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,515 +11,60 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2012.05.30 11:26}
+\newcontextversion{2012.10.19 00:06}
-%D This file is loaded at runtime, thereby providing an
-%D excellent place for hacks, patches, extensions and new
-%D features.
+%D This file is loaded at runtime, thereby providing an excellent place for
+%D hacks, patches, extensions and new features.
\unprotect
\writestatus\m!system{beware: some patches loaded from cont-new.mkiv}
-\def\dividedsize#1#2#3% size gap n
- {\dimexpr
- \ifnum\dimexpr#1\relax>\plusone
- (\dimexpr#1\relax-\numexpr#3-1\relax\dimexpr#2\relax)/#3\else#1%
- \fi
- \relax}
-
-\def\singlewidened #1{\hbox spread 1em{\hss#1\hss}}
-\def\complexwidened[#1]#2{\hbox spread #1{\hss#2\hss}}
+%D Maybe:
-\definecomplexorsimple\widened
-
-\let\active\activecatcode % for a while (tikz)
-
-% todo
-%
-% \def\definelocation{\dodoubleargument\dodefinelocation}
-% \def\dodefinelocation[#1][#2]{\setvalue{loc:#1}{#2}}
-%
-% \definelocation[lt] [\v!left\v!top]
-% \definelocation[tl] [\v!left\v!top]
-% \definelocation[\v!top\v!left][\v!left\v!top]
-%
-% \def\getlocation#1{\executeifdefined{loc:#1}{#1}}
-
-% \let\cs\getvalue % no, we want \cs to be czech
-
-% experimental so this may change
+\unexpanded\def\tightvbox{\dowithnextbox{\dp\nextbox\zeropoint\box\nextbox}\vbox}
+\unexpanded\def\tightvtop{\dowithnextbox{\ht\nextbox\zeropoint\box\nextbox}\vtop}
-\def\startdescriptions
- {\dosingleempty\dostartdescriptions}
+%D Needs some work:
-\def\dostartdescriptions[#1]%
- {\begingroup
- \def\item{\getvalue{#1}}%
- \let\dostoppairdescription \donothing
- \let\@@description \dostartpairdescription
- \let\@@startsomedescription\dostartsomedescription}
+\unexpanded\def\startgridcorrection
+ {\dosingleempty\spac_grid_correction_start}
-\def\stopdescriptions
- {\dostoppairdescription
- \endgroup}
-
-\def\dostartpairdescription[#1][#2]%
- {\dostoppairdescription
- \def\dostoppairdescription{\@@stopdescription{#1}}%
- \bgroup
- \def\currentdescription{#1}%
- \doifelse{\descriptionparameter{\s!do\c!state}}\v!start
- {\@@makedescription{#1}[#2]{}}
- {\@@makedescription{#1}[#2]}}
-
-\def\dostartsomedescription% #1[#2]#3%
- {\bgroup
- \@@makedescription} % {#1}[#2]{#3}}
-
-% \starttext
-%
-% \definedescription[test]
-%
-% \startdescriptions
-% \test{Foo} Bar bar bar
-% \test{Foo} Bar bar bar
-% \test{Foo} Bar bar bar
-% \stopdescriptions
-%
-% \startdescriptions[test]
-% \item{Foo} Bar bar bar
-% \item{Foo} Bar bar bar
-% \item{Foo} Bar bar bar
-% \stopdescriptions
-%
-% \startdescriptions
-% \starttest{Foo} Bar bar bar \stoptest
-% \starttest{Foo} Bar bar bar \stoptest
-% \starttest{Foo} Bar bar bar \stoptest
-% \stopdescriptions
-%
-% \startdescriptions[test]
-% \item{Foo} Bar bar bar
-% \item{Foo} Bar bar bar
-% \item{Foo} Bar bar bar
-% \stopdescriptions
-%
-% \stoptext
-
-% this will be activated when
-%
-% \newinsert\thispageinsert % <- installinsertion
-%
-% \def\flushatthispage
-% {\bgroup
-% \dowithnextbox{\insert\thispageinsert{\box\nextbox}\egroup}%
-% \hbox}
-%
-% \appendtoks
-% \ifvoid\thispageinsert\else\hbox{\smashedbox\thispageinsert}\fi
-% \to \everyshipout
-
-% \definemarkedpage[nobackgrounds]
-% \markpage[nobackgrounds]
-% \doifmarkedpageelse{nobackgrounds}
-
-% Just a simple and fast hanger, for usage in macros.
-
-\def\setuphanging
- {\dodoubleempty\getparameters[\??ha]}
-
-\setuphanging
- [\c!distance=.5em]
-
-\def\starthanging
- {\noindent\bgroup
- \dowithnextbox
- {\setbox\nextbox\hbox{\flushnextbox\hskip\@@hadistance}%
- \hangindent\nextboxwd
- \hangafter\plusone
- \flushnextbox\ignorespaces}
- \hbox}
-
-\def\stophanging
- {\endgraf
- \egroup}
-
-% experimental
-
-\def\stophangaround
- {\endgraf
- \egroup}
-
-\def\starthangaround
- {\noindent\bgroup
- \dowithnextbox
- {\ifdim\nextboxht>\strutht\setbox\nextbox\tbox{\flushnextbox}\fi
- \setbox\nextbox\hbox{\flushnextbox\hskip\@@hadistance}%
- \getboxheight\scratchdimen\of\box\nextbox
- \getnoflines\scratchdimen
- \nextboxht\strutht
- \nextboxdp\strutdp
- \hangindent\nextboxwd
- \hangafter-\noflines
- \llap{\flushnextbox}\ignorespaces}
- \hbox}
-
-\def\modevalue#1#2#3%
- {\@EA\ifx\csname\@mode@\systemmodeprefix#1\endcsname\endcsname\enabledmode#2\else#2\fi}
-
-\def\systemmodevalue#1%
- {\modevalue{\systemmodeprefix#1}}
-
-% new, still to be improved
-%
-% \dorecurse{10}
-% {\input thuan
-% \placefigure{}{\framed[height=1.5cm]{test}}
-% \placefloatplaceholder}
-
-\def\placefloatplaceholder
- {\ifroomforfloat \else
- \scratchdimen\pagegoal
- \advance\scratchdimen-\pagetotal
- \advance\scratchdimen-3\lineheight
- \ifdim\scratchdimen>\zeropoint
- \startlinecorrection[blank]
- \mhbox{\inframed{\labeltexts{placeholder}{\lastcaptiontag}}}%
- \stoplinecorrection
- \else
- \allowbreak
- \fi
- \fi}
-
-\setuplabeltext
- [placeholder={, moved}]
-
-% move to support module, and then use context(...)
-
-\startluacode
- function commands.percentageof(str,dim)
- local n = str:match("^(.*)%%$")
- context.sprint(tex.ctxcatcodes,(n and (tonumber(n)/100)*dim .. "sp") or str)
- end
-\stopluacode
-
-\gdef\setpercentdimen#1#2%
- {#1=\ctxcommand{percentageof("#2",\number#1)}\relax}
-
-% \scratchdimen=100pt \setpercentdimen\scratchdimen{10\letterpercent} \the\scratchdimen
-% \scratchdimen=100pt \setpercentdimen\scratchdimen{5pt} \the\scratchdimen
-
-\bgroup \permitcircumflexescape
-
-\obeylines % don't remove %'s !
-
-\gdef\collapsedspace#1%
- {\ifx#1^^M%
- \expandafter\collapsedspace
- \else
- \space
- \expandafter#1%
- \fi}
-
-\gdef\collapsespaces
- {\prependtoksonce\relax\to\everyeof%
- \ignorelines%
- \ignoretabs%
- \let\obeyedspace\collapsedspace%
- \obeyspaces}
-
-\egroup
-
-\def\inlinedbox
- {\bgroup
- \dowithnextbox
- {\scratchdimen\nextboxht
- \advance\scratchdimen\nextboxdp
- \advance\scratchdimen-\lineheight
- \divide\scratchdimen\plustwo
- \advance\scratchdimen\strutdepth
- \setbox\nextbox\hbox{\lower\scratchdimen\flushnextbox}%
- \nextboxht\strutht
- \nextboxdp\strutdp
- \flushnextbox
- \egroup}%
- \hbox}
-
-\def\dimenratio#1#2% etex only
- {\withoutpt\the\dimexpr2\dimexpr(#1)/\dimexpr(#2)/32768\relax\relax}
-
-\def\doxprecurse#1#2%
- {\ifnum#1=\zerocount % no \ifcase
- \expandafter\gobblethreearguments
- \else
- #2\expandafter\expandafter\expandafter\doxprecurse\expandafter
- \fi\expandafter{\the\numexpr#1-1\relax}{#2}}
-
-\def\buttonframed{\dodoubleempty\localframed[\??bt]} % goodie
-
-\unexpanded\def\asciistr#1{\dontleavehmode{\defconvertedargument\ascii{#1}\verbatimfont\ascii}}
-
-\def\shapefill{\vskip\zeropoint\!!plus\lineheight\!!minus\lineheight\relax}
-
-% \ruledhbox
-% {\startignorespaces
-% \def\oeps{a}
-% \startignorespaces
-% \def\oeps{a}
-% \stopignorespaces
-% \def\oeps{a}
-% \stopignorespaces
-% \oeps}
-
-\newsignal\boissignal
-\newcount \boislevel
-
-\long\def\startignorespaces
- {\advance\boislevel\plusone
- \ifcase\boislevel\or \ifhmode
- \hskip\boissignal
- \fi \fi
- \ignorespaces}
-
-\long\def\stopignorespaces
- {\ifcase\boislevel\or \ifhmode
- \doloop
- {\ifdim\lastskip=\zeropoint
- \exitloop
- \else\ifdim\lastskip=\boissignal
- \unskip
- \exitloop
- \else
- \unskip
- \fi\fi}%
- \fi \fi
- \advance\boislevel\minusone}
-
-\def\minimalhbox#1#%
- {\dowithnextbox
- {\bgroup
- \setbox\scratchbox\hbox#1{\hss}%
- \ifdim\nextboxwd<\wd\scratchbox\nextboxwd\wd\scratchbox\fi
- \flushnextbox
- \egroup}
- \hbox}
-
-\def\gobbleuntilempty#1\empty{}
-
-\def\dodimchoice#1#2#3%
- {\ifdim#1#2%
- #3\@EA\gobbleuntilempty
- \else
- \@EA\dodimchoice
- \fi{#1}}
-
-\def\donumchoice#1#2#3%
- {\ifnum#1#2%
- #3\@EA\gobbleuntilempty
- \else
- \@EA\dodimchoice
- \fi{#1}}
-
-\def\dimchoice#1#2{\dodimchoice{#1}#2{=#1}{#1}\empty}
-\def\numchoice#1#2{\donumchoice{#1}#2{=#1}{#1}\empty}
-
-% \the\dimexpr(\dimchoice {7pt}{{<10pt}{8pt}{<12pt}{9pt}{<15pt}{10pt}{=11pt}{12pt}})
-% \the\dimexpr(\dimchoice{11pt}{{<10pt}{8pt}{<12pt}{9pt}{<15pt}{10pt}{=11pt}{12pt}})
-% \the\dimexpr(\dimchoice{14pt}{{<10pt}{8pt}{<12pt}{9pt}{<15pt}{10pt}{=11pt}{12pt}})
-
-\def\tabulaterule % to be redone, not correct
- {\dotabulaterule
- {\hrule\!!height.5\scratchdimen\!!depth.5\scratchdimen\relax
- \doifvalue{\??tt\currenttabulate\c!distance}\v!grid
- {\kern-\scratchdimen}}} % experimental tm-prikkels
-% so far
-
-% between alignment lines certain rules apply, and even a
-% simple test can mess up a table, which is why we have a
-% special test facility
-%
-% \ruledvbox
-% {\starttabulate[|l|p|]
-% \NC 1test \NC test \NC \NR
-% \tableifelse{\doifelse{a}{a}}{\NC Xtest \NC test \NC \NR}{}%
-% \stoptabulate}
-
-\long\def\tableifelse#1%
- {\tablenoalign
- {#1%
- {\aftergroup \firstoftwoarguments}%
- {\aftergroup\secondoftwoarguments}}}
-
-\long \def\tableiftextelse#1{\tableifelse{\doiftextelse{#1}}}
-
-\def\tightvbox{\dowithnextbox{\nextboxdp\zeropoint\flushnextbox}\vbox}
-\def\tightvtop{\dowithnextbox{\nextboxht\zeropoint\flushnextbox}\vtop}
-
-% what is this stupid macro meant for:
-
-\def\hyphenationpoint
- {\hskip\zeropoint}
-
-\def\hyphenated#1%
- {\bgroup
- \!!counta\zerocount
- \def\hyphenated##1{\advance\!!counta\plusone}%
- \handletokens#1\with\hyphenated
- \!!countb\plusone
- \def\hyphenated##1%
- {##1%
- \advance\!!countb\plusone\relax
- \ifnum\!!countb>2 \ifnum\!!countb<\!!counta
- \hyphenationpoint
- \fi\fi}%
- \handletokens#1\with\hyphenated
- \egroup}
-
-\def\obeysupersubletters
- {\let\super\normalsuper
- \let\suber\normalsuber
- \let\normalsuper\letterhat
- \let\normalsuber\letterunderscore
- \enablesupersub}
-
-\def\obeysupersubmath
- {\let\normalsuper\letterhat
- \let\normalsuber\letterunderscore
- \enablesupersub}
-
-\def\startgridcorrection
- {\dosingleempty\dostartgridcorrection}
-
-\def\dostartgridcorrection[#1]%
+\def\spac_grid_correction_start[#1]%
{\ifgridsnapping
\snaptogrid[#1]\vbox\bgroup
\else
\startbaselinecorrection
\fi}
-\def\stopgridcorrection
+\unexpanded\def\stopgridcorrection
{\ifgridsnapping
\egroup
\else
\stopbaselinecorrection
\fi}
-
-\def\checkgridsnapping
+
+\unexpanded\def\checkgridsnapping
{\lineskip\ifgridsnapping\zeropoint\else\normallineskip\fi}
-
-\def\startplaatsen
- {\dosingleempty\dostartplaatsen}
-\def\dostartplaatsen[#1]% tzt n*links etc
- {\endgraf
- \noindent\bgroup
- \setlocalhsize
- \hbox to \localhsize\bgroup
- \doifnot{#1}\v!left\hss
- \def\stopplaatsen
- {\unskip\unskip\unskip
- \doifnot{#1}\v!right\hss
- \egroup
- \egroup
- \endgraf}%
- \gobblespacetokens}
+%D Probably obsolete:
-% \startplaatsen[links] bla \stopplaatsen
-
-\def\startcolumnmakeup % don't change
+\unexpanded\def\startcolumnmakeup % don't change
{\bgroup
- \getrawnoflines\textheight % teksthoogte kan topskip hebben, dus raw
- \scratchdimen\noflines\lineheight
- \advance\scratchdimen-\lineheight
- \advance\scratchdimen\topskip
- \setbox\scratchbox
- \ifcase\showgridstate\vbox\else\ruledvbox\fi to \scratchdimen\bgroup
- \forgetall} % ! don't change
+ \getrawnoflines\textheight % raw as we cna have topskip
+ \setbox\scratchbox\vbox to \dimexpr\noflines\lineheight-\lineheight+\topskip\relax
+ \bgroup
+ \forgetall}
-\def\stopcolumnmakeup
+\unexpanded\def\stopcolumnmakeup
{\egroup
\dp\scratchbox\zeropoint
\wd\scratchbox\textwidth
\box\scratchbox
\egroup
\page_otr_command_synchronize_hsize}
-
-\long\def\startexternalfigure
- {\dotripleempty\dostartexternalfigure}
-
-\long\def\dostartexternalfigure[#1][#2][#3]#4\stopexternalfigure
- {\gdef\figuredescription{#4}%
- \externalfigure[#1][#2][#3]%
- \globallet\figuredescription\empty}
-
-\let\figuredescription\empty
-
-% incomplete, will be a special case of float placement
-
-\def\startfixed{\dosingleempty\dostartfixed}
-
-\def\dostartfixed[#1]%
- {\expanded{\dowithnextbox{\noexpand\dodofixed{\ifhmode0\else1\fi}{#1}}}%
- \vbox\bgroup
- \setlocalhsize}
-
-\def\stopfixed
- {\egroup}
-
-\def\dodofixed#1#2%
- {\ifcase#1\relax
- \processaction
- [#2]
- [ \v!high=>\bbox {\flushnextbox},
- \v!low=>\tbox {\flushnextbox},
- \v!middle=>\vcenter{\flushnextbox},
- \v!lohi=>\vcenter{\flushnextbox},
- \s!unknown=>\tbox {\flushnextbox},
- \s!default=>\tbox {\flushnextbox}]%
- \else
- \startbaselinecorrection
- \noindent\flushnextbox
- \stopbaselinecorrection
- \fi}
-
-% \startitemize
-%
-% \item \externalfigure[koe][height=2cm]
-% \item \externalfigure[koe][height=2cm]
-% \item \externalfigure[koe][height=2cm]
-% \item \externalfigure[koe][height=2cm]
-%
-% \page
-%
-% \item \startfixed \externalfigure[koe][height=2cm]\stopfixed
-% \item \startfixed[high]\externalfigure[koe][height=2cm]\stopfixed
-% \item \startfixed[low] \externalfigure[koe][height=2cm]\stopfixed
-% \item \startfixed[lohi]\externalfigure[koe][height=2cm]\stopfixed
-%
-% \page
-%
-% \item test \startfixed \externalfigure[koe][height=2cm]\stopfixed
-% \item test \startfixed[high]\externalfigure[koe][height=2cm]\stopfixed
-% \item test \startfixed[low] \externalfigure[koe][height=2cm]\stopfixed
-% \item test \startfixed[lohi]\externalfigure[koe][height=2cm]\stopfixed
-%
-% \page
-%
-% \item test \par \startfixed \externalfigure[koe][height=2cm]\stopfixed
-% \item test \par \startfixed[high]\externalfigure[koe][height=2cm]\stopfixed
-% \item test \par \startfixed[low] \externalfigure[koe][height=2cm]\stopfixed
-% \item test \par \startfixed[lohi]\externalfigure[koe][height=2cm]\stopfixed
-%
-% \stopitemize
-
-\def\obeyfollowingtoken{{}} % end \cs scanning
-
-% potential new defaults:
-%
-% \setbreakpoints[compound]
-% till we fixed all styles:
+%D Till we fixed all styles:
\let\\=\crlf
diff --git a/tex/context/base/cont-nop.mkiv b/tex/context/base/cont-nop.mkiv
new file mode 100644
index 000000000..c8188503e
--- /dev/null
+++ b/tex/context/base/cont-nop.mkiv
@@ -0,0 +1,22 @@
+%D \module
+%D [ file=cont-nop,
+%D version=2012.06.01,
+%D title=\CONTEXT\ Miscellaneous Macros,
+%D subtitle=Startup Dummy,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\writestatus\m!system{loading dummy replacement for jobname}
+
+\protect
+
+\finishjob
+
+\endinput
diff --git a/tex/context/base/cont-yes.mkiv b/tex/context/base/cont-yes.mkiv
new file mode 100644
index 000000000..2a032fc0b
--- /dev/null
+++ b/tex/context/base/cont-yes.mkiv
@@ -0,0 +1,80 @@
+%D \module
+%D [ file=cont-yes,
+%D version=2012.06.01,
+%D title=\CONTEXT\ Miscellaneous Macros,
+%D subtitle=Startup Stub,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% At some point I will reconsider the \starttext .. \stoptext
+% wraping as we can assume proper styling. It's a left-over from
+% mkii that we need to get rid of.
+
+\startluacode
+
+ -- When a style is loaded there is a good change that we never enter
+ -- this code.
+
+ environment.initializefilenames()
+
+ local arguments = environment.arguments
+ local suffix = environment.suffix
+ local filename = environment.filename
+
+ if suffix == "xml" or arguments.forcexml then
+
+ -- Maybe we should move the preamble parsing here as it
+ -- can be part of (any) loaded (sub) file. The \starttext
+ -- wrapping might go away.
+
+ context.starttext()
+ context.xmlprocess("main",filename,"")
+ context.stoptext()
+
+ elseif suffix == "cld" or arguments.forcecld then
+
+ context.runfile(filename)
+
+ elseif suffix == "lua" or arguments.forcelua then
+
+ -- The wrapping might go away. Why is is it there in the
+ -- first place.
+
+ context.starttext()
+ context.ctxlua(string.format('dofile("%s")',filename))
+ context.stoptext()
+
+ elseif suffix == "mp" or arguments.forcemp then
+
+ context.starttext()
+ context.processMPfigurefile(filename)
+ context.stoptext()
+
+ -- elseif suffix == "prep" then
+ --
+ -- -- Why do we wrap here. Because it can be xml? Let's get rid
+ -- -- of prepping in general.
+ --
+ -- context.starttext()
+ -- context.input(filename)
+ -- context.stoptext()
+
+ else
+
+ -- We have a regular tex file so no \starttext yet as we can
+ -- load fonts.
+
+ context.input(filename)
+
+ end
+
+ context.finishjob()
+
+\stopluacode
+
+\endinput
diff --git a/tex/context/base/context-base.lmx b/tex/context/base/context-base.lmx
index 09817463b..2b093c3e1 100644
--- a/tex/context/base/context-base.lmx
+++ b/tex/context/base/context-base.lmx
@@ -19,7 +19,9 @@
+
+
+
diff --git a/tex/context/base/context-help.lmx b/tex/context/base/context-help.lmx
index 5401fb65d..939b70cb6 100644
--- a/tex/context/base/context-help.lmx
+++ b/tex/context/base/context-help.lmx
@@ -19,6 +19,7 @@