From b8ac6d7b7fdb16293c28034c349efd5b0b7b20b3 Mon Sep 17 00:00:00 2001 From: Marius Date: Sun, 20 Oct 2013 01:21:09 +0300 Subject: beta 2013.10.20 07:09 --- tex/context/base/anch-bck.mkvi | 26 +- tex/context/base/anch-pgr.lua | 4 +- tex/context/base/anch-pgr.mkiv | 4 +- tex/context/base/anch-pos.lua | 53 +- tex/context/base/attr-eff.lua | 4 +- tex/context/base/attr-ini.lua | 9 +- tex/context/base/attr-lay.lua | 2 +- tex/context/base/attr-lay.mkiv | 5 +- tex/context/base/back-exp.lua | 23 +- tex/context/base/back-exp.mkiv | 2 +- tex/context/base/back-ini.lua | 9 - tex/context/base/back-pdf.lua | 72 + tex/context/base/back-pdf.mkiv | 96 +- tex/context/base/bibl-tra.lua | 84 +- tex/context/base/bibl-tra.mkiv | 7 +- tex/context/base/blob-ini.lua | 110 +- tex/context/base/blob-ini.mkiv | 14 +- tex/context/base/buff-imp-lua.lua | 9 +- tex/context/base/buff-imp-mp.lua | 2 + tex/context/base/buff-imp-tex.lua | 4 +- tex/context/base/buff-ini.lua | 158 +- tex/context/base/buff-ini.mkiv | 17 + tex/context/base/buff-ver.lua | 32 +- tex/context/base/buff-ver.mkiv | 66 +- tex/context/base/catc-def.mkiv | 2 + tex/context/base/catc-sym.mkiv | 3 +- tex/context/base/char-cjk.lua | 365 --- tex/context/base/char-def.lua | 104 +- tex/context/base/char-ini.lua | 66 +- tex/context/base/char-ini.mkiv | 2 + tex/context/base/char-utf.lua | 284 +- tex/context/base/chem-str.lua | 30 +- tex/context/base/chem-str.mkiv | 83 +- tex/context/base/cldf-bas.lua | 9 +- tex/context/base/cldf-com.lua | 2 + tex/context/base/cldf-ini.lua | 146 +- tex/context/base/cldf-ini.mkiv | 1 + tex/context/base/cldf-int.lua | 3 +- tex/context/base/cldf-prs.lua | 2 + tex/context/base/colo-ini.lua | 143 +- tex/context/base/colo-ini.mkiv | 38 +- tex/context/base/cont-log.mkiv | 13 +- tex/context/base/cont-new.mkii | 2 +- tex/context/base/cont-new.mkiv | 12 +- tex/context/base/cont-new.tmp | 83 - tex/context/base/context-help.lmx | 79 +- tex/context/base/context-version.pdf | Bin 4111 -> 4106 bytes tex/context/base/context.mkii | 2 +- tex/context/base/context.mkiv | 47 +- tex/context/base/context.tmp | 513 ---- tex/context/base/core-con.lua | 21 +- tex/context/base/core-con.mkiv | 5 + tex/context/base/core-dat.lua | 28 +- tex/context/base/core-def.mkiv | 15 +- tex/context/base/core-env.lua | 20 +- tex/context/base/core-fil.mkii | 4 + tex/context/base/core-uti.lua | 34 +- tex/context/base/data-exp.lua | 47 +- tex/context/base/data-met.lua | 12 +- tex/context/base/data-res.lua | 2 +- tex/context/base/data-tmp.lua | 26 + tex/context/base/data-vir.lua | 1 + tex/context/base/file-ini.lua | 6 +- tex/context/base/file-job.mkvi | 18 +- tex/context/base/file-mod.lua | 2 + tex/context/base/file-mod.mkvi | 7 +- tex/context/base/font-afm.lua | 11 +- tex/context/base/font-chk.lua | 56 +- tex/context/base/font-col.lua | 113 +- tex/context/base/font-col.mkvi | 3 +- tex/context/base/font-con.lua | 53 +- tex/context/base/font-ctx.lua | 236 +- tex/context/base/font-def.lua | 37 +- tex/context/base/font-emp.mkvi | 2 + tex/context/base/font-ext.lua | 4 +- tex/context/base/font-fea.mkvi | 10 +- tex/context/base/font-gds.lua | 146 +- tex/context/base/font-hsh.lua | 54 +- tex/context/base/font-ini.mkvi | 49 +- tex/context/base/font-lib.mkvi | 3 +- tex/context/base/font-map.lua | 59 +- tex/context/base/font-mat.mkvi | 46 +- tex/context/base/font-mis.lua | 2 +- tex/context/base/font-nod.lua | 13 +- tex/context/base/font-odv.lua | 351 ++- tex/context/base/font-ota.lua | 11 +- tex/context/base/font-otc.lua | 8 +- tex/context/base/font-otd.lua | 26 +- tex/context/base/font-otf.lua | 351 ++- tex/context/base/font-otn.lua | 693 +++-- tex/context/base/font-otx.lua | 11 +- tex/context/base/font-pat.lua | 32 +- tex/context/base/font-pre.mkiv | 74 +- tex/context/base/font-sel.lua | 675 ++++ tex/context/base/font-sel.mkvi | 367 +++ tex/context/base/font-sol.lua | 2 +- tex/context/base/font-sty.mkvi | 6 +- tex/context/base/font-sym.mkvi | 4 + tex/context/base/font-syn.lua | 423 ++- tex/context/base/font-tfm.lua | 2 +- tex/context/base/font-tra.mkiv | 1 + tex/context/base/font-trt.lua | 71 +- tex/context/base/grph-fig.mkiv | 26 +- tex/context/base/grph-inc.lua | 66 +- tex/context/base/grph-inc.mkiv | 7 +- tex/context/base/java-ini.lua | 30 +- tex/context/base/l-boolean.lua | 4 +- tex/context/base/l-dir.lua | 39 +- tex/context/base/l-file.lua | 26 +- tex/context/base/l-io.lua | 3 +- tex/context/base/l-lpeg.lua | 150 +- tex/context/base/l-os.lua | 107 +- tex/context/base/l-package.lua | 24 +- tex/context/base/l-pdfview.lua | 129 +- tex/context/base/l-string.lua | 2 + tex/context/base/l-table.lua | 390 +-- tex/context/base/l-unicode.lua | 368 ++- tex/context/base/l-url.lua | 16 +- tex/context/base/lang-def.mkiv | 14 +- tex/context/base/lang-ini.mkiv | 25 +- tex/context/base/lang-rep.lua | 189 ++ tex/context/base/lang-url.lua | 4 +- tex/context/base/lang-wrd.lua | 18 +- tex/context/base/layo-ini.lua | 25 +- tex/context/base/lpdf-ano.lua | 37 +- tex/context/base/lpdf-epa.lua | 9 +- tex/context/base/lpdf-fmt.lua | 2 +- tex/context/base/lpdf-nod.lua | 46 +- tex/context/base/lpdf-tag.lua | 11 +- tex/context/base/lpdf-wid.lua | 74 +- tex/context/base/luat-cbk.lua | 2 +- tex/context/base/luat-env.lua | 6 +- tex/context/base/luat-fio.lua | 20 - tex/context/base/luat-mac.lua | 20 +- tex/context/base/lxml-css.lua | 15 +- tex/context/base/lxml-ctx.lua | 6 +- tex/context/base/lxml-dir.lua | 11 +- tex/context/base/lxml-ini.mkiv | 19 +- tex/context/base/lxml-sor.lua | 21 +- tex/context/base/lxml-tab.lua | 3 +- tex/context/base/lxml-tex.lua | 16 +- tex/context/base/m-chart.lua | 6 +- tex/context/base/m-database.lua | 81 +- tex/context/base/m-database.mkiv | 6 +- tex/context/base/m-graph.mkiv | 9 +- tex/context/base/m-hemistich.mkiv | 112 + tex/context/base/m-nodechart.lua | 175 ++ tex/context/base/m-nodechart.mkvi | 192 +- tex/context/base/m-punk.mkiv | 5 +- tex/context/base/m-r.mkii | 174 ++ tex/context/base/m-r.tex | 174 -- tex/context/base/m-spreadsheet.lua | 2 +- tex/context/base/m-spreadsheet.mkiv | 3 + tex/context/base/m-translate.mkiv | 1 - tex/context/base/m-zint.mkiv | 33 +- tex/context/base/math-acc.mkvi | 181 ++ tex/context/base/math-act.lua | 344 ++- tex/context/base/math-ali.mkiv | 76 +- tex/context/base/math-def.mkiv | 277 +- tex/context/base/math-dir.lua | 145 + tex/context/base/math-fbk.lua | 233 +- tex/context/base/math-fen.mkiv | 420 ++- tex/context/base/math-frc.mkiv | 47 +- tex/context/base/math-ini.lua | 179 +- tex/context/base/math-ini.mkiv | 226 +- tex/context/base/math-map.lua | 77 +- tex/context/base/math-noa.lua | 387 ++- tex/context/base/math-pln.mkiv | 32 +- tex/context/base/math-rad.mkvi | 287 ++ tex/context/base/math-ren.lua | 4 - tex/context/base/math-stc.mkvi | 375 ++- tex/context/base/math-vfu.lua | 485 +-- tex/context/base/meta-fig.mkiv | 2 +- tex/context/base/meta-fnt.lua | 269 ++ tex/context/base/meta-fnt.mkiv | 36 + tex/context/base/meta-imp-txt.mkiv | 44 +- tex/context/base/meta-ini.mkiv | 79 +- tex/context/base/meta-pag.mkiv | 4 +- tex/context/base/meta-pdf.lua | 6 +- tex/context/base/meta-pdf.mkiv | 2 +- tex/context/base/mlib-ctx.lua | 1 + tex/context/base/mlib-pdf.lua | 77 +- tex/context/base/mlib-pps.lua | 343 ++- tex/context/base/mlib-pps.mkiv | 70 +- tex/context/base/mlib-run.lua | 34 + tex/context/base/mtx-context-xml.tex | 75 + tex/context/base/mult-aux.lua | 2 + tex/context/base/mult-de.mkii | 32 + tex/context/base/mult-def.lua | 77 + tex/context/base/mult-def.mkiv | 7 + tex/context/base/mult-en.mkii | 32 + tex/context/base/mult-fr.mkii | 32 + tex/context/base/mult-fun.lua | 4 + tex/context/base/mult-ini.lua | 3 + tex/context/base/mult-it.mkii | 32 + tex/context/base/mult-low.lua | 26 +- tex/context/base/mult-mps.lua | 6 +- tex/context/base/mult-nl.mkii | 32 + tex/context/base/mult-pe.mkii | 32 + tex/context/base/mult-ro.mkii | 32 + tex/context/base/mult-sys.mkiv | 14 +- tex/context/base/node-acc.lua | 114 +- tex/context/base/node-aux.lua | 39 +- tex/context/base/node-fin.lua | 716 +---- tex/context/base/node-fin.mkiv | 8 +- tex/context/base/node-fnt.lua | 297 +- tex/context/base/node-ini.lua | 219 +- tex/context/base/node-ini.mkiv | 4 + tex/context/base/node-inj.lua | 11 +- tex/context/base/node-ltp.lua | 3207 ++++++++++++++++++++ tex/context/base/node-met.lua | 669 ++++ tex/context/base/node-pro.lua | 9 +- tex/context/base/node-ref.lua | 121 +- tex/context/base/node-res.lua | 44 +- tex/context/base/node-rul.lua | 30 +- tex/context/base/node-rul.mkiv | 44 +- tex/context/base/node-ser.lua | 99 +- tex/context/base/node-shp.lua | 84 +- tex/context/base/node-tra.lua | 81 +- tex/context/base/node-tsk.lua | 16 +- tex/context/base/node-typ.lua | 58 +- tex/context/base/norm-ltx.mkii | 2 +- tex/context/base/pack-com.mkiv | 56 +- tex/context/base/pack-obj.lua | 3 +- tex/context/base/pack-rul.lua | 37 +- tex/context/base/pack-rul.mkiv | 110 +- tex/context/base/page-app.mkiv | 4 +- tex/context/base/page-brk.mkiv | 2 +- tex/context/base/page-flt.lua | 29 +- tex/context/base/page-imp.mkiv | 2 +- tex/context/base/page-inj.lua | 13 +- tex/context/base/page-lay.mkiv | 14 +- tex/context/base/page-lin.lua | 14 +- tex/context/base/page-lin.mkiv | 48 +- tex/context/base/page-mix.lua | 375 ++- tex/context/base/page-mix.mkiv | 41 +- tex/context/base/page-mrk.mkiv | 1 + tex/context/base/page-mul.mkiv | 28 +- tex/context/base/page-one.mkiv | 2 +- tex/context/base/page-pst.lua | 14 +- tex/context/base/page-pst.mkiv | 1 + tex/context/base/page-str.lua | 30 +- tex/context/base/page-txt.mkvi | 2 +- tex/context/base/phys-dim.lua | 43 +- tex/context/base/s-abr-01.tex | 2 + tex/context/base/s-abr-04.tex | 316 +- tex/context/base/s-fonts-features.mkiv | 2 +- tex/context/base/s-fonts-goodies.mkiv | 2 +- tex/context/base/s-fonts-missing.lua | 16 +- tex/context/base/s-fonts-missing.mkiv | 2 +- tex/context/base/s-fonts-shapes.mkiv | 2 +- tex/context/base/s-fonts-tables.mkiv | 2 +- tex/context/base/s-fonts-vectors.lua | 4 +- tex/context/base/s-fonts-vectors.mkiv | 2 +- tex/context/base/s-math-coverage.lua | 2 + tex/context/base/s-present-tiles.mkiv | 33 +- tex/context/base/s-sql-tables.mkiv | 2 +- tex/context/base/scrn-but.lua | 3 + tex/context/base/scrn-fld.lua | 4 +- tex/context/base/scrn-hlp.lua | 9 +- tex/context/base/scrn-wid.lua | 24 +- tex/context/base/scrn-wid.mkvi | 32 +- tex/context/base/scrp-ini.lua | 306 +- tex/context/base/scrp-ini.mkiv | 1 + tex/context/base/scrp-tha.lua | 57 + tex/context/base/sort-lan.lua | 1 + tex/context/base/spac-ali.lua | 14 +- tex/context/base/spac-ali.mkiv | 25 + tex/context/base/spac-cha.mkiv | 191 -- tex/context/base/spac-chr.lua | 90 +- tex/context/base/spac-chr.mkiv | 7 +- tex/context/base/spac-hor.lua | 4 + tex/context/base/spac-hor.mkiv | 59 +- tex/context/base/spac-ver.lua | 202 +- tex/context/base/spac-ver.mkiv | 50 +- tex/context/base/status-files.pdf | Bin 24734 -> 24545 bytes tex/context/base/status-lua.pdf | Bin 212009 -> 224981 bytes tex/context/base/status-mkiv.lua | 95 +- tex/context/base/strc-blk.lua | 5 +- tex/context/base/strc-con.mkvi | 67 +- tex/context/base/strc-doc.lua | 155 +- tex/context/base/strc-flt.mkvi | 3 + tex/context/base/strc-ini.lua | 12 +- tex/context/base/strc-itm.lua | 14 +- tex/context/base/strc-itm.mkvi | 19 +- tex/context/base/strc-lev.lua | 3 + tex/context/base/strc-lnt.mkvi | 11 +- tex/context/base/strc-lst.lua | 35 +- tex/context/base/strc-lst.mkvi | 2 + tex/context/base/strc-mar.lua | 30 +- tex/context/base/strc-not.lua | 16 +- tex/context/base/strc-not.mkvi | 35 +- tex/context/base/strc-num.lua | 8 +- tex/context/base/strc-pag.lua | 21 +- tex/context/base/strc-ref.lua | 127 +- tex/context/base/strc-ref.mkvi | 23 +- tex/context/base/strc-reg.lua | 85 +- tex/context/base/strc-reg.mkiv | 22 +- tex/context/base/strc-ren.mkiv | 10 +- tex/context/base/strc-sec.mkiv | 14 +- tex/context/base/supp-box.lua | 124 +- tex/context/base/supp-box.mkiv | 117 +- tex/context/base/symb-ini.lua | 1 + tex/context/base/syst-aux.lua | 32 +- tex/context/base/syst-aux.mkiv | 168 +- tex/context/base/syst-lua.lua | 5 +- tex/context/base/tabl-ntb.mkiv | 37 +- tex/context/base/tabl-nte.mkiv | 8 +- tex/context/base/tabl-tbl.lua | 8 +- tex/context/base/tabl-tbl.mkiv | 51 +- tex/context/base/tabl-xtb.lua | 72 +- tex/context/base/tabl-xtb.mkvi | 5 +- tex/context/base/task-ini.lua | 25 +- tex/context/base/toks-ini.lua | 1 + tex/context/base/trac-deb.lua | 25 +- tex/context/base/trac-inf.lua | 29 +- tex/context/base/trac-jus.lua | 71 +- tex/context/base/trac-lmx.lua | 5 +- tex/context/base/trac-log.lua | 190 +- tex/context/base/trac-vis.lua | 175 +- tex/context/base/trac-vis.mkiv | 2 + tex/context/base/type-imp-dejavu.mkiv | 7 + tex/context/base/type-imp-euler.mkiv | 80 +- tex/context/base/type-imp-hgz.mkiv | 2 +- tex/context/base/type-imp-mathdigits.mkiv | 53 + tex/context/base/type-ini.mkvi | 2 + tex/context/base/type-set.mkiv | 6 + tex/context/base/typo-bld.lua | 70 + tex/context/base/typo-brk.lua | 15 +- tex/context/base/typo-cap.lua | 329 +- tex/context/base/typo-cap.mkiv | 126 +- tex/context/base/typo-cln.lua | 18 +- tex/context/base/typo-del.mkiv | 2 + tex/context/base/typo-dha.lua | 398 +++ tex/context/base/typo-dig.lua | 21 +- tex/context/base/typo-dir.lua | 536 +--- tex/context/base/typo-dir.mkiv | 75 +- tex/context/base/typo-drp.lua | 208 ++ tex/context/base/typo-drp.mkiv | 118 + tex/context/base/typo-dua.lua | 758 +++++ tex/context/base/typo-dub.lua | 870 ++++++ tex/context/base/typo-fln.lua | 271 ++ tex/context/base/typo-fln.mkiv | 112 + tex/context/base/typo-itc.lua | 27 +- tex/context/base/typo-krn.lua | 245 +- tex/context/base/typo-krn.mkiv | 54 +- tex/context/base/typo-mar.lua | 42 +- tex/context/base/typo-par.lua | 181 -- tex/context/base/typo-par.mkiv | 107 - tex/context/base/typo-prc.lua | 2 +- tex/context/base/typo-rep.lua | 28 +- tex/context/base/typo-spa.lua | 18 +- tex/context/base/typo-tal.lua | 265 ++ tex/context/base/typo-tal.mkiv | 112 + tex/context/base/typo-txt.mkvi | 4 +- tex/context/base/util-dim.lua | 32 +- tex/context/base/util-env.lua | 1 + tex/context/base/util-jsn.lua | 16 +- tex/context/base/util-lua.lua | 325 +- tex/context/base/util-prs.lua | 30 +- tex/context/base/util-seq.lua | 4 +- tex/context/base/util-soc.lua | 16 +- tex/context/base/util-sql-imp-swiglib.lua | 3 +- tex/context/base/util-sql-users.lua | 6 +- tex/context/base/util-sto.lua | 58 +- tex/context/base/util-str.lua | 90 +- tex/context/base/util-tab.lua | 444 ++- tex/context/base/util-tpl.lua | 34 +- tex/context/base/x-asciimath.lua | 2 + tex/context/base/x-mathml.lua | 1 + tex/context/base/x-mathml.mkiv | 349 ++- tex/context/base/x-set-12.mkiv | 13 + tex/context/fonts/ebgaramond.lfg | 53 + tex/context/fonts/euler-math.lfg | 23 + tex/context/fonts/lm.lfg | 7 +- tex/context/fonts/px-math.lfg | 2 +- tex/context/fonts/treatments.lfg | 59 +- tex/context/fonts/unifraktur.lfg | 23 + tex/context/interface/keys-cs.xml | 32 + tex/context/interface/keys-de.xml | 32 + tex/context/interface/keys-en.xml | 32 + tex/context/interface/keys-fr.xml | 32 + tex/context/interface/keys-it.xml | 32 + tex/context/interface/keys-nl.xml | 32 + tex/context/interface/keys-pe.xml | 32 + tex/context/interface/keys-ro.xml | 32 + tex/context/patterns/word-xx.lua | 14 + tex/generic/context/luatex/luatex-basics-gen.lua | 28 +- tex/generic/context/luatex/luatex-basics-nod.lua | 79 +- tex/generic/context/luatex/luatex-fonts-merged.lua | 2936 +++++++++++++----- tex/generic/context/luatex/luatex-fonts-syn.lua | 4 + tex/generic/context/luatex/luatex-fonts.lua | 8 +- tex/generic/context/luatex/luatex-test.tex | 4 + 393 files changed, 25323 insertions(+), 10161 deletions(-) delete mode 100644 tex/context/base/char-cjk.lua delete mode 100644 tex/context/base/cont-new.tmp delete mode 100644 tex/context/base/context.tmp create mode 100644 tex/context/base/font-sel.lua create mode 100644 tex/context/base/font-sel.mkvi create mode 100644 tex/context/base/lang-rep.lua create mode 100644 tex/context/base/m-hemistich.mkiv create mode 100644 tex/context/base/m-nodechart.lua create mode 100644 tex/context/base/m-r.mkii delete mode 100644 tex/context/base/m-r.tex create mode 100644 tex/context/base/math-acc.mkvi create mode 100644 tex/context/base/math-dir.lua create mode 100644 tex/context/base/math-rad.mkvi create mode 100644 tex/context/base/meta-fnt.lua create mode 100644 tex/context/base/meta-fnt.mkiv create mode 100644 tex/context/base/mtx-context-xml.tex create mode 100644 tex/context/base/node-ltp.lua create mode 100644 tex/context/base/node-met.lua create mode 100644 tex/context/base/scrp-tha.lua delete mode 100644 tex/context/base/spac-cha.mkiv create mode 100644 tex/context/base/type-imp-mathdigits.mkiv create mode 100644 tex/context/base/typo-dha.lua create mode 100644 tex/context/base/typo-drp.lua create mode 100644 tex/context/base/typo-drp.mkiv create mode 100644 tex/context/base/typo-dua.lua create mode 100644 tex/context/base/typo-dub.lua create mode 100644 tex/context/base/typo-fln.lua create mode 100644 tex/context/base/typo-fln.mkiv delete mode 100644 tex/context/base/typo-par.lua delete mode 100644 tex/context/base/typo-par.mkiv create mode 100644 tex/context/base/typo-tal.lua create mode 100644 tex/context/base/typo-tal.mkiv create mode 100644 tex/context/fonts/ebgaramond.lfg create mode 100644 tex/context/fonts/euler-math.lfg create mode 100644 tex/context/fonts/unifraktur.lfg create mode 100644 tex/context/patterns/word-xx.lua (limited to 'tex') diff --git a/tex/context/base/anch-bck.mkvi b/tex/context/base/anch-bck.mkvi index 79e42dc0a..cccf14ee4 100644 --- a/tex/context/base/anch-bck.mkvi +++ b/tex/context/base/anch-bck.mkvi @@ -154,22 +154,22 @@ \def\anch_backgrounds_text_preset_yes {\anch_backgrounds_text_preset_nop \csname\??textbackgroundlevel\textbackgroundparameter\c!location\endcsname - \edef\m_anch_backgrounds_text_frame {\textbackgroundparameter\c!frame}% - \edef\m_anch_backgrounds_text_corner {\textbackgroundparameter\c!corner}% - \edef\m_anch_backgrounds_text_background{\textbackgroundparameter\c!background}% - \ifx\m_anch_backgrounds_text_frame\v!on - \ifx\m_anch_backgrounds_text_corner\v!round - \let\m_anch_backgrounds_text_frame\!!plustwo + \edef\p_anch_backgrounds_text_frame {\textbackgroundparameter\c!frame}% + \edef\p_anch_backgrounds_text_corner {\textbackgroundparameter\c!corner}% + \edef\p_anch_backgrounds_text_background{\textbackgroundparameter\c!background}% + \ifx\p_anch_backgrounds_text_frame\v!on + \ifx\p_anch_backgrounds_text_corner\v!round + \let\p_anch_backgrounds_text_frame\!!plustwo \else - \let\m_anch_backgrounds_text_frame\!!plusone + \let\p_anch_backgrounds_text_frame\!!plusone \fi \else - \let\m_anch_backgrounds_text_frame\!!zerocount + \let\p_anch_backgrounds_text_frame\!!zerocount \fi - \ifx\m_anch_backgrounds_text_background\v!color - \let\m_anch_backgrounds_text_background\!!plusone + \ifx\p_anch_backgrounds_text_background\v!color + \let\p_anch_backgrounds_text_background\!!plusone \else - \let\m_anch_backgrounds_text_background\!!zerocount + \let\p_anch_backgrounds_text_background\!!zerocount \fi \startpositionoverlay{\textbackgroundoverlay{\textbackgroundparameter\c!level}}% \anch_backgrounds_text_meta_graphic % gets expanded directly @@ -184,8 +184,8 @@ {self=\v_anch_backgrounds_text_current, mp=\textbackgroundparameter\c!mp, gridtype=\textbackgroundparameter\c!alternative, - filltype=\m_anch_backgrounds_text_background, - linetype=\m_anch_backgrounds_text_frame, + filltype=\p_anch_backgrounds_text_background, + linetype=\p_anch_backgrounds_text_frame, dashtype=\textbackgroundparameter\c!dash, gridcolor=\textbackgroundparameter\c!framecolor, linecolor=\textbackgroundparameter\c!framecolor, diff --git a/tex/context/base/anch-pgr.lua b/tex/context/base/anch-pgr.lua index 278448e3a..c7f56a92b 100644 --- a/tex/context/base/anch-pgr.lua +++ b/tex/context/base/anch-pgr.lua @@ -681,12 +681,12 @@ end function commands.doifelserangeonpage(first,last,page) local collected = jobpositions.collected local f = collected[first] - if not f then + if not f or f.p == true then doifelse(false) return end local l = collected[last] - if not l then + if not l or l.p == true then doifelse(false) return end diff --git a/tex/context/base/anch-pgr.mkiv b/tex/context/base/anch-pgr.mkiv index 01ef25dc4..c18a1b669 100644 --- a/tex/context/base/anch-pgr.mkiv +++ b/tex/context/base/anch-pgr.mkiv @@ -254,7 +254,7 @@ {\vbox to \overlayheight {%\writestatus{!!!}{\currentpositionoverlay/\MPanchoridentifier/\MPanchornumber}% \edef\MPanchorid{\currentpositionoverlay::\MPanchoridentifier:\MPanchornumber}% realpageno -% \edef\MPanchor##1{\MPpos\MPanchorid}% + % \edef\MPanchor##1{\MPpos\MPanchorid}% \let\MPanchor\MPoverlayanchor % no need to fetch it already, seldom used \the\everyinsertpositionaction \copyposition{\currentpositionoverlay::\MPanchoridentifier}\MPanchorid @@ -444,6 +444,8 @@ \def\currentposition{#1}\MPpositiongraphic{#2}{#3}% \fi} +\let\anch_positions_meta_graphic_handle_indeed\relax + \appendtoks \let\anch_positions_meta_graphic_handle_indeed\anch_positions_meta_graphic_insert \to \everyinsertpositionaction diff --git a/tex/context/base/anch-pos.lua b/tex/context/base/anch-pos.lua index 2697cecf4..9cc9fb128 100644 --- a/tex/context/base/anch-pos.lua +++ b/tex/context/base/anch-pos.lua @@ -26,9 +26,14 @@ local rawget = rawget local lpegmatch = lpeg.match local insert, remove = table.insert, table.remove local allocate, mark = utilities.storage.allocate, utilities.storage.mark -local texsp, texcount, texbox, texdimen, texsetcount = tex.sp, tex.count, tex.box, tex.dimen, tex.setcount +local texsp = tex.sp ----- texsp = string.todimen -- because we cache this is much faster but no rounding +local texgetcount = tex.getcount +local texgetbox = tex.getbox +local texsetcount = tex.setcount +local texget = tex.get + local pdf = pdf -- h and v are variables local setmetatableindex = table.setmetatableindex @@ -170,7 +175,7 @@ local function setdim(name,w,h,d,extra) -- will be used when we move to sp allov if extra == "" then extra = nil end -- todo: sparse tobesaved[name] = { - p = texcount.realpageno, + p = texgetcount("realpageno"), x = x, y = y, w = w, @@ -217,7 +222,7 @@ local function enhance(data) data.y = pdf.v end if data.p == true then - data.p = texcount.realpageno + data.p = texgetcount("realpageno") end if data.c == true then data.c = column @@ -269,6 +274,8 @@ commands.setpos = setall -- will become private table (could also become attribute driven but too nasty -- as attributes can bleed e.g. in margin stuff) +-- not much gain in keeping stack (inc/dec instead of insert/remove) + function jobpositions.b_col(tag) tobesaved[tag] = { r = true, @@ -291,7 +298,7 @@ function jobpositions.e_col(tag) column = columns[#columns] end -function commands.bcolumn(tag,register) +function commands.bcolumn(tag,register) -- name will change insert(columns,tag) column = tag if register then @@ -299,7 +306,7 @@ function commands.bcolumn(tag,register) end end -function commands.ecolumn(register) +function commands.ecolumn(register) -- name will change if register then context(new_latelua(f_e_column())) end @@ -312,17 +319,17 @@ end function jobpositions.b_region(tag) local last = tobesaved[tag] last.x = pdf.h -last.y = pdf.v - last.p = texcount.realpageno + last.y = pdf.v + last.p = texgetcount("realpageno") insert(regions,tag) region = tag end function jobpositions.e_region(correct) local last = tobesaved[region] -if correct then - last.h = last.y - pdf.v -end + if correct then + last.h = last.y - pdf.v + end last.y = pdf.v remove(regions) region = regions[#regions] @@ -333,7 +340,7 @@ function jobpositions.markregionbox(n,tag,correct) nofregions = nofregions + 1 tag = f_region(nofregions) end - local box = texbox[n] + local box = texgetbox(n) local w = box.width local h = box.height local d = box.depth @@ -376,7 +383,7 @@ local nofparagraphs = 0 function commands.parpos() -- todo: relate to localpar (so this is an intermediate variant) nofparagraphs = nofparagraphs + 1 texsetcount("global","c_anch_positions_paragraph",nofparagraphs) - local strutbox = texbox.strutbox + local strutbox = texgetbox("strutbox") local t = { p = true, c = true, @@ -385,14 +392,14 @@ function commands.parpos() -- todo: relate to localpar (so this is an intermedia y = true, h = strutbox.height, d = strutbox.depth, - hs = tex.hsize, + hs = texget("hsize"), } - local leftskip = tex.leftskip.width - local rightskip = tex.rightskip.width - local hangindent = tex.hangindent - local hangafter = tex.hangafter - local parindent = tex.parindent - local parshape = tex.parshape + local leftskip = texget("leftskip").width + local rightskip = texget("rightskip").width + local hangindent = texget("hangindent") + local hangafter = texget("hangafter") + local parindent = texget("parindent") + local parshape = texget("parshape") if leftskip ~= 0 then t.ls = leftskip end @@ -460,7 +467,7 @@ function commands.posplus(name,w,h,d,extra) end function commands.posstrut(name,w,h,d) - local strutbox = texbox.strutbox + local strutbox = texgetbox("strutbox") tobesaved[name] = { p = true, c = column, @@ -476,7 +483,7 @@ end function jobpositions.getreserved(tag,n) if tag == v_column then - local fulltag = f_tag_three(tag,texcount.realpageno,n or 1) + local fulltag = f_tag_three(tag,texgetcount("realpageno"),n or 1) local data = collected[fulltag] if data then return data, fulltag @@ -484,7 +491,7 @@ function jobpositions.getreserved(tag,n) tag = v_text end if tag == v_text then - local fulltag = f_tag_two(tag,texcount.realpageno) + local fulltag = f_tag_two(tag,texgetcount("realpageno")) return collected[fulltag] or false, fulltag end return collected[tag] or false, tag @@ -1013,7 +1020,7 @@ function commands.doifpositionsonsamepageelse(list,page) end function commands.doifpositionsonthispageelse(list) - doifelse(onsamepage(list,tostring(tex.count.realpageno))) + doifelse(onsamepage(list,tostring(texgetcount("realpageno")))) end function commands.doifelsepositionsused() diff --git a/tex/context/base/attr-eff.lua b/tex/context/base/attr-eff.lua index 4dce5419a..b187b64c7 100644 --- a/tex/context/base/attr-eff.lua +++ b/tex/context/base/attr-eff.lua @@ -13,7 +13,7 @@ local tex = tex local states = attributes.states local tasks = nodes.tasks local nodeinjections = backends.nodeinjections -local settexattribute = tex.setattribute +local texsetattribute = tex.setattribute local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex local formatters = string.formatters @@ -107,5 +107,5 @@ function commands.triggereffect(specification) enable() enabled = true end - settexattribute(a_effect,register(specification)) + texsetattribute(a_effect,register(specification)) end diff --git a/tex/context/base/attr-ini.lua b/tex/context/base/attr-ini.lua index 206a86d79..ad4081681 100644 --- a/tex/context/base/attr-ini.lua +++ b/tex/context/base/attr-ini.lua @@ -20,6 +20,9 @@ local attributes = attributes local sharedstorage = storage.shared +local texgetcount = tex.getcount +local texsetattribute = tex.setattribute + attributes.names = attributes.names or { } attributes.numbers = attributes.numbers or { } attributes.list = attributes.list or { } @@ -64,7 +67,7 @@ sharedstorage.attributes_last_private = sharedstorage.attributes_last_private or -- setmetatable(private, { -- __index = function(t,name) -- local number = sharedstorage.attributes_last_private --- if number < 1023 then -- tex.count.minallocatedattribute - 1 +-- if number < 1023 then -- texgetcount("minallocatedattribute") - 1 -- number = number + 1 -- sharedstorage.attributes_last_private = number -- end @@ -81,7 +84,7 @@ function attributes.private(name) -- at the lua end (hidden from user) local number = numbers[name] if not number then local last = sharedstorage.attributes_last_private - if last < 1023 then -- tex.count.minallocatedattribute - 1 + if last < 1023 then -- texgetcount("minallocatedattribute") - 1 last = last + 1 sharedstorage.attributes_last_private = last else @@ -155,7 +158,7 @@ function commands.restorecurrentattributes(name) local font = t.font if attr then for k, v in next, attr do - tex.attribute[k] = v + texsetattribute(k,v) end end if font then diff --git a/tex/context/base/attr-lay.lua b/tex/context/base/attr-lay.lua index 4bcc70b0c..176af1a2c 100644 --- a/tex/context/base/attr-lay.lua +++ b/tex/context/base/attr-lay.lua @@ -133,8 +133,8 @@ attributes.viewerlayers.handler = nodes.installattributehandler { namespace = viewerlayers, initializer = initializer, finalizer = states.finalize, - -- processor = states.stacked, processor = states.stacker, + -- processor = states.stacked, } local stack, enabled, global = { }, false, false diff --git a/tex/context/base/attr-lay.mkiv b/tex/context/base/attr-lay.mkiv index 9c9c3318e..d4aae3060 100644 --- a/tex/context/base/attr-lay.mkiv +++ b/tex/context/base/attr-lay.mkiv @@ -98,8 +98,11 @@ \let\setlayoutcomponentattribute \attr_layoutcomponent_set \let\resetlayoutcomponentattribute\attr_layoutcomponent_reset} +\unexpanded\def\attr_layoutcomponent_cleanup + {\ctxcommand{cleanuplayers()}} + \appendtoks - \ctxcommand{cleanuplayers()}% + \attr_layoutcomponent_cleanup \to \everyshipout \protect \endinput diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua index 4d219a18b..18a339247 100644 --- a/tex/context/base/back-exp.lua +++ b/tex/context/base/back-exp.lua @@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['back-exp'] = { license = "see context related readme files" } +-- beware: we run out of the 200 local limit + -- language -> only mainlanguage, local languages should happen through start/stoplanguage -- tocs/registers -> maybe add a stripper (i.e. just don't flush entries in final tree) -- footnotes -> css 3 @@ -22,10 +24,10 @@ if not modules then modules = { } end modules ['back-exp'] = { -- todo: delay loading (apart from basic tag stuff) local next, type = next, type -local format, match, concat, rep, sub, gsub, gmatch, find = string.format, string.match, table.concat, string.rep, string.sub, string.gsub, string.gmatch, string.find +local format, concat, sub, gsub = string.format, table.concat, string.sub, string.gsub local validstring = string.valid local lpegmatch = lpeg.match -local utfchar, utfbyte, utfvalues = utf.char, utf.byte, utf.values +local utfchar, utfvalues = utf.char, utf.values local insert, remove = table.insert, table.remove local fromunicode16 = fonts.mappings.fromunicode16 local sortedhash = table.sortedhash @@ -81,6 +83,8 @@ local xspaceskip_code = skipcodes.xspaceskip local line_code = listcodes.line +local texgetcount = tex.getcount + local a_characters = attributes.private('characters') local a_exportstatus = attributes.private('exportstatus') @@ -94,9 +98,6 @@ local a_textblock = attributes.private("textblock") local traverse_id = node.traverse_id local traverse_nodes = node.traverse local slide_nodelist = node.slide -local texattribute = tex.attribute -local texdimen = tex.dimen -local texcount = tex.count local locate_node = nodes.locate local references = structures.references @@ -454,7 +455,7 @@ local function checkdocument(root) end function extras.document(result,element,detail,n,fulltag,di) - result[#result+1] = format(" language=%q",languagenames[tex.count.mainlanguagenumber]) + result[#result+1] = format(" language=%q",languagenames[texgetcount("mainlanguagenumber")]) if not less_state then result[#result+1] = format(" file=%q",tex.jobname) result[#result+1] = format(" date=%q",os.date()) @@ -2353,7 +2354,7 @@ local function stopexport(v) images = uniqueusedimages(), root = xhtmlfile, files = files, - language = languagenames[tex.count.mainlanguagenumber], + language = languagenames[texgetcount("mainlanguagenumber")], title = validstring(finetuning.title) or validstring(identity.title), subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle), author = validstring(finetuning.author) or validstring(identity.author), @@ -2377,13 +2378,13 @@ end local function startexport(v) if v and not exporting then report_export("enabling export to xml") --- not yet known in task-ini + -- not yet known in task-ini appendaction("shipouts","normalizers", "nodes.handlers.export") --- enableaction("shipouts","nodes.handlers.export") + -- enableaction("shipouts","nodes.handlers.export") enableaction("shipouts","nodes.handlers.accessibility") enableaction("math", "noads.handlers.tags") ---~ appendaction("finalizers","lists","builders.paragraphs.tag") ---~ enableaction("finalizers","builders.paragraphs.tag") + -- appendaction("finalizers","lists","builders.paragraphs.tag") + -- enableaction("finalizers","builders.paragraphs.tag") luatex.registerstopactions(function() stopexport(v) end) exporting = true end diff --git a/tex/context/base/back-exp.mkiv b/tex/context/base/back-exp.mkiv index 9e65633d4..c7696d383 100644 --- a/tex/context/base/back-exp.mkiv +++ b/tex/context/base/back-exp.mkiv @@ -81,7 +81,7 @@ \to \everyenableelements \appendtoks % frozen and assumed global per highlight class - \unexpanded\def\dotaghighlight{\taggedctxcommand{settaghighlight("\currenthighlight","\highlightparameter\c!style",\number\attribute\colorattribute)}}% + \unexpanded\def\dotaghighlight{\taggedctxcommand{settaghighlight("\currenthighlight",\!!bs\highlightparameter\c!style\!!es,\number\attribute\colorattribute)}}% \to \everyenableelements \appendtoks % we can have differently scaled images diff --git a/tex/context/base/back-ini.lua b/tex/context/base/back-ini.lua index bdd931abd..6f58b3262 100644 --- a/tex/context/base/back-ini.lua +++ b/tex/context/base/back-ini.lua @@ -8,7 +8,6 @@ if not modules then modules = { } end modules ['back-ini'] = { local next, type = next, type local format = string.format -local sind, cosd = math.sind, math.cosd backends = backends or { } local backends = backends @@ -96,11 +95,3 @@ tables.vfspecials = allocate { startslant = comment, stopslant = comment, } - --- experimental code -- - -function commands.pdfrotation(a) -- somewhat weird here - local s, c = sind(a), cosd(a) - context("%0.6f %0.6f %0.6f %0.6f",c,s,-s,c) -end - diff --git a/tex/context/base/back-pdf.lua b/tex/context/base/back-pdf.lua index 63261aa3b..f8a5dab6f 100644 --- a/tex/context/base/back-pdf.lua +++ b/tex/context/base/back-pdf.lua @@ -18,3 +18,75 @@ function codeinjections.getoutputfilename() end backends.install("pdf") + +local context = context + +local sind, cosd = math.sind, math.cosd +local insert, remove = table.insert, table.remove + +local f_matrix = string.formatters["%0.8f %0.8f %0.8f %0.8f"] + +function commands.pdfrotation(a) + -- todo: check for 1 and 0 and flush sparse + local s, c = sind(a), cosd(a) + context(f_matrix(c,s,-s,c)) +end + +-- experimental code (somewhat weird here) .. todo: nodeinjections .. this will only work +-- out well if we also calculate the accumulated cm and wrap inclusions / annotations in +-- the accumulated ... it's a mess +-- +-- we could also do the save restore wrapping here + colorhack + +local pdfsetmatrix = nodes.pool.pdfsetmatrix +local stack = { } + +local function popmatrix() + local top = remove(stack) + if top then + context(pdfsetmatrix(unpack(top))) + end +end + +function commands.pdfstartrotation(a) + if a == 0 then + insert(stack,false) + else + local s, c = sind(a), cosd(a) + context(pdfsetmatrix(c,s,-s,c)) + insert(stack,{ c, -s, s, c }) + end +end + +function commands.pdfstartscaling(sx,sy) + if sx == 1 and sy == 1 then + insert(stack,false) + else + if sx == 0 then + sx = 0.0001 + end + if sy == 0 then + sy = 0.0001 + end + context(pdfsetmatrix(sx,0,0,sy)) + insert(stack,{ 1/sx, 0, 0, 1/sy }) + end +end + +function commands.pdfstartmirroring() + context(pdfsetmatrix(-1,0,0,1)) +end + +function commands.pdfstartmatrix(sx,rx,ry,sy) -- tx, ty + if sx == 1 and rx == 0 and ry == 0 and sy == 1 then + insert(stack,false) + else + context(pdfsetmatrix(sx,rx,ry,sy)) + insert(stack,{ -sx, -rx, -ry, -sy }) + end +end + +commands.pdfstoprotation = popmatrix +commands.pdfstopscaling = popmatrix +commands.pdfstopmirroring = commands.pdfstartmirroring +commands.pdfstopmatrix = popmatrix diff --git a/tex/context/base/back-pdf.mkiv b/tex/context/base/back-pdf.mkiv index 1cf7a3703..948a14138 100644 --- a/tex/context/base/back-pdf.mkiv +++ b/tex/context/base/back-pdf.mkiv @@ -123,45 +123,35 @@ \to \everylastbackendshipout %D Transformations. Some day we will use primitives (once they're fixed). - -% \def\dostartscaling#1#2% the test is needed because acrobat is bugged! -% {\forcecolorhack % maybe use signal instead -% \pdfliteral{q \ifdim#1\points=\zeropoint.0001\else#1\fi\space 0 0 -% \ifdim#2\points=\zeropoint.0001\else#2\fi\space 0 0 cm}} -% -% \def\dostopscaling -% {\pdfliteral{Q}} -% -% \def\dostartrotation#1% grouped -% {\forcecolorhack % maybe use signal instead -% \pdfliteral{q \cldcontext{lpdf.rotationcm(#1)}}} -% -% \def\dostoprotation -% {\pdfliteral{Q}} -% -% function lpdf.rotationcm(a) -% local s, c = sind(a), cosd(a) -% return format("%f %f %f %f 0 0 cm",c,s,-s,c) -% end -% -% \def\dostartmirroring{\pdfliteral{-1 0 0 1 0 0 cm}} -% \def\dostopmirroring {\pdfliteral{-1 0 0 1 0 0 cm}} - % todo: inject at the lua end cq. deal with #5 and #6 too +% % % rotation % % % + \unexpanded\def\dostartrotation#1% grouped - {\forcecolorhack % maybe use signal instead - \advance\backendtransformlevel\plusone + {\advance\backendtransformlevel\plusone + \forcecolorhack \pdfsave \pdfsetmatrix{\ctxcommand{pdfrotation(#1)}}} \unexpanded\def\dostoprotation {\pdfrestore + \forcecolorhack \advance\backendtransformlevel\minusone} +% \unexpanded\def\dostartrotation#1% grouped +% {\forcecolorhack +% \advance\backendtransformlevel\plusone +% \ctxcommand{pdfstartrotation(#1)}} + +% \unexpanded\def\dostoprotation +% {\ctxcommand{pdfstoprotation()}% +% \advance\backendtransformlevel\minusone} + +% % % scaling % % % + \unexpanded\def\dostartscaling#1#2% the test is needed because acrobat is bugged! - {\forcecolorhack % maybe use signal instead - \advance\backendtransformlevel\plusone + {\advance\backendtransformlevel\plusone + \forcecolorhack % maybe use signal instead \pdfsave \pdfsetmatrix {\ifdim#1\points=\zeropoint.0001\else#1\fi\space 0 0 @@ -169,24 +159,49 @@ \unexpanded\def\dostopscaling {\pdfrestore + \forcecolorhack \advance\backendtransformlevel\minusone} +% \unexpanded\def\dostartscaling#1#2% the test is needed because acrobat is bugged! +% {\forcecolorhack +% \advance\backendtransformlevel\plusone +% \ctxcommand{pdfstartscaling(#1,#2)}} + +% \unexpanded\def\dostopscaling +% {\ctxcommand{pdfstopscaling()}% +% \advance\backendtransformlevel\minusone} + +% % % mirroring % % % + \unexpanded\def\dostartmirroring {\advance\backendtransformlevel\plusone + \forcecolorhack \pdfsave \pdfsetmatrix{-1 0 0 1}} % 0 0 \unexpanded\def\dostopmirroring {\pdfrestore + \forcecolorhack \advance\backendtransformlevel\minusone} +% \unexpanded\def\dostartmirroring +% {\advance\backendtransformlevel\plusone +% \ctxcommand{pdfstartmirroring()}} + +% \unexpanded\def\dostopmirroring +% {\ctxcommand{pdfstopmirroring()}% +% \advance\backendtransformlevel\minusone} + +% % % transform % % % + \unexpanded\def\dotransformnextbox#1#2#3#4#5#6% sx rx ry sy tx ty (will change) / basepoints ! {\advance\backendtransformlevel\plusone % fixing ht/dp/wd should happen elsewhere \dowithnextbox{\dodotransformnextbox{#5}{#6}{#1 #2 #3 #4}}} \unexpanded\def\dodotransformnextbox#1#2#3% - {\hbox + {%\forcecolorhack + \hbox {\kern#1\onebasepoint \raise#2\onebasepoint\hbox {\pdfsave @@ -195,14 +210,31 @@ \pdfrestore \advance\backendtransformlevel\minusone}}} -\unexpanded\def\dostartclipping#1#2#3% +% \unexpanded\def\dotransformnextbox#1#2#3#4#5#6% sx rx ry sy tx ty (will change) / basepoints ! +% {\advance\backendtransformlevel\plusone +% % fixing ht/dp/wd should happen elsewhere +% \dowithnextbox{\dodotransformnextbox{#1}{#2}{#3}{#4}{#5}{#6}}} + +% \unexpanded\def\dodotransformnextbox#1#2#3#4#5#6% +% {\hbox +% {\kern #5\onebasepoint +% \raise#6\onebasepoint +% \hbox +% {\ctxcommand{pdfstartmatrix(#1,#2,#3,#4)}% +% \box\nextbox +% \ctxcommand{pdfstopmatrix()}% +% \advance\backendtransformlevel\minusone}}} + +% % % clipping % % % + +\unexpanded\def\dostartclipping#1#2#3% we can move this to lua and only set a box here {\PointsToBigPoints{#2}\width \PointsToBigPoints{#3}\height \meta_grab_clip_path{#1}\width\height{0 0 m \width\space 0 l \width \height l 0 \height l}% \pdfliteral{q 0 w \MPclippath\space W n}} \unexpanded\def\dostopclipping - {\pdfliteral{Q n}} + {\pdfliteral{Q}} %D The following will move to the backend \LUA\ code: @@ -230,6 +262,8 @@ {\back_object_stop \egroup} +\let\back_object_stop\relax + % attr {/Group << /S /Transparency /I false /K true >>} \def\back_object_register#1#2% diff --git a/tex/context/base/bibl-tra.lua b/tex/context/base/bibl-tra.lua index 6a7016023..39eae3bcd 100644 --- a/tex/context/base/bibl-tra.lua +++ b/tex/context/base/bibl-tra.lua @@ -6,26 +6,41 @@ if not modules then modules = { } end modules ['bibl-tra'] = { license = "see context related readme files" } +-- also see bibl-tra-new ! + +local match, gmatch, format, concat, sort = string.match, string.gmatch, string.format, table.concat, table.sort + bibtex = bibtex or { } local bibtex = bibtex bibtex.hacks = bibtex.hacks or { } local hacks = bibtex.hacks -local match, gmatch, format, concat, sort = string.match, string.gmatch, string.format, table.concat, table.sort -local variables, constants = interfaces.variables, interfaces.constants - local trace_bibtex = false trackers.register("publications.bibtex", function(v) trace_bibtex = v end) local report_tex = logs.reporter("publications","tex") -local context, structures = context, structures +local context = context +local structures = structures -local references = structures.references -local sections = structures.sections +local references = structures.references +local sections = structures.sections -local list, done, alldone, used, registered, ordered = { }, { }, { }, { }, { }, { } -local mode = 0 +local variables = interfaces.variables + +local v_short = variables.short +local v_cite = variables.cite +local v_default = variables.default +local v_reference = variables.default + +local list = { } +local done = { } +local alldone = { } +local used = { } +local registered = { } +local ordered = { } +local shorts = { } +local mode = 0 local template = utilities.strings.striplong([[ \citation{*} @@ -54,12 +69,17 @@ function hacks.process(settings) end end -function hacks.register(str) +function hacks.register(tag,short) + if not short or short == "" then + short = tag + end if trace_bibtex then - report_tex("registering bibtex entry %a",str) + report_tex("registering bibtex entry %a with shortcut %a",tag,short) end - registered[#registered+1] = str - ordered[str] = #registered + local top = #registered + 1 + registered[top] = tag + ordered [tag] = top + shorts [tag] = short end function hacks.nofregistered() @@ -90,19 +110,38 @@ function hacks.add(str,listindex) end end -local function compare(a,b) -- quite some checking for non-nil - local aa, bb = a and a[1], b and b[1] - if aa and bb then - local oa, ob = ordered[aa], ordered[bb] - return oa and ob and oa < ob - end - return false -end - function hacks.flush(sortvariant) - if sortvariant == "" or sortvariant == variables.cite or sortvariant == "default" then + local compare -- quite some checking for non-nil + if sortvariant == "" or sortvariant == v_cite or sortvariant == v_default then -- order is cite order i.e. same as list + elseif sortvariant == v_short then + compare = function(a,b) + local aa, bb = a and a[1], b and b[1] + if aa and bb then + local oa, ob = shorts[aa], shorts[bb] + return oa and ob and oa < ob + end + return false + end + elseif sortvariant == v_reference then + compare = function(a,b) + local aa, bb = a and a[1], b and b[1] + if aa and bb then + return aa and bb and aa < bb + end + return false + end else + compare = function(a,b) + local aa, bb = a and a[1], b and b[1] + if aa and bb then + local oa, ob = ordered[aa], ordered[bb] + return oa and ob and oa < ob + end + return false + end + end + if compare then sort(list,compare) end for i=1,#list do @@ -235,6 +274,7 @@ function hacks.resolve(prefix,block,reference) -- maybe already feed it split if c[3] then context.dowithbibtexnumrefrange(#collected,i,prefix,c[1],c[2],c[3],c[4]) else +-- print(#collected,i,prefix,c[1],c[2]) context.dowithbibtexnumref(#collected,i,prefix,c[1],c[2]) end end diff --git a/tex/context/base/bibl-tra.mkiv b/tex/context/base/bibl-tra.mkiv index b142e8938..174bc8e25 100644 --- a/tex/context/base/bibl-tra.mkiv +++ b/tex/context/base/bibl-tra.mkiv @@ -9,6 +9,11 @@ %C %C Donated to the public domain. + +% % % % watch out ... bibl-tra-new.mkiv is work in progress % % % % + +% % % % mlbibtex also supports context and we can run that instead of bibtex % % % % + %D This module has been adapted to \MKIV\ by Hans Hagen so if things go wrong, %D he is to blame. The changes concern references and lists but teh rendering %D itself is unchanged. Future versions might provide variants as we have plans @@ -684,7 +689,7 @@ \doifassignmentelse{#1}% {\getparameters[\??pb][k=\s!unknown,t=article,n=,s=,a=,y=,o=,u=,#1]}% {\getparameters[\??pb][k=#1,t=article,n=,s=,a=,y=,o=,u=]}% - \ctxlua{bibtex.hacks.register("\@@pbk")}% + \ctxlua{bibtex.hacks.register("\@@pbk","\@@pbs")}% \catcode\commentasciicode\othercatcode \dodostartpublication} diff --git a/tex/context/base/blob-ini.lua b/tex/context/base/blob-ini.lua index 4debaf94c..32fac7662 100644 --- a/tex/context/base/blob-ini.lua +++ b/tex/context/base/blob-ini.lua @@ -31,30 +31,37 @@ if not modules then modules = { } end modules ['blob-ini'] = { local type, tostring = type, tostring local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns -local report_blobs = logs.reporter("blobs") +local report_blobs = logs.reporter("blobs") -local t_tonodes = typesetters.tonodes -local t_hpack = typesetters.hpack +local flush_node_list = node.flush_list +local hpack_node_list = node.hpack +local vpack_node_list = node.vpack +local write_node = node.write -local flush_node_list = node.flush_list -local hpack_node_list = node.hpack -local vpack_node_list = node.vpack -local write_node = node.write +local typesetters = nodes.typesetters +local tonodes = typesetters.tonodes +local tohpack = typesetters.tohpack +local tohpackfast = typesetters.tohpackfast +local tovpack = typesetters.tovpack +local tovpackfast = typesetters.tovpackfast blobs = blobs or { } -local newline = lpegpatterns.newline -local space = lpegpatterns.spacer -local spacing = newline * space^0 -local content = (space^1)/" " + (1-spacing) +-- provide copies here (nicer for manuals) -local ctxtextcapture = lpeg.Ct ( ( -- needs checking (see elsewhere) - space^0 * ( - newline^2 * space^0 * lpeg.Cc("") - + newline * space^0 * lpeg.Cc(" ") - + lpeg.Cs(content^1) - ) -)^0) +blobs.tonodes = tonodes +blobs.tohpack = tohpack +blobs.tohpackfast = tohpackfast +blobs.tovpack = tovpack +blobs.tovpackfast = tovpackfast + +-- end of helpers + +local newline = lpeg.patterns.newline +local space = lpeg.patterns.spacer +local newpar = (space^0*newline*space^0)^2 + +local ctxtextcapture = lpeg.Ct ( ( space^0 * ( newpar + lpeg.Cs(((space^1/" " + 1)-newpar)^1) ) )^0) function blobs.new() return { @@ -81,33 +88,12 @@ function blobs.append(t,str) -- compare concat and link str = tostring(str) typ = "string" end - local list = t.list if typ == "string" then local pars = lpegmatch(ctxtextcapture,str) - local noflist = #list + local list = t.list for p=1,#pars do - local str = pars[p] - if #str == 0 then - noflist = noflist + 1 - list[noflist] = { head = nil, tail = nil } - else - local l = list[noflist] - if not l then - l = { head = nil, tail = nil } - noflist = noflist + 1 - list[noflist] = l - end - local head, tail = t_tonodes(str,nil,nil) - if head then - if l.head then - l.tail.next = head - head.prev = l.tail - l.tail = tail - else - l.head, l.tail = head, tail - end - end - end + local head, tail = tonodes(pars[p],nil,nil) + list[#list+1] = { head = head, tail = tail } end end end @@ -121,7 +107,7 @@ function blobs.pack(t,how) end if how == "vertical" then -- we need to prepend a local par node - -- list[i].pack = node.vpack(list[i].head,"exactly") + -- list[i].pack = vpack_node_list(list[i].head,"exactly") report_blobs("vpack not yet supported") else list[i].pack = hpack_node_list(list[i].head,"exactly") @@ -176,12 +162,44 @@ end -- for the moment here: -function commands.widthofstring(str) - local l = t_hpack(str) - context(number.todimen(l.width)) +local function strwd(str) + local l = tohpack(str) + local w = l.width flush_node_list(l) + return w end +local function strht(str) + local l = tohpack(str) + local h = l.height + flush_node_list(l) + return h +end + +local function strdp(str) + local l = tohpack(str) + local d = l.depth + flush_node_list(l) + return d +end + +local function strhd(str) + local l = tohpack(str) + local s = l.height + l.depth + flush_node_list(l) + return s +end + +blobs.strwd = strwd +blobs.strht = strht +blobs.strdp = strdp +blobs.strhd = strhd + +function commands.strwd(str) context(strwd(str)) end +function commands.strht(str) context(strht(str)) end +function commands.strdp(str) context(strdp(str)) end +function commands.strhd(str) context(strhd(str)) end + -- less efficient: -- -- function commands.widthof(str) diff --git a/tex/context/base/blob-ini.mkiv b/tex/context/base/blob-ini.mkiv index 4fdb9e4b6..1dfb766f4 100644 --- a/tex/context/base/blob-ini.mkiv +++ b/tex/context/base/blob-ini.mkiv @@ -27,7 +27,19 @@ % this one takes simple (utf) strings -\def\widthofstring#1{\ctxcommand{widthofstring(\!!bs#1\!!es)}} +\def\wdofstring#1{\dimexpr\ctxcommand{strwd(\!!bs#1\!!es)}\scaledpoint\relax} +\def\htofstring#1{\dimexpr\ctxcommand{strht(\!!bs#1\!!es)}\scaledpoint\relax} +\def\dpofstring#1{\dimexpr\ctxcommand{strdp(\!!bs#1\!!es)}\scaledpoint\relax} +\def\hdofstring#1{\dimexpr\ctxcommand{strhd(\!!bs#1\!!es)}\scaledpoint\relax} + +\def\widthofstring {\the\wdofstring} +\def\heightofstring {\the\htofstring} +\def\depthofstring {\the\dpofstring} +\def\heightanddepthofstring{\the\hdofstring} + +\let\htdpofstring \hdofstring + +\let\hd\htdp % if yes then move this % this one takes anything that can be typeset diff --git a/tex/context/base/buff-imp-lua.lua b/tex/context/base/buff-imp-lua.lua index 1147666cc..04e79afba 100644 --- a/tex/context/base/buff-imp-lua.lua +++ b/tex/context/base/buff-imp-lua.lua @@ -6,7 +6,8 @@ if not modules then modules = { } end modules ['buff-imp-lua'] = { license = "see context related readme files" } --- borrowed from scite +-- borrowed from ctx scite lexers +-- add goto/label scanning -- -- depricated: -- @@ -26,9 +27,9 @@ local core = tohash { local base = tohash { "assert", "collectgarbage", "dofile", "error", "loadfile", - "loadstring", "print", "rawget", "rawset", "require", "tonumber", + "loadstring", "load", "print", "rawget", "rawset", "require", "tonumber", "tostring", "type", "_G", "getmetatable", "ipairs", "next", "pairs", - "pcall", "rawequal", "setmetatable", "xpcall", "module", "select", + "pcall", "rawequal", "setmetatable", "xpcall", "module", "select", "goto", } local libraries = { @@ -61,7 +62,7 @@ local libraries = { }, lpeg = tohash{ "print", "match", "locale", "type", "version", "setmaxstack", - "P", "R", "S", "C", "V", "Cs", "Ct", "Cs", "Cp", "Carg", + "P", "R", "S", "C", "V", "Cs", "Ct", "Cs", "Cc", "Cp", "Carg", "Cg", "Cb", "Cmt", "Cf", "B", }, -- bit diff --git a/tex/context/base/buff-imp-mp.lua b/tex/context/base/buff-imp-mp.lua index 34e3459c6..bcd18dd47 100644 --- a/tex/context/base/buff-imp-mp.lua +++ b/tex/context/base/buff-imp-mp.lua @@ -36,6 +36,8 @@ local MetapostSnippetConstructor = verbatim.MetapostSnippetConstructor local MetapostSnippetBoundary = verbatim.MetapostSnippetBoundary local MetapostSnippetSpecial = verbatim.MetapostSnippetSpecial local MetapostSnippetComment = verbatim.MetapostSnippetComment +local MetapostSnippetQuote = verbatim.MetapostSnippetQuote +local MetapostSnippetString = verbatim.MetapostSnippetString local MetapostSnippetNamePrimitive = verbatim.MetapostSnippetNamePrimitive local MetapostSnippetNamePlain = verbatim.MetapostSnippetNamePlain local MetapostSnippetNameMetafun = verbatim.MetapostSnippetNameMetafun diff --git a/tex/context/base/buff-imp-tex.lua b/tex/context/base/buff-imp-tex.lua index 29fd8c0c5..097dff212 100644 --- a/tex/context/base/buff-imp-tex.lua +++ b/tex/context/base/buff-imp-tex.lua @@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['buff-imp-tex'] = { license = "see context related readme files" } +-- needs an update, use mult-low + local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns local context = context @@ -39,7 +41,7 @@ local handler = visualizers.newhandler { -- todo: unicode letters in control sequences (slow as we need to test the nature) local comment = S("%") -local name = P("\\") * (patterns.letter + S("@!?"))^1 +local name = P("\\") * (patterns.letter + S("@!?_"))^1 local escape = P("\\") * (patterns.anything - patterns.newline)^-1 -- else we get \n local group = S("${}") local boundary = S('[]()<>#="') diff --git a/tex/context/base/buff-ini.lua b/tex/context/base/buff-ini.lua index 475d23efe..a8c101b61 100644 --- a/tex/context/base/buff-ini.lua +++ b/tex/context/base/buff-ini.lua @@ -6,24 +6,28 @@ if not modules then modules = { } end modules ['buff-ini'] = { license = "see context related readme files" } -local trace_run = false trackers.register("buffers.run", function(v) trace_run = v end) -local trace_grab = false trackers.register("buffers.grab", function(v) trace_grab = v end) -local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end) - -local report_buffers = logs.reporter("buffers","usage") -local report_grabbing = logs.reporter("buffers","grabbing") - -local context, commands = context, commands - local concat = table.concat local type, next, load = type, next, load local sub, format = string.sub, string.format local splitlines, validstring = string.splitlines, string.valid local P, Cs, patterns, lpegmatch = lpeg.P, lpeg.Cs, lpeg.patterns, lpeg.match +local trace_run = false trackers.register("buffers.run", function(v) trace_run = v end) +local trace_grab = false trackers.register("buffers.grab", function(v) trace_grab = v end) +local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end) + +local report_buffers = logs.reporter("buffers","usage") +local report_typeset = logs.reporter("buffers","typeset") +local report_grabbing = logs.reporter("buffers","grabbing") + +local context = context +local commands = commands + local variables = interfaces.variables local settings_to_array = utilities.parsers.settings_to_array local formatters = string.formatters +local addsuffix = file.addsuffix +local replacesuffix = file.replacesuffix local v_yes = variables.yes @@ -42,18 +46,34 @@ local function erase(name) end local function assign(name,str,catcodes) - cache[name] = { data = str, catcodes = catcodes } + cache[name] = { + data = str, + catcodes = catcodes, + typeset = false, + } end -local function append(name,str) +local function combine(name,str,prepend) local buffer = cache[name] if buffer then - buffer.data = buffer.data .. str + buffer.data = prepend and (str .. buffer.data) or (buffer.data .. str) + buffer.typeset = false else - cache[name] = { data = str } + cache[name] = { + data = str, + typeset = false, + } end end +local function prepend(name,str) + combine(name,str,true) +end + +local function append(name,str) + combine(name,str) +end + local function exists(name) return cache[name] end @@ -68,10 +88,40 @@ local function getlines(name) return buffer and splitlines(buffer.data) end -local function collectcontent(names,separator) -- no print - if type(names) == "string" then - names = settings_to_array(names) +local function getnames(name) + if type(name) == "string" then + return settings_to_array(name) + else + return name end +end + +local function istypeset(name) + local names = getnames(name) + if #names == 0 then + return false + end + for i=1,#names do + local c = cache[names[i]] + if c and not c.typeset then + return false + end + end + return true +end + +local function markastypeset(name) + local names = getnames(name) + for i=1,#names do + local c = cache[names[i]] + if c then + c.typeset = true + end + end +end + +local function collectcontent(name,separator) -- no print + local names = getnames(name) local nnames = #names if nnames == 0 then return getcontent("") -- default buffer @@ -90,12 +140,10 @@ local function collectcontent(names,separator) -- no print end end -local function loadcontent(names) -- no print - if type(names) == "string" then - names = settings_to_array(names) - end +local function loadcontent(name) -- no print + local names = getnames(name) local nnames = #names - local ok = false + local ok = false if nnames == 0 then ok = load(getcontent("")) -- default buffer elseif nnames == 1 then @@ -127,10 +175,10 @@ local function loadcontent(names) -- no print end end - buffers.raw = getcontent buffers.erase = erase buffers.assign = assign +buffers.prepend = prepend buffers.append = append buffers.exists = exists buffers.getcontent = getcontent @@ -233,7 +281,7 @@ end function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes) -- maybe move \\ to call local dn = getcontent(name) if dn == "" then - nesting = 0 + nesting = 0 continue = false end if trace_grab then @@ -251,8 +299,8 @@ function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes) -- maybe nesting = nesting + lpegmatch(counter,bufferdata) local more = nesting > 0 if more then - dn = dn .. sub(bufferdata,2,-1) .. endtag - nesting = nesting - 1 + dn = dn .. sub(bufferdata,2,-1) .. endtag + nesting = nesting - 1 continue = true else if continue then @@ -274,10 +322,7 @@ function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes) -- maybe commands.doifelse(more) end --- The optional prefix hack is there for the typesetbuffer feature and --- in mkii we needed that (this hidden feature is used in a manual). - -local function prepared(name,list,prefix) -- list is optional +function commands.savebuffer(list,name,prefix) -- name is optional if not list or list == "" then list = name end @@ -288,42 +333,51 @@ local function prepared(name,list,prefix) -- list is optional if content == "" then content = "empty buffer" end - if prefix then - local name = file.addsuffix(name,"tmp") - return tex.jobname .. "-" .. name, content - else - return name, content + if prefix == v_yes then + name = addsuffix(tex.jobname .. "-" .. name,"tmp") end + io.savedata(name,content) end -local capsule = "\\starttext\n%s\n\\stoptext\n" -local command = "context %s" - -function commands.runbuffer(name,list,encapsulate) - local name, content = prepared(name,list) - if encapsulate then - content = format(capsule,content) +local files = { } +local last = 0 + +function commands.runbuffer(name,encapsulate) -- we used to compare the saved file with content + local names = getnames(name) + local filename = files[name] + local tobedone = not istypeset(names) + if tobedone or not filename then + last = last + 1 + filename = formatters["%s-typeset-buffer-%03i"](tex.jobname,last) + files[name] = filename end - local data = io.loaddata(name) - if data ~= content then + if tobedone then if trace_run then - report_buffers("changes in %a, processing forced",name) + report_typeset("changes in %a, processing forced",name) + end + local filename = addsuffix(filename,"tmp") + local content = collectcontent(names,nil) or "" + if content == "" then + content = "empty buffer" + end + if encapsulate then + content = formatters["\\starttext\n%s\n\\stoptext\n"](content) end - io.savedata(name,content) - os.execute(format(command,name)) + io.savedata(filename,content) + local command = formatters["context %s %s"](jit and "--jit" or "",filename) + report_typeset("running: %s\n",command) + os.execute(command) + markastypeset(names) elseif trace_run then - report_buffers("no changes in %a, not processed",name) + report_typeset("no changes in %a, not processed",name) end -end - -function commands.savebuffer(list,name,prefix) -- name is optional - local name, content = prepared(name,list,prefix==v_yes) - io.savedata(name,content) + context(replacesuffix(filename,"pdf")) end function commands.getbuffer(name) local str = getcontent(name) if str ~= "" then + -- characters.showstring(str) context.viafile(str,formatters["buffer.%s"](validstring(name,"noname"))) end end diff --git a/tex/context/base/buff-ini.mkiv b/tex/context/base/buff-ini.mkiv index 7616a1dee..4ea3042b6 100644 --- a/tex/context/base/buff-ini.mkiv +++ b/tex/context/base/buff-ini.mkiv @@ -61,6 +61,9 @@ \def\buff_grab_direct_indeed_a[#1][#2][#3][#4]{\buff_start_indeed {#1}{#2}{#3}{#4}} \def\buff_grab_direct_indeed_b[#1][#2][#3][#4]{\buff_start_indeed\empty{#1}{#2}{#3}} +\let\buff_finish\relax +\let\buff_gobble\relax + \unexpanded\def\buff_pickup#1#2#3#4#5% name, startsequence, stopsequence, before, after {\begingroup % (1) #4% @@ -140,6 +143,20 @@ \unexpanded\def\buff_get_stored_indeed#1% {\ctxcommand{getbuffer("#1")}} +\unexpanded\def\getdefinedbuffer[#1]% + {\buff_get_stored{#1}{\thedefinedbuffer{#1}}}% + +\unexpanded\def\inlinebuffer + {\dosingleempty\buff_get_inline} + +\unexpanded\def\buff_get_inline[#1]% [name] + {\doifelsenothing{#1} + {\buff_get_stored_inline_indeed\empty} + {\processcommalist[#1]\buff_get_stored_inline_indeed}} + +\unexpanded\def\buff_get_stored_inline_indeed#1% + {\ignorespaces\ctxcommand{getbuffer("#1")}\removeunwantedspaces} + \definebuffer [\v!hiding] diff --git a/tex/context/base/buff-ver.lua b/tex/context/base/buff-ver.lua index e327a59dd..772008e39 100644 --- a/tex/context/base/buff-ver.lua +++ b/tex/context/base/buff-ver.lua @@ -10,16 +10,16 @@ if not modules then modules = { } end modules ['buff-ver'] = { -- supposed to use different names for their own variants. -- -- todo: skip=auto +-- +-- todo: update to match context scite lexing -local type, next, rawset, rawget, setmetatable, getmetatable = type, next, rawset, rawget, setmetatable, getmetatable +local type, next, rawset, rawget, setmetatable, getmetatable, tonumber = type, next, rawset, rawget, setmetatable, getmetatable, tonumber local format, lower, upper,match, find, sub = string.format, string.lower, string.upper, string.match, string.find, string.sub local splitlines = string.splitlines local concat = table.concat local C, P, R, S, V, Carg, Cc, Cs = lpeg.C, lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Carg, lpeg.Cc, lpeg.Cs local patterns, lpegmatch, is_lpeg = lpeg.patterns, lpeg.match, lpeg.is_lpeg -local context, commands = context, commands - local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end) local report_visualizers = logs.reporter("buffers","visualizers") @@ -29,6 +29,9 @@ visualizers = visualizers or { } local specifications = allocate() visualizers.specifications = specifications +local context = context +local commands = commands + local tabtospace = utilities.strings.tabtospace local variables = interfaces.variables local settings_to_array = utilities.parsers.settings_to_array @@ -38,6 +41,8 @@ local addsuffix = file.addsuffix local v_auto = variables.auto local v_yes = variables.yes +local v_last = variables.last +local v_all = variables.all -- beware, all macros have an argument: @@ -568,7 +573,7 @@ local function realign(lines,strip) -- "yes", if strip == v_yes then n = math.huge for i=1, #lines do - local spaces = find(lines[i],"%S") + local spaces = find(lines[i],"%S") -- can be lpeg if not spaces then -- empty line elseif spaces == 0 then @@ -592,11 +597,13 @@ local function realign(lines,strip) -- "yes", return lines end +local onlyspaces = S(" \t\f\n\r")^0 * P(-1) + local function getstrip(lines,first,last) local first, last = first or 1, last or #lines for i=first,last do local li = lines[i] - if #li == 0 or find(li,"^%s*$") then + if #li == 0 or lpegmatch(onlyspaces,li) then first = first + 1 else break @@ -604,7 +611,7 @@ local function getstrip(lines,first,last) end for i=last,first,-1 do local li = lines[i] - if #li == 0 or find(li,"^%s*$") then + if #li == 0 or lpegmatch(onlyspaces,li) then last = last - 1 else break @@ -730,14 +737,21 @@ end -- parser so we use lpeg. -- -- [[\text ]] [[\text{}]] [[\text \text ]] [[\text \\ \text ]] +-- +-- needed in e.g. tabulate (manuals) ------ strip = Cs((P(" ")^1 * P(-1)/"" + 1)^0) -local strip = Cs((P("\\") * ((1-S("\\ "))^1) * (P(" ")/"") + 1)^0) -- +local compact_all = Cs((P("\\") * ((1-S("\\ "))^1) * (P(" ")/"") * (P(-1) + S("[{")) + 1)^0) +local compact_last = Cs((P(" ")^1 * P(-1)/"" + 1)^0) function commands.typestring(settings) local content = settings.data if content and content ~= "" then - content = #content > 1 and lpegmatch(strip,content) or content -- can be an option, but needed in e.g. tabulate + local compact = settings.compact + if compact == v_all then + content = lpegmatch(compact_all,content) + elseif compact == v_last then + content = lpegmatch(compact_last,content) + end -- content = decodecomment(content) -- content = dotabs(content,settings) visualize(content,checkedsettings(settings,"inline")) diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv index 430834a30..dcea15496 100644 --- a/tex/context/base/buff-ver.mkiv +++ b/tex/context/base/buff-ver.mkiv @@ -45,8 +45,10 @@ \unexpanded\def\specialcontrolspace{\hskip\zeropoint\fastcontrolspace\hskip\zeropoint} -\setvalue{\??typinglines\v!yes }{\obeybreakpoints} -\setvalue{\??typinglines\v!hyphenated}{\obeyhyphens} +\setvalue{\??typinglines\v!no }{\buff_verbatim_ignore_hyphens} +\setvalue{\??typinglines\v!normal }{\buff_verbatim_ignore_hyphens} +\setvalue{\??typinglines\v!yes }{\buff_verbatim_obey_breakpoints} +\setvalue{\??typinglines\v!hyphenated}{\buff_verbatim_obey_hyphens} \setvalue{\??typingspace\v!on }{\let\obeyedspace\specialcontrolspace} \setvalue{\??typingspace\v!stretch }{\let\obeyedspace\specialstretchedspace} @@ -61,15 +63,36 @@ \setvalue{\??typingblank\v!line }{\baselineskip} \setvalue{\??typingblank\v!none }{\zeropoint} +\unexpanded\def\buff_verbatim_obey_hyphens + {} + +\unexpanded\def\buff_verbatim_obey_breakpoints + {\language\minusone % tricky as this affects the pagebuilder + \veryraggedright} + +\unexpanded\def\buff_verbatim_ignore_hyphens + {\language\minusone} % tricky as this affects the pagebuilder + +\def\buff_verbatim_initialize_breaks % order matters + {\spaceskip.5\emwidth\relax + \let\obeyedspace\specialobeyedspace + \let\controlspace\specialcontrolspace + \edef\p_buff_lines{\typeparameter\c!lines}% + \ifcsname\??typinglines\p_buff_lines\endcsname % sets \obeyedspace, \controlspace, + \csname\??typinglines\p_buff_lines\endcsname + \fi + \edef\p_buff_space{\typeparameter\c!space}% + \ifcsname\??typingspace\p_buff_space\endcsname % sets \obeyedspace + \csname\??typingspace\p_buff_space\endcsname + \fi} + \def\buff_verbatim_initialize_type_one {\let\obeylines\ignorelines - \ignorehyphens % default \usetypestyleandcolor\c!style\c!color \setcatcodetable\vrbcatcodes} \def\buff_verbatim_initialize_type_two - {\let\obeyedspace\specialobeyedspace - \csname\??typingspace\typeparameter\c!space\endcsname + {\buff_verbatim_initialize_breaks \relax\the\everyinitializeverbatim\relax} \unexpanded\def\doinitializeverbatim % for use elsewhere .. temp hack (see lxml-ini) @@ -98,13 +121,10 @@ {\switchtobodyfont[\typingparameter\c!bodyfont]% can be low level call \buff_verbatim_check_margins \usetypingstyleandcolor\c!style\c!color - \doifsomething{\typingparameter\c!align}{\setupalign[\typingparameter\c!align]}% - \ignorehyphens} % default + \doifsomething{\typingparameter\c!align}{\setupalign[\typingparameter\c!align]}} \def\buff_verbatim_initialize_typing_two - {\let\obeyedspace\specialobeyedspace - \csname\??typingspace\typingparameter\c!space\endcsname - \csname\??typinglines\typingparameter\c!lines\endcsname + {\buff_verbatim_initialize_breaks \relax\the\everyinitializeverbatim\relax} %D \macros @@ -255,7 +275,7 @@ {\dontleavehmode \bgroup \edef\currenttype{#1}% - \lettypeparameter\v!lines\v!hyphenated + \lettypeparameter\c!lines\v!hyphenated \let\specialobeyedspace\specialstretchedspace \doifnextoptionalelse\buff_verbatim_type_yes\buff_verbatim_type_nop} @@ -298,10 +318,11 @@ {\buff_verbatim_initialize_type_two \dostarttagged\t!verbatim\currenttype \ctxcommand{typestring{ - data = \!!bs\detokenize{#1}\!!es, - tab = "\typeparameter\c!tab", - method = "\typeparameter\c!option", - nature = "inline", + data = \!!bs\detokenize{#1}\!!es, + tab = "\typeparameter\c!tab", + method = "\typeparameter\c!option", + nature = "inline", + compact = "\typeparameter\c!compact", % none | all | last (all needed in tabulate etc for manuals) }}% \dostoptagged \buff_verbatim_right_of_type @@ -334,7 +355,7 @@ \unexpanded\def\specialfixedspace {\kern\interwordspace\relax} \unexpanded\def\specialobeyedspace {\hskip\interwordspace\relax} % better than spaceskip -\unexpanded\def\specialstretchedspace{\hskip.5\interwordspace\s!plus.125\interwordstretch\relax} % more but not less +\unexpanded\def\specialstretchedspace{\hskip.5\interwordspace\s!plus.125\interwordspace\relax} % \interwordstretch can be zero \unexpanded\def\specialcontrolspace {\hskip\zeropoint\hbox{\normalcontrolspace}\hskip\zeropoint\relax} \unexpanded\def\obeyhyphens @@ -350,7 +371,7 @@ {\language\minusone % extra bonus, the \null should do the job too \let\obeyedspace \specialobeyedspace \let\controlspace\specialcontrolspace - \spaceskip.5em\relax} + \spaceskip.5\emwidth\relax} %D \macros %D {tex,arg,mat,dis,astype} @@ -378,7 +399,9 @@ \catcode\leftbraceasciicode \begingroupcatcode \catcode\rightbraceasciicode\endgroupcatcode #1% + \bgroup % else wrong font for #2 \aftergroup#2% + \aftergroup\egroup \let\nexttoken} \unexpanded\def\tex{\buff_verbatim_special_type\texescape \relax} @@ -640,6 +663,7 @@ %\c!bodyfont=, %\c!color=, \c!space=\v!off, + \c!lines=\v!no, \c!page=\v!no, \c!tab=\v!yes, % what is this: \s!ascii ? \c!option=\v!none, @@ -652,7 +676,6 @@ \c!blank=\v!line, %\c!escape=, % yes | no | {START,STOP} | default when yes: {BTEX,ETEX} \c!numbering=\v!no, - %\c!lines=, %\c!range=, \c!start=1, %\c!stop=, @@ -669,11 +692,13 @@ \setuptype [\c!space=\v!off, + \c!lines=\v!no, %\c!color=, \c!style=\tt, %\c!option=\v!normal, \c!page=\v!no, - \c!tab=\v!yes] + \c!tab=\v!yes, + \c!compact=\v!all] %D Buffers @@ -685,6 +710,9 @@ \unexpanded\def\buff_verbatim_type_defined_buffer {\dotripleempty\buff_verbatim_type_defined_buffer_indeed} +\unexpanded\def\typedefinedbuffer[#1]% + {\buff_verbatim_type_defined_buffer[\v!buffer][\thedefinedbuffer{#1}]}% + \appendtoks \setuevalue{\e!type\currentbuffer}{\buff_verbatim_type_defined_buffer[\v!buffer][\currentdefinedbuffer]}% \to \everydefinebuffer diff --git a/tex/context/base/catc-def.mkiv b/tex/context/base/catc-def.mkiv index cfbaed171..bf794f045 100644 --- a/tex/context/base/catc-def.mkiv +++ b/tex/context/base/catc-def.mkiv @@ -142,4 +142,6 @@ \normalprotected\def\unprotect{\pushcatcodetable\setcatcodetable\prtcatcodes} \normalprotected\def\protect {\popcatcodetable} +% \prependtoks \catcodetable\ctxcatcodes \to \everyjob + \endinput diff --git a/tex/context/base/catc-sym.mkiv b/tex/context/base/catc-sym.mkiv index 82169d994..425a5393c 100644 --- a/tex/context/base/catc-sym.mkiv +++ b/tex/context/base/catc-sym.mkiv @@ -71,7 +71,8 @@ \normalprotected\def\uncatcodespacetokens {\catcode\spaceasciicode \spacecatcode - \catcode\formfeedasciicode \ignorecatcode + \catcode\tabasciicode \spacecatcode + \catcode\formfeedasciicode \endoflinecatcode \catcode\endoflineasciicode\endoflinecatcode \catcode\delasciicode \ignorecatcode} diff --git a/tex/context/base/char-cjk.lua b/tex/context/base/char-cjk.lua deleted file mode 100644 index 3d7de1423..000000000 --- a/tex/context/base/char-cjk.lua +++ /dev/null @@ -1,365 +0,0 @@ -if not modules then modules = { } end modules ['char-cjk'] = { - version = 1.001, - comment = "companion to char-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local setmetatable = setmetatable -local insert = table.insert -local floor = math.floor -local format = string.format -local utfchar = utf.char - -local ranges = characters.ranges -local allocate = utilities.storage.allocate - --- Hangul Syllable - --- The following conversion is taken from unicode.org/reports/tr15/tr15-23.html#Hangul --- but adapted to our needs. - --- local SBase = 0xAC00 --- --- local LBase, LCount = 0x1100, 19 --- local VBase, VCount = 0x1161, 21 --- local TBase, TCount = 0x11A7, 28 --- --- local NCount = VCount * TCount --- local SCount = LCount * NCount --- --- local function decomposed(unicode) --- local SIndex = unicode - SBase --- if SIndex >= 0 and SIndex < SCount then --- local lead_consonant = LBase + floor( SIndex / NCount) --- local medial_vowel = VBase + floor((SIndex % NCount) / TCount) --- local tail_consonant = TBase + SIndex % TCount --- if tail_consonant ~= TBase then --- return lead_consonant, medial_vowel, tail_consonant --- else --- return lead_consonant, medial_vowel --- end --- end --- end --- --- Lua will optimize the inline constants so the next variant is --- 10% faster. In practice this will go unnoticed, but it's also less --- code, so let's do it. Pushing the constant section into the --- function body saves 5%. - -local function decomposed(unicode) - local index = unicode - 0xAC00 - if index >= 0 and index < 19 * 21 * 28 then - local lead_consonant = 0x1100 + floor( index / (21 * 28)) - local medial_vowel = 0x1161 + floor((index % (21 * 28)) / 28) - local tail_consonant = 0x11A7 + index % 28 - if tail_consonant ~= 0x11A7 then - return lead_consonant, medial_vowel, tail_consonant - else - return lead_consonant, medial_vowel - end - end -end - -local lead_consonants = { [0] = - "G", "GG", "N", "D", "DD", "R", "M", "B", "BB", - "S", "SS", "", "J", "JJ", "C", "K", "T", "P", "H" -} - -local medial_vowels = { [0] = - "A", "AE", "YA", "YAE", "EO", "E", "YEO", "YE", "O", - "WA", "WAE", "OE", "YO", "U", "WEO", "WE", "WI", - "YU", "EU", "YI", "I" -} - -local tail_consonants = { [0] = - "", "G", "GG", "GS", "N", "NJ", "NH", "D", "L", "LG", "LM", - "LB", "LS", "LT", "LP", "LH", "M", "B", "BS", - "S", "SS", "NG", "J", "C", "K", "T", "P", "H" -} - --- local function description(unicode) --- local index = unicode - 0xAC00 --- if index >= 0 and index < 19 * 21 * 28 then --- local lead_consonant = floor( index / NCount) --- local medial_vowel = floor((index % NCount) / TCount) --- local tail_consonant = index % TCount --- return format( --- "HANGUL SYLLABLE %s%s%s", --- lead_consonants[lead_consonant], --- medial_vowels [medial_vowel ], --- tail_consonants[tail_consonant] --- ) --- end --- end - -local function description(unicode) - local index = unicode - 0xAC00 - if index >= 0 and index < 19 * 21 * 28 then - local lead_consonant = floor( index / (21 * 28)) - local medial_vowel = floor((index % (21 * 28)) / 28) - local tail_consonant = index % 28 - return format( - "HANGUL SYLLABLE %s%s%s", - lead_consonants[lead_consonant], - medial_vowels [medial_vowel ], - tail_consonants[tail_consonant] - ) - end -end - --- so far - --- We have a [lead consonant,medial vowel,tail consonant] where the last one --- is optional. For sort ranges we need the first one but some are collapsed. --- Beware, we map to modern so the font should support it. - -local function leadconsonant(unicode) - return - -- unicode < 0xAC00 and nil -- original - -- unicode > 0xD7AF and nil or -- original - unicode >= 0xD558 and 0x314E or -- 하 => ㅎ - unicode >= 0xD30C and 0x314D or -- 파 => ㅍ - unicode >= 0xD0C0 and 0x314C or -- 타 => ㅌ - unicode >= 0xCE74 and 0x314B or -- 카 => ㅋ - unicode >= 0xCC28 and 0x314A or -- 차 => ㅊ - unicode >= 0xC790 and 0x3148 or -- 자 => ㅈ - unicode >= 0xC544 and 0x3147 or -- 아 => ㅇ - unicode >= 0xC0AC and 0x3145 or -- 사 => ㅅ - unicode >= 0xBC14 and 0x3142 or -- 바 => ㅂ - unicode >= 0xB9C8 and 0x3141 or -- 마 => ㅁ - unicode >= 0xB77C and 0x3139 or -- 라 => ㄹ - unicode >= 0xB2E4 and 0x3137 or -- 다 => ㄷ - unicode >= 0xB098 and 0x3134 or -- 나 => ㄴ - unicode >= 0xAC00 and 0x3131 or -- 가 => ㄱ - nil -- can't happen -end - -local remapped = { -- this might be merged into char-def.lua - [0x1100] = 0x3131, -- G - [0x1101] = 0x3132, -- GG - [0x1102] = 0x3134, -- N - [0x1103] = 0x3137, -- D - [0x1104] = 0x3138, -- DD - [0x1105] = 0x3139, -- R - -- [0X111A] = 0x3140, -- LH used for last sound - [0x1106] = 0x3141, -- M - [0x1107] = 0x3142, -- B - [0x1108] = 0x3143, -- BB - -- [0x1121] = 0x3144, -- BS used for last sound - [0x1109] = 0x3145, -- S - [0x110A] = 0x3146, -- SS - [0x110B] = 0x3147, -- (IEUNG) no sound but has form - [0x110C] = 0x3148, -- J - [0x110D] = 0x3149, -- JJ - [0x110E] = 0x314A, -- C - [0x110F] = 0x314B, -- K - [0x1110] = 0x314C, -- T - [0x1111] = 0x314D, -- P - [0x1112] = 0x314E, -- H - - [0x1161] = 0x314F, -- A - [0x1162] = 0x3150, -- AE - [0x1163] = 0x3151, -- YA - [0x1164] = 0x3152, -- YAE - [0x1165] = 0x3153, -- EO - [0x1166] = 0x3154, -- E - [0x1167] = 0x3155, -- YEO - [0x1168] = 0x3156, -- YE - [0x1169] = 0x3157, -- O - [0x116A] = 0x3158, -- WA - [0x116B] = 0x3159, -- WAE - [0x116C] = 0x315A, -- OE - [0x116D] = 0x315B, -- YO - [0x116E] = 0x315C, -- U - [0x116F] = 0x315D, -- WEO - [0x1170] = 0x315E, -- WE - [0x1171] = 0x315F, -- WI - [0x1172] = 0x3160, -- YU - [0x1173] = 0x3161, -- EU - [0x1174] = 0x3162, -- YI - [0x1175] = 0x3163, -- I - - [0x11A8] = 0x3131, -- G - [0x11A9] = 0x3132, -- GG - [0x11AA] = 0x3133, -- GS - [0x11AB] = 0x3134, -- N - [0x11AC] = 0x3135, -- NJ - [0x11AD] = 0x3136, -- NH - [0x11AE] = 0x3137, -- D - [0x11AF] = 0x3139, -- L - [0x11B0] = 0x313A, -- LG - [0x11B1] = 0x313B, -- LM - [0x11B2] = 0x313C, -- LB - [0x11B3] = 0x313D, -- LS - [0x11B4] = 0x313E, -- LT - [0x11B5] = 0x313F, -- LP - [0x11B6] = 0x3140, -- LH - [0x11B7] = 0x3141, -- M - [0x11B8] = 0x3142, -- B - [0x11B9] = 0x3144, -- BS - [0x11BA] = 0x3145, -- S - [0x11BB] = 0x3146, -- SS - [0x11BC] = 0x3147, -- NG - [0x11BD] = 0x3148, -- J - [0x11BE] = 0x314A, -- C - [0x11BF] = 0x314B, -- K - [0x11C0] = 0x314C, -- T - [0x11C1] = 0x314D, -- P - [0x11C2] = 0x314E, -- H -} - -characters.hangul = allocate { - decomposed = decomposed, - description = description, - leadconsonant = leadconsonant, - remapped = remapped, -} - --- so far - -local hangul_syllable_basetable = { - category = "lo", - cjkwd = "w", - description = "", - direction = "l", - linebreak = "h2", -} - -local hangul_syllable_metatable = { - __index = function(t,k) - local u = t.unicodeslot - if k == "fscode" or k == "leadconsonant" then - return leadconsonant(u) - elseif k == "decomposed" then - return { decomposed(u) } - elseif k == "specials" then - return { "char", decomposed(u) } - elseif k == "description" then - return description(u) - else - return hangul_syllable_basetable[k] - end - end -} - -function characters.remap_hangul_syllabe(t) - local tt = type(t) - if tt == "number" then - return remapped[t] or t - elseif tt == "table" then - local r = { } - for i=1,#t do - local ti = t[i] - r[i] = remapped[ti] or ti - end - return r - else - return t - end -end - -local hangul_syllable_extender = function(k,v) - local t = { - unicodeslot = k, - } - setmetatable(t,hangul_syllable_metatable) - return t -end - -local hangul_syllable_range = { - first = 0xAC00, - last = 0xD7A3, - extender = hangul_syllable_extender, -} - -setmetatable(hangul_syllable_range, hangul_syllable_metatable) - --- CJK Ideograph - -local cjk_ideograph_metatable = { - __index = { - category = "lo", - cjkwd = "w", - description = "", - direction = "l", - linebreak = "id", - } -} - -local cjk_ideograph_extender = function(k,v) - local t = { - -- shcode = shcode, - unicodeslot = k, - } - setmetatable(t,cjk_ideograph_metatable) - return t -end - -local cjk_ideograph_range = { - first = 0x4E00, - last = 0x9FBB, - extender = cjk_ideograph_extender, -} - --- CJK Ideograph Extension A - -local cjk_ideograph_extension_a_metatable = { - __index = { - category = "lo", - cjkwd = "w", - description = "", - direction = "l", - linebreak = "id", - } -} - -local cjk_ideograph_extension_a_extender = function(k,v) - local t = { - -- shcode = shcode, - unicodeslot = k, - } - setmetatable(t,cjk_ideograph_extension_a_metatable) - return t -end - -local cjk_ideograph_extension_a_range = { - first = 0x3400, - last = 0x4DB5, - extender = cjk_ideograph_extension_a_extender, -} - --- CJK Ideograph Extension B - -local cjk_ideograph_extension_b_metatable = { - __index = { - category = "lo", - cjkwd = "w", - description = "", - direction = "l", - linebreak = "id", - } -} - -local cjk_ideograph_extension_b_extender = function(k,v) - local t = { - -- shcode = shcode, - unicodeslot = k, - } - setmetatable(t,cjk_ideograph_extension_b_metatable) - return t -end - -local cjk_ideograph_extension_b_range = { - first = 0x20000, - last = 0x2A6D6, - extender = cjk_ideograph_extension_b_extender, -} - --- Ranges - -insert(ranges, hangul_syllable_range) -insert(ranges, cjk_ideograph_range) -insert(ranges, cjk_ideograph_extension_a_range) -insert(ranges, cjk_ideograph_extension_b_range) diff --git a/tex/context/base/char-def.lua b/tex/context/base/char-def.lua index 7d2df5d41..1261bd45a 100644 --- a/tex/context/base/char-def.lua +++ b/tex/context/base/char-def.lua @@ -369,6 +369,7 @@ characters.data={ mathclass="open", mathname="lparent", mirror=0x0029, + textclass="open", unicodeslot=0x0028, }, { @@ -381,6 +382,7 @@ characters.data={ mathclass="close", mathname="rparent", mirror=0x0028, + textclass="close", unicodeslot=0x0029, }, { @@ -638,6 +640,7 @@ characters.data={ mathclass="relation", mathname="lt", mirror=0x003E, + textclass="open", unicodeslot=0x003C, }, { @@ -672,6 +675,7 @@ characters.data={ mathclass="relation", mathname="gt", mirror=0x003C, + textclass="close", unicodeslot=0x003E, }, { @@ -990,8 +994,9 @@ characters.data={ direction="on", linebreak="op", mathclass="open", - mathname="lbrack", + mathname="lbracket", mirror=0x005D, + textclass="open", unicodeslot=0x005B, }, { @@ -1014,8 +1019,9 @@ characters.data={ direction="on", linebreak="cp", mathclass="close", - mathname="rbrack", + mathname="rbracket", mirror=0x005B, + textclass="close", unicodeslot=0x005D, }, { @@ -1349,6 +1355,7 @@ characters.data={ mathclass="open", mathname="lbrace", mirror=0x007D, + textclass="open", unicodeslot=0x007B, }, { @@ -1381,6 +1388,10 @@ characters.data={ class="relation", name="mid", }, + { + class="delimiter", + name="singleverticalbar", + }, }, unicodeslot=0x007C, }, @@ -1395,6 +1406,7 @@ characters.data={ mathclass="close", mathname="rbrace", mirror=0x007B, + textclass="close", unicodeslot=0x007D, }, { @@ -1764,6 +1776,7 @@ characters.data={ direction="on", linebreak="qu", mirror=0x00BB, + textclass="open", unicodeslot=0x00AB, }, { @@ -1952,6 +1965,7 @@ characters.data={ direction="on", linebreak="qu", mirror=0x00AB, + textclass="close", unicodeslot=0x00BB, }, { @@ -57286,6 +57300,10 @@ characters.data={ class="close", name="rVert", }, + { + class="delimiter", + name="doubleverticalbar", + }, }, unicodeslot=0x2016, }, @@ -57597,6 +57615,8 @@ characters.data={ description="REVERSED PRIME", direction="on", linebreak="po", + mathclass="nothing", + mathname="reversedprime", unicodeslot=0x2035, }, [0x2036]={ @@ -57604,6 +57624,8 @@ characters.data={ description="REVERSED DOUBLE PRIME", direction="on", linebreak="po", + mathclass="nothing", + mathname="reverseddoubleprime", specials={ "compat", 0x2035, 0x2035 }, unicodeslot=0x2036, }, @@ -57612,6 +57634,8 @@ characters.data={ description="REVERSED TRIPLE PRIME", direction="on", linebreak="po", + mathclass="nothing", + mathname="reversedtripleprime", specials={ "compat", 0x2035, 0x2035, 0x2035 }, unicodeslot=0x2037, }, @@ -57630,6 +57654,7 @@ characters.data={ direction="on", linebreak="qu", mirror=0x203A, + textclass="open", unicodeslot=0x2039, }, [0x203A]={ @@ -57640,6 +57665,7 @@ characters.data={ direction="on", linebreak="qu", mirror=0x2039, + textclass="close", unicodeslot=0x203A, }, [0x203B]={ @@ -57675,19 +57701,23 @@ characters.data={ adobename="overline", category="po", cjkwd="a", + comment=[[mathspec={ { class="topaccent", name="overbar" }, { class="botaccent", name="underbar" } }"]], description="OVERLINE", direction="on", linebreak="al", + mathextensible="l", + mathfiller="barfill", mathspec={ { - class="topaccent", - name="overbar", + class="under", + name="underbar", }, { - class="botaccent", - name="underbar", + class="over", + name="overbar", }, }, + mathstretch="h", specials={ "compat", 0x0020, 0x0305 }, unicodeslot=0x203E, }, @@ -57886,6 +57916,8 @@ characters.data={ description="QUADRUPLE PRIME", direction="on", linebreak="al", + mathclass="nothing", + mathname="quadrupleprime", specials={ "compat", 0x2032, 0x2032, 0x2032, 0x2032 }, unicodeslot=0x2057, }, @@ -58135,6 +58167,7 @@ characters.data={ direction="on", linebreak="op", mirror=0x207E, + textclass="open", specials={ "super", 0x0028 }, unicodeslot=0x207D, }, @@ -58145,6 +58178,7 @@ characters.data={ direction="on", linebreak="cl", mirror=0x207D, + textclass="close", specials={ "super", 0x0029 }, unicodeslot=0x207E, }, @@ -58283,6 +58317,7 @@ characters.data={ direction="on", linebreak="op", mirror=0x208E, + textclass="open", specials={ "sub", 0x0028 }, unicodeslot=0x208D, }, @@ -58293,6 +58328,7 @@ characters.data={ direction="on", linebreak="cl", mirror=0x208D, + textclass="close", specials={ "sub", 0x0029 }, unicodeslot=0x208E, }, @@ -61516,6 +61552,8 @@ characters.data={ description="NABLA", direction="on", linebreak="ai", + mathclass="default", + mathname="nabla", unicodeslot=0x2207, }, [0x2208]={ @@ -61742,8 +61780,20 @@ characters.data={ description="SQUARE ROOT", direction="on", linebreak="ai", - mathclass="radical", - mathname="surd", + mathspec={ + { + class="root", + name="rootradical", + }, + { + class="radical", + name="surdradical", + }, + { + class="ordinary", + name="surd", + }, + }, unicodeslot=0x221A, }, [0x221B]={ @@ -64200,8 +64250,10 @@ characters.data={ description="DIAMETER SIGN", direction="on", linebreak="al", - mathclass="ordinary", - mathname="varnothing", + mathspec={ + { class="ord", name="varnothing" }, + { class="ord", name="diameter" }, + }, unicodeslot=0x2300, }, [0x2301]={ @@ -74057,16 +74109,20 @@ characters.data={ }, [0x27F6]={ category="sm", + comment="the parent hack is needed for mathml", description="LONG RIGHTWARDS ARROW", direction="on", linebreak="al", mathclass="relation", mathextensible="r", + mathfiller="rightarrowfill", mathname="longrightarrow", + mathparent=0x2192, unicodeslot=0x27F6, }, [0x27F7]={ category="sm", + comment="the parent hack is needed for mathml", description="LONG LEFT RIGHT ARROW", direction="on", linebreak="al", @@ -74074,6 +74130,7 @@ characters.data={ mathextensible="h", mathfiller="leftrightarrowfill", mathname="longleftrightarrow", + mathparent=0x2190, unicodeslot=0x27F7, }, [0x27F8]={ @@ -76935,6 +76992,12 @@ characters.data={ description="TRIPLE VERTICAL BAR DELIMITER", direction="on", linebreak="al", + mathspec={ + { + class="delimiter", + name="tripleverticalbar", + }, + }, unicodeslot=0x2980, }, [0x2981]={ @@ -127677,6 +127740,7 @@ characters.data={ direction="on", linebreak="op", mirror=0xFE5A, + textclass="open", specials={ "small", 0x0028 }, unicodeslot=0xFE59, }, @@ -127688,6 +127752,7 @@ characters.data={ direction="on", linebreak="cl", mirror=0xFE59, + textclass="close", specials={ "small", 0x0029 }, unicodeslot=0xFE5A, }, @@ -127699,6 +127764,7 @@ characters.data={ direction="on", linebreak="op", mirror=0xFE5C, + textclass="open", specials={ "small", 0x007B }, unicodeslot=0xFE5B, }, @@ -127710,6 +127776,7 @@ characters.data={ direction="on", linebreak="cl", mirror=0xFE5B, + textclass="close", specials={ "small", 0x007D }, unicodeslot=0xFE5C, }, @@ -127721,6 +127788,7 @@ characters.data={ direction="on", linebreak="op", mirror=0xFE5E, + textclass="open", specials={ "small", 0x3014 }, unicodeslot=0xFE5D, }, @@ -127732,6 +127800,7 @@ characters.data={ direction="on", linebreak="cl", mirror=0xFE5D, + textclass="close", specials={ "small", 0x3015 }, unicodeslot=0xFE5E, }, @@ -127792,6 +127861,7 @@ characters.data={ direction="on", linebreak="id", mirror=0xFE65, + textclass="open", specials={ "small", 0x003C }, unicodeslot=0xFE64, }, @@ -127803,6 +127873,7 @@ characters.data={ direction="on", linebreak="id", mirror=0xFE64, + textclass="close", specials={ "small", 0x003E }, unicodeslot=0xFE65, }, @@ -129162,6 +129233,7 @@ characters.data={ direction="on", linebreak="op", mirror=0xFF09, + textclass="open", specials={ "wide", 0x0028 }, unicodeslot=0xFF08, }, @@ -129173,6 +129245,7 @@ characters.data={ direction="on", linebreak="cl", mirror=0xFF08, + textclass="close", specials={ "wide", 0x0029 }, unicodeslot=0xFF09, }, @@ -129364,6 +129437,7 @@ characters.data={ direction="on", linebreak="id", mirror=0xFF1E, + textclass="open", specials={ "wide", 0x003C }, unicodeslot=0xFF1C, }, @@ -129385,6 +129459,7 @@ characters.data={ direction="on", linebreak="id", mirror=0xFF1C, + textclass="close", specials={ "wide", 0x003E }, unicodeslot=0xFF1E, }, @@ -129702,6 +129777,7 @@ characters.data={ direction="on", linebreak="op", mirror=0xFF3D, + textclass="open", specials={ "wide", 0x005B }, unicodeslot=0xFF3B, }, @@ -129723,6 +129799,7 @@ characters.data={ direction="on", linebreak="cl", mirror=0xFF3B, + textclass="close", specials={ "wide", 0x005D }, unicodeslot=0xFF3D, }, @@ -130050,6 +130127,7 @@ characters.data={ direction="on", linebreak="op", mirror=0xFF5D, + textclass="open", specials={ "wide", 0x007B }, unicodeslot=0xFF5B, }, @@ -130071,6 +130149,7 @@ characters.data={ direction="on", linebreak="cl", mirror=0xFF5B, + textclass="close", specials={ "wide", 0x007D }, unicodeslot=0xFF5D, }, @@ -130091,6 +130170,7 @@ characters.data={ direction="on", linebreak="op", mirror=0xFF60, + textclass="open", specials={ "wide", 0x2985 }, unicodeslot=0xFF5F, }, @@ -130101,6 +130181,7 @@ characters.data={ direction="on", linebreak="cl", mirror=0xFF5F, + textclass="close", specials={ "wide", 0x2986 }, unicodeslot=0xFF60, }, @@ -130122,6 +130203,7 @@ characters.data={ direction="on", linebreak="op", mirror=0xFF63, + textclass="open", specials={ "narrow", 0x300C }, unicodeslot=0xFF62, }, @@ -130133,6 +130215,7 @@ characters.data={ direction="on", linebreak="cl", mirror=0xFF62, + textclass="close", specials={ "narrow", 0x300D }, unicodeslot=0xFF63, }, @@ -173802,7 +173885,6 @@ characters.data={ }, [0x1D6FB]={ category="sm", - comment="mathname='nabla'", description="MATHEMATICAL ITALIC NABLA", direction="l", linebreak="al", diff --git a/tex/context/base/char-ini.lua b/tex/context/base/char-ini.lua index b75f5eda7..a94aa6b5f 100644 --- a/tex/context/base/char-ini.lua +++ b/tex/context/base/char-ini.lua @@ -10,7 +10,7 @@ if not modules then modules = { } end modules ['char-ini'] = { -- we can remove the tag range starting at 0xE0000 (special applications) -local utfchar, utfbyte, utfvalues, ustring = utf.char, utf.byte, utf.values, utf.ustring +local utfchar, utfbyte, utfvalues, ustring, utotable = utf.char, utf.byte, utf.values, utf.ustring, utf.totable local concat, unpack, tohash = table.concat, table.unpack, table.tohash local next, tonumber, type, rawget, rawset = next, tonumber, type, rawget, rawset local format, lower, gsub, match, gmatch = string.format, string.lower, string.gsub, string.match, string.match, string.gmatch @@ -167,7 +167,7 @@ local blocks = allocate { ["cjkradicalssupplement"] = { first = 0x02E80, last = 0x02EFF, otf="hang", description = "CJK Radicals Supplement" }, ["cjkstrokes"] = { first = 0x031C0, last = 0x031EF, otf="hang", description = "CJK Strokes" }, ["cjksymbolsandpunctuation"] = { first = 0x03000, last = 0x0303F, otf="hang", description = "CJK Symbols and Punctuation" }, - ["cjkunifiedideographs"] = { first = 0x04E00, last = 0x09FFF, otf="hang", description = "CJK Unified Ideographs" }, + ["cjkunifiedideographs"] = { first = 0x04E00, last = 0x09FFF, otf="hang", description = "CJK Unified Ideographs", catcode = "letter" }, ["cjkunifiedideographsextensiona"] = { first = 0x03400, last = 0x04DBF, otf="hang", description = "CJK Unified Ideographs Extension A" }, ["cjkunifiedideographsextensionb"] = { first = 0x20000, last = 0x2A6DF, otf="hang", description = "CJK Unified Ideographs Extension B" }, ["combiningdiacriticalmarks"] = { first = 0x00300, last = 0x0036F, description = "Combining Diacritical Marks" }, @@ -483,6 +483,8 @@ setmetatableindex(characters.is_letter, mt) setmetatableindex(characters.is_command, mt) setmetatableindex(characters.is_spacing, mt) +-- todo: also define callers for the above + -- linebreak: todo: hash -- -- normative : BK CR LF CM SG GL CB SP ZW NL WJ JL JV JT H2 H3 @@ -557,6 +559,36 @@ setmetatableindex(characters.directions,function(t,k) return v end) +characters.mirrors = { } + +setmetatableindex(characters.mirrors,function(t,k) + local d = data[k] + if d then + local v = d.mirror + if v then + t[k] = v + return v + end + end + t[k] = false + return v +end) + +characters.textclasses = { } + +setmetatableindex(characters.textclasses,function(t,k) + local d = data[k] + if d then + local v = d.textclass + if v then + t[k] = v + return v + end + end + t[k] = false + return v +end) + --[[ldx--

Next comes a whole series of helper methods. These are (will be) part of the official .

@@ -916,8 +948,19 @@ end local tracedchars = utilities.strings.tracers tracedchars[0x00] = "[signal]" +tracedchars[0x0A] = "[linefeed]" +tracedchars[0x0B] = "[tab]" +tracedchars[0x0C] = "[formfeed]" +tracedchars[0x0D] = "[return]" tracedchars[0x20] = "[space]" +function characters.showstring(str) + local list = utotable(str) + for i=1,#list do + report_defining("split % 3i : %C",i,list[i]) + end +end + -- the following code will move to char-tex.lua -- tex @@ -1022,13 +1065,20 @@ function characters.define(tobelettered, tobeactivated) -- catcodetables end local range = chr.range if range then - for i=1,range.first,range.last do + for i=1,range.first,range.last do -- tricky as not all are letters texsetcatcode(i,11) end end end texsetcatcode(0x200C,11) -- non-joiner texsetcatcode(0x200D,11) -- joiner + for k, v in next, blocks do + if v.catcode == "letter" then + for i=v.first,v.last do + texsetcatcode(i,11) + end + end + end end tex.catcodetable = saved end @@ -1130,6 +1180,15 @@ directives.register("characters.spaceafteruppercase",function(v) end end) +-- tex + +function commands.chardescription(slot) + local d = data[slot] + if d then + context(d.description) + end +end + -- xml characters.activeoffset = 0x10000 -- there will be remapped in that byte range @@ -1155,4 +1214,3 @@ end -- entities.amp = utfchar(characters.activeoffset + utfbyte("&")) -- entities.gt = utfchar(characters.activeoffset + utfbyte(">")) -- end - diff --git a/tex/context/base/char-ini.mkiv b/tex/context/base/char-ini.mkiv index 0e2b773f0..113d26709 100644 --- a/tex/context/base/char-ini.mkiv +++ b/tex/context/base/char-ini.mkiv @@ -91,4 +91,6 @@ % catcodes.register("xmlcatcodes",\number\xmlcatcodes) } +\def\chardescription#1{\ctxcommand{chardescription(\number#1)}} + \protect \endinput diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua index d0e40e664..95ed48279 100644 --- a/tex/context/base/char-utf.lua +++ b/tex/context/base/char-utf.lua @@ -6,14 +6,19 @@ if not modules then modules = { } end modules ['char-utf'] = { license = "see context related readme files" } +-- todo: trackers +-- todo: no longer special characters (high) here, only needed in special cases and +-- these don't go through this file anyway +-- graphemes: basic symbols + --[[ldx-- -

When a sequence of characters enters the application, it may -be neccessary to collapse subsequences into their composed variant.

+

When a sequence of characters enters the application, it may be +neccessary to collapse subsequences into their composed variant.

This module implements methods for collapsing and expanding -sequences. We also provide means to deal with characters that are -special to as well as 8-bit characters that need to end up -in special kinds of output (for instance ).

+sequences. We also provide means to deal with characters that are special to + as well as 8-bit characters that need to end up in special kinds +of output (for instance ).

We implement these manipulations as filters. One can run multiple filters over a string.

@@ -26,9 +31,6 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns local charfromnumber = characters.fromnumber --- todo: trackers --- graphemes: basic symbols - characters = characters or { } local characters = characters @@ -53,8 +55,8 @@ local utffilters = characters.filters.utf -- is characters.combined cached? --[[ldx-- -

It only makes sense to collapse at runtime, since we don't expect -source code to depend on collapsing.

+

It only makes sense to collapse at runtime, since we don't expect source code +to depend on collapsing.

--ldx]]-- -- for the moment, will be entries in char-def.lua @@ -164,27 +166,21 @@ function utffilters.addgrapheme(result,first,second) -- can be U+ 0x string or u end --[[ldx-- -

In order to deal with 8-bit output, we need to find a way to -go from to 8-bit. This is handled in the - engine itself.

- -

This leaves us problems with characters that are specific to - like {}, $ and alike.

- -

We can remap some chars that tex input files are sensitive for to -a private area (while writing to a utility file) and revert then -to their original slot when we read in such a file. Instead of -reverting, we can (when we resolve characters to glyphs) map them -to their right glyph there.

- -

For this purpose we can use the private planes 0x0F0000 and -0x100000.

+

In order to deal with 8-bit output, we need to find a way to go from to +8-bit. This is handled in the engine itself.

+ +

This leaves us problems with characters that are specific to like +{}, $ and alike. We can remap some chars that tex input files +are sensitive for to a private area (while writing to a utility file) and revert then +to their original slot when we read in such a file. Instead of reverting, we can (when +we resolve characters to glyphs) map them to their right glyph there. For this purpose +we can use the private planes 0x0F0000 and 0x100000.

--ldx]]-- -local low = allocate({ }) -local high = allocate({ }) -local escapes = allocate({ }) -local special = "~#$%^&_{}\\|" +local low = allocate() +local high = allocate() +local escapes = allocate() +local special = "~#$%^&_{}\\|" -- "~#$%{}\\|" local private = { low = low, @@ -248,128 +244,21 @@ first snippet uses the relocated dollars.

The next variant has lazy token collecting, on a 140 page mk.tex this saves -about .25 seconds, which is understandable because we have no graphmes and +about .25 seconds, which is understandable because we have no graphemes and not collecting tokens is not only faster but also saves garbage collecting.

--ldx]]-- --- lpeg variant is not faster --- --- I might use the combined loop at some point for the filter --- some day. - --- function utffilters.collapse(str) -- not really tested (we could preallocate a table) --- if str and str ~= "" then --- local nstr = #str --- if nstr > 1 then --- if initialize then -- saves a call --- initialize() --- end --- local tokens, t, first, done, n = { }, 0, false, false, 0 --- for second in utfcharacters(str) do --- local dec = decomposed[second] --- if dec then --- if not done then --- if n > 0 then --- for s in utfcharacters(str) do --- if n == 1 then --- break --- else --- t = t + 1 --- tokens[t] = s --- n = n - 1 --- end --- end --- end --- done = true --- elseif first then --- t = t + 1 --- tokens[t] = first --- end --- t = t + 1 --- tokens[t] = dec --- first = false --- elseif done then --- local crs = high[second] --- if crs then --- if first then --- t = t + 1 --- tokens[t] = first --- end --- first = crs --- else --- local cgf = graphemes[first] --- if cgf and cgf[second] then --- first = cgf[second] --- elseif first then --- t = t + 1 --- tokens[t] = first --- first = second --- else --- first = second --- end --- end --- else --- local crs = high[second] --- if crs then --- for s in utfcharacters(str) do --- if n == 1 then --- break --- else --- t = t + 1 --- tokens[t] = s --- n = n - 1 --- end --- end --- if first then --- t = t + 1 --- tokens[t] = first --- end --- first = crs --- done = true --- else --- local cgf = graphemes[first] --- if cgf and cgf[second] then --- for s in utfcharacters(str) do --- if n == 1 then --- break --- else --- t = t + 1 --- tokens[t] = s --- n = n - 1 --- end --- end --- first = cgf[second] --- done = true --- else --- first = second --- n = n + 1 --- end --- end --- end --- end --- if done then --- if first then --- t = t + 1 --- tokens[t] = first --- end --- return concat(tokens) -- seldom called --- end --- elseif nstr > 0 then --- return high[str] or str --- end --- end --- return str --- end - local skippable = table.tohash { "mkiv", "mkvi" } local filesuffix = file.suffix --- we could reuse tokens but it's seldom populated anyway - -function utffilters.collapse(str,filename) -- not really tested (we could preallocate a table) +function utffilters.collapse(str,filename) -- we can make high a seperate pass (never needed with collapse) if skippable[filesuffix(filename)] then return str + -- elseif find(filename,"^virtual://") then + -- return str + -- else + -- -- print("\n"..filename) end if str and str ~= "" then local nstr = #str @@ -380,44 +269,60 @@ function utffilters.collapse(str,filename) -- not really tested (we could preall local tokens, t, first, done, n = { }, 0, false, false, 0 for second in utfcharacters(str) do if done then - local crs = high[second] - if crs then - if first then - t = t + 1 - tokens[t] = first - end - first = crs - else - local cgf = graphemes[first] - if cgf and cgf[second] then - first = cgf[second] - elseif first then + if first then + if second == " " then t = t + 1 tokens[t] = first first = second else - first = second + -- local crs = high[second] + -- if crs then + -- t = t + 1 + -- tokens[t] = first + -- first = crs + -- else + local cgf = graphemes[first] + if cgf and cgf[second] then + first = cgf[second] + else + t = t + 1 + tokens[t] = first + first = second + end + -- end end + elseif second == " " then + first = second + else + -- local crs = high[second] + -- if crs then + -- first = crs + -- else + first = second + -- end end + elseif second == " " then + first = nil + n = n + 1 else - local crs = high[second] - if crs then - for s in utfcharacters(str) do - if n == 1 then - break - else - t = t + 1 - tokens[t] = s - n = n - 1 - end - end - if first then - t = t + 1 - tokens[t] = first - end - first = crs - done = true - else + -- local crs = high[second] + -- if crs then + -- for s in utfcharacters(str) do + -- if n == 1 then + -- break + -- else + -- t = t + 1 + -- tokens[t] = s + -- n = n - 1 + -- end + -- end + -- if first then + -- t = t + 1 + -- tokens[t] = first + -- end + -- first = crs + -- done = true + -- else local cgf = graphemes[first] if cgf and cgf[second] then for s in utfcharacters(str) do @@ -435,7 +340,7 @@ function utffilters.collapse(str,filename) -- not really tested (we could preall first = second n = n + 1 end - end + -- end end end if done then @@ -520,34 +425,3 @@ if sequencers then end) end - ---[[ldx-- -

Next we implement some commands that are used in the user interface.

---ldx]]-- - --- commands = commands or { } --- --- function commands.uchar(first,second) --- context(utfchar(first*256+second)) --- end - ---[[ldx-- -

A few helpers (used to be luat-uni).

---ldx]]-- - --- obsolete: --- --- function utf.split(str) --- local t, n = { }, 0 --- for snippet in utfcharacters(str) do --- n = n + 1 --- t[n+1] = snippet --- end --- return t --- end --- --- function utf.each(str,fnc) --- for snippet in utfcharacters(str) do --- fnc(snippet) --- end --- end diff --git a/tex/context/base/chem-str.lua b/tex/context/base/chem-str.lua index dfcf0a3e1..347363345 100644 --- a/tex/context/base/chem-str.lua +++ b/tex/context/base/chem-str.lua @@ -37,24 +37,24 @@ local concat, insert, remove, unique, sorted = table.concat, table.insert, table local processor_tostring = typesetters and typesetters.processors.tostring local settings_to_array = utilities.parsers.settings_to_array local settings_to_array_with_repeat = utilities.parsers.settings_to_array_with_repeat -local formatters = string.formatters local lpegmatch = lpeg.match local P, R, S, C, Cs, Ct, Cc, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cmt -local variables = interfaces and interfaces.variables -local context = context -local formatters = string.formatters -local texcount = tex.count - -local v_default = variables.default -local v_small = variables.small -local v_medium = variables.medium -local v_big = variables.big -local v_normal = variables.normal -local v_fit = variables.fit -local v_on = variables.on -local v_none = variables.none +local variables = interfaces and interfaces.variables +local commands = commands +local context = context +local formatters = string.formatters +local texgetcount = tex.getcount + +local v_default = variables.default +local v_small = variables.small +local v_medium = variables.medium +local v_big = variables.big +local v_normal = variables.normal +local v_fit = variables.fit +local v_on = variables.on +local v_none = variables.none local mpnamedcolor = attributes.colors.mpnamedcolor local topoints = number.topoints @@ -716,7 +716,7 @@ function chemistry.start(settings) width, left, right, sp_width = calculated(width, left, right,factor,unit,scale) height, bottom, top, sp_height = calculated(height,bottom,top, factor,unit,scale) -- - if width ~= "true" and height ~= "true" and texcount["@@trialtypesetting"] ~= 0 then + if width ~= "true" and height ~= "true" and texgetcount("@@trialtypesetting") ~= 0 then if trace_structure then report_chemistry("skipping trial run") end diff --git a/tex/context/base/chem-str.mkiv b/tex/context/base/chem-str.mkiv index c28ea21d9..d9ec1842b 100644 --- a/tex/context/base/chem-str.mkiv +++ b/tex/context/base/chem-str.mkiv @@ -317,33 +317,50 @@ % special macros (probably needs some more work) +\let\chem_box_normal_yes\hbox +\let\chem_box_visual_yes\hbox +\let\chem_box_visual_nop\relax + +\installtextracker + {chemistry.boxes} + {\let\chem_box_visual_yes\ruledhbox \let\chem_box_visual_nop\ruledhbox} + {\let\chem_box_visual_yes\hbox \let\chem_box_visual_nop\relax } + \def\chem_top_construct#1#2#3#4% - {\begingroup - \setbox0\hbox{\setstrut\strut#3}% - \setbox2\hbox{\setstrut\strut\molecule{#4}}% - \setbox0\hbox{\raise\dimexpr\dp0+\ht2\relax\hbox to \wd2{#1\box0#2}}% - % no: \smashbox0 - \hbox{\box0\box2}% - \endgroup}% + {\hbox\bgroup + \setstrut + \setbox\scratchboxone\chem_box_visual_yes{\strut#3}% + \setbox\scratchboxtwo\chem_box_visual_yes{\strut\molecule{#4}}% + \setbox\scratchboxone\chem_box_normal_yes{\raise\dimexpr\dp\scratchboxone+\ht\scratchboxtwo\relax\hbox to \wd\scratchboxtwo{#1\box\scratchboxone#2}}% + \smashbox\scratchboxone + \box\scratchboxone + \box\scratchboxtwo + \egroup} \def\chem_bottom_construct#1#2#3#4% - {\begingroup - \setbox0\hbox{\setstrut\strut#3}% - \setbox2\hbox{\setstrut\strut#4}% - \setbox0\hbox{\lower\dimexpr\dp2+\ht0\relax\hbox to \wd2{#1\box0#2}}% - % no: \smashbox0 - \hbox{\box0\box2}% - \endgroup}% - -\unexpanded\def\chemicalleft#1#2% - {\begingroup - \hbox{\llap{\setstrut\strut#1}\setstrut\strut#2}% - \endgroup}% - -\unexpanded\def\chemicalright#1#2% - {\begingroup - \hbox{\setstrut\strut#2\rlap{\setstrut\strut#1}}% - \endgroup}% + {\hbox\bgroup + \setstrut + \setbox\scratchboxone\chem_box_visual_yes{\strut#3}% + \setbox\scratchboxtwo\chem_box_visual_yes{\strut\molecule{#4}}% + \setbox\scratchboxone\chem_box_normal_yes{\lower\dimexpr\dp\scratchboxtwo+\ht\scratchboxone\relax\hbox to \wd\scratchboxtwo{#1\box\scratchboxone#2}}% + \smashbox\scratchboxone + \box\scratchboxone + \box\scratchboxtwo + \egroup} + +\unexpanded\def\chemicalleft#1#2% redundant boxes thanks to visual + {\hbox\bgroup + \setstrut + \llap{\chem_box_visual_nop{\strut#1}}% + \chem_box_visual_nop{\strut#2}% + \egroup} + +\unexpanded\def\chemicalright#1#2% redundant boxes thanks to visual + {\hbox\bgroup + \setstrut + \chem_box_visual_yes{\strut#2}% + \rlap{\chem_box_visual_nop{\strut#1}}% + \egroup} \unexpanded\def\chemicaltop {\chem_top_construct \hss \hss } \unexpanded\def\chemicallefttop {\chem_top_construct \relax \hss } @@ -372,11 +389,16 @@ \expandafter\chem_aligned_text_text \fi} +\let\chemicaltighttext\relax % maybe smaller strut + \def\chem_aligned_text_text#1#2#3% {\dontleavehmode \begingroup \usechemicalstyleandcolor\c!style\c!color - \hbox to \fontcharwd\font`C{\setstrut\strut#1\molecule{#3}#2}% + \chem_box_visual_yes to \fontcharwd\font`C\bgroup + \setstrut\strut + #1\molecule{#3}#2% + \egroup \endgroup} \def\chem_aligned_text_math#1#2#3% @@ -384,7 +406,10 @@ \begingroup \scratchcounter\normalmathstyle \usechemicalstyleandcolor\c!style\c!color - \hbox to \fontcharwd\font`C{\setstrut\strut#1\mathematics{\tf\triggermathstyle\scratchcounter\molecule{#3}}#2}% + \chem_box_visual_yes to \fontcharwd\font`C\bgroup + \setstrut\strut + #1\mathematics{\tf\triggermathstyle\scratchcounter\molecule{#3}}#2% + \egroup \endgroup} \unexpanded\def\chemicalcentered {\chemicalalignedtext\hss \hss } @@ -582,6 +607,9 @@ \definechemicalsymbol[d:gives] [\rightarrowfill] % \chem_arrow_construct\xrightarrow \definechemicalsymbol[d:equilibrium] [\rightoverleftarrowfill] % \chem_arrow_construct\xrightoverleftarrow \definechemicalsymbol[d:mesomeric] [\leftarrowfill] % \chem_arrow_construct\xleftrightarrow +\definechemicalsymbol[d:single] [\chemicalbondrule] +\definechemicalsymbol[d:double] [\hbox{\lower.5ex\chemicalbondrule\hskip-1em\raise.5ex\chemicalbondrule}] +\definechemicalsymbol[d:triple] [\hbox{\chemicalbondrule\hskip-1em\lower.5ex\chemicalbondrule\hskip-1em\raise.5ex\chemicalbondrule}] \definechemicalsymbol[d:opencomplex] [\mathematics{\Bigg[}] % not yet ok \definechemicalsymbol[d:closecomplex][\mathematics{\Bigg]}] % not yet ok @@ -591,6 +619,9 @@ \definechemicalsymbol[d:GIVES] [{\chemicalsymbol[d:gives]}] \definechemicalsymbol[d:EQUILIBRIUM] [{\chemicalsymbol[d:equilibrium]}] \definechemicalsymbol[d:MESOMERIC] [{\chemicalsymbol[d:mesomeric]}] +\definechemicalsymbol[d:SINGLE] [{\chemicalsymbol[d:single]}] +\definechemicalsymbol[d:DOUBLE] [{\chemicalsymbol[d:double]}] +\definechemicalsymbol[d:TRIPLE] [{\chemicalsymbol[d:triple]}] \definechemicalsymbol[d:OPENCOMPLEX] [{\chemicalsymbol[d:opencomplex]}] \definechemicalsymbol[d:CLOSECOMPLEX][{\chemicalsymbol[d:closecomplex]}] diff --git a/tex/context/base/cldf-bas.lua b/tex/context/base/cldf-bas.lua index 6adeb2272..b982fc364 100644 --- a/tex/context/base/cldf-bas.lua +++ b/tex/context/base/cldf-bas.lua @@ -38,7 +38,8 @@ local new_rule = nodepool.rule local new_glyph = nodepool.glyph local current_font = font.current -local texcount = tex.count +local texgetcount = tex.getcount +local texsetcount = tex.setcount function context.char(k) -- used as escape too, so don't change to utf if type(k) == "table" then @@ -163,9 +164,9 @@ context.endhbox = context.egroup local function allocate(name,what,cmd) local a = format("c_syst_last_allocated_%s",what) - local n = texcount[a] + 1 - if n <= texcount.c_syst_max_allocated_register then - texcount[a] = n + local n = texgetcount(a) + 1 + if n <= texgetcount("c_syst_max_allocated_register") then + texsetcount(a,n) end context("\\global\\expandafter\\%sdef\\csname %s\\endcsname %s\\relax",cmd or what,name,n) return n diff --git a/tex/context/base/cldf-com.lua b/tex/context/base/cldf-com.lua index fa0dbed3e..bd357b712 100644 --- a/tex/context/base/cldf-com.lua +++ b/tex/context/base/cldf-com.lua @@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['cldf-com'] = { license = "see context related readme files" } +-- todo ... needs more thinking ... a special table toolkit + local tostring = tostring local context = context local generics = context.generics -- needs documentation diff --git a/tex/context/base/cldf-ini.lua b/tex/context/base/cldf-ini.lua index 4a7d9f025..b29db4090 100644 --- a/tex/context/base/cldf-ini.lua +++ b/tex/context/base/cldf-ini.lua @@ -28,17 +28,19 @@ local tex = tex context = context or { } local context = context -local format, gsub, validstring = string.format, string.gsub, string.valid +local format, gsub, validstring, stripstring = string.format, string.gsub, string.valid, string.strip local next, type, tostring, tonumber, setmetatable, unpack, select = next, type, tostring, tonumber, setmetatable, unpack, select local insert, remove, concat = table.insert, table.remove, table.concat -local lpegmatch, lpegC, lpegS, lpegP, lpegCc, patterns = lpeg.match, lpeg.C, lpeg.S, lpeg.P, lpeg.Cc, lpeg.patterns +local lpegmatch, lpegC, lpegS, lpegP, lpegV, lpegCc, lpegCs, patterns = lpeg.match, lpeg.C, lpeg.S, lpeg.P, lpeg.V, lpeg.Cc, lpeg.Cs, lpeg.patterns local formatters = string.formatters -- using formatteds is slower in this case +local loaddata = io.loaddata + local texsprint = tex.sprint local textprint = tex.tprint local texprint = tex.print local texwrite = tex.write -local texcount = tex.count +local texgetcount = tex.getcount local isnode = node.is_node -- after 0.65 just node.type local writenode = node.write @@ -86,13 +88,13 @@ local function _flush_f_(n) else local tn = type(sn) if tn == "function" then - if not sn() and texcount["@@trialtypesetting"] == 0 then -- @@trialtypesetting is private! + if not sn() and texgetcount("@@trialtypesetting") == 0 then -- @@trialtypesetting is private! _stack_f_[n] = nil else -- keep, beware, that way the stack can grow end else - if texcount["@@trialtypesetting"] == 0 then -- @@trialtypesetting is private! + if texgetcount("@@trialtypesetting") == 0 then -- @@trialtypesetting is private! writenode(sn) _stack_f_[n] = nil else @@ -107,7 +109,7 @@ local function _flush_n_(n) local sn = _stack_n_[n] if not sn then report_cld("data with id %a cannot be found on stack",n) - elseif texcount["@@trialtypesetting"] == 0 then -- @@trialtypesetting is private! + elseif texgetcount("@@trialtypesetting") == 0 then -- @@trialtypesetting is private! writenode(sn) _stack_n_[n] = nil else @@ -799,6 +801,10 @@ function context.runfile(filename) end end +function context.loadfile(filename) + context(stripstring(loaddata(resolvers.findfile(filename)))) +end + -- some functions function context.direct(first,...) @@ -809,7 +815,7 @@ end -- context.delayed (todo: lines) -local delayed = { } context.delayed = delayed -- maybe also store them +local delayed = { } context.delayed = delayed -- creates function (maybe also store them) local function indexer(parent,k) local f = function(...) @@ -871,7 +877,7 @@ setmetatable(delayed, { __index = indexer, __call = caller } ) -- context.nested (todo: lines) -local nested = { } context.nested = nested +local nested = { } context.nested = nested -- creates strings local function indexer(parent,k) local f = function(...) @@ -903,28 +909,36 @@ setmetatable(nested, { __index = indexer, __call = caller } ) -- verbatim -local verbatim = { } context.verbatim = verbatim +function context.newindexer(catcodes) + local handler = { } -local function indexer(parent,k) - local command = context[k] - local f = function(...) + local function indexer(parent,k) + local command = context[k] + local f = function(...) + local savedcatcodes = contentcatcodes + contentcatcodes = catcodes + command(...) + contentcatcodes = savedcatcodes + end + parent[k] = f + return f + end + + local function caller(parent,...) local savedcatcodes = contentcatcodes - contentcatcodes = vrbcatcodes - command(...) + contentcatcodes = catcodes + defaultcaller(parent,...) contentcatcodes = savedcatcodes end - parent[k] = f - return f -end -local function caller(parent,...) - local savedcatcodes = contentcatcodes - contentcatcodes = vrbcatcodes - defaultcaller(parent,...) - contentcatcodes = savedcatcodes + setmetatable(handler, { __index = indexer, __call = caller } ) + + return handler end -setmetatable(verbatim, { __index = indexer, __call = caller } ) +context.verbatim = context.newindexer(vrbcatcodes) +context.puretext = context.newindexer(txtcatcodes) +-------.protected = context.newindexer(prtcatcodes) -- formatted @@ -1064,3 +1078,89 @@ setmetatable(delayed, { __index = indexer, __call = caller } ) function context.concat(...) context(concat(...)) end + +-- templates + +local single = lpegP("%") +local double = lpegP("%%") +local lquoted = lpegP("%[") +local rquoted = lpegP("]%") + +local start = [[ +local texescape = lpeg.patterns.texescape +local lpegmatch = lpeg.match +return function(variables) return +]] + +local stop = [[ +end +]] + +local replacer = lpegP { "parser", + parser = lpegCs(lpegCc(start) * lpegV("step") * (lpegCc("..") * lpegV("step"))^0 * lpegCc(stop)), + unquoted = (lquoted/'') * ((lpegC((1-rquoted)^1)) / "lpegmatch(texescape,variables['%0'] or '')" ) * (rquoted/''), + escape = double/'%%', + key = (single/'') * ((lpegC((1-single)^1)) / "(variables['%0'] or '')" ) * (single/''), + step = lpegV("unquoted") + + lpegV("escape") + + lpegV("key") + + lpegCc("\n[===[") * (1 - lpegV("unquoted") - lpegV("escape") - lpegV("key"))^1 * lpegCc("]===]\n"), +} + +local templates = { } + +local function indexer(parent,k) + local v = lpegmatch(replacer,k) + if not v then + v = "error: no valid template (1)" + else + v = loadstring(v) + if type(v) ~= "function" then + v = "error: no valid template (2)" + else + v = v() + if not v then + v = "error: no valid template (3)" + end + end + end + if type(v) == "function" then + local f = function(first,second) + if second then + pushcatcodes(first) + flushlines(v(second)) + popcatcodes() + else + flushlines(v(first)) + end + end + parent[k] = f + return f + else + return function() + flush(v) + end + end + +end + +local function caller(parent,k,...) + return parent[k](...) +end + +setmetatable(templates, { __index = indexer, __call = caller } ) + +function context.template(template,...) + context(templates[template](...)) +end + +context.templates = templates + +-- The above is a bit over the top as we could also stick to a simple context.replace +-- which is fast enough anyway, but the above fits in nicer, also with the catcodes. +-- +-- local replace = utilities.templates.replace +-- +-- function context.template(template,variables) +-- context(replace(template,variables)) +-- end diff --git a/tex/context/base/cldf-ini.mkiv b/tex/context/base/cldf-ini.mkiv index 77948e058..258409d7a 100644 --- a/tex/context/base/cldf-ini.mkiv +++ b/tex/context/base/cldf-ini.mkiv @@ -37,6 +37,7 @@ \def\cldn#1{\directlua{_cldn_(#1)}} % global (nodes) \normalprotected\def\cldprocessfile#1{\directlua{context.runfile("#1")}} + \def\cldloadfile #1{\directlua{context.loadfile("#1")}} \def\cldcontext #1{\directlua{context(#1)}} \def\cldcommand #1{\directlua{context.#1}} % \def\cldverbatim #1{\directlua{context.verbatim.#1}} % maybe make verbatim global diff --git a/tex/context/base/cldf-int.lua b/tex/context/base/cldf-int.lua index 6cbfd666f..2743e4924 100644 --- a/tex/context/base/cldf-int.lua +++ b/tex/context/base/cldf-int.lua @@ -19,9 +19,10 @@ local catcodenumbers = catcodes.numbers local ctxcatcodes = catcodenumbers.ctxcatcodes local vrbcatcodes = catcodenumbers.vrbcatcodes +local context = context local contextsprint = context.sprint -local trace_define = false trackers.register("context.define", function(v) trace_define = v end) +local trace_define = false trackers.register("context.define", function(v) trace_define = v end) interfaces = interfaces or { } diff --git a/tex/context/base/cldf-prs.lua b/tex/context/base/cldf-prs.lua index 9fbdba0c8..160d30b19 100644 --- a/tex/context/base/cldf-prs.lua +++ b/tex/context/base/cldf-prs.lua @@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['cldf-bas'] = { license = "see context related readme files" } +-- used in chem-ini.lua + local lpegmatch, patterns = lpeg.match, lpeg.patterns local P, R, V, Cc, Cs = lpeg.P, lpeg.R, lpeg.V, lpeg.Cc, lpeg.Cs local format = string.format diff --git a/tex/context/base/colo-ini.lua b/tex/context/base/colo-ini.lua index b9e8eb203..535ee71b8 100644 --- a/tex/context/base/colo-ini.lua +++ b/tex/context/base/colo-ini.lua @@ -287,6 +287,7 @@ local left = P("(") local right = P(")") local comma = P(",") local mixnumber = lpegpatterns.number / tonumber + + P("-") / function() return -1 end local mixname = C(P(1-left-right-comma)^1) ----- mixcolor = Cc("M") * mixnumber * left * mixname * (comma * mixname)^-1 * right * P(-1) local mixcolor = Cc("M") * mixnumber * left * mixname * (comma * mixname)^0 * right * P(-1) -- one is also ok @@ -509,7 +510,23 @@ colors.mpcolor = mpcolor colors.mpnamedcolor = mpnamedcolor colors.mpoptions = mpoptions -function colors.formatcolor(ca,separator) +-- local function formatcolor(ca,separator) +-- local cv = colorvalues[ca] +-- if cv then +-- local c, cn, f, t, model = { }, 0, 13, 13, cv[1] +-- if model == 2 then +-- return c[2] +-- elseif model == 3 then +-- return concat(c,separator,3,5) +-- elseif model == 4 then +-- return concat(c,separator,6,9) +-- end +-- else +-- return 0 +-- end +-- end + +local function formatcolor(ca,separator) local cv = colorvalues[ca] if cv then local c, cn, f, t, model = { }, 0, 13, 13, cv[1] @@ -530,41 +547,64 @@ function colors.formatcolor(ca,separator) end end -function colors.formatgray(ca,separator) +local function formatgray(ca,separator) local cv = colorvalues[ca] return format("%0.3f",(cv and cv[2]) or 0) end -function colors.colorcomponents(ca) -- return list +colors.formatcolor = formatcolor +colors.formatgray = formatgray + +local f_gray = formatters["s=%1.3f"] +local f_rgb = formatters["r=%1.3f%sg=%1.3f%sb=%1.3f"] +local f_cmyk = formatters["c=%1.3f%sm=%1.3f%sy=%1.3f%sk=%1.3f"] +local f_spot_name = formatters["p=%s"] +local f_spot_value = formatters["p=%1.3f"] +local f_transparency = formatters["a=%1.3f%st=%1.3f"] +local f_both = formatters["%s%s%s"] + +local function colorcomponents(ca,separator) -- return list local cv = colorvalues[ca] if cv then local model = cv[1] if model == 2 then - return format("s=%1.3f",cv[2]) + return f_gray(cv[2]) elseif model == 3 then - return format("r=%1.3f g=%1.3f b=%1.3f",cv[3],cv[4],cv[5]) + return f_rgb(cv[3],separator or " ",cv[4],separator or " ",cv[5]) elseif model == 4 then - return format("c=%1.3f m=%1.3f y=%1.3f k=%1.3f",cv[6],cv[7],cv[8],cv[9]) + return f_cmyk(cv[6],separator or " ",cv[7],separator or " ",cv[8],separator or " ",cv[9]) elseif type(cv[13]) == "string" then - return format("p=%s",cv[13]) + return f_spot_name(cv[13]) else - return format("p=%1.3f",cv[13]) + return f_spot_value(cv[13]) end else return "" end end -function colors.transparencycomponents(ta) +local function transparencycomponents(ta,separator) local tv = transparencyvalues[ta] if tv then - return format("a=%1.3f t=%1.3f",tv[1],tv[2]) + return f_transparency(tv[1],separator or " ",tv[2]) else return "" end end -function colors.spotcolorname(ca,default) +local function processcolorcomponents(ca,separator) + local cs = colorcomponents(ca,separator) + local ts = transparencycomponents(ca,separator) + if cs == "" then + return ts + elseif ts == "" then + return cs + else + return f_both(cs,separator or " ",ts) + end +end + +local function spotcolorname(ca,default) local cv, v = colorvalues[ca], "unknown" if cv and cv[1] == 5 then v = cv[10] @@ -572,7 +612,7 @@ function colors.spotcolorname(ca,default) return tostring(v) end -function colors.spotcolorparent(ca,default) +local function spotcolorparent(ca,default) local cv, v = colorvalues[ca], "unknown" if cv and cv[1] == 5 then v = cv[12] @@ -583,7 +623,7 @@ function colors.spotcolorparent(ca,default) return tostring(v) end -function colors.spotcolorvalue(ca,default) +local function spotcolorvalue(ca,default) local cv, v = colorvalues[ca], 0 if cv and cv[1] == 5 then v = cv[13] @@ -591,13 +631,20 @@ function colors.spotcolorvalue(ca,default) return tostring(v) end +colors.colorcomponents = colorcomponents +colors.transparencycomponents = transparencycomponents +colors.processcolorcomponents = processcolorcomponents +colors.spotcolorname = spotcolorname +colors.spotcolorparent = spotcolorparent +colors.spotcolorvalue = spotcolorvalue + -- experiment (a bit of a hack, as we need to get the attribute number) local min = math.min -- a[b,c] -> b+a*(c-b) -local function f(one,two,i,fraction) +local function inbetween(one,two,i,fraction) local o, t = one[i], two[i] local otf = o + fraction * (t - o) if otf > 1 then @@ -606,6 +653,22 @@ local function f(one,two,i,fraction) return otf end +local function justone(one,fraction,i) + local otf = fraction * one[i] + if otf > 1 then + otf = 1 + end + return otf +end + +local function complement(one,fraction,i) + local otf = - fraction * (1 - one[i]) + if otf > 1 then + otf = 1 + end + return otf +end + function colors.defineintermediatecolor(name,fraction,c_one,c_two,a_one,a_two,specs,global,freeze) fraction = tonumber(fraction) or 1 local one, two = colorvalues[c_one], colorvalues[c_two] @@ -617,37 +680,38 @@ function colors.defineintermediatecolor(name,fraction,c_one,c_two,a_one,a_two,sp -- problems with weighted gray conversions and work with original values local ca if csone == 2 then - ca = register_color(name,'gray',f(one,two,2,fraction)) + ca = register_color(name,'gray',inbetween(one,two,2,fraction)) elseif csone == 3 then - ca = register_color(name,'rgb', f(one,two,3,fraction), - f(one,two,4,fraction), - f(one,two,5,fraction)) + ca = register_color(name,'rgb', inbetween(one,two,3,fraction), + inbetween(one,two,4,fraction), + inbetween(one,two,5,fraction)) elseif csone == 4 then - ca = register_color(name,'cmyk',f(one,two,6,fraction), - f(one,two,7,fraction), - f(one,two,8,fraction), - f(one,two,9,fraction)) + ca = register_color(name,'cmyk',inbetween(one,two,6,fraction), + inbetween(one,two,7,fraction), + inbetween(one,two,8,fraction), + inbetween(one,two,9,fraction)) else - ca = register_color(name,'gray',f(one,two,2,fraction)) + ca = register_color(name,'gray',inbetween(one,two,2,fraction)) end definecolor(name,ca,global,freeze) -- end else + local inbetween = fraction < 0 and complement or justone local csone = one[1] local ca if csone == 2 then - ca = register_color(name,'gray',fraction*one[2]) + ca = register_color(name,'gray',inbetween(one,fraction,2)) elseif csone == 3 then - ca = register_color(name,'rgb', fraction*one[3], - fraction*one[4], - fraction*one[5]) + ca = register_color(name,'rgb', inbetween(one,fraction,3), + inbetween(one,fraction,4), + inbetween(one,fraction,5)) elseif csone == 4 then - ca = register_color(name,'cmyk',fraction*one[6], - fraction*one[7], - fraction*one[8], - fraction*one[9]) + ca = register_color(name,'cmyk',inbetween(one,fraction,6), + inbetween(one,fraction,7), + inbetween(one,fraction,8), + inbetween(one,fraction,9)) else - ca = register_color(name,'gray',fraction*one[2]) + ca = register_color(name,'gray',inbetween(one,fraction,2)) end definecolor(name,ca,global,freeze) end @@ -752,13 +816,14 @@ commands.definemultitonecolor = colors.definemultitonecolor commands.definetransparency = colors.definetransparency commands.defineintermediatecolor = colors.defineintermediatecolor -function commands.spotcolorname (a) context(colors.spotcolorname (a)) end -function commands.spotcolorparent (a) context(colors.spotcolorparent (a)) end -function commands.spotcolorvalue (a) context(colors.spotcolorvalue (a)) end -function commands.colorcomponents (a) context(colors.colorcomponents (a)) end -function commands.transparencycomponents(a) context(colors.transparencycomponents(a)) end -function commands.formatcolor (...) context(colors.formatcolor (...)) end -function commands.formatgray (...) context(colors.formatgray (...)) end +function commands.spotcolorname (a) context(spotcolorname (a)) end +function commands.spotcolorparent (a) context(spotcolorparent (a)) end +function commands.spotcolorvalue (a) context(spotcolorvalue (a)) end +function commands.colorcomponents (a,s) context(colorcomponents (a,s)) end +function commands.transparencycomponents(a,s) context(transparencycomponents(a,s)) end +function commands.processcolorcomponents(a,s) context(processcolorcomponents(a,s)) end +function commands.formatcolor (...) context(formatcolor (...)) end +function commands.formatgray (...) context(formatgray (...)) end function commands.mpcolor(model,ca,ta,default) context(mpcolor(model,ca,ta,default)) diff --git a/tex/context/base/colo-ini.mkiv b/tex/context/base/colo-ini.mkiv index 3a037691e..6aa51b218 100644 --- a/tex/context/base/colo-ini.mkiv +++ b/tex/context/base/colo-ini.mkiv @@ -70,7 +70,7 @@ %D {definecolor,defineglobalcolor,definenamedcolor,definespotcolor,definemultitonecolor, %D definetransparency} %D -%D \startbuffer +%D \starttyping %D \definecolor [blue] [c=1,m=.38,y=0,k=.64] % pantone pms 2965 uncoated m %D \definecolor [yellow] [c=0,m=.28,y=1,k=.06] % pantone pms 124 uncoated m %D @@ -79,6 +79,12 @@ %D %D \definemultitonecolor [pdftoolscolor] [blue=.12,yellow=.28] [c=.1,m=.1,y=.3,k=.1] %D +%D \defineprocesscolor[myred][r=.5] +%D \defineprocesscolor[myredish][red][a=1,t=.5] +%D +%D \blackrule[color=myred,width=\hsize,height=1cm] \par +%D \blackrule[color=myredish,width=\hsize,height=1cm] +%D %D \useexternalfigure[demofig][mill.png][object=no] %D %D \startcombination[4*1] @@ -87,12 +93,11 @@ %D {\externalfigure[demofig][color=blue-100]} {spot color} %D {\externalfigure[demofig][color=yellow-100]} {spot color} %D \stopcombination -%D \stopbuffer -%D -%D \getbuffer \typebuffer +%D \stoptyping \unexpanded\def\definecolor {\dodoubleargument\colo_basics_define} \unexpanded\def\defineglobalcolor {\dodoubleargument\colo_basics_define_global} +\unexpanded\def\defineprocesscolor {\dotripleargument\colo_basics_define_process} \unexpanded\def\definenamedcolor {\dodoubleargument\colo_basics_define_named} \unexpanded\def\definespotcolor {\dotripleargument\colo_basics_define_spot} \unexpanded\def\definemultitonecolor{\doquadrupleempty\colo_basics_define_multitone} @@ -116,6 +121,9 @@ % \testfeatureonce{100000}{\color[red]{}} % 1.046 => 0.541 +\let\g_color\empty +\let\g_style\empty + \unexpanded\def\switchtocolor[#1]{\csname#1\endcsname} \unexpanded\def\color [#1]{\bgroup @@ -597,7 +605,7 @@ \def\v_colo_dummy_name{d_u_m_m_y} -\letvalue{\??colorsetter-}\empty % used? +\letvalue{\??colorsetter -}\empty % used? \letvalue{\??transparencysetter-}\empty % used? % new: expandable (see tbl) @@ -688,7 +696,7 @@ {\ctxcommand{defineprocesscolor("#1","#2",true,\v_colo_freeze_state)}% \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}} -\def\colo_basics_define_named[#1][#2]% +\def\colo_basics_define_named[#1][#2]% currently same as define {\ctxcommand{defineprocesscolor("#1","#2",false,\v_colo_freeze_state)}% \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}} @@ -700,6 +708,21 @@ {\ctxcommand{defineprocesscolor("\v_colo_dummy_name","#1",false,false)}% \colo_helpers_activate_dummy} +\def\colo_basics_define_process + {\ifthirdargument + \expandafter\colo_basics_define_process_yes + \else + \expandafter\colo_basics_define_process_nop + \fi} + +\def\colo_basics_define_process_yes[#1][#2][#3]% + {\ctxcommand{defineprocesscolor("#1","\processcolorcomponents{#2},#3",false,\v_colo_freeze_state)}% + \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}} + +\def\colo_basics_define_process_nop[#1][#2][#3]% + {\ctxcommand{defineprocesscolor("#1","#2",false,\v_colo_freeze_state)}% + \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}} + % Spotcolors used setxvalue but that messes up currentcolor % and probably no global is needed either but they are global % at the lua end (true argument) so we keep that if only because @@ -728,6 +751,8 @@ % % \definecolor[mycolorc][.5(blue,red)] % \definecolor[mycolord][.5(blue)] +% \definecolor[mycolord][-.5(blue,red)] % complement +% \definecolor[mycolord][-(blue)] % complement % % \enabledirectives[colors.pgf] % \definecolor[mycolorx][magenta!50!yellow] @@ -1018,6 +1043,7 @@ \def\colorcomponents #1{\ctxcommand{colorcomponents(\thecolorattribute{#1})}} \def\transparencycomponents #1{\ctxcommand{transparencycomponents(\thetransparencyattribute{#1})}} +\def\processcolorcomponents #1{\ctxcommand{processcolorcomponents(\thecolorattribute{#1},",")}} \def\colorvalue #1{\ctxcommand{formatcolor(\thecolorattribute{#1},"\colorformatseparator")}} \def\grayvalue #1{\ctxcommand{formatgray (\thecolorattribute{#1},"\colorformatseparator")}} diff --git a/tex/context/base/cont-log.mkiv b/tex/context/base/cont-log.mkiv index 5d4133143..99e08450b 100644 --- a/tex/context/base/cont-log.mkiv +++ b/tex/context/base/cont-log.mkiv @@ -126,11 +126,16 @@ \setMFPfont META\syst_logos_meta_hyphen FONT% \endgroup} +% \unexpanded\def\MetaPost +% {\dontleavehmode +% \begingroup +% \setMFPfont META\syst_logos_meta_hyphen POST% +% \endgroup} +% +% As decided on the ConText Meeting 2013 the logo has been simplified: + \unexpanded\def\MetaPost - {\dontleavehmode - \begingroup - \setMFPfont META\syst_logos_meta_hyphen POST% - \endgroup} + {MetaPost} \unexpanded\def\MetaFun {MetaFun} diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii index 544e5e537..f0385c065 100644 --- a/tex/context/base/cont-new.mkii +++ b/tex/context/base/cont-new.mkii @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2013.04.09 10:38} +\newcontextversion{2013.06.07 17:34} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index 7f241e357..b625a0ff3 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2013.05.28 00:36} +\newcontextversion{2013.10.20 07:09} %D This file is loaded at runtime, thereby providing an excellent place for %D hacks, patches, extensions and new features. @@ -27,16 +27,6 @@ \unexpanded\def\tightvbox{\dowithnextbox{\dp\nextbox\zeropoint\box\nextbox}\vbox} \unexpanded\def\tightvtop{\dowithnextbox{\ht\nextbox\zeropoint\box\nextbox}\vtop} -%D Maybe: - -% \startluacode -% function context.loadfile(filename) -% context(string.strip(io.loaddata(resolvers.findfile(filename)))) -% end -% \stopluacode -% -% \edef\tufte{\cldcommand{loadfile("tufte.tex")}} - %D Needs some work: \unexpanded\def\startgridcorrection diff --git a/tex/context/base/cont-new.tmp b/tex/context/base/cont-new.tmp deleted file mode 100644 index 5d4fcd60a..000000000 --- a/tex/context/base/cont-new.tmp +++ /dev/null @@ -1,83 +0,0 @@ -%D \module -%D [ file=cont-new, -%D version=1995.10.10, -%D title=\CONTEXT\ Miscellaneous Macros, -%D subtitle=New Macros, -%D author=Hans Hagen, -%D date=\currentdate, -%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] -%C -%C This module is part of the \CONTEXT\ macro||package and is -%C therefore copyrighted by \PRAGMA. See mreadme.pdf for -%C details. - -\newcontextversion{2013.05.28 00:34} - -%D This file is loaded at runtime, thereby providing an excellent place for -%D hacks, patches, extensions and new features. - -\unprotect - -% \writestatus\m!system{beware: some patches loaded from cont-new.mkiv} - -% \attribute152\zerocount : marks ... lots of sweeps so best early in list - -%D Maybe: - -\unexpanded\def\tightvbox{\dowithnextbox{\dp\nextbox\zeropoint\box\nextbox}\vbox} -\unexpanded\def\tightvtop{\dowithnextbox{\ht\nextbox\zeropoint\box\nextbox}\vtop} - -%D Maybe: - -% \startluacode -% function context.loadfile(filename) -% context(string.strip(io.loaddata(resolvers.findfile(filename)))) -% end -% \stopluacode -% -% \edef\tufte{\cldcommand{loadfile("tufte.tex")}} - -%D Needs some work: - -\unexpanded\def\startgridcorrection - {\dosingleempty\spac_grid_correction_start} - -\def\spac_grid_correction_start[#1]% - {\ifgridsnapping - \snaptogrid[#1]\vbox\bgroup - \else - \startbaselinecorrection - \fi} - -\unexpanded\def\stopgridcorrection - {\ifgridsnapping - \egroup - \else - \stopbaselinecorrection - \fi} - -\unexpanded\def\checkgridsnapping - {\lineskip\ifgridsnapping\zeropoint\else\normallineskip\fi} - -%D Probably obsolete: - -\unexpanded\def\startcolumnmakeup % don't change - {\bgroup - \getrawnoflines\textheight % raw as we cna have topskip - \setbox\scratchbox\vbox to \dimexpr\noflines\lineheight-\lineheight+\topskip\relax - \bgroup - \forgetall} - -\unexpanded\def\stopcolumnmakeup - {\egroup - \dp\scratchbox\zeropoint - \wd\scratchbox\textwidth - \box\scratchbox - \egroup - \page_otr_command_synchronize_hsize} - -%D Till we fixed all styles: - -\let\\=\crlf - -\protect \endinput diff --git a/tex/context/base/context-help.lmx b/tex/context/base/context-help.lmx index 140493915..cf4f73a61 100644 --- a/tex/context/base/context-help.lmx +++ b/tex/context/base/context-help.lmx @@ -24,44 +24,41 @@ @@ -75,13 +72,13 @@
-
- -

+
+ +
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf index 1c70a39fc..0909c6799 100644 Binary files a/tex/context/base/context-version.pdf and b/tex/context/base/context-version.pdf differ diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii index 97afd33df..990c8bd11 100644 --- a/tex/context/base/context.mkii +++ b/tex/context/base/context.mkii @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2013.04.09 10:38} +\edef\contextversion{2013.06.07 17:34} %D For those who want to use this: diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index eaa431122..5f3866345 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -3,7 +3,7 @@ %D version=2008.28.10, % 1995.10.10, %D title=\CONTEXT, %D subtitle=\CONTEXT\ Format Generation, -%D author=Hans Hagen, +%D author=Hans Hagen, % ɦɑns ɦɑˈχən %D date=\currentdate, %D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] %C @@ -25,8 +25,8 @@ %D up and the dependencies are more consistent. \edef\contextformat {\jobname} -\edef\contextversion{2013.05.28 00:36} -\edef\contextkind {current} +\edef\contextversion{2013.10.20 07:09} +\edef\contextkind {beta} %D For those who want to use this: @@ -91,6 +91,8 @@ \loadmarkfile{cldf-ini} +% \tracecatcodetables + % From here on we have \unexpanded being \normalprotected, as we already had % \unexpanded long before etex came around. @@ -162,7 +164,7 @@ \loadmarkfile{supp-ran} \loadmarkfile{supp-mat} -\loadmarkfile{spac-cha} +%loadmarkfile{spac-cha} % obsolete %loadmarkfile{supp-num} % obsolete \loadmarkfile{typo-ini} @@ -324,19 +326,6 @@ \loadmarkfile{strc-bkm} % bookmarks -\loadmarkfile{tabl-com} -\loadmarkfile{tabl-pln} - -\loadmarkfile{tabl-tab} % thrd-tab stripped and merged - -\loadmarkfile{tabl-tbl} -\loadmarkfile{tabl-ntb} -\loadmarkfile{tabl-nte} -\loadmarkfile{tabl-ltb} -\loadmarkfile{tabl-tsp} -\loadmkvifile{tabl-xtb} -\loadmarkfile{tabl-mis} - \loadmarkfile{java-ini} \loadmkvifile{scrn-fld} @@ -362,6 +351,22 @@ \loadmkvifile{font-col} \loadmkvifile{font-gds} \loadmkvifile{font-aux} +\loadmkvifile{font-sel} + +\loadmarkfile{typo-tal} + +\loadmarkfile{tabl-com} +\loadmarkfile{tabl-pln} + +\loadmarkfile{tabl-tab} % thrd-tab stripped and merged + +\loadmarkfile{tabl-tbl} +\loadmarkfile{tabl-ntb} +\loadmarkfile{tabl-nte} +\loadmarkfile{tabl-ltb} +\loadmarkfile{tabl-tsp} +\loadmkvifile{tabl-xtb} +\loadmarkfile{tabl-mis} \loadmarkfile{typo-lan} @@ -384,7 +389,8 @@ \loadmarkfile{typo-dig} \loadmarkfile{typo-rep} \loadmkvifile{typo-txt} -\loadmarkfile{typo-par} +\loadmarkfile{typo-drp} +\loadmarkfile{typo-fln} \loadmkvifile{type-ini} \loadmarkfile{type-set} @@ -403,6 +409,7 @@ \loadmarkfile{meta-fun} \loadmarkfile{meta-pag} \loadmarkfile{meta-grd} +\loadmarkfile{meta-fnt} \loadmarkfile{page-mrk} % depends on mp @@ -420,7 +427,7 @@ \loadmarkfile{math-ini} \loadmarkfile{math-pln} \loadmarkfile{math-for} -\loadmarkfile{math-def} +\loadmarkfile{math-def} % also saves some meanings \loadmarkfile{math-ali} %loadmarkfile{math-arr} \loadmkvifile{math-stc} @@ -430,6 +437,8 @@ \loadmarkfile{math-int} \loadmarkfile{math-del} \loadmarkfile{math-fen} +\loadmkvifile{math-acc} +\loadmkvifile{math-rad} \loadmarkfile{math-inl} \loadmarkfile{math-dis} %loadmarkfile{math-lan} diff --git a/tex/context/base/context.tmp b/tex/context/base/context.tmp deleted file mode 100644 index 7976b4641..000000000 --- a/tex/context/base/context.tmp +++ /dev/null @@ -1,513 +0,0 @@ -%D \module -%D [ file=context, -%D version=2008.28.10, % 1995.10.10, -%D title=\CONTEXT, -%D subtitle=\CONTEXT\ Format Generation, -%D author=Hans Hagen, -%D date=\currentdate, -%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] -%C -%C This module is part of the \CONTEXT\ macro||package and is -%C therefore copyrighted by \PRAGMA. See mreadme.pdf for -%C details. - -\catcode`\{=1 \catcode`\}=2 \catcode`\#=6 - -%D From the next string (which is set by the script that assembles the -%D distribution) later on we will calculate a number that can be used -%D by use modules to identify the feature level. Starting with version -%D 2004.8.30 the low level interface is english. Watch out and adapt -%D your styles an modules. - -% \everypar{\writestatus{!!!!}{some spurious input in line \the\inputlineno}\wait} - -%D The order of loading will change when all modules have been cleaned -%D up and the dependencies are more consistent. - -\edef\contextformat {\jobname} -\edef\contextversion{2013.05.28 00:34} -\edef\contextkind {beta} - -%D For those who want to use this: - -\let\fmtname \contextformat -\let\fmtversion\contextversion - -%D Loading: - -\edef\mksuffix {mkiv} -\edef\contextmark{MKIV} - -\ifx\normalinput\undefined \let\normalinput\input \fi - -\def\loadcorefile#1{\normalinput#1\relax} -\def\loadmarkfile#1{\normalinput#1.\mksuffix\relax} -\def\loadmkiifile#1{} -\def\loadmkivfile#1{\normalinput#1.mkiv\relax} -\def\loadmkvifile#1{\normalinput#1.mkvi\relax} - -%D First we load the system modules. These implement a lot of -%D manipulation macros. We start with setting up some basic \TEX\ -%D machinery. - -\loadmarkfile{syst-ini} - -%D Some checking (more primitives are now defined): - -\ifdefined\defaultinterface \else \def\defaultinterface {english} \fi -%ifdefined\messageinterface \else \let\messageinterface \defaultinterface \fi -\ifdefined\defaultlanguagetag \else \def\defaultlanguagetag{en} \fi - -%D We just quit if new functionality is expected. - -\ifnum\luatexversion<70 % also change message - \writestatus{!!!!}{Your luatex binary is too old, you need at least version 0.70!} - \expandafter\end -\fi - -%D There is only this way to pass the version info to \LUA\ (currently). Hm, we could -%D now put it into the environment. - -\newtoks\contextversiontoks \contextversiontoks\expandafter{\contextversion} -\newtoks\contextkindtoks \contextkindtoks \expandafter{\contextkind} - -% \normaleverypar{\wait} % uncomment for test of funny injections - -%D Now the more fundamental code gets defined. - -\loadmarkfile{norm-ctx} -\loadmarkfile{syst-pln} -\loadmarkfile{syst-mes} - -\loadmarkfile{luat-cod} -\loadmarkfile{luat-bas} -\loadmarkfile{luat-lib} - -\loadmarkfile{catc-ini} -\loadmarkfile{catc-act} -\loadmarkfile{catc-def} -\loadmarkfile{catc-ctx} -\loadmarkfile{catc-sym} - -\loadmarkfile{cldf-ini} - -% From here on we have \unexpanded being \normalprotected, as we already had -% \unexpanded long before etex came around. - -\loadmarkfile{syst-aux} -\loadmarkfile{syst-lua} -\loadmarkfile{syst-con} - -\loadmarkfile{syst-fnt} -\loadmarkfile{syst-rtp} - -\loadmkvifile{file-ini} -\loadmkvifile{file-res} -\loadmkvifile{file-lib} - -\loadmarkfile{supp-dir} - -\loadmarkfile{char-ini} -\loadmarkfile{char-utf} -\loadmarkfile{char-act} - -\loadmarkfile{mult-ini} -\loadmarkfile{mult-sys} -\loadmarkfile{mult-aux} -\loadmarkfile{mult-def} -\loadmarkfile{mult-chk} -%loadmarkfile{mult-aux} % moved up -\loadmkvifile{mult-dim} - -\loadmarkfile{cldf-int} % interface - -\loadmarkfile{luat-ini} - -\loadmarkfile{toks-ini} - -\loadmarkfile{attr-ini} - -\loadmarkfile{core-ini} -\loadmarkfile{core-env} - -\loadmarkfile{layo-ini} - -\loadmarkfile{node-ini} - -\loadmarkfile{cldf-bas} % basics / depends on nodes - -\loadmarkfile{node-fin} -\loadmarkfile{node-mig} -\loadmarkfile{typo-bld} % par builders -%loadmarkfile{node-pag} - -\loadmarkfile{back-ini} - -\loadmarkfile{attr-col} -\loadmarkfile{attr-lay} -\loadmarkfile{attr-neg} -\loadmarkfile{attr-eff} -\loadmarkfile{attr-mkr} - -\loadmarkfile{trac-tex} -\loadmarkfile{trac-deb} % will move up -\loadmarkfile{trac-ctx} % maybe move up - -%loadmarkfile{blob-ini} % not to be used, we only use a helper - -\loadmarkfile{supp-box} - -%loadmarkfile{supp-vis} % replaced by trac-vis -%loadmarkfile{supp-fun} % mostly replaced - -\loadmarkfile{supp-ran} -\loadmarkfile{supp-mat} -\loadmarkfile{spac-cha} -%loadmarkfile{supp-num} % obsolete - -\loadmarkfile{typo-ini} - -\loadmkvifile{file-syn} -\loadmkvifile{file-mod} - -\loadmarkfile{core-con} - -\loadmarkfile{cont-fil} - -\loadmarkfile{regi-ini} -\loadmarkfile{enco-ini} -\loadmarkfile{hand-ini} - -\loadmarkfile{lang-ini} -\loadmarkfile{lang-lab} - -\loadmarkfile{unic-ini} - -\loadmarkfile{core-uti} -\loadmarkfile{core-two} -\loadmarkfile{core-dat} - -\loadmarkfile{colo-ini} -\loadmarkfile{colo-grp} % optional -\loadmarkfile{colo-ext} - -\loadmarkfile{node-bck} % overloads anch-pgr (experimental and undocumented) - -\loadmarkfile{pack-cut} % leftovers from trac-vis - -\loadmarkfile{lang-mis} -\loadmarkfile{lang-url} -\loadmarkfile{lang-def} - -\loadmkvifile{file-job} % why so late? - -\loadmarkfile{symb-ini} % brrr depends on fonts - -\loadmarkfile{sort-ini} - -\loadmkvifile{pack-mis} -\loadmarkfile{pack-rul} -\loadmarkfile{pack-mrl} -\loadmkvifile{pack-bck} -\loadmarkfile{pack-fen} - -\loadmarkfile{lxml-ini} -\loadmarkfile{lxml-sor} - -\loadmkvifile{typo-prc} - -\loadmkvifile{strc-ini} -\loadmarkfile{strc-tag} -\loadmarkfile{strc-doc} -\loadmarkfile{strc-num} -\loadmarkfile{strc-mar} -\loadmarkfile{strc-sbe} -\loadmkvifile{strc-lst} -\loadmarkfile{strc-sec} -\loadmarkfile{strc-pag} % hm, depends on core-num -\loadmarkfile{strc-ren} -\loadmarkfile{strc-xml} -\loadmarkfile{strc-def} % might happen later -\loadmkvifile{strc-ref} -\loadmarkfile{strc-reg} -\loadmkvifile{strc-lev} % experiment - -\loadmarkfile{spac-ali} -\loadmarkfile{spac-hor} -\loadmarkfile{spac-flr} -\loadmarkfile{spac-ver} -\loadmarkfile{spac-lin} -\loadmarkfile{spac-pag} -\loadmarkfile{spac-par} -%loadmarkfile{spac-adj} % no longer needed -\loadmarkfile{spac-def} -\loadmarkfile{spac-grd} - -\loadmarkfile{anch-pos} - -\loadmkvifile{scrn-ini} -\loadmkvifile{scrn-ref} - -\loadmarkfile{pack-obj} - -\loadmkvifile{strc-itm} - -\loadmkvifile{strc-con} -\loadmkvifile{strc-des} -\loadmkvifile{strc-enu} - -\loadmarkfile{strc-ind} -\loadmarkfile{strc-lab} -\loadmarkfile{strc-syn} - -\loadmarkfile{core-sys} - -\loadmarkfile{page-var} -\loadmkvifile{page-otr} -\loadmarkfile{page-ini} -\loadmarkfile{page-ins} -\loadmarkfile{page-fac} -\loadmarkfile{page-brk} -\loadmarkfile{page-col} -\loadmarkfile{page-inf} -\loadmarkfile{page-grd} -\loadmarkfile{page-flt} -\loadmarkfile{page-bck} -\loadmarkfile{page-not} -\loadmarkfile{page-one} -\loadmarkfile{page-lay} -\loadmkvifile{page-box} -\loadmkvifile{page-txt} -\loadmarkfile{page-sid} % when - -\loadmkvifile{strc-flt} - -\loadmarkfile{page-pst} -\loadmkvifile{page-mbk} -\loadmarkfile{page-mul} % partly overloaded -\loadmarkfile{page-mix} % new -\loadmarkfile{page-set} -\loadmarkfile{pack-lyr} -\loadmarkfile{pack-pos} -\loadmkvifile{page-mak} - -\loadmarkfile{page-lin} -\loadmarkfile{page-par} -\loadmarkfile{typo-pag} -\loadmarkfile{typo-mar} -\loadmarkfile{typo-itm} - -\loadmarkfile{buff-ini} -\loadmarkfile{buff-ver} -\loadmkvifile{buff-par} - -\loadmarkfile{buff-imp-tex} % optional as also runtime if not loaded -\loadmarkfile{buff-imp-mp} % optional as also runtime if not loaded -\loadmarkfile{buff-imp-lua} % optional as also runtime if not loaded -\loadmarkfile{buff-imp-xml} % optional as also runtime if not loaded - -\loadmarkfile{buff-imp-parsed-xml} % optional -%loadmarkfile{buff-imp-parsed-lua} % optional - -\loadmarkfile{strc-blk} - -\loadmarkfile{page-imp} -\loadmkvifile{page-sel} % optional -\loadmkvifile{page-inj} % optional - -\loadmkvifile{scrn-pag} -\loadmkvifile{scrn-wid} -\loadmkvifile{scrn-but} -\loadmkvifile{scrn-bar} - -\loadmarkfile{page-com} % optional (after scrn-pag) - -\loadmarkfile{strc-bkm} % bookmarks - -\loadmarkfile{tabl-com} -\loadmarkfile{tabl-pln} - -\loadmarkfile{tabl-tab} % thrd-tab stripped and merged - -\loadmarkfile{tabl-tbl} -\loadmarkfile{tabl-ntb} -\loadmarkfile{tabl-nte} -\loadmarkfile{tabl-ltb} -\loadmarkfile{tabl-tsp} -\loadmkvifile{tabl-xtb} -\loadmarkfile{tabl-mis} - -\loadmarkfile{java-ini} - -\loadmkvifile{scrn-fld} -\loadmkvifile{scrn-hlp} - -\loadmarkfile{char-enc} % will move up - -\loadmkvifile{font-lib} % way too late -\loadmkvifile{font-fil} -\loadmkvifile{font-var} -\loadmkvifile{font-fea} -\loadmkvifile{font-mat} -\loadmkvifile{font-ini} -\loadmkvifile{font-sym} -\loadmkvifile{font-sty} -\loadmkvifile{font-set} -\loadmkvifile{font-emp} -\loadmarkfile{font-pre} -\loadmarkfile{font-unk} -\loadmarkfile{font-tra} -\loadmarkfile{font-chk} -\loadmarkfile{font-uni} -\loadmkvifile{font-col} -\loadmkvifile{font-gds} -\loadmkvifile{font-aux} - -\loadmarkfile{typo-lan} - -\loadmarkfile{lxml-css} - -\loadmarkfile{spac-chr} % depends on fonts - -\loadmarkfile{blob-ini} % not to be used, we only use a helper - -\loadmarkfile{trac-vis} -\loadmarkfile{trac-jus} - -\loadmarkfile{typo-cln} -\loadmarkfile{typo-spa} -\loadmarkfile{typo-krn} -\loadmkvifile{typo-itc} -\loadmarkfile{typo-dir} -\loadmarkfile{typo-brk} -\loadmarkfile{typo-cap} -\loadmarkfile{typo-dig} -\loadmarkfile{typo-rep} -\loadmkvifile{typo-txt} -\loadmarkfile{typo-par} - -\loadmkvifile{type-ini} -\loadmarkfile{type-set} - -\loadmarkfile{scrp-ini} - -\loadmarkfile{lang-wrd} % can be optional (discussion with mm sideeffect) -%loadmarkfile{lang-rep} % can be optional (bt 2013 side effect) - -\loadmarkfile{prop-ini} % only for downward compatibility - -\loadmarkfile{mlib-ctx} - -\loadmarkfile{meta-ini} -\loadmarkfile{meta-tex} -\loadmarkfile{meta-fun} -\loadmarkfile{meta-pag} -\loadmarkfile{meta-grd} - -\loadmarkfile{page-mrk} % depends on mp - -\loadmarkfile{page-flw} -\loadmarkfile{page-spr} -\loadmarkfile{page-plg} -\loadmarkfile{page-str} - -\loadmarkfile{anch-pgr} % can be moved up (nicer for dependencies) -\loadmkvifile{anch-bck} -\loadmarkfile{anch-tab} % overloads tabl-tbl -\loadmarkfile{anch-bar} -%loadmarkfile{anch-snc} % when needed this one will be redone - -\loadmarkfile{math-ini} -\loadmarkfile{math-pln} -\loadmarkfile{math-for} -\loadmarkfile{math-def} -\loadmarkfile{math-ali} -%loadmarkfile{math-arr} -\loadmkvifile{math-stc} -\loadmarkfile{math-frc} -\loadmarkfile{math-mis} -\loadmarkfile{math-scr} -\loadmarkfile{math-int} -\loadmarkfile{math-del} -\loadmarkfile{math-fen} -\loadmarkfile{math-inl} -\loadmarkfile{math-dis} -%loadmarkfile{math-lan} - -\loadmarkfile{phys-dim} - -\loadmarkfile{strc-mat} - -\loadmarkfile{chem-ini} -\loadmarkfile{chem-str} - -\loadmarkfile{typo-scr} - -\loadmarkfile{node-rul} -\loadmkvifile{font-sol} % font solutions - -\loadmkvifile{strc-not} -\loadmkvifile{strc-lnt} - -\loadmarkfile{pack-com} -\loadmarkfile{typo-del} - -\loadmarkfile{grph-trf} -\loadmarkfile{grph-inc} -\loadmarkfile{grph-fig} -\loadmarkfile{grph-raw} - -\loadmarkfile{pack-box} -\loadmarkfile{pack-bar} -\loadmarkfile{page-app} -\loadmarkfile{meta-fig} - -\loadmarkfile{lang-spa} % will become obsolete - -\loadmarkfile{bibl-bib} -\loadmarkfile{bibl-tra} - -%loadmarkfile{x-xtag} % no longer preloaded - -\loadmarkfile{meta-xml} - -\loadmarkfile{cont-log} - -\loadmarkfile{task-ini} - -\loadmarkfile{cldf-ver} % verbatim, this can come late -\loadmarkfile{cldf-com} % commands, this can come late - -\loadmarkfile{core-ctx} % this order might change but we need to check depedencies / move to another namespace - -\loadmarkfile{core-def} - -%usemodule[x][res-04] % xml resource libraries -%usemodule[x][res-08] % rlx runtime conversion -%usemodule[x][res-12] % rli external indentification - -% now we hook in backend code (needs checking) - -\loadmarkfile{back-pdf} % actually, this one should load the next three using document.arguments.backend -\loadmarkfile{mlib-pdf} -\loadmarkfile{mlib-pps} -\loadmarkfile{meta-pdf} -\loadmarkfile{grph-epd} - -\loadmarkfile{back-exp} - -\setupcurrentlanguage[\defaultlanguagetag] - -\prependtoks - \ctxlua{statistics.starttiming(statistics)}% -\to \everyjob - -\appendtoks - \ctxlua{statistics.stoptiming(statistics)}% -\to \everyjob - -\appendtoks - \ctxlua{statistics.savefmtstatus("\jobname","\contextversion","context.mkiv","\contextkind")}% can become automatic -\to \everydump - -\errorstopmode \dump \endinput diff --git a/tex/context/base/core-con.lua b/tex/context/base/core-con.lua index cb284b9a7..315a34f39 100644 --- a/tex/context/base/core-con.lua +++ b/tex/context/base/core-con.lua @@ -216,27 +216,14 @@ function commands.Character (n) context(chr (n,upper_offset)) end function commands.characters(n) context(chrs(n,lower_offset)) end function commands.Characters(n) context(chrs(n,upper_offset)) end -local days = { - [false] = { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 }, - [true] = { 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 } -} - -local function weekday(day,month,year) - return date("%w",time{year=year,month=month,day=day}) + 1 -end - -local function isleapyear(year) - return (year % 400 == 0) or ((year % 100 ~= 0) and (year % 4 == 0)) -end +local weekday = os.weekday -- moved to l-os +local isleapyear = os.isleapyear -- moved to l-os +local nofdays = os.nofdays -- moved to l-os local function leapyear(year) return isleapyear(year) and 1 or 0 end -local function nofdays(year,month) - return days[isleapyear(year)][month] -end - local function textime() return tonumber(date("%H")) * 60 + tonumber(date("%M")) end @@ -254,7 +241,7 @@ converters.nofdays = nofdays converters.textime = textime function commands.weekday (day,month,year) context(weekday (day,month,year)) end -function commands.leapyear(year) context(leapyear(year)) end -- rather useless +function commands.leapyear(year) context(leapyear(year)) end -- rather useless, only for ifcase function commands.nofdays (year,month) context(nofdays (year,month)) end function commands.year () context(date("%Y")) end diff --git a/tex/context/base/core-con.mkiv b/tex/context/base/core-con.mkiv index 7febdcf39..375d77072 100644 --- a/tex/context/base/core-con.mkiv +++ b/tex/context/base/core-con.mkiv @@ -15,6 +15,8 @@ \registerctxluafile{core-con}{1.001} +% todo: iso date ranges (from/to) + \unprotect \ifdefined\currentlanguage \else \let\currentlanguage\empty \fi @@ -97,6 +99,9 @@ \def\koreannumeralsp #1{\ctxcommand{alphabetic(\number#1,"korean-parent")}} \def\koreannumeralsc #1{\ctxcommand{alphabetic(\number#1,"korean-circle")}} +\let\koreanparentnumerals\koreannumeralsp +\let\koreancirclenumerals\koreannumeralsc + \def\chinesenumerals #1{\ctxcommand{chinesenumerals (\number#1)}} \def\chinesecapnumerals#1{\ctxcommand{chinesecapnumerals(\number#1,"cap")}} \def\chineseallnumerals#1{\ctxcommand{chineseallnumerals(\number#1,"all")}} diff --git a/tex/context/base/core-dat.lua b/tex/context/base/core-dat.lua index 826d3a675..242d362d0 100644 --- a/tex/context/base/core-dat.lua +++ b/tex/context/base/core-dat.lua @@ -21,13 +21,17 @@ local trace_pagestates = false trackers.register("job.pagestates", function(v) local report_dataset = logs.reporter("dataset") local report_pagestate = logs.reporter("pagestate") -local allocate = utilities.storage.allocate +local allocate = utilities.storage.allocate local settings_to_hash = utilities.parsers.settings_to_hash -local texcount = tex.count -local formatters = string.formatters -local v_yes = interfaces.variables.yes -local new_latelua = nodes.pool.latelua +local texgetcount = tex.getcount +local texsetcount = tex.setcount + +local formatters = string.formatters + +local v_yes = interfaces.variables.yes + +local new_latelua = nodes.pool.latelua local collected = allocate() local tobesaved = allocate() @@ -86,7 +90,7 @@ local function setdata(settings) set.index = index data.index = index data.order = index - data.realpage = texcount.realpageno + data.realpage = texgetcount("realpageno") if trace_datasets then report_dataset("action %a, name %a, tag %a, index %a","assign delayed",name,tag,index) end @@ -101,7 +105,7 @@ datasets.setdata = setdata function datasets.extend(name,tag) local set = sets[name] local order = set.order + 1 - local realpage = texcount.realpageno + local realpage = texgetcount("realpageno") set.order = order local t = tobesaved[name][tag] t.realpage = realpage @@ -207,7 +211,7 @@ local function setstate(settings) else tag = tonumber(tag) or tag -- autonumber saves keys end - local realpage = texcount.realpageno + local realpage = texgetcount("realpageno") local data = realpage list[tag] = data if trace_pagestates then @@ -219,7 +223,7 @@ end pagestates.setstate = setstate function pagestates.extend(name,tag) - local realpage = texcount.realpageno + local realpage = texgetcount("realpageno") if trace_pagestates then report_pagestate("action %a, name %a, tag %a, preset %a","synchronize",name,tag,realpage) end @@ -261,9 +265,5 @@ end function commands.setpagestaterealpageno(name,tag) local t = collected[name] t = t and (t[tag] or t[tonumber(tag)]) - if t then - texcount.realpagestateno = t - else - texcount.realpagestateno = texcount.realpageno - end + texsetcount("realpagestateno",t or texgetcount("realpageno")) end diff --git a/tex/context/base/core-def.mkiv b/tex/context/base/core-def.mkiv index dc2a30f4e..99bed6d34 100644 --- a/tex/context/base/core-def.mkiv +++ b/tex/context/base/core-def.mkiv @@ -45,6 +45,8 @@ \flushpostponednodedata \typo_delimited_repeat \insertparagraphintro + \typo_initial_handle + \typo_firstline_handle \to \everypar \appendtoks @@ -93,23 +95,26 @@ \font_preloads_at_stop_text \to \everystoptext +% We made \loadoptionfile obsolete: we pass options via the command line to +% luatex now and handle them directly instead of via a file. This also makes +% the next obsolete: +% +% \directsetup{*runtime:options} +% \directsetup{*runtime:modules} + \appendtoks \showcontextbanner \initializenewlinechar \calculatecurrenttime - \loadsystemfiles - % \loadoptionfile % obsolete, but nice to keep as reference of when/how + \syst_files_load \job_options_get_commandline % expands some commands \job_options_get_ctxfile % might expand some commands \job_options_set_filenames \font_preloads_at_every_job \settopskip % brrr \initializemainlanguage - \initializexmlprocessing % is this still needed? \initializepagebackgrounds \initializepagecounters - % \directsetup{*runtime:options}% % obsolete as the option file is replaced by a more direct mechanism - % \directsetup{*runtime:modules}% % obsolete as the option file is replaced by a more direct mechanism \job_options_set_modes \job_options_set_modules \job_options_set_environments diff --git a/tex/context/base/core-env.lua b/tex/context/base/core-env.lua index 025192d4b..a4d1fdd92 100644 --- a/tex/context/base/core-env.lua +++ b/tex/context/base/core-env.lua @@ -15,12 +15,14 @@ local P, C, S, Cc, lpegmatch, patterns = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc, lpeg.m local csname_id = token.csname_id local create = token.create -local texcount = tex.count +local texgetcount = tex.getcount local texsetcount = tex.setcount local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex +local context = context + local undefined = csname_id("*undefined*crap*") local iftrue = create("iftrue")[2] -- inefficient hack @@ -42,8 +44,8 @@ setmetatableindex(tex.modes, function(t,k) if csname_id(n) == undefined then return false else - modes[k] = function() return texcount[n] >= 1 end - return texcount[n] >= 1 + modes[k] = function() return texgetcount(n) >= 1 end + return texgetcount(n) >= 1 end end end) @@ -57,18 +59,18 @@ setmetatableindex(tex.systemmodes, function(t,k) if csname_id(n) == undefined then return false else - systemmodes[k] = function() return texcount[n] >= 1 end - return texcount[n] >= 1 + systemmodes[k] = function() return texgetcount(n) >= 1 end + return texgetcount(n) >= 1 end end end) setmetatableindex(tex.constants, function(t,k) - return csname_id(k) ~= undefined and texcount[k] or 0 + return csname_id(k) ~= undefined and texgetcount(k) or 0 end) setmetatableindex(tex.conditionals, function(t,k) -- 0 == true - return csname_id(k) ~= undefined and texcount[k] == 0 + return csname_id(k) ~= undefined and texgetcount(k) == 0 end) setmetatableindex(tex.ifs, function(t,k) @@ -84,7 +86,7 @@ end) -- if glob then -- texsetcount("global",name,0) -- else --- texcount[name] = 0 +-- texsetcount(name,0) -- end -- end -- @@ -92,7 +94,7 @@ end) -- if glob then -- texsetcount("global",name,1) -- else --- texcount[name] = 1 +-- texsetcount(name,1) -- end -- end diff --git a/tex/context/base/core-fil.mkii b/tex/context/base/core-fil.mkii index 6b0d8caf5..16daf7bf6 100644 --- a/tex/context/base/core-fil.mkii +++ b/tex/context/base/core-fil.mkii @@ -348,6 +348,10 @@ \setupexternalresources [url=] +%D Goodie: + +\unexpanded\def\continueifinputfile#1{\doifnotfile{#1}{\endinput}} + %D This module will be perfected / changed / weeded. \protect \endinput diff --git a/tex/context/base/core-uti.lua b/tex/context/base/core-uti.lua index 96ccdca48..cbbdeff23 100644 --- a/tex/context/base/core-uti.lua +++ b/tex/context/base/core-uti.lua @@ -20,7 +20,6 @@ saves much runtime but at the cost of more memory usage.

local format, match = string.format, string.match local next, type, tostring = next, type, tostring local concat = table.concat -local texcount = tex.count local definetable = utilities.tables.definetable local accesstable = utilities.tables.accesstable @@ -30,6 +29,8 @@ local packers = utilities.packers local allocate = utilities.storage.allocate local mark = utilities.storage.mark +local texgetcount = tex.getcount + local report_passes = logs.reporter("job","passes") job = job or { } @@ -95,6 +96,8 @@ job.register('job.variables.checksums', checksums) local rmethod, rvalue +local setxvalue = context.setxvalue + local function initializer() tobesaved = jobvariables.tobesaved collected = jobvariables.collected @@ -110,7 +113,7 @@ local function initializer() end tobesaved.randomseed = rvalue for cs, value in next, collected do - context.setxvalue(cs,value) + setxvalue(cs,value) end end @@ -138,7 +141,7 @@ local jobpacker = packers.new(packlist,job.packversion) -- jump number when chan job.pack = true -- job.pack = false -directives.register("job.pack",function(v) pack = v end) +directives.register("job.pack",function(v) job.pack = v end) local _save_, _load_, _others_ = { }, { }, { } -- registers timing @@ -147,7 +150,7 @@ function job.save(filename) -- we could return a table but it can get pretty lar local f = io.open(filename,'w') if f then f:write("local utilitydata = { }\n\n") - f:write(serialize(comment,"utilitydata.comment",true,true),"\n\n") + f:write(serialize(comment,"utilitydata.comment",true),"\n\n") for l=1,#savelist do local list = savelist[l] local target = format("utilitydata.%s",list[1]) @@ -160,11 +163,11 @@ function job.save(filename) -- we could return a table but it can get pretty lar packers.pack(data,jobpacker,true) end local definer, name = definetable(target,true,true) -- no first and no last - f:write(definer,"\n\n",serialize(data,name,true,true),"\n\n") + f:write(definer,"\n\n",serialize(data,name,true),"\n\n") end if job.pack then packers.strip(jobpacker) - f:write(serialize(jobpacker,"utilitydata.job.packed",true,true),"\n\n") + f:write(serialize(jobpacker,"utilitydata.job.packed",true),"\n\n") end f:write("return utilitydata") f:close() @@ -262,7 +265,7 @@ end) statistics.register("callbacks", function() local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0 - local pages = texcount['realpageno'] - 1 + local pages = texgetcount('realpageno') - 1 if pages > 1 then return format("direct: %s, indirect: %s, total: %s (%i per page)", total-indirect, indirect, total, total/pages) else @@ -276,17 +279,30 @@ statistics.register("randomizer", function() end end) +local kg_per_watt_per_second = 1 / 15000000 +local watts_per_core = 50 +local speedup_by_other_engine = 1.2 +local used_wood_factor = watts_per_core * kg_per_watt_per_second / speedup_by_other_engine +local used_wood_factor = (50 / 15000000) / 1.2 + function statistics.formatruntime(runtime) if not environment.initex then -- else error when testing as not counters yet - local shipped = texcount['nofshipouts'] - local pages = texcount['realpageno'] + local shipped = texgetcount('nofshipouts') + local pages = texgetcount('realpageno') if pages > shipped then pages = shipped end if shipped > 0 or pages > 0 then local persecond = shipped / runtime if pages == 0 then pages = shipped end +if jit then +local saved = watts_per_core * runtime * kg_per_watt_per_second / speedup_by_other_engine +local saved = used_wood_factor * runtime +-- return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second, %f kg tree saved by using luajittex",runtime,pages,shipped,persecond,saved) + return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second, %f g tree saved by using luajittex",runtime,pages,shipped,persecond,saved*1000) +else return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second",runtime,pages,shipped,persecond) +end else return format("%s seconds",runtime) end diff --git a/tex/context/base/data-exp.lua b/tex/context/base/data-exp.lua index 8a2fd0320..c67e97bb1 100644 --- a/tex/context/base/data-exp.lua +++ b/tex/context/base/data-exp.lua @@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['data-exp'] = { local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub local concat, sort = table.concat, table.sort local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns -local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S +local Ct, Cs, Cc, Carg, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.Carg, lpeg.P, lpeg.C, lpeg.S local type, next = type, next local ostype = os.type @@ -26,21 +26,21 @@ local resolvers = resolvers -- all, when working on the main resolver code, I don't want to scroll -- past this every time. See data-obs.lua for the gsub variant. -local function f_first(a,b) - local t, n = { }, 0 - for s in gmatch(b,"[^,]+") do - n = n + 1 ; t[n] = a .. s - end - return concat(t,",") -end - -local function f_second(a,b) - local t, n = { }, 0 - for s in gmatch(a,"[^,]+") do - n = n + 1 ; t[n] = s .. b - end - return concat(t,",") -end +-- local function f_first(a,b) +-- local t, n = { }, 0 +-- for s in gmatch(b,"[^,]+") do +-- n = n + 1 ; t[n] = a .. s +-- end +-- return concat(t,",") +-- end +-- +-- local function f_second(a,b) +-- local t, n = { }, 0 +-- for s in gmatch(a,"[^,]+") do +-- n = n + 1 ; t[n] = s .. b +-- end +-- return concat(t,",") +-- end -- kpsewhich --expand-braces '{a,b}{c,d}' -- ac:bc:ad:bd @@ -69,6 +69,21 @@ local function f_both(a,b) return concat(t,",") end +local comma = P(",") +local nocomma = (1-comma)^1 +local docomma = comma^1/"," +local before = Cs((nocomma * Carg(1) + docomma)^0) +local after = Cs((Carg(1) * nocomma + docomma)^0) +local both = Cs(((C(nocomma) * Carg(1))/function(a,b) return lpegmatch(before,b,1,a) end + docomma)^0) + +local function f_first (a,b) return lpegmatch(after, b,1,a) end +local function f_second(a,b) return lpegmatch(before,a,1,b) end +local function f_both (a,b) return lpegmatch(both, b,1,a) end + +-- print(f_first ("a", "x,y,z")) +-- print(f_second("a,b,c","x")) +-- print(f_both ("a,b,c","x,y,z")) + local left = P("{") local right = P("}") local var = P((1 - S("{}" ))^0) diff --git a/tex/context/base/data-met.lua b/tex/context/base/data-met.lua index 96da70bfd..ee9de3fd9 100644 --- a/tex/context/base/data-met.lua +++ b/tex/context/base/data-met.lua @@ -36,7 +36,7 @@ local function splitmethod(filename) -- todo: filetype in specification end filename = file.collapsepath(filename,".") -- hm, we should keep ./ in some cases --- filename = gsub(filename,"^%./",getcurrentdir().."/") -- we will merge dir.expandname and collapse some day + -- filename = gsub(filename,"^%./",getcurrentdir().."/") -- we will merge dir.expandname and collapse some day if not find(filename,"://") then return { scheme = "file", path = filename, original = filename, filename = filename } @@ -49,6 +49,16 @@ local function splitmethod(filename) -- todo: filetype in specification end end +-- local function splitmethod(filename) -- todo: filetype in specification +-- if not filename then +-- return { scheme = "unknown", original = filename } +-- end +-- if type(filename) == "table" then +-- return filename -- already split +-- end +-- return url.hashed(filename) +-- end + resolvers.splitmethod = splitmethod -- bad name but ok -- the second argument is always analyzed (saves time later on) and the original diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua index 532b6261f..f1ddb62aa 100644 --- a/tex/context/base/data-res.lua +++ b/tex/context/base/data-res.lua @@ -1303,7 +1303,7 @@ local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other end -- we could have a loop over the 6 functions but then we'd have to --- always analyze +-- always analyze .. todo: use url split collect_instance_files = function(filename,askedformat,allresults) -- uses nested askedformat = askedformat or "" diff --git a/tex/context/base/data-tmp.lua b/tex/context/base/data-tmp.lua index 5025a8a0a..3e109dcfe 100644 --- a/tex/context/base/data-tmp.lua +++ b/tex/context/base/data-tmp.lua @@ -250,6 +250,10 @@ end caches.getreadablepaths = getreadablepaths caches.getwritablepath = getwritablepath +-- this can be tricky as we can have a pre-generated format while at the same time +-- use e.g. a home path where we have updated file databases and so maybe we need +-- to check first if we do have a writable one + function caches.getfirstreadablefile(filename,...) local rd = getreadablepaths(...) for i=1,#rd do @@ -263,6 +267,28 @@ function caches.getfirstreadablefile(filename,...) return caches.setfirstwritablefile(filename,...) end +-- next time we have an issue, we can test this instead: + +function caches.getfirstreadablefile_TEST_ME_FIRST(filename,...) + -- check if we have already written once + local fullname, path = caches.setfirstwritablefile(filename,...) + if is_readable(fullname) then + return fullname, path -- , true + end + -- otherwise search for pregenerated + local rd = getreadablepaths(...) + for i=1,#rd do + local path = rd[i] + local fullname = file.join(path,filename) + if is_readable(fullname) then + usedreadables[i] = true + return fullname, path -- , false + end + end + -- else assume new written + return fullname, path -- , true +end + function caches.setfirstwritablefile(filename,...) local wr = getwritablepath(...) local fullname = file.join(wr,filename) diff --git a/tex/context/base/data-vir.lua b/tex/context/base/data-vir.lua index e5bf35fa7..48fec54e0 100644 --- a/tex/context/base/data-vir.lua +++ b/tex/context/base/data-vir.lua @@ -58,6 +58,7 @@ function openers.virtual(specification) end data[original] = nil -- when we comment this we can have error messages -- With utf-8 we signal that no regime is to be applied! + -- characters.showstring(d) return openers.helpers.textopener("virtual",original,d,"utf-8") else if trace_virtual then diff --git a/tex/context/base/file-ini.lua b/tex/context/base/file-ini.lua index 1872ed3d3..2bc742a1f 100644 --- a/tex/context/base/file-ini.lua +++ b/tex/context/base/file-ini.lua @@ -13,13 +13,13 @@ if not modules then modules = { } end modules ['file-ini'] = { resolvers.jobs = resolvers.jobs or { } -local texcount = tex.count -local setvalue = context.setvalue +local texsetcount = tex.setcount +local setvalue = context.setvalue function commands.splitfilename(fullname) local t = file.nametotable(fullname) local path = t.path - texcount.splitoffkind = (path == "" and 0) or (path == '.' and 1) or 2 + texsetcount("splitoffkind",(path == "" and 0) or (path == '.' and 1) or 2) setvalue("splitofffull",fullname) setvalue("splitoffpath",path) setvalue("splitoffname",t.name) diff --git a/tex/context/base/file-job.mkvi b/tex/context/base/file-job.mkvi index 087f1a685..ce0d54ece 100644 --- a/tex/context/base/file-job.mkvi +++ b/tex/context/base/file-job.mkvi @@ -66,14 +66,16 @@ % \def\doloadsystemfile#1% only mkiv files % {\readfile{sys:///#1.\mksuffix}{\showmessage\m!system2{#1.\mksuffix}}\donothing} -\unexpanded\def\loadsystemfiles - {\syst_files_load\f!newfilename % new code, to be integrated at some point, plus fixes posted on the list - \syst_files_load\f!locfilename % new code, somewhat experimental, not distributed (outside the dev group) - \syst_files_load\f!expfilename % new code, very experimental, can be engine specific, mostly for me only - \syst_files_load\f!sysfilename} % local settings, but probably not that good an idea to use - -\def\syst_files_load#name% only mkiv files - {\readsysfile{#name.\mksuffix}{\showmessage\m!system2{#name.\mksuffix}}\donothing} +\unexpanded\def\syst_files_load + {\syst_files_load_indeed\f!newfilename % new code, to be integrated at some point, plus fixes posted on the list + \syst_files_load_indeed\f!locfilename % new code, somewhat experimental, not distributed (outside the dev group) + \syst_files_load_indeed\f!expfilename % new code, very experimental, can be engine specific, mostly for me only + \syst_files_load_indeed\f!sysfilename % local settings, but probably not that good an idea to use + %\syst_files_load_indeed\f!fntfilename % maybe some day, can load goodies and patches + } + +\def\syst_files_load_indeed#name% from now on we assume a suffix to be part of the name + {\readsysfile{#name}{\showmessage\m!system2{#name}}\donothing} % obsolete, but we keep it as reference of what happened % diff --git a/tex/context/base/file-mod.lua b/tex/context/base/file-mod.lua index 3659d3089..822f37c86 100644 --- a/tex/context/base/file-mod.lua +++ b/tex/context/base/file-mod.lua @@ -27,6 +27,8 @@ local report_modules = logs.reporter("resolvers","modules") commands = commands or { } local commands = commands +local context = context + local findbyscheme = resolvers.finders.byscheme -- use different one local iterator = utilities.parsers.iterator diff --git a/tex/context/base/file-mod.mkvi b/tex/context/base/file-mod.mkvi index 0dbd14bdb..00966a442 100644 --- a/tex/context/base/file-mod.mkvi +++ b/tex/context/base/file-mod.mkvi @@ -104,6 +104,8 @@ \let\currentmoduleparameters\empty \let\currentmodule \s!unknown +\newtoks\everysetupmodule + \unexpanded\def\startmodule {\doifnextoptionalelse\syst_modules_start_yes\syst_modules_start_nop} @@ -131,7 +133,8 @@ \getparameters[\??module\currentmodule:][#name]% internal (defaults) \normalexpanded{\getparameters[\??module\currentmodule:][\the\scratchtoks]}% loadtime (user) \fi - \let\currentmoduleparameters\empty} + \let\currentmoduleparameters\empty + \the\everysetupmodule} % to be tested: % @@ -145,7 +148,7 @@ % \fi % \let\currentmoduleparameters\empty} -\def\moduleparameter#name#parameter% +\def\moduleparameter#name#parameter% should have been \namedmoduleparameter {\csname\??module \ifcsname\??module#name:#parameter\endcsname#name:#parameter\fi \endcsname} diff --git a/tex/context/base/font-afm.lua b/tex/context/base/font-afm.lua index cb0c2438f..adb4281b2 100644 --- a/tex/context/base/font-afm.lua +++ b/tex/context/base/font-afm.lua @@ -54,6 +54,8 @@ afm.addligatures = true -- best leave this set to true afm.addtexligatures = true -- best leave this set to true afm.addkerns = true -- best leave this set to true +local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes + local function setmode(tfmdata,value) if value then tfmdata.properties.mode = lower(value) @@ -309,7 +311,7 @@ local addkerns, addligatures, addtexligatures, unify, normalize -- we will imple function afm.load(filename) -- hm, for some reasons not resolved yet filename = resolvers.findfile(filename,'afm') or "" - if filename ~= "" then + if filename ~= "" and not fonts.names.ignoredfile(filename) then local name = file.removesuffix(file.basename(filename)) local data = containers.read(afm.cache,name) local attr = lfs.attributes(filename) @@ -360,6 +362,9 @@ function afm.load(filename) data = containers.write(afm.cache, name, data) data = containers.read(afm.cache,name) end + if applyruntimefixes and data then + applyruntimefixes(filename,data) + end end return data else @@ -637,10 +642,10 @@ local function copytotfm(data) parameters.x_height = 400 parameters.quad = 1000 -- - if italicangle then + if italicangle and italicangle ~= 0 then parameters.italicangle = italicangle parameters.italicfactor = math.cos(math.rad(90+italicangle)) - parameters.slant = - math.round(math.tan(italicangle*math.pi/180)) + parameters.slant = - math.tan(italicangle*math.pi/180) end if monospaced then parameters.space_stretch = 0 diff --git a/tex/context/base/font-chk.lua b/tex/context/base/font-chk.lua index 1b89366fd..6dc1667bb 100644 --- a/tex/context/base/font-chk.lua +++ b/tex/context/base/font-chk.lua @@ -9,8 +9,9 @@ if not modules then modules = { } end modules ['font-chk'] = { -- possible optimization: delayed initialization of vectors -- move to the nodes namespace -local format = string.format +local formatters = string.formatters local bpfactor = number.dimenfactors.bp +local fastcopy = table.fastcopy local report_fonts = logs.reporter("fonts","checking") @@ -23,9 +24,11 @@ local fonthashes = fonts.hashes local fontdata = fonthashes.identifiers local fontcharacters = fonthashes.characters -local addprivate = fonts.helpers.addprivate -local hasprivate = fonts.helpers.hasprivate -local getprivatenode = fonts.helpers.getprivatenode +local helpers = fonts.helpers + +local addprivate = helpers.addprivate +local hasprivate = helpers.hasprivate +local getprivatenode = helpers.getprivatenode local otffeatures = fonts.constructors.newfeatures("otf") local registerotffeature = otffeatures.register @@ -65,7 +68,7 @@ local function onetimemessage(font,char,message) -- char == false returns table if char == false then return table.sortedkeys(category) elseif not category[char] then - report_fonts("char %U in font %a with id %a: %s",char,tfmdata.properties.fullname,font,message) + report_fonts("char %C in font %a with id %a: %s",char,tfmdata.properties.fullname,font,message) category[char] = true end end @@ -147,7 +150,7 @@ local variants = { { tag = "yellow", r = .6, g = .6, b = 0 }, } -local package = "q %0.6f 0 0 %0.6f 0 0 cm %s %s %s rg %s %s %s RG 10 M 1 j 1 J 0.05 w %s Q" +local pdf_blob = "pdf: q %0.6f 0 0 %0.6f 0 0 cm %s %s %s rg %s %s %s RG 10 M 1 j 1 J 0.05 w %s Q" local cache = { } -- saves some tables but not that impressive @@ -162,9 +165,9 @@ local function addmissingsymbols(tfmdata) -- we can have an alternative with rul for i =1, #fakes do local fake = fakes[i] local name = fake.name - local privatename = format("placeholder %s %s",name,tag) + local privatename = formatters["placeholder %s %s"](name,tag) if not hasprivate(tfmdata,privatename) then - local hash = format("%s_%s_%s_%s_%s_%s",name,tag,r,g,b,size) + local hash = formatters["%s_%s_%s_%s_%s_%s"](name,tag,r,g,b,size) local char = cache[hash] if not char then char = { @@ -172,7 +175,7 @@ local function addmissingsymbols(tfmdata) -- we can have an alternative with rul height = size*fake.height, depth = size*fake.depth, -- bah .. low level pdf ... should be a rule or plugged in - commands = { { "special", "pdf: " .. format(package,scale,scale,r,g,b,r,g,b,fake.code) } } + commands = { { "special", formatters[pdf_blob](scale,scale,r,g,b,r,g,b,fake.code) } } } cache[hash] = char end @@ -197,7 +200,7 @@ fonts.loggers.category_to_placeholder = mapping function commands.getplaceholderchar(name) local id = font.current() addmissingsymbols(fontdata[id]) - context(fonts.helpers.getprivatenode(fontdata[id],name)) + context(helpers.getprivatenode(fontdata[id],name)) end function checkers.missing(head) @@ -207,6 +210,7 @@ function checkers.missing(head) local char = n.char if font ~= lastfont then characters = fontcharacters[font] + lastfont = font end if not characters[char] and is_character[chardata[char].category] then if action == "remove" then @@ -357,3 +361,35 @@ luatex.registerstopactions(function() end end end) + +-- for the moment here + +local function expandglyph(characters,index,done) + done = done or { } + if not done[index] then + local data = characters[index] + if data then + done[index] = true + local d = fastcopy(data) + local n = d.next + if n then + d.next = expandglyph(characters,n,done) + end + local h = d.horiz_variants + if h then + for i=1,#h do + h[i].glyph = expandglyph(characters,h[i].glyph,done) + end + end + local v = d.vert_variants + if v then + for i=1,#v do + v[i].glyph = expandglyph(characters,v[i].glyph,done) + end + end + return d + end + end +end + +helpers.expandglyph = expandglyph diff --git a/tex/context/base/font-col.lua b/tex/context/base/font-col.lua index 20c99c9b4..b15997cba 100644 --- a/tex/context/base/font-col.lua +++ b/tex/context/base/font-col.lua @@ -7,6 +7,7 @@ if not modules then modules = { } end modules ['font-col'] = { } -- possible optimization: delayed initialization of vectors +-- we should also share equal vectors (math) local context, commands, trackers, logs = context, commands, trackers, logs local node, nodes, fonts, characters = node, nodes, fonts, characters @@ -15,12 +16,12 @@ local file, lpeg, table, string = file, lpeg, table, string local type, next, toboolean = type, next, toboolean local gmatch = string.gmatch local fastcopy = table.fastcopy ------ P, Cc, lpegmatch = lpeg.P, lpeg.Cc, lpeg.match -local traverse_id = node.traverse_id +local traverse_id = nodes.traverse_id + local settings_to_hash = utilities.parsers.settings_to_hash -local trace_collecting = false trackers.register("fonts.collecting", function(v) trace_collecting = v end) +local trace_collecting = false trackers.register("fonts.collecting", function(v) trace_collecting = v end) local report_fonts = logs.reporter("fonts","collections") @@ -43,7 +44,22 @@ local list = { } local current = 0 local enabled = false --- maybe also a copy +local function checkenabled() + -- a bit ugly but nicer than a fuzzy state while defining math + if next(vectors) then + if not enabled then + nodes.tasks.enableaction("processors","fonts.collections.process") + enabled = true + end + else + if enabled then + nodes.tasks.disableaction("processors","fonts.collections.process") + enabled = false + end + end +end + +collections.checkenabled = checkenabled function collections.reset(name,font) if font and font ~= "" then @@ -86,8 +102,22 @@ function collections.define(name,font,ranges,details) end end end - details.font, details.start, details.stop = font, start, stop - d[#d+1] = fastcopy(details) + local offset = details.offset + if type(offset) == "string" then + local start = characters.getrange(offset) + offset = start or false + else + offset = tonumber(offset) or false + end + d[#d+1] = { + font = font, + start = start, + stop = stop, + offset = offset, + rscale = tonumber (details.rscale) or 1, + force = toboolean(details.force,true), + check = toboolean(details.check,true), + } end end end @@ -102,57 +132,62 @@ function collections.registermain(name) list[#list+1] = last end +-- check: when true, only set when present in font +-- force: when false, then not set when already set + function collections.clonevector(name) statistics.starttiming(fonts) - local d = definitions[name] - local t = { } if trace_collecting then report_fonts("processing collection %a",name) end - for i=1,#d do - local f = d[i] - local id = list[i] - local start, stop = f.start, f.stop + local definitions = definitions[name] + local vector = { } + vectors[current] = vector + for i=1,#definitions do + local definition = definitions[i] + local name = definition.font + local start = definition.start + local stop = definition.stop + local check = definition.check + local force = definition.force + local offset = definition.offset or start + local remap = definition.remap + local cloneid = list[i] + local oldchars = fontdata[current].characters + local newchars = fontdata[cloneid].characters if trace_collecting then - report_fonts("remapping font %a to %a for range %U - %U",current,id,start,stop) + report_fonts("remapping font %a to %a for range %U - %U",current,cloneid,start,stop) end - local check = toboolean(f.check or "false",true) - local force = toboolean(f.force or "true",true) - local remap = f.remap or nil - -- check: when true, only set when present in font - -- force: when false, then not set when already set - local oldchars = fontdata[current].characters - local newchars = fontdata[id].characters if check then - for i=start,stop do - if newchars[i] and (force or (not t[i] and not oldchars[i])) then + for unicode = start, stop do + local unic = unicode + offset - start + if not newchars[unicode] then + -- not in font + elseif force or (not vector[unic] and not oldchars[unic]) then if remap then - t[i] = { id, remap[i] } + vector[unic] = { cloneid, remap[unicode] } else - t[i] = id + vector[unic] = cloneid end end end else - for i=start,stop do - if force or (not t[i] and not oldchars[i]) then + for unicode = start, stop do + local unic = unicode + offset - start + if force or (not vector[unic] and not oldchars[unic]) then if remap then - t[i] = { id, remap[i] } + vector[unic] = { cloneid, remap[unicode] } else - t[i] = id + vector[unic] = cloneid end end end end end - vectors[current] = t if trace_collecting then report_fonts("activating collection %a for font %a",name,current) end - if not enabled then - nodes.tasks.enableaction("processors","fonts.collections.process") - enabled = true - end + checkenabled() statistics.stoptiming(fonts) end @@ -163,11 +198,14 @@ end -- -- if lpegmatch(okay,name) then -function collections.prepare(name) +function collections.prepare(name) -- we can do this in lua now current = currentfont() if vectors[current] then return end + if fontdata[current].mathparameters then + return + end local d = definitions[name] if d then if trace_collecting then @@ -213,12 +251,13 @@ function collections.process(head) -- this way we keep feature processing if type(id) == "table" then local newid, newchar = id[1], id[2] if trace_collecting then - report_fonts("remapping character %a in font %a to character %a in font %a",n.char,n.font,newchar,newid) + report_fonts("remapping character %C in font %a to character %C in font %a",getchar(n),getfont(n),newchar,newid) end - n.font, n.char = newid, newchar + n.font = newid + n.char = newchar else if trace_collecting then - report_fonts("remapping font %a to %a for character %a",n.font,id,n.char) + report_fonts("remapping font %a to %a for character %C",getfont(n),id,getchar(n)) end n.font = id end diff --git a/tex/context/base/font-col.mkvi b/tex/context/base/font-col.mkvi index 030e9aef7..34a1b04a8 100644 --- a/tex/context/base/font-col.mkvi +++ b/tex/context/base/font-col.mkvi @@ -15,7 +15,8 @@ % % actually we can now do more at the lua end -% todo: missing only, force always, multiple fallbacks with test, scale +% todo : missing only, force always, multiple fallbacks with test, scale +% beware : only english key/values % % \resetfontfallback [whatever] % diff --git a/tex/context/base/font-con.lua b/tex/context/base/font-con.lua index 790d4877a..24b03222c 100644 --- a/tex/context/base/font-con.lua +++ b/tex/context/base/font-con.lua @@ -307,6 +307,7 @@ function constructors.scale(tfmdata,specification) if tonumber(specification) then specification = { size = specification } end + target.specification = specification -- local scaledpoints = specification.size local relativeid = specification.relativeid @@ -379,13 +380,13 @@ function constructors.scale(tfmdata,specification) targetproperties.mode = properties.mode or "base" -- inherited -- local askedscaledpoints = scaledpoints - local scaledpoints, delta = constructors.calculatescale(tfmdata,scaledpoints) -- no shortcut, dan be redefined + local scaledpoints, delta = constructors.calculatescale(tfmdata,scaledpoints,nil,specification) -- no shortcut, dan be redefined -- local hdelta = delta local vdelta = delta -- - target.designsize = parameters.designsize -- not really needed so it muight become obsolete - target.units_per_em = units -- just a trigger for the backend (does luatex use this? if not it will go) + target.designsize = parameters.designsize -- not really needed so it might become obsolete + target.units_per_em = units -- just a trigger for the backend -- local direction = properties.direction or tfmdata.direction or 0 -- pointless, as we don't use omf fonts at all target.direction = direction @@ -474,13 +475,13 @@ function constructors.scale(tfmdata,specification) -- target.postprocessors = tfmdata.postprocessors -- - local targetslant = (parameters.slant or parameters[1] or 0) - local targetspace = (parameters.space or parameters[2] or 0)*hdelta - local targetspace_stretch = (parameters.space_stretch or parameters[3] or 0)*hdelta - local targetspace_shrink = (parameters.space_shrink or parameters[4] or 0)*hdelta - local targetx_height = (parameters.x_height or parameters[5] or 0)*vdelta - local targetquad = (parameters.quad or parameters[6] or 0)*hdelta - local targetextra_space = (parameters.extra_space or parameters[7] or 0)*hdelta + local targetslant = (parameters.slant or parameters[1] or 0) * factors.pt -- per point + local targetspace = (parameters.space or parameters[2] or 0) * hdelta + local targetspace_stretch = (parameters.space_stretch or parameters[3] or 0) * hdelta + local targetspace_shrink = (parameters.space_shrink or parameters[4] or 0) * hdelta + local targetx_height = (parameters.x_height or parameters[5] or 0) * vdelta + local targetquad = (parameters.quad or parameters[6] or 0) * hdelta + local targetextra_space = (parameters.extra_space or parameters[7] or 0) * hdelta -- targetparameters.slant = targetslant -- slantperpoint targetparameters.space = targetspace @@ -841,7 +842,7 @@ function constructors.finalize(tfmdata) end -- if not parameters.designsize then - parameters.designsize = tfmdata.designsize or 655360 + parameters.designsize = tfmdata.designsize or (factors.pt * 10) end -- if not parameters.units then @@ -999,21 +1000,11 @@ function constructors.hashinstance(specification,force) size = math.round(constructors.scaled(size,designsizes[hash])) specification.size = size end - -- local mathsize = specification.mathsize or 0 - -- if mathsize > 0 then - -- local textsize = specification.textsize - -- if fallbacks then - -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ] @ ' .. fallbacks - -- else - -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ]' - -- end - -- else - if fallbacks then - return hash .. ' @ ' .. tostring(size) .. ' @ ' .. fallbacks - else - return hash .. ' @ ' .. tostring(size) - end - -- end + if fallbacks then + return hash .. ' @ ' .. tostring(size) .. ' @ ' .. fallbacks + else + return hash .. ' @ ' .. tostring(size) + end end function constructors.setname(tfmdata,specification) -- todo: get specification from tfmdata @@ -1278,7 +1269,8 @@ function constructors.collectprocessors(what,tfmdata,features,trace,report) local whathandler = handlers[what] local whatfeatures = whathandler.features local whatprocessors = whatfeatures.processors - local processors = whatprocessors[properties.mode] + local mode = properties.mode + local processors = whatprocessors[mode] if processors then for i=1,#processors do local step = processors[i] @@ -1295,7 +1287,7 @@ function constructors.collectprocessors(what,tfmdata,features,trace,report) end end elseif trace then - report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname) + report("no feature processors for mode %a for font %a",mode,properties.fullname) end end return processes @@ -1309,7 +1301,8 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report) local whathandler = handlers[what] local whatfeatures = whathandler.features local whatmanipulators = whatfeatures.manipulators - local manipulators = whatmanipulators[properties.mode] + local mode = properties.mode + local manipulators = whatmanipulators[mode] if manipulators then for i=1,#manipulators do local step = manipulators[i] @@ -1318,7 +1311,7 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report) if value then local action = step.action if trace then - report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) + report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname) end if action then action(tfmdata,feature,value) diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua index 2583c6520..6c3402683 100644 --- a/tex/context/base/font-ctx.lua +++ b/tex/context/base/font-ctx.lua @@ -14,7 +14,6 @@ if not modules then modules = { } end modules ['font-ctx'] = { local context, commands = context, commands -local texcount, texsetcount = tex.count, tex.setcount local format, gmatch, match, find, lower, gsub, byte = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub, string.byte local concat, serialize, sort, fastcopy, mergedtable = table.concat, table.serialize, table.sort, table.fastcopy, table.merged local sortedhash, sortedkeys, sequenced = table.sortedhash, table.sortedkeys, table.sequenced @@ -35,6 +34,7 @@ local trace_designsize = false trackers.register("fonts.designsize", functio local trace_usage = false trackers.register("fonts.usage", function(v) trace_usage = v end) local trace_mapfiles = false trackers.register("fonts.mapfiles", function(v) trace_mapfiles = v end) local trace_automode = false trackers.register("fonts.automode", function(v) trace_automode = v end) +local trace_merge = false trackers.register("fonts.merge", function(v) trace_merge = v end) local report_features = logs.reporter("fonts","features") local report_cummulative = logs.reporter("fonts","cummulative") @@ -56,8 +56,15 @@ local fontgoodies = fonts.goodies local helpers = fonts.helpers local hashes = fonts.hashes local currentfont = font.current -local texattribute = tex.attribute -local texdimen = tex.dimen + +local texgetattribute = tex.getattribute +local texsetattribute = tex.setattribute +local texgetdimen = tex.getdimen +local texsetcount = tex.setcount +local texget = tex.get + +local texdefinefont = tex.definefont +local texsp = tex.sp local fontdata = hashes.identifiers local characters = hashes.chardata @@ -67,6 +74,8 @@ local resources = hashes.resources local csnames = hashes.csnames local marks = hashes.markdata local lastmathids = hashes.lastmathids +local exheights = hashes.exheights +local emwidths = hashes.emwidths local designsizefilename = fontgoodies.designsizes.filename @@ -114,7 +123,11 @@ end -- this will move elsewhere ... -utilities.strings.formatters.add(formatters,"font:name", [["'"..file.basename(%s.properties.name).."'"]]) +function fonts.helpers.name(tfmdata) + return file.basename(type(tfmdata) == "number" and properties[tfmdata].name or tfmdata.properties.name) +end + +utilities.strings.formatters.add(formatters,"font:name", [["'"..fonts.helpers.name(%s).."'"]]) utilities.strings.formatters.add(formatters,"font:features",[["'"..table.sequenced(%s," ",true).."'"]]) -- ... like font-sfm or so @@ -157,10 +170,24 @@ commands.resetnullfont = definers.resetnullfont -- so we never enter the loop then; we can store the defaults in the tma -- file (features.gpos.mkmk = 1 etc) -local needsnodemode = { - gpos_mark2mark = true, - gpos_mark2base = true, - gpos_mark2ligature = true, +local needsnodemode = { -- we will have node mode by default anyway + -- gsub_single = true, + gsub_multiple = true, + -- gsub_alternate = true, + -- gsub_ligature = true, + gsub_context = true, + gsub_contextchain = true, + gsub_reversecontextchain = true, + -- chainsub = true, + -- reversesub = true, + gpos_mark2base = true, + gpos_mark2ligature = true, + gpos_mark2mark = true, + gpos_cursive = true, + -- gpos_single = true, + -- gpos_pair = true, + gpos_context = true, + gpos_contextchain = true, } otftables.scripts.auto = "automatic fallback to latn when no dflt present" @@ -198,6 +225,8 @@ local function checkedscript(tfmdata,resources,features) return script end +-- basemode combined with dynamics is somewhat tricky + local function checkedmode(tfmdata,resources,features) local sequences = resources.sequences if sequences and #sequences > 0 then @@ -240,6 +269,9 @@ local function checkedmode(tfmdata,resources,features) end end end + if trace_automode then + report_defining("forcing mode base, font %!font:name!",tfmdata) + end features.mode = "base" -- new, or is this wrong? return "base" end @@ -540,6 +572,9 @@ local function mergecontextfeatures(currentname,extraname,how,mergedname) -- str for k, v in next, extra do mergedfeatures[k] = v end + if trace_merge then + report_features("merge %a, method %a, current %|T, extra %|T, result %|T",mergedname,"add",current or { },extra,mergedfeatures) + end elseif how == "-" then if current then for k, v in next, current do @@ -552,10 +587,16 @@ local function mergecontextfeatures(currentname,extraname,how,mergedname) -- str mergedfeatures[k] = false end end + if trace_merge then + report_features("merge %a, method %a, current %|T, extra %|T, result %|T",mergedname,"subtract",current or { },extra,mergedfeatures) + end else -- = for k, v in next, extra do mergedfeatures[k] = v end + if trace_merge then + report_features("merge %a, method %a, result %|T",mergedname,"replace",mergedfeatures) + end end local number = #numbers + 1 mergedfeatures.number = number @@ -617,19 +658,39 @@ specifiers.definecontext = definecontext -- we extend the hasher: +-- constructors.hashmethods.virtual = function(list) +-- local s = { } +-- local n = 0 +-- for k, v in next, list do +-- n = n + 1 +-- s[n] = k -- no checking on k +-- end +-- if n > 0 then +-- sort(s) +-- for i=1,n do +-- local k = s[i] +-- s[i] = k .. '=' .. tostring(list[k]) +-- end +-- return concat(s,"+") +-- end +-- end + constructors.hashmethods.virtual = function(list) local s = { } local n = 0 for k, v in next, list do n = n + 1 - s[n] = k -- no checking on k + -- if v == true then + -- s[n] = k .. '=true' + -- elseif v == false then + -- s[n] = k .. '=false' + -- else + -- s[n] = k .. "=" .. v + -- end + s[n] = k .. "=" .. tostring(v) end if n > 0 then sort(s) - for i=1,n do - local k = s[i] - s[i] = k .. '=' .. tostring(list[k]) - end return concat(s,"+") end end @@ -639,14 +700,14 @@ end -- local withcache = { } -- concat might be less efficient than nested tables -- -- local function withset(name,what) --- local zero = texattribute[0] +-- local zero = texgetattribute(0) -- local hash = zero .. "+" .. name .. "*" .. what -- local done = withcache[hash] -- if not done then -- done = mergecontext(zero,name,what) -- withcache[hash] = done -- end --- texattribute[0] = done +-- texsetattribute(0,done) -- end -- -- local function withfnt(name,what,font) @@ -657,7 +718,7 @@ end -- done = registercontext(font,name,what) -- withcache[hash] = done -- end --- texattribute[0] = done +-- texsetattribute(0,done) -- end function specifiers.showcontext(name) @@ -775,12 +836,16 @@ local value = C((leftparent * (1-rightparent)^0 * rightparent + (1-space) local dimension = C((space/"" + P(1))^1) local rest = C(P(1)^0) local scale_none = Cc(0) -local scale_at = P("at") * Cc(1) * spaces * dimension -- value -local scale_sa = P("sa") * Cc(2) * spaces * dimension -- value -local scale_mo = P("mo") * Cc(3) * spaces * dimension -- value -local scale_scaled = P("scaled") * Cc(4) * spaces * dimension -- value +local scale_at = P("at") * Cc(1) * spaces * dimension -- dimension +local scale_sa = P("sa") * Cc(2) * spaces * dimension -- number +local scale_mo = P("mo") * Cc(3) * spaces * dimension -- number +local scale_scaled = P("scaled") * Cc(4) * spaces * dimension -- number +local scale_ht = P("ht") * Cc(5) * spaces * dimension -- dimension +local scale_cp = P("cp") * Cc(6) * spaces * dimension -- dimension + +local specialscale = { [5] = "ht", [6] = "cp" } -local sizepattern = spaces * (scale_at + scale_sa + scale_mo + scale_scaled + scale_none) +local sizepattern = spaces * (scale_at + scale_sa + scale_mo + scale_ht + scale_cp + scale_scaled + scale_none) local splitpattern = spaces * value * spaces * rest function helpers.splitfontpattern(str) @@ -828,18 +893,18 @@ function commands.definefont_one(str) if size and size ~= "" then local mode, size = lpegmatch(sizepattern,size) if size and mode then - texcount.scaledfontmode = mode + texsetcount("scaledfontmode",mode) setsomefontsize(size) else - texcount.scaledfontmode = 0 + texsetcount("scaledfontmode",0) setemptyfontsize() end elseif true then -- so we don't need to check in tex - texcount.scaledfontmode = 2 + texsetcount("scaledfontmode",2) setemptyfontsize() else - texcount.scaledfontmode = 0 + texsetcount("scaledfontmode",0) setemptyfontsize() end specification = definers.makespecification(str,lookup,name,sub,method,detail,size) @@ -858,7 +923,7 @@ local function nice_cs(cs) end function commands.definefont_two(global,cs,str,size,inheritancemode,classfeatures,fontfeatures,classfallbacks,fontfallbacks, - mathsize,textsize,relativeid,classgoodies,goodies,classdesignsize,fontdesignsize) + mathsize,textsize,relativeid,classgoodies,goodies,classdesignsize,fontdesignsize,scaledfontmode) if trace_defining then report_defining("start stage two: %s (size %s)",str,size) end @@ -888,6 +953,7 @@ function commands.definefont_two(global,cs,str,size,inheritancemode,classfeature local id = tonumber(relativeid) or 0 specification.relativeid = id > 0 and id end + -- specification.name = name specification.size = size specification.sub = (sub and sub ~= "" and sub) or specification.sub @@ -896,6 +962,7 @@ function commands.definefont_two(global,cs,str,size,inheritancemode,classfeature specification.goodies = goodies specification.cs = cs specification.global = global + specification.scalemode = scaledfontmode -- context specific if detail and detail ~= "" then specification.method = method or "*" specification.detail = detail @@ -963,9 +1030,11 @@ function commands.definefont_two(global,cs,str,size,inheritancemode,classfeature name,tfmdata,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks,classgoodies,goodies,classdesignsize,fontdesignsize) end csnames[tfmdata] = specification.cs - tex.definefont(global,cs,tfmdata) + texdefinefont(global,cs,tfmdata) -- resolved (when designsize is used): - setsomefontsize((fontdata[tfmdata].parameters.size or 0) .. "sp") + local size = fontdata[tfmdata].parameters.size or 0 + setsomefontsize(size .. "sp") + texsetcount("scaledfontsize",size) lastfontid = tfmdata else -- setting the extra characters will move elsewhere @@ -977,11 +1046,12 @@ function commands.definefont_two(global,cs,str,size,inheritancemode,classfeature -- characters[0x2007] = { width = characters[0x0030] and characters[0x0030].width or parameters.space } -- figure -- characters[0x2008] = { width = characters[0x002E] and characters[0x002E].width or parameters.space } -- period -- + constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference local id = font.define(tfmdata) csnames[id] = specification.cs tfmdata.properties.id = id definers.register(tfmdata,id) -- to be sure, normally already done - tex.definefont(global,cs,id) + texdefinefont(global,cs,id) constructors.cleanuptable(tfmdata) constructors.finalize(tfmdata) if trace_defining then @@ -989,7 +1059,9 @@ function commands.definefont_two(global,cs,str,size,inheritancemode,classfeature name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks) end -- resolved (when designsize is used): - setsomefontsize((tfmdata.parameters.size or 655360) .. "sp") + local size = tfmdata.parameters.size or 655360 + setsomefontsize(size .. "sp") + texsetcount("scaledfontsize",size) lastfontid = id end if trace_defining then @@ -1030,7 +1102,7 @@ function definers.define(specification) specification.detail = specification.detail or (detail ~= "" and detail) or "" -- if type(specification.size) == "string" then - specification.size = tex.sp(specification.size) or 655260 + specification.size = texsp(specification.size) or 655260 end -- specification.specification = "" -- not used @@ -1054,16 +1126,17 @@ function definers.define(specification) return -1, nil elseif type(tfmdata) == "number" then if cs then - tex.definefont(specification.global,cs,tfmdata) + texdefinefont(specification.global,cs,tfmdata) csnames[tfmdata] = cs end return tfmdata, fontdata[tfmdata] else + constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference local id = font.define(tfmdata) tfmdata.properties.id = id definers.register(tfmdata,id) if cs then - tex.definefont(specification.global,cs,id) + texdefinefont(specification.global,cs,id) csnames[id] = cs end constructors.cleanuptable(tfmdata) @@ -1083,7 +1156,7 @@ local n = 0 function definers.internal(specification,cs) specification = specification or { } local name = specification.name - local size = specification.size and number.todimen(specification.size) or texdimen.bodyfontsize + local size = specification.size and number.todimen(specification.size) or texgetdimen("bodyfontsize") local number = tonumber(specification.number) local id = nil if number then @@ -1121,9 +1194,25 @@ end) local calculatescale = constructors.calculatescale -function constructors.calculatescale(tfmdata,scaledpoints,relativeid) - local scaledpoints, delta = calculatescale(tfmdata,scaledpoints) - -- if enable_auto_r_scale and relativeid then -- for the moment this is rather context specific +function constructors.calculatescale(tfmdata,scaledpoints,relativeid,specification) + if specification then + local scalemode = specification.scalemode + local special = scalemode and specialscale[scalemode] + if special then + -- we also have available specification.textsize + local parameters = tfmdata.parameters + local designsize = parameters.designsize + if special == "ht" then + local height = parameters.ascender * designsize / parameters.units + scaledpoints = (scaledpoints/height) * designsize + elseif special == "cp" then + local height = (tfmdata.descriptions[utf.byte("X")].height or parameters.ascender) * designsize / parameters.units + scaledpoints = (scaledpoints/height) * designsize + end + end + end + scaledpoints, delta = calculatescale(tfmdata,scaledpoints) + -- if enable_auto_r_scale and relativeid then -- for the moment this is rather context specific (we need to hash rscale then) -- local relativedata = fontdata[relativeid] -- local rfmdata = relativedata and relativedata.unscaled and relativedata.unscaled -- local id_x_height = rfmdata and rfmdata.parameters and rfmdata.parameters.x_height @@ -1137,6 +1226,31 @@ function constructors.calculatescale(tfmdata,scaledpoints,relativeid) return scaledpoints, delta end +local designsizes = constructors.designsizes + +function constructors.hashinstance(specification,force) + local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks + if force or not hash then + hash = constructors.hashfeatures(specification) + specification.hash = hash + end + if size < 1000 and designsizes[hash] then + size = math.round(constructors.scaled(size,designsizes[hash])) + specification.size = size + end + if fallbacks then + return hash .. ' @ ' .. tostring(size) .. ' @ ' .. fallbacks + else + local scalemode = specification.scalemode + local special = scalemode and specialscale[scalemode] + if special then + return hash .. ' @ ' .. tostring(size) .. ' @ ' .. special + else + return hash .. ' @ ' .. tostring(size) + end + end +end + -- We overload the (generic) resolver: local resolvers = definers.resolvers @@ -1403,11 +1517,11 @@ function commands.featureattribute(tag) end function commands.setfontfeature(tag) - texattribute[0] = contextnumber(tag) + texsetattribute(0,contextnumber(tag)) end function commands.resetfontfeature() - texattribute[0] = 0 + texsetattribute(0,0) end -- function commands.addfs(tag) withset(tag, 1) end @@ -1463,11 +1577,11 @@ end local dimenfactors = number.dimenfactors -function helpers.dimenfactor(unit,tfmdata) -- could be a method of a font instance +function helpers.dimenfactor(unit,id) if unit == "ex" then - return (tfmdata and tfmdata.parameters.x_height) or 655360 + return id and exheights[id] or 282460 -- lm 10pt elseif unit == "em" then - return (tfmdata and tfmdata.parameters.em_width) or 655360 + return id and emwidths [id] or 655360 -- lm 10pt else local du = dimenfactors[unit] return du and 1/du or tonumber(unit) or 1 @@ -1558,19 +1672,24 @@ end local quads = hashes.quads local xheights = hashes.xheights -setmetatableindex(number.dimenfactors, function(t,k) +setmetatableindex(dimenfactors, function(t,k) if k == "ex" then - return xheigths[currentfont()] + return 1/xheights[currentfont()] elseif k == "em" then - return quads[currentfont()] - elseif k == "%" then - return dimen.hsize/100 + return 1/quads[currentfont()] + elseif k == "pct" or k == "%" then + return 1/(texget("hsize")/100) else -- error("wrong dimension: " .. (s or "?")) -- better a message return false end end) +dimenfactors.ex = nil +dimenfactors.em = nil +dimenfactors["%"] = nil +dimenfactors.pct = nil + --[[ldx--

Before a font is passed to we scale it. Here we also need to scale virtual characters.

@@ -1582,8 +1701,11 @@ function constructors.checkvirtualids(tfmdata) local selfid = font.nextid() if fonts and #fonts > 0 then for i=1,#fonts do - if fonts[i][2] == 0 then - fonts[i][2] = selfid + local fi = fonts[i] + if fi[2] == 0 then + fi[2] = selfid + elseif fi.id == 0 then + fi.id = selfid end end else @@ -1646,19 +1768,19 @@ local hows = { ["="] = "replace", } -function commands.feature(how,parent,name,font) - if not how then - if trace_features and texattribute[0] ~= 0 then +function commands.feature(how,parent,name,font) -- 0/1 test temporary for testing + if not how or how == 0 then + if trace_features and texgetattribute(0) ~= 0 then report_cummulative("font %!font:name!, reset",fontdata[font or true]) end - texattribute[0] = 0 - elseif how == true then + texsetattribute(0,0) + elseif how == true or how == 1 then local hash = "feature > " .. parent local done = cache[hash] if trace_features and done then report_cummulative("font %!font:name!, revive %a : %!font:features!",fontdata[font or true],parent,setups[numbers[done]]) end - texattribute[0] = done or 0 + texsetattribute(0,done or 0) else local full = parent .. how .. name local hash = "feature > " .. full @@ -1676,7 +1798,7 @@ function commands.feature(how,parent,name,font) report_cummulative("font %!font:name!, %s %a : %!font:features!",fontdata[font or true],hows[how],full,setups[numbers[done]]) end end - texattribute[0] = done + texsetattribute(0,done) end end @@ -1803,7 +1925,7 @@ local function analyzeprocessor(head,font,attr) end registerotffeature { -- adapts - name = "analyze", + name = "analyze", processors = { node = analyzeprocessor, } diff --git a/tex/context/base/font-def.lua b/tex/context/base/font-def.lua index bee02e8dc..e5c5d990c 100644 --- a/tex/context/base/font-def.lua +++ b/tex/context/base/font-def.lua @@ -11,6 +11,7 @@ if not modules then modules = { } end modules ['font-def'] = { local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub local tostring, next = tostring, next local lpegmatch = lpeg.match +local suffixonly, removesuffix = file.suffix, file.removesuffix local allocate = utilities.storage.allocate @@ -112,7 +113,7 @@ addlookup("name") addlookup("spec") local function getspecification(str) - return lpegmatch(splitter,str) + return lpegmatch(splitter,str or "") -- weird catch end definers.getspecification = getspecification @@ -169,12 +170,13 @@ local resolvers = definers.resolvers function resolvers.file(specification) local name = resolvefile(specification.name) -- catch for renames - local suffix = file.suffix(name) + local suffix = lower(suffixonly(name)) if fonts.formats[suffix] then - specification.forced = suffix - specification.name = file.removesuffix(name) + specification.forced = suffix + specification.forcedname = name + specification.name = removesuffix(name) else - specification.name = name -- can be resolved + specification.name = name -- can be resolved end end @@ -185,12 +187,13 @@ function resolvers.name(specification) if resolved then specification.resolved = resolved specification.sub = sub - local suffix = file.suffix(resolved) + local suffix = lower(suffixonly(resolved)) if fonts.formats[suffix] then - specification.forced = suffix - specification.name = file.removesuffix(resolved) + specification.forced = suffix + specification.forcedname = resolved + specification.name = removesuffix(resolved) else - specification.name = resolved + specification.name = resolved end end else @@ -203,10 +206,11 @@ function resolvers.spec(specification) if resolvespec then local resolved, sub = resolvespec(specification.name,specification.sub,specification) -- we pass specification for overloaded versions if resolved then - specification.resolved = resolved - specification.sub = sub - specification.forced = file.suffix(resolved) - specification.name = file.removesuffix(resolved) + specification.resolved = resolved + specification.sub = sub + specification.forced = lower(suffixonly(resolved)) + specification.forcedname = resolved + specification.name = removesuffix(resolved) end else resolvers.name(specification) @@ -221,9 +225,8 @@ function definers.resolve(specification) end end if specification.forced == "" then - specification.forced = nil - else - specification.forced = specification.forced + specification.forced = nil + specification.forcedname = nil end specification.hash = lower(specification.name .. ' @ ' .. constructors.hashfeatures(specification)) if specification.sub and specification.sub ~= "" then @@ -294,7 +297,7 @@ function definers.loadfont(specification) if not tfmdata then local forced = specification.forced or "" if forced ~= "" then - local reader = readers[lower(forced)] + local reader = readers[lower(forced)] -- normally forced is already lowered tfmdata = reader and reader(specification) if not tfmdata then report_defining("forced type %a of %a not found",forced,specification.name) diff --git a/tex/context/base/font-emp.mkvi b/tex/context/base/font-emp.mkvi index 1713eda70..34a1ea9cc 100644 --- a/tex/context/base/font-emp.mkvi +++ b/tex/context/base/font-emp.mkvi @@ -117,6 +117,8 @@ %D The next feature was not present in previous versions. It %D takes care of \type {\em \bf ...} situations. +\let\font_emphasis_saved_emphasis_boldface\relax + \def\font_emphasis_set_emphasis_boldface {\let\font_emphasis_saved_emphasis_boldface\bf \let\font_emphasis_set_emphasis_boldface\relax diff --git a/tex/context/base/font-ext.lua b/tex/context/base/font-ext.lua index 89d5927d4..ede2151d6 100644 --- a/tex/context/base/font-ext.lua +++ b/tex/context/base/font-ext.lua @@ -469,7 +469,7 @@ end registerotffeature { name = "protrusion", - description = "shift characters into the left and or right margin", + description = "l/r margin character protrusion", initializers = { base = initializeprotrusion, node = initializeprotrusion, @@ -728,6 +728,8 @@ registerotffeature { -- local gray = { "special", "pdf: /Tr1 gs .75 g" } -- local black = { "special", "pdf: /Tr0 gs 0 g" } +-- sort of obsolete as we now have \showglyphs + local push = { "push" } local pop = { "pop" } local gray = { "special", "pdf: .75 g" } diff --git a/tex/context/base/font-fea.mkvi b/tex/context/base/font-fea.mkvi index c1e051621..777c6e3ca 100644 --- a/tex/context/base/font-fea.mkvi +++ b/tex/context/base/font-fea.mkvi @@ -164,7 +164,7 @@ \let\doaddfeature \font_feature_add_nop % low level faster ones \let\dosubtractfeature \font_feature_subtract_nop \let\doreplacefeature \font_feature_replace_nop -\let\doreserandaddfeature\font_feature_reset_add_nop +\let\doresetandaddfeature\font_feature_reset_add_nop \unexpanded\def\font_feature_add {\ifnum\c_font_feature_state=\plusone @@ -330,4 +330,12 @@ \ctxcommand{registerlanguagefeatures()} +% also new + +\unexpanded\def\useaddfontfeatureparameter#1% faster local variant + {\edef\m_font_feature_asked{#1\c!features}% + \ifx\m_font_feature_asked\empty\else + \font_feature_add + \fi} + \protect \endinput diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua index 6332f40b0..7f8bb91d1 100644 --- a/tex/context/base/font-gds.lua +++ b/tex/context/base/font-gds.lua @@ -18,6 +18,7 @@ local trace_goodies = false trackers.register("fonts.goodies", function(v) local report_goodies = logs.reporter("fonts","goodies") local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex local otf = fonts.handlers.otf local afm = fonts.handlers.afm @@ -43,6 +44,10 @@ local addotffeature = otf.enhancers.addfeature local findfile = resolvers.findfile +local glyph_code = nodes.nodecodes.glyph + +local traverse_id = nodes.traverse_id + function fontgoodies.report(what,trace,goodies) if trace_goodies or trace then local whatever = goodies[what] @@ -298,27 +303,104 @@ local function setcolorscheme(tfmdata,scheme) tfmdata.properties.colorscheme = false end -local fontdata = fonts.hashes.identifiers -local setnodecolor = nodes.tracers.colors.set -local traverse_id = node.traverse_id -local a_colorscheme = attributes.private('colorscheme') -local glyph = node.id("glyph") +local fontproperties = fonts.hashes.properties + +local a_colorscheme = attributes.private('colorscheme') +local setnodecolor = nodes.tracers.colors.set + +-- function colorschemes.coloring(head) +-- local lastfont, lastscheme +-- local done = false +-- for n in traverse_id(glyph_code,head) do +-- local a = n[a_colorscheme] +-- if a then +-- local f = n.font +-- if f ~= lastfont then +-- lastscheme = fontproperties[f].colorscheme +-- lastfont = f +-- end +-- if lastscheme then +-- local sc = lastscheme[n.char] +-- if sc then +-- done = true +-- setnodecolor(n,"colorscheme:"..a..":"..sc) -- slow +-- end +-- end +-- end +-- end +-- return head, done +-- end + +-- seldom used, mostly in manuals, so non critical .. anyhow, somewhat faster: + +-- function colorschemes.coloring(head) +-- local lastfont = nil +-- local lastattr = nil +-- local lastscheme = nil +-- local lastprefix = nil +-- local done = nil +-- for n in traverse_id(glyph_code,head) do +-- local a = n[a_colorscheme] +-- if a then +-- if a ~= lastattr then +-- lastattr = a +-- lastprefix = "colorscheme:" .. a .. ":" +-- end +-- local f = n.font +-- if f ~= lastfont then +-- lastfont = f +-- lastscheme = fontproperties[f].colorscheme +-- end +-- if lastscheme then +-- local sc = lastscheme[n.char] +-- if sc then +-- setnodecolor(n,lastprefix .. sc) -- slow +-- done = true +-- end +-- end +-- end +-- end +-- return head, done +-- end + +-- ok, in case we have hundreds of pages colored: + +local cache = { } -- this could be a weak table + +setmetatableindex(cache,function(t,a) + local v = { } + setmetatableindex(v,function(t,c) + local v = "colorscheme:" .. a .. ":" .. c + t[c] = v + return c + end) + t[a]= v + return v +end) function colorschemes.coloring(head) - local lastfont, lastscheme - local done = false - for n in traverse_id(glyph,head) do + local lastfont = nil + local lastattr = nil + local lastcache = nil + local lastscheme = nil + local done = nil + for n in traverse_id(glyph_code,head) do local a = n[a_colorscheme] if a then local f = n.font if f ~= lastfont then - lastscheme, lastfont = fontdata[f].properties.colorscheme, f + lastfont = f + lastscheme = fontproperties[f].colorscheme + end + if a ~= lastattr then + lastattr = a + lastcache = cache[a] end if lastscheme then local sc = lastscheme[n.char] if sc then + setnodecolor(n,lastcache[sc]) -- we could inline this one done = true - setnodecolor(n,"colorscheme:"..a..":"..sc) -- slow end end end @@ -691,7 +773,7 @@ function fontgoodies.designsizes.register(name,size,specification) d.default = specification else if type(size) == "string" then - size = texsp(size) + size = texsp(size) -- hm end local ranges = d.ranges ranges[#ranges+1] = { size, specification } @@ -750,3 +832,45 @@ registerotffeature { node = finalize, } } + +-- kern hackery: +-- +-- yes : use goodies table +-- auto : assume features to be set (often ccmp only) + +local function setkeepligatures(tfmdata,value) + if not tfmdata.properties.keptligatures then + local goodies = tfmdata.goodies + if goodies then + for i=1,#goodies do + local g = goodies[i] + local letterspacing = g.letterspacing + if letterspacing then + local keptligatures = letterspacing.keptligatures + if keptligatures then + local unicodes = tfmdata.resources.unicodes + local hash = { } + for k, v in next, keptligatures do + local u = unicodes[k] + if u then + hash[u] = true + else + -- error: unknown name + end + end + tfmdata.properties.keptligatures = hash + end + end + end + end + end +end + +registerotffeature { + name = "keepligatures", + description = "keep ligatures in letterspacing", + initializers = { + base = setkeepligatures, + node = setkeepligatures, + } +} diff --git a/tex/context/base/font-hsh.lua b/tex/context/base/font-hsh.lua index f5c80d705..773cc2b69 100644 --- a/tex/context/base/font-hsh.lua +++ b/tex/context/base/font-hsh.lua @@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['font-hsh'] = { license = "see context related readme files" } +local rawget = rawget + local setmetatableindex = table.setmetatableindex local currentfont = font.current local allocate = utilities.storage.allocate @@ -27,6 +29,7 @@ local spaces = hashes.spaces or allocate() local quads = hashes.quads or allocate() -- maybe also spacedata local xheights = hashes.xheights or allocate() local csnames = hashes.csnames or allocate() -- namedata +local features = hashes.features or allocate() local marks = hashes.marks or allocate() local italics = hashes.italics or allocate() local lastmathids = hashes.lastmathids or allocate() @@ -42,11 +45,15 @@ hashes.spaces = spaces hashes.quads = quads hashes.emwidths = quads hashes.xheights = xheights hashes.exheights = xheights hashes.csnames = csnames +hashes.features = features hashes.marks = marks hashes.italics = italics hashes.lastmathids = lastmathids hashes.dynamics = dynamics +local nodepool = nodes.pool +local dummyglyph = nodepool.register(nodepool.glyph()) + local nulldata = allocate { name = "nullfont", characters = { }, @@ -133,14 +140,14 @@ setmetatableindex(resources, function(t,k) end end) -setmetatableindex(quads, function(t,k) +setmetatableindex(features, function(t,k) if k == true then - return quads[currentfont()] + return features[currentfont()] else - local parameters = parameters[k] - local quad = parameters and parameters.quad or 0 - t[k] = quad - return quad + local shared = identifiers[k].shared + local features = shared and shared.features or { } + t[k] = features + return features end end) @@ -183,12 +190,43 @@ setmetatableindex(marks, function(t,k) end end) +setmetatableindex(quads, function(t,k) + if k == true then + return quads[currentfont()] + else + local parameters = rawget(parameters,k) + local quad + if parameters then + quad = parameters.quad + else + dummyglyph.font = k + dummyglyph.char = 0x2014 -- emdash + quad = dummyglyph.width -- dirty trick + end + if not quad or quad == 0 then + quad = 655360 -- lm 10pt + end + t[k] = quad + return quad + end +end) + setmetatableindex(xheights, function(t,k) if k == true then return xheights[currentfont()] else - local parameters = parameters[k] - local xheight = parameters and parameters.xheight or 0 + local parameters = rawget(parameters,k) + local xheight + if parameters then + xheight = parameters.xheight + else + dummyglyph.font = k + dummyglyph.char = 0x78 -- x + xheight = dummyglyph.height -- dirty trick + end + if not xheight or xheight == 0 then + xheight = 282460 -- lm 10pt + end t[k] = xheight return xheight end diff --git a/tex/context/base/font-ini.mkvi b/tex/context/base/font-ini.mkvi index 5f7aaa92f..521901e05 100644 --- a/tex/context/base/font-ini.mkvi +++ b/tex/context/base/font-ini.mkvi @@ -700,6 +700,7 @@ \let\somefontsize\zerocount \newcount\scaledfontmode % also used at the lua end +\newcount\scaledfontsize % also used at the lua end \newcount\lastfontid % also used at the lua end / tex end \newtoks \everydefinefont @@ -755,6 +756,9 @@ \or % scaled, don't use this one as it's unpredictable \d_font_scaled_font_size-\somefontsize\scaledpoint + \else % ht cp + % experiment, yet undocumented + \d_font_scaled_font_size\somefontsize \fi \relax \d_font_scaled_font_size\v_font_size_relative\d_font_scaled_font_size @@ -770,7 +774,7 @@ \else \d_font_scaled_text_face\textface \fi - \edef\somefontspec{at \number\d_font_scaled_font_size sp}% + \edef\somefontspec{at \number\d_font_scaled_font_size sp}% probably no longer used, needs checking \edef\somefontfile{\truefontname\somefontname}% \ifx\somefontfile\s!unknown \edef\somefontfile{\defaultfontfile}% @@ -797,9 +801,10 @@ "\m_font_class_goodies", % experiment (not yet used) "\m_font_goodies", "\m_font_class_designsize", - "\m_font_designsize" + "\m_font_designsize", + \number\scaledfontmode )}% - \edef\somefontspec{at \number\d_font_scaled_font_size sp}% we need the resolved designsize (for fallbacks) + \edef\somefontspec{at \number\scaledfontsize sp}% we need the resolved designsize (for fallbacks) \expandafter\let\expandafter\lastrawfontcall\csname#csname\endcsname \the\everydefinefont \c_font_feature_inheritance_mode\c_font_feature_inheritance_default} @@ -1519,7 +1524,7 @@ \unexpanded\def\font_basics_define_body_font_name_identifier_a#name#identifier#style% {%\writestatus\m!fonts{[#name:#style] => [##identifier:#style]}% - \ifcsname\csname\??fontdefinitions#name:#style\endcsname + \ifcsname\??fontdefinitions#name:#style\endcsname \expandafter\let\csname\??fontdefinitions#name:#style\expandafter\endcsname\csname\??fontdefinitions#identifier:#style\endcsname \else \expandafter\def\csname\??fontdefinitions#name:#style\endcsname{\csname\??fontdefinitions#identifier:#style\endcsname}% @@ -1674,7 +1679,7 @@ \unexpanded\def\font_helpers_set_font_set_font_option_body#method#body#message% {\normalizebodyfontsize\normalizedsetfont{#body}% redundant for some calls \ifcsname\??fontbodyknown\normalizedsetfont\endcsname \else - \font_helpers_define_unknown_font{#body}% + \font_helpers_define_unknown_font\normalizedsetfont \fi \ifcsname\??fontbodyknown\normalizedsetfont\endcsname \localbodyfontsize\normalizedsetfont @@ -1708,6 +1713,7 @@ \let\defaultfontstyle \s!rm \let\defaultfontalternative\s!tf \let\defaultfontsize \empty +\let\defaultfontface \!!zerocount %D So far for synchronisation. (We can inline the following macros.) @@ -2130,6 +2136,37 @@ %D $\cases{& \ccaron}$ $x=\hbox{\ccaron $x=\hbox{\ccaron}$}$ %D \stoptyping +%D \macros +%D {usebodyfont} +%D +%D This looks nicer then a switch in the preamble +%D +%D \starttyping +%D \usebodyfont[pagella,10pt] +%D \usebodyfont[termes,10pt] +%D \usebodyfont[dejavu,10pt] +%D +%D \setupbodyfont[dejavu] +%D +%D \starttext +%D test +%D \stoptext +%D \stoptyping + +% \unexpanded\def\usebodyfont[#1]% +% {\pushmacro\fontclass +% \switchtobodyfont[#1]% +% \popmacro\fontclass +% \ifx\fontclass\empty\else\setupbodyfont\relax\fi} + +\unexpanded\def\usebodyfont[#1]% + {\pushmacro\fontclass + \font_helpers_set_font\zerocount{#1}% + \popmacro\fontclass + \ifx\fontclass\empty \else + \font_basics_setupbodyfont_nop + \fi} + %D Handy for manuals: \unexpanded\def\fontchar#character% @@ -2228,6 +2265,7 @@ \unexpanded\def\fullrestoreglobalbodyfont {\let\fontsize\defaultfontsize \let\fontbody\defaultfontbody + \let\fontface\defaultfontface \currentxfontsize\zerocount \let\fontclass\globalfontclass \font_basics_switch_points\normalizedglobalbodyfontsize @@ -2241,6 +2279,7 @@ \unexpanded\def\partialrestoreglobalbodyfont {\let\fontsize\defaultfontsize \let\fontbody\defaultfontbody + \let\fontface\defaultfontface \currentxfontsize\zerocount \redoconvertfont \tf diff --git a/tex/context/base/font-lib.mkvi b/tex/context/base/font-lib.mkvi index 4bacef10b..a664d9b3a 100644 --- a/tex/context/base/font-lib.mkvi +++ b/tex/context/base/font-lib.mkvi @@ -32,6 +32,8 @@ \registerctxluafile{font-hsh}{1.001} % hashes used by context \registerctxluafile{font-nod}{1.001} +\registerctxluafile{font-trt}{1.001} + \registerctxluafile{font-oti}{1.001} % otf initialization \registerctxluafile{font-ott}{1.001} % otf tables (first) \registerctxluafile{font-otf}{1.001} % otf main @@ -56,7 +58,6 @@ \registerctxluafile{font-vf} {1.001} \registerctxluafile{font-enh}{1.001} -\registerctxluafile{font-trt}{1.001} \registerctxluafile{font-gds}{1.001} \registerctxluafile{font-def}{1.001} diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua index b3c8da0f6..ce724b973 100644 --- a/tex/context/base/font-map.lua +++ b/tex/context/base/font-map.lua @@ -261,38 +261,51 @@ function mappings.addtounicode(data,filename) end end -- a.whatever or a_b_c.whatever or a_b_c (no numbers) a.b_ + -- + -- It is not trivial to find a solution that suits all fonts. We tried several alternatives + -- and this one seems to work reasonable also with fonts that use less standardized naming + -- schemes. The extra private test is tested by KE and seems to work okay with non-typical + -- fonts as well. + -- + -- The next time I look into this, I'll add an extra analysis step to the otf loader (we can + -- resolve some tounicodes by looking into the gsub data tables that are bound to glyphs. + -- if not unicode or unicode == "" then local split = lpegmatch(namesplitter,name) local nsplit = split and #split or 0 - if nsplit >= 2 then - local t, n = { }, 0 - for l=1,nsplit do - local base = split[l] - local u = unicodes[base] or unicodevector[base] - if not u then + local t, n = { }, 0 + unicode = true + for l=1,nsplit do + local base = split[l] + local u = unicodes[base] or unicodevector[base] + if not u then + break + elseif type(u) == "table" then + if u[1] >= private then + unicode = false break - elseif type(u) == "table" then - n = n + 1 - t[n] = u[1] - else - n = n + 1 - t[n] = u end - end - if n == 0 then -- done then - -- nothing - elseif n == 1 then - originals[index] = t[1] - tounicode[index] = tounicode16(t[1],name) + n = n + 1 + t[n] = u[1] else - originals[index] = t - tounicode[index] = tounicode16sequence(t) + if u >= private then + unicode = false + break + end + n = n + 1 + t[n] = u end - nl = nl + 1 - unicode = true + end + if n == 0 then -- done then + -- nothing + elseif n == 1 then + originals[index] = t[1] + tounicode[index] = tounicode16(t[1],name) else - -- skip: already checked and we don't want privates here + originals[index] = t + tounicode[index] = tounicode16sequence(t) end + nl = nl + 1 end -- last resort (we might need to catch private here as well) if not unicode or unicode == "" then diff --git a/tex/context/base/font-mat.mkvi b/tex/context/base/font-mat.mkvi index 4fe44a2da..c8ee2630b 100644 --- a/tex/context/base/font-mat.mkvi +++ b/tex/context/base/font-mat.mkvi @@ -66,8 +66,8 @@ \definesystemattribute[mathfamily][public] -\newconditional\c_font_bidirectional_math_strategy % can be default, not that much overhead: \settrue\c_font_bidirectional_math_strategy -\newconditional\c_font_complete_bold_math_strategy \settrue\c_font_complete_bold_math_strategy +\newconditional\c_font_bidirectional_mathstrategy % can be default, not that much overhead: \settrue\c_font_bidirectional_mathstrategy +\newconditional\c_font_complete_bold_mathstrategy \settrue\c_font_complete_bold_mathstrategy \def\mathtextsuffix {-text} \def\mathscriptsuffix {-script} @@ -224,9 +224,9 @@ \edef\m_font_class_direction{\ifcsname\??fontclass\fontclass\s!mm\s!direction\endcsname\csname\??fontclass\fontclass\s!mm\s!direction\endcsname\fi}% % ... \ifx\m_font_class_direction\v!both - \settrue\c_font_bidirectional_math_strategy + \settrue\c_font_bidirectional_mathstrategy \else - \setfalse\c_font_bidirectional_math_strategy + \setfalse\c_font_bidirectional_mathstrategy \fi \to \t_font_math_strategies @@ -257,32 +257,32 @@ \scriptscriptfont\c_font_fam_mr_lr\scriptscriptfont\c_font_fam_mr} \appendtoks - \ifconditional\c_font_bidirectional_math_strategy + \ifconditional\c_font_bidirectional_mathstrategy \font_helpers_bidirectional_mathstrategy_yes \else \font_helpers_bidirectional_mathstrategy_nop \fi \to \t_font_math_strategies -\def\font_helpers_complete_bold_math_strategy_yes_bidi +\def\font_helpers_complete_bold_mathstrategy_yes_bidi {\font_helpers_set_math_family_bold\c_font_fam_mb_lr\s!mblr\c_font_fam_mr_lr \font_helpers_set_math_family_bold\c_font_fam_mb_rl\s!mbrl\c_font_fam_mr_rl \ifnum\fontid\textfont\c_font_fam_mb=\fontid\textfont\c_font_fam_mb_lr\else - \font_helpers_complete_bold_math_strategy_yes_bidi_changed + \font_helpers_complete_bold_mathstrategy_yes_bidi_changed \fi} -\def\font_helpers_complete_bold_math_strategy_yes_bidi_changed +\def\font_helpers_complete_bold_mathstrategy_yes_bidi_changed {\textfont \c_font_fam_mb\textfont \c_font_fam_mb_lr \scriptfont \c_font_fam_mb\scriptfont \c_font_fam_mb_lr \scriptscriptfont\c_font_fam_mb\scriptscriptfont\c_font_fam_mb_lr} -\def\font_helpers_complete_bold_math_strategy_yes +\def\font_helpers_complete_bold_mathstrategy_yes {\font_helpers_set_math_family_bold\c_font_fam_mb\s!mb\c_font_fam_mr\relax \ifnum\fontid\textfont\c_font_fam_mb_rl=\fontid\textfont\c_font_fam_mb\else - \font_helpers_complete_bold_math_strategy_yes_changed + \font_helpers_complete_bold_mathstrategy_yes_changed \fi} -\def\font_helpers_complete_bold_math_strategy_yes_changed +\def\font_helpers_complete_bold_mathstrategy_yes_changed {\textfont \c_font_fam_mb_rl\textfont \c_font_fam_mb \scriptfont \c_font_fam_mb_rl\scriptfont \c_font_fam_mb \scriptscriptfont\c_font_fam_mb_rl\scriptscriptfont\c_font_fam_mb @@ -290,12 +290,12 @@ \scriptfont \c_font_fam_mb_lr\scriptfont \c_font_fam_mb \scriptscriptfont\c_font_fam_mb_lr\scriptscriptfont\c_font_fam_mb} -\def\font_helpers_complete_bold_math_strategy_nop +\def\font_helpers_complete_bold_mathstrategy_nop {\ifnum\fontid\textfont\c_font_fam_mb=\fontid\textfont\c_font_fam_mr\else - \font_helpers_complete_bold_math_strategy_nop_changed + \font_helpers_complete_bold_mathstrategy_nop_changed \fi} -\def\font_helpers_complete_bold_math_strategy_nop_changed +\def\font_helpers_complete_bold_mathstrategy_nop_changed {\textfont \c_font_fam_mb \textfont \c_font_fam_mr \scriptfont \c_font_fam_mb \scriptfont \c_font_fam_mr \scriptscriptfont\c_font_fam_mb \scriptscriptfont\c_font_fam_mr @@ -306,19 +306,19 @@ \scriptfont \c_font_fam_mb_lr\scriptfont \c_font_fam_mr_lr \scriptscriptfont\c_font_fam_mb_lr\scriptscriptfont\c_font_fam_mr_lr} -\def\font_helpers_apply_complete_bold_math_strategy - {\ifconditional\c_font_complete_bold_math_strategy - \ifconditional\c_font_bidirectional_math_strategy - \font_helpers_complete_bold_math_strategy_yes_bidi +\def\font_helpers_apply_complete_bold_mathstrategy + {\ifconditional\c_font_complete_bold_mathstrategy + \ifconditional\c_font_bidirectional_mathstrategy + \font_helpers_complete_bold_mathstrategy_yes_bidi \else - \font_helpers_complete_bold_math_strategy_yes + \font_helpers_complete_bold_mathstrategy_yes \fi \else - \font_helpers_complete_bold_math_strategynop + \font_helpers_complete_bold_mathstrategy_nop \fi} \appendtoks - \font_helpers_apply_complete_bold_math_strategy + \font_helpers_apply_complete_bold_mathstrategy \to \t_font_math_strategies \ifdefined\defaultmathfamily \else @@ -330,7 +330,7 @@ \to \everymathematics \unexpanded\def\font_helpers_synchronize_math_family_mr - {\attribute\mathfamilyattribute\ifconditional\c_font_bidirectional_math_strategy + {\attribute\mathfamilyattribute\ifconditional\c_font_bidirectional_mathstrategy \ifconditional\c_math_right_to_left \plustwo \else @@ -341,7 +341,7 @@ \fi} \unexpanded\def\font_helpers_synchronize_math_family_mb - {\attribute\mathfamilyattribute\ifconditional\c_font_bidirectional_math_strategy + {\attribute\mathfamilyattribute\ifconditional\c_font_bidirectional_mathstrategy \ifconditional\c_math_right_to_left \ifconditional\c_font_pseudo_bold_math_state\pluseight\else\plusfive\fi \else diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua index 83df65341..0796356c4 100644 --- a/tex/context/base/font-mis.lua +++ b/tex/context/base/font-mis.lua @@ -22,7 +22,7 @@ local handlers = fonts.handlers handlers.otf = handlers.otf or { } local otf = handlers.otf -otf.version = otf.version or 2.743 +otf.version = otf.version or 2.745 otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true) function otf.loadcached(filename,format,sub) diff --git a/tex/context/base/font-nod.lua b/tex/context/base/font-nod.lua index f99130279..7fa3297d4 100644 --- a/tex/context/base/font-nod.lua +++ b/tex/context/base/font-nod.lua @@ -41,10 +41,12 @@ tracers.characters = char_tracers local step_tracers = tracers.steppers or { } tracers.steppers = step_tracers -local copy_node_list = node.copy_list -local hpack_node_list = node.hpack -local free_node_list = node.flush_list -local traverse_nodes = node.traverse +local texsetbox = tex.setbox + +local copy_node_list = nodes.copy_list +local hpack_node_list = nodes.hpack +local free_node_list = nodes.flush_list +local traverse_nodes = nodes.traverse local nodecodes = nodes.nodecodes local whatcodes = nodes.whatcodes @@ -232,7 +234,8 @@ end function step_tracers.glyphs(n,i) local c = collection[i] if c then - tex.box[n] = hpack_node_list(copy_node_list(c)) + local b = hpack_node_list(copy_node_list(c)) -- multiple arguments + texsetbox(n,b) end end diff --git a/tex/context/base/font-odv.lua b/tex/context/base/font-odv.lua index b518fb79c..69f74dfa5 100644 --- a/tex/context/base/font-odv.lua +++ b/tex/context/base/font-odv.lua @@ -58,7 +58,7 @@ if not modules then modules = { } end modules ['font-odv'] = { -- local function ms_matra(c) -- local prebase, abovebase, belowbase, postbase = true, true, true, true -- local n = c.next --- while n and n.id == glyph_code and n.subtype<256 and n.font == font do +-- while n and n.id == glyph_code and n.subtype < 256 and n.font == font do -- local char = n.char -- if not dependent_vowel[char] then -- break @@ -83,7 +83,6 @@ if not modules then modules = { } end modules ['font-odv'] = { local insert, imerge = table.insert, table.imerge local next = next -local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end) local report_devanagari = logs.reporter("otf","devanagari") fonts = fonts or { } @@ -101,13 +100,11 @@ local methods = fonts.analyzers.methods local otffeatures = fonts.constructors.newfeatures("otf") local registerotffeature = otffeatures.register -local processcharacters = nodes.handlers.characters - -local insert_node_after = node.insert_after -local copy_node = node.copy -local free_node = node.free -local remove_node = node.remove -local flush_list = node.flush_list +local insert_node_after = nodes.insert_after +local copy_node = nodes.copy +local free_node = nodes.free +local remove_node = nodes.remove +local flush_list = nodes.flush_list local unsetvalue = attributes.unsetvalue @@ -126,11 +123,41 @@ local s_pref = states.pref local s_blwf = states.blwf local s_pstf = states.pstf +local replace_all_nbsp = nil + +replace_all_nbsp = function(head) -- delayed definition + replace_all_nbsp = typesetters and typesetters.characters and typesetters.characters.replacenbspaces or function(head) + return head + end + return replace_all_nbsp(head) +end + +local fontprocesses = fonts.hashes.processes +local xprocesscharacters = nil + +xprocesscharacters = function(head,font) + xprocesscharacters = nodes.handlers.characters + return xprocesscharacters(head,font) +end + +local function processcharacters(head,font) + return xprocesscharacters(head) +end + +-- function processcharacters(head,font) +-- local processors = fontprocesses[font] +-- for i=1,#processors do +-- head = processors[i](head,font,0) +-- end +-- return head, true +-- end + -- In due time there will be entries here for scripts like Bengali, Gujarati, -- Gurmukhi, Kannada, Malayalam, Oriya, Tamil, Telugu. Feel free to provide the -- code points. local consonant = { + -- devanagari [0x0915] = true, [0x0916] = true, [0x0917] = true, [0x0918] = true, [0x0919] = true, [0x091A] = true, [0x091B] = true, [0x091C] = true, [0x091D] = true, [0x091E] = true, [0x091F] = true, [0x0920] = true, @@ -143,9 +170,22 @@ local consonant = { [0x0939] = true, [0x0958] = true, [0x0959] = true, [0x095A] = true, [0x095B] = true, [0x095C] = true, [0x095D] = true, [0x095E] = true, [0x095F] = true, [0x0979] = true, [0x097A] = true, + -- kannada + [0x0C95] = true, [0x0C96] = true, [0x0C97] = true, [0x0C98] = true, + [0x0C99] = true, [0x0C9A] = true, [0x0C9B] = true, [0x0C9C] = true, + [0x0C9D] = true, [0x0C9E] = true, [0x0C9F] = true, [0x0CA0] = true, + [0x0CA1] = true, [0x0CA2] = true, [0x0CA3] = true, [0x0CA4] = true, + [0x0CA5] = true, [0x0CA6] = true, [0x0CA7] = true, [0x0CA8] = true, + [0x0CA9] = true, [0x0CAA] = true, [0x0CAB] = true, [0x0CAC] = true, + [0x0CAD] = true, [0x0CAE] = true, [0x0CAF] = true, [0x0CB0] = true, + [0x0CB1] = true, [0x0CB2] = true, [0x0CB3] = true, [0x0CB4] = true, + [0x0CB5] = true, [0x0CB6] = true, [0x0CB7] = true, [0x0CB8] = true, + [0x0CB9] = true, + [0x0CDE] = true, -- obsolete } local independent_vowel = { + -- devanagari [0x0904] = true, [0x0905] = true, [0x0906] = true, [0x0907] = true, [0x0908] = true, [0x0909] = true, [0x090A] = true, [0x090B] = true, [0x090C] = true, [0x090D] = true, [0x090E] = true, [0x090F] = true, @@ -153,20 +193,32 @@ local independent_vowel = { [0x0914] = true, [0x0960] = true, [0x0961] = true, [0x0972] = true, [0x0973] = true, [0x0974] = true, [0x0975] = true, [0x0976] = true, [0x0977] = true, + -- kannada + [0x0C85] = true, [0x0C86] = true, [0x0C87] = true, [0x0C88] = true, + [0x0C89] = true, [0x0C8A] = true, [0x0C8B] = true, [0x0C8C] = true, + [0x0C8D] = true, [0x0C8E] = true, [0x0C8F] = true, [0x0C90] = true, + [0x0C91] = true, [0x0C92] = true, [0x0C93] = true, [0x0C94] = true, } local dependent_vowel = { -- matra + -- devanagari [0x093A] = true, [0x093B] = true, [0x093E] = true, [0x093F] = true, [0x0940] = true, [0x0941] = true, [0x0942] = true, [0x0943] = true, [0x0944] = true, [0x0945] = true, [0x0946] = true, [0x0947] = true, [0x0948] = true, [0x0949] = true, [0x094A] = true, [0x094B] = true, [0x094C] = true, [0x094E] = true, [0x094F] = true, [0x0955] = true, [0x0956] = true, [0x0957] = true, [0x0962] = true, [0x0963] = true, + -- kannada + [0x0CBE] = true, [0x0CBF] = true, [0x0CC0] = true, [0x0CC1] = true, + [0x0CC2] = true, [0x0CC3] = true, [0x0CC4] = true, [0x0CC5] = true, + [0x0CC6] = true, [0x0CC7] = true, [0x0CC8] = true, [0x0CC9] = true, + [0x0CCA] = true, [0x0CCB] = true, [0x0CCC] = true, } local vowel_modifier = { + -- devanagari [0x0900] = true, [0x0901] = true, [0x0902] = true, [0x0903] = true, - -- A8E0 - A8F1 are cantillation marks for the Samaveda and may not belong here. + -- A8E0 - A8F1 are cantillation marks for the Samaveda and may not belong here. [0xA8E0] = true, [0xA8E1] = true, [0xA8E2] = true, [0xA8E3] = true, [0xA8E4] = true, [0xA8E5] = true, [0xA8E6] = true, [0xA8E7] = true, [0xA8E8] = true, [0xA8E9] = true, [0xA8EA] = true, [0xA8EB] = true, @@ -178,9 +230,21 @@ local stress_tone_mark = { [0x0951] = true, [0x0952] = true, [0x0953] = true, [0x0954] = true, } -local c_nukta = 0x093C -- used to be tables -local c_halant = 0x094D -- used to be tables -local c_ra = 0x0930 -- used to be tables +local nukta = { + -- devanagari + [0x093C] = true, + -- kannada: + [0x0CBC] = true, +} + +local halant = { + -- devanagari + [0x094D] = true, + -- kannada + [0x0CCD] = true, +} + +local c_ra = 0x0930 -- used to be tables (also used as constant) local c_anudatta = 0x0952 -- used to be tables local c_nbsp = 0x00A0 -- used to be tables local c_zwnj = 0x200C -- used to be tables @@ -191,6 +255,19 @@ local zw_char = { -- could also be inlined [0x200D] = true, } +-- 0C82 anusvara +-- 0C83 visarga +-- 0CBD avagraha +-- 0CD5 length mark +-- 0CD6 ai length mark +-- 0CE0 letter ll +-- 0CE1 letter rr +-- 0CE2 vowel sign l +-- 0CE2 vowel sign ll +-- 0CF1 sign +-- 0CF2 sign +-- OCE6 - OCEF digits + local pre_mark = { [0x093F] = true, [0x094E] = true, } @@ -232,10 +309,11 @@ for k, v in next, below_mark do mark_above_below_post[k] = below_mark end for k, v in next, post_mark do mark_above_below_post[k] = post_mark end -- Again, this table can be extended for other scripts than devanagari. Actually, --- for ConTeXt this kind of dat is kept elsewhere so eventually we might move +-- for ConTeXt this kind of data is kept elsewhere so eventually we might move -- tables to someplace else. local reorder_class = { + -- devanagari [0x0930] = "before postscript", [0x093F] = "before half", [0x0940] = "after subscript", @@ -254,6 +332,20 @@ local reorder_class = { [0x0962] = "after subscript", [0x0963] = "after subscript", [0x093E] = "after subscript", + -- kannada: + [0x0CB0] = "after postscript", -- todo in code below + [0x0CBF] = "before subscript", -- todo in code below + [0x0CC6] = "before subscript", -- todo in code below + [0x0CCC] = "before subscript", -- todo in code below + [0x0CBE] = "before subscript", -- todo in code below + [0x0CE2] = "before subscript", -- todo in code below + [0x0CE3] = "before subscript", -- todo in code below + [0x0CC1] = "before subscript", -- todo in code below + [0x0CC2] = "before subscript", -- todo in code below + [0x0CC3] = "after subscript", + [0x0CC4] = "after subscript", + [0x0CD5] = "after subscript", + [0x0CD6] = "after subscript", } -- We use some pseudo features as we need to manipulate the nodelist based @@ -457,22 +549,22 @@ local function deva_initialize(font,attr) end -local function deva_reorder(head,start,stop,font,attr) +local function deva_reorder(head,start,stop,font,attr,nbspaces) local lookuphash, reph, vattu, blwfcache = deva_initialize(font,attr) -- could be inlines but ugly - local current = start - local n = start.next - local base = nil + local current = start + local n = start.next + local base = nil local firstcons = nil - local lastcons = nil + local lastcons = nil local basefound = false - if start.char == c_ra and n.char == c_halant and reph then + if start.char == c_ra and halant[n.char] and reph then -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph -- from candidates for base consonants if n == stop then - return head, stop + return head, stop, nbspaces end if n.next.char == c_zwj then current = start @@ -488,28 +580,31 @@ local function deva_reorder(head,start,stop,font,attr) stop = stop.prev head = remove_node(head,current) free_node(current) - return head, stop + return head, stop, nbspaces else - base, firstcons, lastcons = current, current, current - current = current.next + nbspaces = nbspaces + 1 + base = current + firstcons = current + lastcons = current + current = current.next if current ~= stop then - if current.char == c_nukta then + if nukta[current.char] then current = current.next end if current.char == c_zwj then if current ~= stop then local next = current.next - if next ~= stop and next.char == c_halant then + if next ~= stop and halant[next.char] then current = next next = current.next - local tmp = next.next + local tmp = next and next.next or nil -- needs checking local changestop = next == stop local tempcurrent = copy_node(next) local nextcurrent = copy_node(current) tempcurrent.next = nextcurrent nextcurrent.prev = tempcurrent tempcurrent[a_state] = s_blwf - tempcurrent = processcharacters(tempcurrent) + tempcurrent = processcharacters(tempcurrent,font) tempcurrent[a_state] = unsetvalue if next.char == tempcurrent.char then flush_list(tempcurrent) @@ -520,7 +615,9 @@ local function deva_reorder(head,start,stop,font,attr) current.char = tempcurrent.char -- (assumes that result of blwf consists of one node) local freenode = current.next current.next = tmp - tmp.prev = current + if tmp then + tmp.prev = current + end free_node(freenode) flush_list(tempcurrent) if changestop then @@ -559,14 +656,14 @@ local function deva_reorder(head,start,stop,font,attr) -- if base consonant is not last one then move halant from base consonant to last one local np = base local n = base.next - if n.char == c_nukta then + if nukta[n.char] then np = n n = n.next end - if n.char == c_halant then + if halant[n.char] then if lastcons ~= stop then local ln = lastcons.next - if ln.char == c_nukta then + if nukta[ln.char] then lastcons = ln end end @@ -588,7 +685,8 @@ local function deva_reorder(head,start,stop,font,attr) end n = start.next - if start.char == c_ra and n.char == c_halant and not (n ~= stop and zw_char[n.next.char]) then + -- if start.char == c_ra and halant[n.char] and not (n ~= stop and zw_char[n.next.char]) then + if n ~= stop and start.char == c_ra and halant[n.char] and not zw_char[n.next.char] then -- if syllable starts with Ra + H then move this combination so that it follows either: -- the post-base 'matra' (if any) or the base consonant local matra = base @@ -625,7 +723,7 @@ local function deva_reorder(head,start,stop,font,attr) local current = start while current ~= stop do local next = current.next - if next ~= stop and next.char == c_halant and next.next.char == c_zwnj then + if next ~= stop and halant[next.char] and next.next.char == c_zwnj then current[a_state] = unsetvalue end current = next @@ -633,7 +731,7 @@ local function deva_reorder(head,start,stop,font,attr) if base ~= stop and base[a_state] then local next = base.next - if next.char == c_halant and not (next ~= stop and next.next.char == c_zwj) then + if halant[next.char] and not (next ~= stop and next.next.char == c_zwj) then base[a_state] = unsetvalue end end @@ -645,7 +743,7 @@ local function deva_reorder(head,start,stop,font,attr) local current, allreordered, moved = start, false, { [base] = true } local a, b, p, bn = base, base, base, base.next - if base ~= stop and bn.char == c_nukta then + if base ~= stop and nukta[bn.char] then a, b, p = bn, bn, bn end while not allreordered do @@ -654,12 +752,12 @@ local function deva_reorder(head,start,stop,font,attr) local n = current.next local l = nil -- used ? if c ~= stop then - if n.char == c_nukta then + if nukta[n.char] then c = n n = n.next end if c ~= stop then - if n.char == c_halant then + if halant[n.char] then c = n n = n.next end @@ -719,7 +817,7 @@ local function deva_reorder(head,start,stop,font,attr) while current ~= stop do local c = current local n = current.next - if current.char == c_ra and n.char == c_halant then + if current.char == c_ra and halant[n.char] then c = n n = n.next local b, bn = base, base @@ -782,10 +880,17 @@ local function deva_reorder(head,start,stop,font,attr) end else local char = current.char - if consonant[char] or char == c_nbsp then -- maybe combined hash + if consonant[char] then cns = current local next = cns.next - if next.char == c_halant then + if halant[next.char] then + cns = next + end + elseif char == c_nbsp then + nbspaces = nbspaces + 1 + cns = current + local next = cns.next + if halant[next.char] then cns = next end end @@ -795,11 +900,12 @@ local function deva_reorder(head,start,stop,font,attr) end if base.char == c_nbsp then + nbspaces = nbspaces - 1 head = remove_node(head,base) free_node(base) end - return head, stop + return head, stop, nbspaces end -- If a pre-base matra character had been reordered before applying basic features, @@ -818,7 +924,7 @@ function handlers.devanagari_reorder_matras(head,start,kind,lookupname,replaceme -- can be fast loop while current and current.id == glyph_code and current.subtype<256 and current.font == font and current[a_syllabe] == startattr do local next = current.next - if current.char == c_halant and not current[a_state] then + if halant[current.char] and not current[a_state] then if next and next.id == glyph_code and next.subtype<256 and next.font == font and next[a_syllabe] == startattr and zw_char[next.char] then current = next end @@ -872,7 +978,7 @@ function handlers.devanagari_reorder_reph(head,start,kind,lookupname,replacement local startfont = start.font local startattr = start[a_syllabe] while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do --step 2 - if current.char == c_halant and not current[a_state] then + if halant[current.char] and not current[a_state] then local next = current.next if next and next.id == glyph_code and next.subtype<256 and next.font == startfont and next[a_syllabe] == startattr and zw_char[next.char] then current = next @@ -985,7 +1091,7 @@ function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start,k -- can be fast for loop + caching state while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do local next = current.next - if current.char == c_halant and not current[a_state] then + if halant[current.char] and not current[a_state] then if next and next.id == glyph_code and next.subtype<256 and next.font == font and next[a_syllabe] == startattr then local char = next.char if char == c_zwnj or char == c_zwj then @@ -1046,6 +1152,9 @@ function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replaceme if prev then prev.next = stop end + if head == start then + head = stop + end flush_list(start) return head, stop, true end @@ -1126,13 +1235,18 @@ end -- this one will be merged into the caller: it saves a call, but we will then make function -- of the actions -local function dev2_reorder(head,start,stop,font,attr) -- maybe do a pass over (determine stop in sweep) +local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pass over (determine stop in sweep) local lookuphash, seqsubset = dev2_initialize(font,attr) - local reph, pre_base_reordering_consonants = false, { } -- was nil ... probably went unnoticed because never assigned - local halfpos, basepos, subpos, postpos = nil, nil, nil, nil - local locl = { } + local pre_base_reordering_consonants = { } -- was nil ... probably went unnoticed because never assigned + + local reph = false -- was nil ... probably went unnoticed because never assigned + local halfpos = nil + local basepos = nil + local subpos = nil + local postpos = nil + local locl = { } for i=1,#seqsubset do @@ -1142,8 +1256,15 @@ local function dev2_reorder(head,start,stop,font,attr) -- maybe do a pass over ( local kind = subset[1] local lookupcache = subset[2] if kind == "rphf" then - if subset[3] then - reph = true + -- todo: rphf might be result of other handler/chainproc + -- todo: rphf actualy acts on consonant + halant. + -- todo: the consonant might not necesseraly be 0x0930 ... (but for devanagari it is) + local lookup = lookupcache[0x0930] + if lookup then + local hit = lookup[0x094D] + if hit then + reph = hit["ligature"] + end end local current = start local last = stop.next @@ -1173,6 +1294,7 @@ local function dev2_reorder(head,start,stop,font,attr) -- maybe do a pass over ( elseif kind == "pref" then -- why not global? pretty ineffient this way -- this will move to the initializer and we will store the hash in dataset + -- todo: reph might also be result of chain for k, v in lookupcache[0x094D], next do pre_base_reordering_consonants[k] = v and v["ligature"] --ToDo: reph might also be result of chain end @@ -1206,15 +1328,15 @@ local function dev2_reorder(head,start,stop,font,attr) -- maybe do a pass over ( local next = current.next local n = locl[next] or next.char if found[n] then - if next ~= stop and next.next.char == c_zwnj then --ZWNJ prevent creation of half - current = current.next + if next ~= stop and next.next.char == c_zwnj then -- zwnj prevent creation of half + current = next else current[a_state] = s_half if not halfpos then halfpos = current end end - current = next + current = current.next end end end @@ -1275,38 +1397,40 @@ local function dev2_reorder(head,start,stop,font,attr) -- maybe do a pass over ( current = start.next.next end - if current ~= stop.next and current.char == c_nbsp then - -- Stand Alone cluster + local function stand_alone(is_nbsp) if current == stop then stop = stop.prev head = remove_node(head,current) free_node(current) - return head, stop + return head, stop, nbspaces else - base = current + if is_nbsp then + nbspaces = nbspaces + 1 + end + base = current current = current.next if current ~= stop then local char = current.char - if char == c_nukta then + if nukta[char] then current = current.next char = current.char end if char == c_zwj then local next = current.next - if current ~= stop and next ~= stop and next.char == c_halant then + if current ~= stop and next ~= stop and halant[next.char] then current = next next = current.next local tmp = next.next local changestop = next == stop next.next = nil current[a_state] = s_pref - current = processcharacters(current) + current = processcharacters(current,font) current[a_state] = s_blwf - current = processcharacters(current) + current = processcharacters(current,font) current[a_state] = s_pstf - current = processcharacters(current) + current = processcharacters(current,font) current[a_state] = unsetvalue - if current.char == c_halant then + if halant[current.char] then current.next.next = tmp local nc = copy_node(current) current.char = dotted_circle @@ -1321,18 +1445,26 @@ local function dev2_reorder(head,start,stop,font,attr) -- maybe do a pass over ( end end end + end + + if current ~= stop.next then + -- Stand Alone cluster + stand_alone() + elseif current.char == c_nbsp then + -- Stand Alone cluster + stand_alone(true) else -- not Stand Alone cluster local last = stop.next while current ~= last do -- find base consonant local next = current.next if consonant[current.char] then - if not (current ~= stop and next ~= stop and next.char == c_halant and next.next.char == c_zwj) then + if not (current ~= stop and next ~= stop and halant[next.char] and next.next.char == c_zwj) then if not firstcons then firstcons = current end -- check whether consonant has below-base or post-base form or is pre-base reordering Ra local a = current[a_state] - if not (a == s_pref or a == s_blwf or a == pstf) then + if not (a == s_pref or a == s_blwf or a == s_pstf) then base = current end end @@ -1348,7 +1480,7 @@ local function dev2_reorder(head,start,stop,font,attr) -- maybe do a pass over ( if start[a_state] == s_rphf then start[a_state] = unsetvalue end - return head, stop + return head, stop, nbspaces else if base[a_state] then base[a_state] = unsetvalue @@ -1450,7 +1582,7 @@ local function dev2_reorder(head,start,stop,font,attr) -- maybe do a pass over ( local current, c = start, nil while current ~= stop do local char = current.char - if char == c_halant or stress_tone_mark[char] then + if halant[char] or stress_tone_mark[char] then if not c then c = current end @@ -1458,7 +1590,7 @@ local function dev2_reorder(head,start,stop,font,attr) -- maybe do a pass over ( c = nil end local next = current.next - if c and next.char == c_nukta then + if c and nukta[next.char] then if head == c then head = next end @@ -1484,11 +1616,12 @@ local function dev2_reorder(head,start,stop,font,attr) -- maybe do a pass over ( end if base.char == c_nbsp then + nbspaces = nbspaces - 1 head = remove_node(head, base) free_node(base) end - return head, stop + return head, stop, nbspaces end -- cleaned up and optimized ... needs checking (local, check order, fixes, extra hash, etc) @@ -1501,8 +1634,8 @@ imerge(separator,dependent_vowel) imerge(separator,vowel_modifier) imerge(separator,stress_tone_mark) -separator[0x093C] = true -- nukta -separator[0x094D] = true -- halant +for k, v in next, nukta do separator[k] = true end +for k, v in next, halant do separator[k] = true end local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowel -- why two variants ... the comment suggests that it's the same ruleset @@ -1512,7 +1645,7 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe end if variant == 1 then local v = n.id == glyph_code and n.subtype<256 and n.font == font - if v and n.char == c_nukta then + if v and nukta[n.char] then n = n.next if n then v = n.id == glyph_code and n.subtype<256 and n.font == font @@ -1527,7 +1660,7 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe local nnnc = nnn.char if nnc == c_zwj and consonant[nnnc] then c = nnn - elseif (nnc == c_zwnj or nnc == c_zwj) and nnnc == c_halant then + elseif (nnc == c_zwnj or nnc == c_zwj) and halant[nnnc] then local nnnn = nnn.next if nnnn and nnnn.id == glyph_code and consonant[nnnn.char] and nnnn.subtype<256 and nnnn.font == font then c = nnnn @@ -1537,7 +1670,7 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe end end elseif variant == 2 then - if n.id == glyph_code and n.char == c_nukta and n.subtype<256 and n.font == font then + if n.id == glyph_code and nukta[n.char] and n.subtype<256 and n.font == font then c = n end n = c.next @@ -1548,9 +1681,9 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe if nv and zw_char[n.char] then n = nn nn = nn.next - nv = nn.id == glyph_code and nn.subtype<256 and nn.font == font + nv = nn and nn.id == glyph_code and nn.subtype<256 and nn.font == font end - if nn and nv and n.char == c_halant and consonant[nn.char] then + if nv and halant[n.char] and consonant[nn.char] then c = nn end end @@ -1578,7 +1711,7 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe end char = n.char end - if char == c_nukta then + if nukta[char] then c = c.next n = c.next if not n then @@ -1590,7 +1723,7 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe end char = n.char end - if char == c_halant then + if halant[char] then c = c.next n = c.next if not n then @@ -1638,7 +1771,7 @@ local function analyze_next_chars_two(c,font) if not n then return c end - if n.id == glyph_code and n.char == c_nukta and n.subtype<256 and n.font == font then + if n.id == glyph_code and nukta[n.char] and n.subtype<256 and n.font == font then c = n end n = c @@ -1646,7 +1779,7 @@ local function analyze_next_chars_two(c,font) local nn = n.next if nn and nn.id == glyph_code and nn.subtype<256 and nn.font == font then local char = nn.char - if char == c_halant then + if halant[char] then n = nn local nnn = nn.next if nnn and nnn.id == glyph_code and zw_char[nnn.char] and nnn.subtype<256 and nnn.font == font then @@ -1655,7 +1788,7 @@ local function analyze_next_chars_two(c,font) elseif char == c_zwnj or char == c_zwj then -- n = nn -- not here (?) local nnn = nn.next - if nnn and nnn.id == glyph_code and nnn.char == c_halant and nnn.subtype<256 and nnn.font == font then + if nnn and nnn.id == glyph_code and halant[nnn.char] and nnn.subtype<256 and nnn.font == font then n = nnn end else @@ -1665,7 +1798,7 @@ local function analyze_next_chars_two(c,font) if nn and nn.id == glyph_code and consonant[nn.char] and nn.subtype<256 and nn.font == font then n = nn local nnn = nn.next - if nnn and nnn.id == glyph_code and nnn.char == c_nukta and nnn.subtype<256 and nnn.font == font then + if nnn and nnn.id == glyph_code and nukta[nnn.char] and nnn.subtype<256 and nnn.font == font then n = nnn end c = n @@ -1702,7 +1835,7 @@ local function analyze_next_chars_two(c,font) end char = n.char end - if char == c_halant then + if halant[char] then c = c.next n = c.next if not n then @@ -1740,7 +1873,7 @@ local function analyze_next_chars_two(c,font) end char = n.char end - if char == c_nukta then + if nukta[char] then c = c.next n = c.next if not n then @@ -1752,7 +1885,7 @@ local function analyze_next_chars_two(c,font) end char = n.char end - if char == c_halant then + if halant[char] then c = c.next n = c.next if not n then @@ -1811,7 +1944,10 @@ end -- a lot. Common code has been synced. function methods.deva(head,font,attr) - local current, start, done = head, true, false + local current = head + local start = true + local done = false + local nbspaces = 0 while current do if current.id == glyph_code and current.subtype<256 and current.font == font then done = true @@ -1819,7 +1955,7 @@ function methods.deva(head,font,attr) local syllableend = nil local c = current local n = c.next - if n and c.char == c_ra and n.id == glyph_code and n.char == c_halant and n.subtype<256 and n.font == font then + if n and c.char == c_ra and n.id == glyph_code and halant[n.char] and n.subtype<256 and n.font == font then local n = n.next if n and n.id == glyph_code and n.subtype<256 and n.font == font then c = n @@ -1840,9 +1976,10 @@ function methods.deva(head,font,attr) end if standalone then -- stand alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)] - local syllabeend, current = analyze_next_chars_one(c,font,2) -- watch out, here we set current to next + local syllableend = analyze_next_chars_one(c,font,2) + current = syllableend.next if syllablestart ~= syllableend then - head, current = deva_reorder(head,syllablestart,syllableend,font,attr) + head, current, nbspaces = deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces) current = current.next end else @@ -1864,7 +2001,7 @@ function methods.deva(head,font,attr) break end local c = n.char - if c == c_nukta then + if nukta[c] then n = n.next if not n then break @@ -1875,7 +2012,7 @@ function methods.deva(head,font,attr) end c = n.char end - if c == c_halant then + if halant[c] then n = n.next if not n then break @@ -1903,7 +2040,7 @@ function methods.deva(head,font,attr) end end local n = current.next - if n and n.id == glyph_code and n.char == c_nukta and n.subtype<256 and n.font == font then + if n and n.id == glyph_code and nukta[n.char] and n.subtype<256 and n.font == font then -- nukta (not specified in Microsft Devanagari OpenType specification) current = n n = current.next @@ -1913,7 +2050,7 @@ function methods.deva(head,font,attr) if current then local v = current.id == glyph_code and current.subtype<256 and current.font == font if v then - if current.char == c_halant then + if halant[current.char] then -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H local n = current.next if n and n.id == glyph_code and zw_char[n.char] and n.subtype<256 and n.font == font then @@ -1951,7 +2088,7 @@ function methods.deva(head,font,attr) end end if syllablestart ~= syllableend then - head, current = deva_reorder(head,syllablestart,syllableend,font,attr) + head, current, nbspaces = deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces) current = current.next end elseif independent_vowel[char] then @@ -1990,6 +2127,12 @@ function methods.deva(head,font,attr) start = false end + if nbspaces > 0 then + head = replace_all_nbsp(head) + end + + head = typesetters.characters.handler(head) + return head, done end @@ -1999,10 +2142,11 @@ end -- handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1) function methods.dev2(head,font,attr) - local current = head - local start = true - local done = false - local syllabe = 0 + local current = head + local start = true + local done = false + local syllabe = 0 + local nbspaces = 0 while current do local syllablestart, syllableend = nil, nil if current.id == glyph_code and current.subtype<256 and current.font == font then @@ -2010,7 +2154,7 @@ function methods.dev2(head,font,attr) syllablestart = current local c = current local n = current.next - if n and c.char == c_ra and n.id == glyph_code and n.char == c_halant and n.subtype<256 and n.font == font then + if n and c.char == c_ra and n.id == glyph_code and halant[n.char] and n.subtype<256 and n.font == font then local n = n.next if n and n.id == glyph_code and n.subtype<256 and n.font == font then c = n @@ -2024,6 +2168,7 @@ function methods.dev2(head,font,attr) else local standalone = char == c_nbsp if standalone then + nbspaces = nbspaces + 1 local p = current.prev if not p then -- begin of paragraph or box @@ -2058,7 +2203,7 @@ function methods.dev2(head,font,attr) end end if syllableend and syllablestart ~= syllableend then - head, current = dev2_reorder(head,syllablestart,syllableend,font,attr) + head, current, nbspaces = dev2_reorder(head,syllablestart,syllableend,font,attr,nbspaces) end if not syllableend and current.id == glyph_code and current.subtype<256 and current.font == font and not current[a_state] then local mark = mark_four[current.char] @@ -2070,5 +2215,9 @@ function methods.dev2(head,font,attr) current = current.next end + if nbspaces > 0 then + head = replace_all_nbsp(head) + end + return head, done end diff --git a/tex/context/base/font-ota.lua b/tex/context/base/font-ota.lua index 79fcf3fa2..9af5a3347 100644 --- a/tex/context/base/font-ota.lua +++ b/tex/context/base/font-ota.lua @@ -32,6 +32,7 @@ local a_state = attributes.private('state') local nodecodes = nodes.nodecodes local glyph_code = nodecodes.glyph +local disc_code = nodecodes.disc local math_code = nodecodes.math local traverse_id = node.traverse_id @@ -78,6 +79,12 @@ local features = { fina = s_fina, isol = s_isol, -- mark = s_mark, + -- rest = s_rest, + rphf = s_rphf, + half = s_half, + pref = s_pref, + blwf = s_blwf, + pstf = s_pstf, } analyzers.states = states @@ -118,7 +125,7 @@ function analyzers.setstate(head,font) end elseif id == disc_code then -- always in the middle - current[a_state] = s_midi + current[a_state] = s_medi last = current else -- finish if first and first == last then @@ -179,7 +186,7 @@ end registerotffeature { name = "analyze", - description = "analysis of (for instance) character classes", + description = "analysis of character classes", default = true, initializers = { node = analyzeinitializer, diff --git a/tex/context/base/font-otc.lua b/tex/context/base/font-otc.lua index a87dcadf8..3006e47ca 100644 --- a/tex/context/base/font-otc.lua +++ b/tex/context/base/font-otc.lua @@ -188,9 +188,11 @@ otf.enhancers.register("check extra features",enhance) -- tlig -- -local tlig = { - endash = "hyphen hyphen", - emdash = "hyphen hyphen hyphen", +local tlig = { -- we need numbers for some fonts so ... + -- endash = "hyphen hyphen", + -- emdash = "hyphen hyphen hyphen", + [0x2013] = { 0x002D, 0x002D }, + [0x2014] = { 0x002D, 0x002D, 0x002D }, -- quotedblleft = "quoteleft quoteleft", -- quotedblright = "quoteright quoteright", -- quotedblleft = "grave grave", diff --git a/tex/context/base/font-otd.lua b/tex/context/base/font-otd.lua index a9d093d6d..919da2379 100644 --- a/tex/context/base/font-otd.lua +++ b/tex/context/base/font-otd.lua @@ -242,19 +242,19 @@ function otf.dataset(tfmdata,font,attr) -- attr only when explicit (as in specia } rl[attr] = ra local sequences = tfmdata.resources.sequences --- setmetatableindex(ra, function(t,k) --- if type(k) == "number" then --- local v = initialize(sequences[k],script,language,s_enabled,a_enabled,font,attr,dynamic) --- t[k] = v or false --- return v --- end --- end) -for s=1,#sequences do - local v = initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic) - if v then - ra[#ra+1] = v - end -end + -- setmetatableindex(ra, function(t,k) + -- if type(k) == "number" then + -- local v = initialize(sequences[k],script,language,s_enabled,a_enabled,font,attr,dynamic) + -- t[k] = v or false + -- return v + -- end + -- end) + for s=1,#sequences do + local v = initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic) + if v then + ra[#ra+1] = v + end + end end return ra diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua index c1f2f14fc..7598a9c35 100644 --- a/tex/context/base/font-otf.lua +++ b/tex/context/base/font-otf.lua @@ -48,7 +48,7 @@ local otf = fonts.handlers.otf otf.glists = { "gsub", "gpos" } -otf.version = 2.743 -- beware: also sync font-mis.lua +otf.version = 2.745 -- beware: also sync font-mis.lua otf.cache = containers.define("fonts", "otf", otf.version, true) local fontdata = fonts.hashes.identifiers @@ -73,6 +73,9 @@ local packdata = true local syncspace = true local forcenotdef = false local includesubfonts = false +local overloadkerns = false -- experiment + +local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes local wildcard = "*" local default = "dflt" @@ -81,12 +84,24 @@ local fontloaderfields = fontloader.fields local mainfields = nil local glyphfields = nil -- not used yet +local formats = fonts.formats + +formats.otf = "opentype" +formats.ttf = "truetype" +formats.ttc = "truetype" +formats.dfont = "truetype" + registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end) registerdirective("fonts.otf.loader.force", function(v) forceload = v end) registerdirective("fonts.otf.loader.usemetatables", function(v) usemetatables = v end) registerdirective("fonts.otf.loader.pack", function(v) packdata = v end) registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end) registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end) +registerdirective("fonts.otf.loader.overloadkerns", function(v) overloadkerns = v end) + +local function otf_format(filename) + return formats[lower(file.suffix(filename))] +end local function load_featurefile(raw,featurefile) if featurefile and featurefile ~= "" then @@ -321,7 +336,7 @@ function enhancers.register(what,action) -- only already registered can be overl actions[what] = action end -function otf.load(filename,format,sub,featurefile) +function otf.load(filename,sub,featurefile) -- second argument (format) is gone ! local base = file.basename(file.removesuffix(filename)) local name = file.removesuffix(base) local attr = lfs.attributes(filename) @@ -420,7 +435,7 @@ function otf.load(filename,format,sub,featurefile) data = { size = size, time = time, - format = format, + format = otf_format(filename), featuredata = featurefiles, resources = { filename = resolvers.unresolve(filename), -- no shortcut @@ -494,6 +509,9 @@ function otf.load(filename,format,sub,featurefile) report_otf("loading from cache using hash %a",hash) end enhance("unpack",data,filename,nil,false) + if applyruntimefixes then + applyruntimefixes(filename,data) + end enhance("add dimensions",data,filename,nil,false) if trace_sequences then showfeatureorder(data,filename) @@ -1534,6 +1552,118 @@ actions["reorganize glyph kerns"] = function(data,filename,raw) end end +-- actions["merge kern classes"] = function(data,filename,raw) +-- local gposlist = raw.gpos +-- if gposlist then +-- local descriptions = data.descriptions +-- local resources = data.resources +-- local unicodes = resources.unicodes +-- local splitter = data.helpers.tounicodetable +-- local ignored = 0 +-- for gp=1,#gposlist do +-- local gpos = gposlist[gp] +-- local subtables = gpos.subtables +-- if subtables then +-- for s=1,#subtables do +-- local subtable = subtables[s] +-- local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes +-- if kernclass then -- the next one is quite slow +-- local split = { } -- saves time +-- for k=1,#kernclass do +-- local kcl = kernclass[k] +-- local firsts = kcl.firsts +-- local seconds = kcl.seconds +-- local offsets = kcl.offsets +-- local lookups = kcl.lookup -- singular +-- if type(lookups) ~= "table" then +-- lookups = { lookups } +-- end +-- -- if offsets[1] == nil then +-- -- offsets[1] = "" -- defaults ? +-- -- end +-- -- we can check the max in the loop +-- -- local maxseconds = getn(seconds) +-- for n, s in next, firsts do +-- split[s] = split[s] or lpegmatch(splitter,s) +-- end +-- local maxseconds = 0 +-- for n, s in next, seconds do +-- if n > maxseconds then +-- maxseconds = n +-- end +-- split[s] = split[s] or lpegmatch(splitter,s) +-- end +-- for l=1,#lookups do +-- local lookup = lookups[l] +-- for fk=1,#firsts do -- maxfirsts ? +-- local fv = firsts[fk] +-- local splt = split[fv] +-- if splt then +-- local extrakerns = { } +-- local baseoffset = (fk-1) * maxseconds +-- for sk=2,maxseconds do -- will become 1 based in future luatex +-- local sv = seconds[sk] +-- -- for sk, sv in next, seconds do +-- local splt = split[sv] +-- if splt then -- redundant test +-- local offset = offsets[baseoffset + sk] +-- if offset then +-- for i=1,#splt do +-- extrakerns[splt[i]] = offset +-- end +-- end +-- end +-- end +-- for i=1,#splt do +-- local first_unicode = splt[i] +-- local description = descriptions[first_unicode] +-- if description then +-- local kerns = description.kerns +-- if not kerns then +-- kerns = { } -- unicode indexed ! +-- description.kerns = kerns +-- end +-- local lookupkerns = kerns[lookup] +-- if not lookupkerns then +-- lookupkerns = { } +-- kerns[lookup] = lookupkerns +-- end +-- if overloadkerns then +-- for second_unicode, kern in next, extrakerns do +-- lookupkerns[second_unicode] = kern +-- end +-- else +-- for second_unicode, kern in next, extrakerns do +-- local k = lookupkerns[second_unicode] +-- if not k then +-- lookupkerns[second_unicode] = kern +-- elseif k ~= kern then +-- if trace_loading then +-- report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern) +-- end +-- ignored = ignored + 1 +-- end +-- end +-- end +-- elseif trace_loading then +-- report_otf("no glyph data for %U", first_unicode) +-- end +-- end +-- end +-- end +-- end +-- end +-- subtable.kernclass = { } +-- end +-- end +-- end +-- end +-- if ignored > 0 then +-- report_otf("%s kern overloads ignored") +-- end +-- end +-- end + actions["merge kern classes"] = function(data,filename,raw) local gposlist = raw.gpos if gposlist then @@ -1541,80 +1671,99 @@ actions["merge kern classes"] = function(data,filename,raw) local resources = data.resources local unicodes = resources.unicodes local splitter = data.helpers.tounicodetable + local ignored = 0 + local blocked = 0 for gp=1,#gposlist do local gpos = gposlist[gp] local subtables = gpos.subtables if subtables then + local first_done = { } -- could become an option so that we can deal with buggy fonts that don't get fixed + local split = { } -- saves time .. although probably not that much any more in the fixed luatex kernclass table for s=1,#subtables do local subtable = subtables[s] local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes + local lookup = subtable.lookup or subtable.name if kernclass then -- the next one is quite slow - local split = { } -- saves time - for k=1,#kernclass do - local kcl = kernclass[k] - local firsts = kcl.firsts - local seconds = kcl.seconds - local offsets = kcl.offsets - local lookups = kcl.lookup -- singular - if type(lookups) ~= "table" then - lookups = { lookups } - end - -- if offsets[1] == nil then - -- offsets[1] = "" - -- end - -- we can check the max in the loop - -- local maxseconds = getn(seconds) - for n, s in next, firsts do - split[s] = split[s] or lpegmatch(splitter,s) - end - local maxseconds = 0 - for n, s in next, seconds do - if n > maxseconds then - maxseconds = n - end - split[s] = split[s] or lpegmatch(splitter,s) + if #kernclass > 0 then + kernclass = kernclass[1] + lookup = type(kernclass.lookup) == "string" and kernclass.lookup or lookup + report_otf("fixing kernclass table of lookup %a",lookup) + end + local firsts = kernclass.firsts + local seconds = kernclass.seconds + local offsets = kernclass.offsets + -- if offsets[1] == nil then + -- offsets[1] = "" -- defaults ? + -- end + -- we can check the max in the loop + -- local maxseconds = getn(seconds) + for n, s in next, firsts do + split[s] = split[s] or lpegmatch(splitter,s) + end + local maxseconds = 0 + for n, s in next, seconds do + if n > maxseconds then + maxseconds = n end - for l=1,#lookups do - local lookup = lookups[l] - for fk=1,#firsts do -- maxfirsts ? - local fv = firsts[fk] - local splt = split[fv] - if splt then - local extrakerns = { } - local baseoffset = (fk-1) * maxseconds - for sk=2,maxseconds do -- will become 1 based in future luatex - local sv = seconds[sk] - -- for sk, sv in next, seconds do - local splt = split[sv] - if splt then -- redundant test - local offset = offsets[baseoffset + sk] - if offset then - for i=1,#splt do - extrakerns[splt[i]] = offset - end - end + split[s] = split[s] or lpegmatch(splitter,s) + end + for fk=1,#firsts do -- maxfirsts ? + local fv = firsts[fk] + local splt = split[fv] + if splt then + local extrakerns = { } + local baseoffset = (fk-1) * maxseconds + for sk=2,maxseconds do -- will become 1 based in future luatex + local sv = seconds[sk] + -- for sk, sv in next, seconds do + local splt = split[sv] + if splt then -- redundant test + local offset = offsets[baseoffset + sk] + if offset then + for i=1,#splt do + extrakerns[splt[i]] = offset end end - for i=1,#splt do - local first_unicode = splt[i] - local description = descriptions[first_unicode] - if description then - local kerns = description.kerns - if not kerns then - kerns = { } -- unicode indexed ! - description.kerns = kerns - end - local lookupkerns = kerns[lookup] - if not lookupkerns then - lookupkerns = { } - kerns[lookup] = lookupkerns - end + end + end + for i=1,#splt do + local first_unicode = splt[i] + if first_done[first_unicode] then + report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode) + blocked = blocked + 1 + else + first_done[first_unicode] = true + local description = descriptions[first_unicode] + if description then + local kerns = description.kerns + if not kerns then + kerns = { } -- unicode indexed ! + description.kerns = kerns + end + local lookupkerns = kerns[lookup] + if not lookupkerns then + lookupkerns = { } + kerns[lookup] = lookupkerns + end + if overloadkerns then for second_unicode, kern in next, extrakerns do lookupkerns[second_unicode] = kern end - elseif trace_loading then - report_otf("no glyph data for %U", first_unicode) + else + for second_unicode, kern in next, extrakerns do + local k = lookupkerns[second_unicode] + if not k then + lookupkerns[second_unicode] = kern + elseif k ~= kern then + if trace_loading then + report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern) + end + ignored = ignored + 1 + end + end end + elseif trace_loading then + report_otf("no glyph data for %U", first_unicode) end end end @@ -1625,6 +1774,12 @@ actions["merge kern classes"] = function(data,filename,raw) end end end + if ignored > 0 then + report_otf("%s kern overloads ignored",ignored) + end + if blocked > 0 then + report_otf("%s succesive kerns blocked",blocked) + end end end @@ -1894,10 +2049,24 @@ local function copytotfm(data,cache_id) end end -- end math + -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?) + local filename = constructors.checkedfilename(resources) + local fontname = metadata.fontname + local fullname = metadata.fullname or fontname + local units = metadata.units_per_em or 1000 + -- + if units == 0 then -- catch bugs in fonts + units = 1000 -- maybe 2000 when ttf + metadata.units_per_em = 1000 + report_otf("changing %a units to %a",0,units) + end + -- local monospaced = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced") local charwidth = pfminfo.avgwidth -- or unset - local italicangle = metadata.italicangle local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight +-- charwidth = charwidth * units/1000 +-- charxheight = charxheight * units/1000 + local italicangle = metadata.italicangle properties.monospaced = monospaced parameters.italicangle = italicangle parameters.charwidth = charwidth @@ -1927,16 +2096,6 @@ local function copytotfm(data,cache_id) end end spaceunits = tonumber(spaceunits) or 500 -- brrr - -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?) - local filename = constructors.checkedfilename(resources) - local fontname = metadata.fontname - local fullname = metadata.fullname or fontname - local units = metadata.units_per_em or 1000 - -- - if units == 0 then -- catch bugs in fonts - units = 1000 - metadata.units_per_em = 1000 - end -- parameters.slant = 0 parameters.space = spaceunits -- 3.333 (cmr10) @@ -1947,10 +2106,10 @@ local function copytotfm(data,cache_id) if spaceunits < 2*units/5 then -- todo: warning end - if italicangle then + if italicangle and italicangle ~= 0 then parameters.italicangle = italicangle parameters.italicfactor = math.cos(math.rad(90+italicangle)) - parameters.slant = - math.round(math.tan(italicangle*math.pi/180)) + parameters.slant = - math.tan(italicangle*math.pi/180) end if monospaced then parameters.space_stretch = 0 @@ -1979,7 +2138,7 @@ local function copytotfm(data,cache_id) -- properties.space = spacer properties.encodingbytes = 2 - properties.format = data.format or fonts.formats[filename] or "opentype" + properties.format = data.format or otf_format(filename) or formats.otf properties.noglyphnames = true properties.filename = filename properties.fontname = fontname @@ -2008,9 +2167,9 @@ local function otftotfm(specification) local name = specification.name local sub = specification.sub local filename = specification.filename - local format = specification.format + -- local format = specification.format local features = specification.features.normal - local rawdata = otf.load(filename,format,sub,features and features.featurefile) + local rawdata = otf.load(filename,sub,features and features.featurefile) if rawdata and next(rawdata) then rawdata.lookuphash = { } tfmdata = copytotfm(rawdata,cache_id) @@ -2104,48 +2263,38 @@ function otf.collectlookups(rawdata,kind,script,language) return nil, nil end --- readers +-- readers (a bit messy, this forced so I might redo that bit: foo.ttf FOO.ttf foo.TTF FOO.TTF) -local function check_otf(forced,specification,suffix,what) +local function check_otf(forced,specification,suffix) local name = specification.name if forced then - name = file.addsuffix(name,suffix,true) + name = specification.forcedname -- messy end local fullname = findbinfile(name,suffix) or "" if fullname == "" then fullname = fonts.names.getfilename(name,suffix) or "" end - if fullname ~= "" then + if fullname ~= "" and not fonts.names.ignoredfile(fullname) then specification.filename = fullname - specification.format = what return read_from_otf(specification) end end -local function opentypereader(specification,suffix,what) +local function opentypereader(specification,suffix) local forced = specification.forced or "" - if forced == "otf" then - return check_otf(true,specification,forced,"opentype") - elseif forced == "ttf" or forced == "ttc" or forced == "dfont" then - return check_otf(true,specification,forced,"truetype") + if formats[forced] then + return check_otf(true,specification,forced) else - return check_otf(false,specification,suffix,what) + return check_otf(false,specification,suffix) end end -readers.opentype = opentypereader - -local formats = fonts.formats - -formats.otf = "opentype" -formats.ttf = "truetype" -formats.ttc = "truetype" -formats.dfont = "truetype" +readers.opentype = opentypereader -- kind of useless and obsolete -function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end -function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end -function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end -function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end +function readers.otf (specification) return opentypereader(specification,"otf") end +function readers.ttf (specification) return opentypereader(specification,"ttf") end +function readers.ttc (specification) return opentypereader(specification,"ttf") end +function readers.dfont(specification) return opentypereader(specification,"ttf") end -- this will be overloaded diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua index 5da0cf798..3733d51c2 100644 --- a/tex/context/base/font-otn.lua +++ b/tex/context/base/font-otn.lua @@ -20,7 +20,7 @@ if not modules then modules = { } end modules ['font-otn'] = { -- todo: -- --- kerning is probably not yet ok for latin around dics nodes +-- kerning is probably not yet ok for latin around dics nodes (interesting challenge) -- extension infrastructure (for usage out of context) -- sorting features according to vendors/renderers -- alternative loop quitters @@ -31,7 +31,8 @@ if not modules then modules = { } end modules ['font-otn'] = { -- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere) -- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests) -- remove some optimizations (when I have a faster machine) - +-- +-- maybe redo the lot some way (more context specific) --[[ldx--

This module is a bit more split up that I'd like but since we also want to test @@ -187,6 +188,7 @@ local default = "dflt" local nodecodes = nodes.nodecodes local whatcodes = nodes.whatcodes local glyphcodes = nodes.glyphcodes +local disccodes = nodes.disccodes local glyph_code = nodecodes.glyph local glue_code = nodecodes.glue @@ -197,6 +199,8 @@ local math_code = nodecodes.math local dir_code = whatcodes.dir local localpar_code = whatcodes.localpar +local discretionary_code = disccodes.discretionary + local ligature_code = glyphcodes.ligature local privateattribute = attributes.private @@ -502,13 +506,18 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives end end -local function multiple_glyphs(head,start,multiple) -- marks ? +local function multiple_glyphs(head,start,multiple,ignoremarks) local nofmultiples = #multiple if nofmultiples > 0 then start.char = multiple[1] if nofmultiples > 1 then local sn = start.next for k=2,nofmultiples do -- todo: use insert_node +-- untested: +-- +-- while ignoremarks and marks[sn.char] then +-- local sn = sn.next +-- end local n = copy_node(start) -- ignore components n.char = multiple[k] n.next = sn @@ -530,7 +539,7 @@ local function multiple_glyphs(head,start,multiple) -- marks ? end function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) - local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue + local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives) if choice then if trace_alternatives then @@ -545,11 +554,11 @@ function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence return head, start, true end -function handlers.gsub_multiple(head,start,kind,lookupname,multiple) +function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) if trace_multiples then logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple)) end - return multiple_glyphs(head,start,multiple) + return multiple_glyphs(head,start,multiple,sequence.flags[1]) end function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) @@ -648,7 +657,7 @@ function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence if marks[basechar] then while true do base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then basechar = base.char if not marks[basechar] then break @@ -796,7 +805,7 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence if al[anchor] then local ma = markanchors[anchor] if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -905,7 +914,6 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) prev = snext snext = snext.next else - local krn = kerns[nextchar] if not krn then -- skip elseif type(krn) == "table" then @@ -1026,35 +1034,35 @@ single lookup case. The efficiency of the replacements can be improved by deleti as less as needed but that would also make the code even more messy.

--ldx]]-- -local function delete_till_stop(start,stop,ignoremarks) -- keeps start - local n = 1 - if start == stop then - -- done - elseif ignoremarks then - repeat -- start x x m x x stop => start m - local next = start.next - if not marks[next.char] then - local components = next.components - if components then -- probably not needed - flush_node_list(components) - end - delete_node(start,next) - end - n = n + 1 - until next == stop - else -- start x x x stop => start - repeat - local next = start.next - local components = next.components - if components then -- probably not needed - flush_node_list(components) - end - delete_node(start,next) - n = n + 1 - until next == stop - end - return n -end +-- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start +-- local n = 1 +-- if start == stop then +-- -- done +-- elseif ignoremarks then +-- repeat -- start x x m x x stop => start m +-- local next = start.next +-- if not marks[next.char] then +-- local components = next.components +-- if components then -- probably not needed +-- flush_node_list(components) +-- end +-- head = delete_node(head,next) +-- end +-- n = n + 1 +-- until next == stop +-- else -- start x x x stop => start +-- repeat +-- local next = start.next +-- local components = next.components +-- if components then -- probably not needed +-- flush_node_list(components) +-- end +-- head = delete_node(head,next) +-- n = n + 1 +-- until next == stop +-- end +-- return head, n +-- end --[[ldx--

Here we replace start by a single variant, First we delete the rest of the @@ -1108,7 +1116,7 @@ the match.

--ldx]]-- function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - delete_till_stop(start,stop) -- we could pass ignoremarks as #3 .. + -- local head, n = delete_till_stop(head,start,stop) local startchar = start.char local subtables = currentlookup.subtables local lookupname = subtables[1] @@ -1127,7 +1135,7 @@ function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext, if trace_multiples then logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) end - return multiple_glyphs(head,start,replacements) + return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) end end return head, start, false @@ -1438,7 +1446,7 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext if al[anchor] then local ma = markanchors[anchor] if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -1550,10 +1558,11 @@ function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lo return head, start, false end +chainmores.gpos_single = chainprocs.gpos_single -- okay? + -- when machines become faster i will make a shared function function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) --- logwarning("%s: gpos_pair not yet supported",cref(kind,chainname,chainlookupname)) local snext = start.next if snext then local startchar = start.char @@ -1623,6 +1632,8 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look return head, start, false end +chainmores.gpos_pair = chainprocs.gpos_pair -- okay? + -- what pointer to return, spec says stop -- to be discussed ... is bidi changer a space? -- elseif char == zwnj and sequence[n][32] then -- brrr @@ -1865,7 +1876,11 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq if chainlookup then local cp = chainprocs[chainlookup.type] if cp then - head, start, done = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) + local ok + head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) + if ok then + done = true + end else logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) end @@ -1892,22 +1907,28 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end end local chainlookupname = chainlookups[i] - local chainlookup = lookuptable[chainlookupname] -- can be false (n matches, 254 +-- +-- attr = attr or false +-- +-- local a = getattr(start,0) +-- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then +-- -- the action +-- end + local function featuresprocessor(head,font,attr) local lookuphash = lookuphashes[font] -- we can also check sequences here @@ -2093,24 +2123,24 @@ local function featuresprocessor(head,font,attr) checkstep(head) end - tfmdata = fontdata[font] - descriptions = tfmdata.descriptions - characters = tfmdata.characters - resources = tfmdata.resources + tfmdata = fontdata[font] + descriptions = tfmdata.descriptions + characters = tfmdata.characters + resources = tfmdata.resources - marks = resources.marks - anchorlookups = resources.lookup_to_anchor - lookuptable = resources.lookups - lookuptypes = resources.lookuptypes + marks = resources.marks + anchorlookups = resources.lookup_to_anchor + lookuptable = resources.lookups + lookuptypes = resources.lookuptypes - currentfont = font - rlmode = 0 + currentfont = font + rlmode = 0 - local sequences = resources.sequences - local done = false - local datasets = otf.dataset(tfmdata,font,attr) + local sequences = resources.sequences + local done = false + local datasets = otf.dataset(tfmdata,font,attr) - local dirstack = { } -- could move outside function + local dirstack = { } -- could move outside function -- We could work on sub start-stop ranges instead but I wonder if there is that -- much speed gain (experiments showed that it made not much sense) and we need @@ -2120,245 +2150,360 @@ local function featuresprocessor(head,font,attr) -- Keeping track of the headnode is needed for devanagari (I generalized it a bit -- so that multiple cases are also covered.) --- for s=1,#sequences do --- local dataset = datasets[s] --- if dataset then --- featurevalue = dataset[1] -- todo: pass to function instead of using a global --- if featurevalue then -- never false - -for s=1,#datasets do - local dataset = datasets[s] - featurevalue = dataset[1] -- todo: pass to function instead of using a global - - local sequence = dataset[5] -- sequences[s] -- also dataset[5] - local rlparmode = 0 - local topstack = 0 - local success = false - local attribute = dataset[2] - local chain = dataset[3] -- sequence.chain or 0 - local typ = sequence.type - local subtables = sequence.subtables - if chain < 0 then - -- this is a limited case, no special treatments like 'init' etc - local handler = handlers[typ] - -- we need to get rid of this slide! probably no longer needed in latest luatex - local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo + for s=1,#datasets do + local dataset = datasets[s] + featurevalue = dataset[1] -- todo: pass to function instead of using a global + + local sequence = dataset[5] -- sequences[s] -- also dataset[5] + local rlparmode = 0 + local topstack = 0 + local success = false + local attribute = dataset[2] + local chain = dataset[3] -- sequence.chain or 0 + local typ = sequence.type + local subtables = sequence.subtables + if chain < 0 then + -- this is a limited case, no special treatments like 'init' etc + local handler = handlers[typ] + -- we need to get rid of this slide! probably no longer needed in latest luatex + local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo + while start do + local id = start.id + if id == glyph_code then + if start.font == font and start.subtype<256 then + local a = start[0] + if a then + a = a == attr + else + a = true + end + if a then + for i=1,#subtables do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if success then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start = start.prev end + else + start = start.prev + end + else + start = start.prev + end + else + start = start.prev + end + end + else + local handler = handlers[typ] + local ns = #subtables + local start = head -- local ? + rlmode = 0 -- to be checked ? + if ns == 1 then -- happens often + local lookupname = subtables[1] + local lookupcache = lookuphash[lookupname] + if not lookupcache then -- also check for empty cache + report_missing_cache(typ,lookupname) + else + + local function subrun(start) + -- mostly for gsub, gpos would demand a more clever approach + local head = start + local done = false + while start do + local id = start.id + if id == glyph_code and start.font == font and start.subtype <256 then + local a = start[0] + if a then + a = (a == attr) and (not attribute or start[a_state] == attribute) + else + a = not attribute or start[a_state] == attribute + end + if a then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- sequence kan weg + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done = true + end + end + if start then start = start.next end + else + start = start.next + end + else + start = start.next + end + end + if done then + success = true + return head + end + end + while start do local id = start.id if id == glyph_code then if start.font == font and start.subtype<256 then local a = start[0] if a then - a = a == attr + a = (a == attr) and (not attribute or start[a_state] == attribute) else - a = true + a = not attribute or start[a_state] == attribute end if a then - for i=1,#subtables do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if success then - break - end - end - else - report_missing_cache(typ,lookupname) + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- sequence kan weg + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + success = true end end - if start then start = start.prev end + if start then start = start.next end else - start = start.prev + start = start.next end else - start = start.prev + start = start.next + end + elseif id == disc_code then + -- mostly for gsub + if start.subtype == discretionary_code then + local pre = start.pre + if pre then + local new = subrun(pre) + if new then start.pre = new end + end + local post = start.post + if post then + local new = subrun(post) + if new then start.post = new end + end + local replace = start.replace + if replace then + local new = subrun(replace) + if new then start.replace = new end + end + end + start = start.next + elseif id == whatsit_code then -- will be function + local subtype = start.subtype + if subtype == dir_code then + local dir = start.dir + if dir == "+TRT" or dir == "+TLT" then + topstack = topstack + 1 + dirstack[topstack] = dir + elseif dir == "-TRT" or dir == "-TLT" then + topstack = topstack - 1 + end + local newdir = dirstack[topstack] + if newdir == "+TRT" then + rlmode = -1 + elseif newdir == "+TLT" then + rlmode = 1 + else + rlmode = rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype == localpar_code then + local dir = start.dir + if dir == "TRT" then + rlparmode = -1 + elseif dir == "TLT" then + rlparmode = 1 + else + rlparmode = 0 + end + -- one might wonder if the par dir should be looked at, so we might as well drop the next line + rlmode = rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end end + start = start.next + elseif id == math_code then + start = end_of_math(start).next else - start = start.prev + start = start.next end end - else - local handler = handlers[typ] - local ns = #subtables - local start = head -- local ? - rlmode = 0 -- to be checked ? - if ns == 1 then -- happens often - local lookupname = subtables[1] - local lookupcache = lookuphash[lookupname] - if not lookupcache then -- also check for empty cache - report_missing_cache(typ,lookupname) - else - while start do - local id = start.id - if id == glyph_code then - if start.font == font and start.subtype<256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- sequence kan weg - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - success = true - end + end + else + + local function subrun(start) + -- mostly for gsub, gpos would demand a more clever approach + local head = start + local done = false + while start do + local id = start.id + if id == glyph_code and start.id == font and start.subtype <256 then + local a = start[0] + if a then + a = (a == attr) and (not attribute or start[a_state] == attribute) + else + a = not attribute or start[a_state] == attribute + end + if a then + for i=1,ns do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- we could move all code inline but that makes things even more unreadable + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done = true + break + elseif not start then + -- don't ask why ... shouldn't happen + break end - if start then start = start.next end - else - start = start.next end - elseif id == math_code then - start = end_of_math(start).next else - start = start.next - end - elseif id == whatsit_code then -- will be function - local subtype = start.subtype - if subtype == dir_code then - local dir = start.dir - if dir == "+TRT" or dir == "+TLT" then - topstack = topstack + 1 - dirstack[topstack] = dir - elseif dir == "-TRT" or dir == "-TLT" then - topstack = topstack - 1 - end - local newdir = dirstack[topstack] - if newdir == "+TRT" then - rlmode = -1 - elseif newdir == "+TLT" then - rlmode = 1 - else - rlmode = rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype == localpar_code then - local dir = start.dir - if dir == "TRT" then - rlparmode = -1 - elseif dir == "TLT" then - rlparmode = 1 - else - rlparmode = 0 - end - rlmode = rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end + report_missing_cache(typ,lookupname) end - start = start.next - elseif id == math_code then - start = end_of_math(start).next - else - start = start.next end + if start then start = start.next end + else + start = start.next end + else + start = start.next end - else - while start do - local id = start.id - if id == glyph_code then - if start.font == font and start.subtype<256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - for i=1,ns do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- we could move all code inline but that makes things even more unreadable - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - success = true - break - elseif not start then - -- don't ask why ... shouldn't happen - break - end - end - else - report_missing_cache(typ,lookupname) + end + if done then + success = true + return head + end + end + + while start do + local id = start.id + if id == glyph_code then + if start.font == font and start.subtype<256 then + local a = start[0] + if a then + a = (a == attr) and (not attribute or start[a_state] == attribute) + else + a = not attribute or start[a_state] == attribute + end + if a then + for i=1,ns do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- we could move all code inline but that makes things even more unreadable + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + success = true + break + elseif not start then + -- don't ask why ... shouldn't happen + break end end - if start then start = start.next end else - start = start.next + report_missing_cache(typ,lookupname) end - else - start = start.next end - elseif id == whatsit_code then - local subtype = start.subtype - if subtype == dir_code then - local dir = start.dir - if dir == "+TRT" or dir == "+TLT" then - topstack = topstack + 1 - dirstack[topstack] = dir - elseif dir == "-TRT" or dir == "-TLT" then - topstack = topstack - 1 - end - local newdir = dirstack[topstack] - if newdir == "+TRT" then - rlmode = -1 - elseif newdir == "+TLT" then - rlmode = 1 - else - rlmode = rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype == localpar_code then - local dir = start.dir - if dir == "TRT" then - rlparmode = -1 - elseif dir == "TLT" then - rlparmode = 1 - else - rlparmode = 0 - end - rlmode = rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start = start.next - elseif id == math_code then - start = end_of_math(start).next + if start then start = start.next end else start = start.next end + else + start = start.next end + elseif id == disc_code then + -- mostly for gsub + if start.subtype == discretionary_code then + local pre = start.pre + if pre then + local new = subrun(pre) + if new then start.pre = new end + end + local post = start.post + if post then + local new = subrun(post) + if new then start.post = new end + end + local replace = start.replace + if replace then + local new = subrun(replace) + if new then start.replace = new end + end + end + start = start.next + elseif id == whatsit_code then + local subtype = start.subtype + if subtype == dir_code then + local dir = start.dir + if dir == "+TRT" or dir == "+TLT" then + topstack = topstack + 1 + dirstack[topstack] = dir + elseif dir == "-TRT" or dir == "-TLT" then + topstack = topstack - 1 + end + local newdir = dirstack[topstack] + if newdir == "+TRT" then + rlmode = -1 + elseif newdir == "+TLT" then + rlmode = 1 + else + rlmode = rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype == localpar_code then + local dir = start.dir + if dir == "TRT" then + rlparmode = -1 + elseif dir == "TLT" then + rlparmode = 1 + else + rlparmode = 0 + end + rlmode = rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start = start.next + elseif id == math_code then + start = end_of_math(start).next + else + start = start.next end end - if success then - done = true - end - if trace_steps then -- ? - registerstep(head) - end - --- end --- else --- -- report_process("warning, no dataset %a",s) --- end - + end + end + if success then + done = true + end + if trace_steps then -- ? + registerstep(head) + end end return head, done end diff --git a/tex/context/base/font-otx.lua b/tex/context/base/font-otx.lua index 5c41ad66f..f39045223 100644 --- a/tex/context/base/font-otx.lua +++ b/tex/context/base/font-otx.lua @@ -32,6 +32,7 @@ local a_state = attributes.private('state') local nodecodes = nodes.nodecodes local glyph_code = nodecodes.glyph +local disc_code = nodecodes.disc local math_code = nodecodes.math local traverse_id = node.traverse_id @@ -79,6 +80,12 @@ local features = { fina = s_fina, isol = s_isol, -- mark = s_mark, + -- rest = s_rest, + rphf = s_rphf, + half = s_half, + pref = s_pref, + blwf = s_blwf, + pstf = s_pstf, } analyzers.states = states @@ -123,7 +130,7 @@ function analyzers.setstate(head,font) end elseif id == disc_code then -- always in the middle - current[a_state] = s_midi + current[a_state] = s_medi last = current else -- finish if first and first == last then @@ -184,7 +191,7 @@ end registerotffeature { name = "analyze", - description = "analysis of (for instance) character classes", + description = "analysis of character classes", default = true, initializers = { node = analyzeinitializer, diff --git a/tex/context/base/font-pat.lua b/tex/context/base/font-pat.lua index 3ad37641c..9733c9ada 100644 --- a/tex/context/base/font-pat.lua +++ b/tex/context/base/font-pat.lua @@ -20,25 +20,25 @@ local patches = otf.enhancers.patches local register = patches.register local report = patches.report -local function patch(data,filename) - if data.design_size == 0 then - local ds = match(file.basename(lower(filename)),"(%d+)") - if ds then - report("font %a has design size %a",filename,ds) - data.design_size = tonumber(ds) * 10 - end - end -end - -register("after","migrate metadata","^lmroman", patch) -register("after","migrate metadata","^lmsans", patch) -register("after","migrate metadata","^lmtypewriter",patch) +-- local function patch(data,filename) +-- if not metadata.design_size or metadata.design_size == 0 then +-- local ds = match(file.basename(lower(filename)),"(%d+)") +-- if ds then +-- report("font %a has design size %a",filename,ds) +-- metadata.design_size = tonumber(ds) * 10 +-- end +-- end +-- end +-- +-- register("after","migrate metadata","^lmroman", patch) +-- register("after","migrate metadata","^lmsans", patch) +-- register("after","migrate metadata","^lmtypewriter",patch) -- For some reason (either it's a bug in the font, or it's a problem in the -- library) the palatino arabic fonts don't have the mkmk features properly -- set up. -local function patch(data,filename) +register("after","rehash features","^palatino.*arabic", function patch(data,filename) local gpos = data.gpos if gpos then for k=1,#gpos do @@ -61,9 +61,7 @@ local function patch(data,filename) end end end -end - -register("after","rehash features","palatino.*arabic",patch) +end) -- -- this code is now in lm-math.lfg -- diff --git a/tex/context/base/font-pre.mkiv b/tex/context/base/font-pre.mkiv index b03abed7d..75f42f8f5 100644 --- a/tex/context/base/font-pre.mkiv +++ b/tex/context/base/font-pre.mkiv @@ -17,11 +17,15 @@ %D A basic set of features is defined here. +% beware, base mode + dynamics can give weird effects + +% rlig ccmp + \definefontfeature [always] - [mode=auto, - script=auto, - kern=yes, + [mode=node, % we had 'auto', but let's try 'node' for a while and see what the impact is + script=auto, % on speed; 'base' just doesn't play well with dynamics; some day we can even + kern=yes, % consider skipping the base passes when no base mode is used mark=yes, mkmk=yes, curs=yes] @@ -59,6 +63,15 @@ tlig=yes, trep=yes] +\definefontfeature + [letterspacing] + [liga=no, + rlig=no, + clig=no, + dlig=no, + ccmp=yes, + keepligatures=auto] + \definefontfeature % can be used for type1 fonts [complete] [always] @@ -72,28 +85,38 @@ [mode=none, features=no] -\definefontfeature % might move - [arabic] - [mode=node,language=dflt,script=arab,ccmp=yes, +\definefontfeature + [semetic-complete] + [mode=node,analyze=yes,language=dflt,ccmp=yes, init=yes,medi=yes,fina=yes,isol=yes, - liga=yes,dlig=yes,rlig=yes,clig=yes,calt=yes, - mark=yes,mkmk=yes,kern=yes,curs=yes] + mark=yes,mkmk=yes,kern=yes,curs=yes, + liga=yes,dlig=yes,rlig=yes,clig=yes,calt=yes] + +\definefontfeature + [semetic-simple] + [mode=node,analyze=yes,language=dflt,ccmp=yes, + init=yes,medi=yes,fina=yes,isol=yes, + mark=yes,mkmk=yes,kern=yes,curs=yes, + rlig=yes,calt=yes] \definefontfeature - [hebrew] [arabic] + [semetic-complete] + [script=arab] + +\definefontfeature + [hebrew] + [semetic-complete] [script=hebr] -\definefontfeature % might move +\definefontfeature [simplearabic] - [mode=node,language=dflt,script=arab, - init=yes,medi=yes,fina=yes, - rlig=yes,calt=yes, - mark=yes,mkmk=yes,curs=yes] + [semetic-simple] + [script=arab] \definefontfeature [simplehebrew] - [simplearabic] + [semetic-simple] [script=hebr] % \definefont [DevaOne] [file:chandas.ttf*devanagari-one at 12pt] @@ -151,6 +174,7 @@ trep=yes, mathalternates=yes, mathitalics=yes, + % mathgaps=yes, % nomathitalics=yes, % don't pass to tex, might become default language=dflt, script=math] @@ -195,6 +219,21 @@ [missing] [missing=yes] +%D Nice to have too: + +\definefontfeature + [quality] + [expansion=quality, + protrusion=quality] + +\definefontfeature + [slanted] + [slant=.2] + +\definefontfeature + [boldened] + [extend=1.2] + %D We define some colors that are used in tracing (for instance \OPENTYPE\ %D features). We cannot yet inherit because no colors are predefined. @@ -497,6 +536,7 @@ \definealternativestyle [\v!Words] [{\setcharactercasing[\v!Words ]}] [{\setcharactercasing[\v!Words ]}] \definealternativestyle [\v!capital] [{\setcharactercasing[\v!capital]}] [{\setcharactercasing[\v!capital]}] \definealternativestyle [\v!Capital] [{\setcharactercasing[\v!Capital]}] [{\setcharactercasing[\v!Capital]}] +\definealternativestyle [\v!mixed] [{\setcharactercasing[\v!mixed ]}] [{\setcharactercasing[\v!mixed ]}] \definealternativestyle [\v!cap] [{\setcharactercasing[\v!cap ]}] [{\setcharactercasing[\v!cap ]}] \definealternativestyle [\v!Cap] [{\setcharactercasing[\v!Cap ]}] [{\setcharactercasing[\v!Cap ]}] @@ -523,7 +563,7 @@ %D %D For tracing purposes we define: -\definefont[tinyfont][Mono at 1ex] +\definefont[tinyfont][dejavusansmono at 1ex] %D \macros %D {infofont} @@ -535,7 +575,7 @@ \let\infofont\relax % satisfy dep checker -\definefont [infofont] [Mono at 6pt] % todo \the\everybodyfont +\definefont[infofont][dejavusansmono at 6pt] % todo \the\everybodyfont \protect \endinput diff --git a/tex/context/base/font-sel.lua b/tex/context/base/font-sel.lua new file mode 100644 index 000000000..47268ade8 --- /dev/null +++ b/tex/context/base/font-sel.lua @@ -0,0 +1,675 @@ +if not modules then modules = { } end modules ['font-sel'] = { + version = 1.000, + comment = "companion to font-sel.mkvi", + author = "Wolfgang Schuster", + copyright = "Wolfgang Schuster", + license = "GNU General Public License" +} + +local context = context +local cleanname = fonts.names.cleanname +local gsub, splitup, find = string.gsub, string.splitup, string.find +local formatters = string.formatters +local settings_to_array = utilities.parsers.settings_to_array + +local v_yes = interfaces.variables.yes +local v_simplefonts = interfaces.variables.simplefonts +local v_selectfont = interfaces.variables.selectfont +local v_default = interfaces.variables.default + +local selectfont = fonts.select or { } +fonts.select = selectfont + +local data = selectfont.data or { } +selectfont.data = data + +local fallbacks = selectfont.fallbacks or { } +selectfont.fallbacks = fallbacks + +local methods = selectfont.methods or { } +selectfont.methods = methods + +local getlookups = fonts.names.getlookups +local registerdesignsizes = fonts.goodies.designsizes.register + +local alternatives = { + ["tf"] = "regular", + ["it"] = "italic", + ["sl"] = "slanted", + ["bf"] = "bold", + ["bi"] = "bolditalic", + ["bs"] = "boldslanted", + ["sc"] = "smallcaps", +} + +local styles = { + ["rm"] = "serif", + ["ss"] = "sans", + ["tt"] = "mono", + ["hw"] = "handwriting", + ["cg"] = "calligraphy", + ["mm"] = "math", +} + +local sizes = { + ["default"] = { + { 40, "4pt" }, + { 50, "5pt" }, + { 60, "6pt" }, + { 70, "7pt" }, + { 80, "8pt" }, + { 90, "9pt" }, + { 100, "10pt" }, + { 110, "11pt" }, + { 120, "12pt" }, + { 144, "14.4pt" }, + { 173, "17.3pt" }, + }, + ["dtp"] = { + { 50, "5pt" }, + { 60, "6pt" }, + { 70, "7pt" }, + { 80, "8pt" }, + { 90, "9pt" }, + { 100, "10pt" }, + { 110, "11pt" }, + { 120, "12pt" }, + { 130, "13pt" }, + { 140, "14pt" }, + { 160, "16pt" }, + { 180, "18pt" }, + { 220, "22pt" }, + { 280, "28pt" }, + } +} + +local synonyms = { + ["rm"] = { + ["tf"] = "Serif", + ["it"] = "SerifItalic", + ["sl"] = "SerifSlanted", + ["bf"] = "SerifBold", + ["bi"] = "SerifBoldItalic", + ["bs"] = "SerifBoldSlanted", + ["sc"] = "SerifCaps", + }, + ["ss"] = { + ["tf"] = "Sans", + ["it"] = "SansItalic", + ["sl"] = "SansSlanted", + ["bf"] = "SansBold", + ["bi"] = "SansBoldItalic", + ["bs"] = "SansBoldSlanted", + ["sc"] = "SansCaps", + }, + ["tt"] = { + ["tf"] = "Mono", + ["it"] = "MonoItalic", + ["sl"] = "MonoSlanted", + ["bf"] = "MonoBold", + ["bi"] = "MonoBoldItalic", + ["bs"] = "MonoBoldSlanted", + ["sc"] = "MonoCaps", + }, + ["hw"] = { + ["tf"] = "Handwriting", + }, + ["cg"] = { + ["tf"] = "Calligraphy", + }, + ["mm"] = { + ["tf"] = "MathRoman", + ["bf"] = "MathBold", + } +} + +local replacement = { + ["style"] = { + ["it"] = "tf", + ["sl"] = "it", + ["bf"] = "tf", + ["bi"] = "bf", + ["bs"] = "bi", + ["sc"] = "tf", + }, + ["weight"] = { + ["it"] = "tf", + ["sl"] = "tf", + ["bf"] = "tf", + ["bi"] = "bf", + ["bs"] = "bf", + ["sc"] = "tf", + }, +} + +local names = { + ["selectfont"] = { -- weight, style, width, variant, italic + ["regular"] = { weight = "normal", style = "normal", width = "normal", variant = "normal", italic = false }, + ["italic"] = { weight = "normal", style = "italic", width = "normal", variant = "normal", italic = true }, + ["slanted"] = { weight = "normal", style = "slanted", width = "normal", variant = "normal", italic = true }, + ["medium"] = { weight = "medium", style = "normal", width = "normal", variant = "normal", italic = false }, + ["mediumitalic"] = { weight = "medium", style = "italic", width = "normal", variant = "normal", italic = true }, + ["mediumcaps"] = { weight = "medium", style = "normal", width = "normal", variant = "smallcaps", italic = true }, + ["bold"] = { weight = "bold", style = "normal", width = "normal", variant = "normal", italic = false }, + ["bolditalic"] = { weight = "bold", style = "italic", width = "normal", variant = "normal", italic = true }, + ["boldslanted"] = { weight = "bold", style = "slanted", width = "normal", variant = "normal", italic = true }, + ["smallcaps"] = { weight = "normal", style = "normal", width = "normal", variant = "smallcaps", italic = false }, + }, + ["simplefonts"] = { + ["light"] = { "lightregular", "light" }, + ["lightitalic"] = { "lightitalic", "lightit", "lightoblique" }, + ["lightcaps"] = { "smallcapslight" }, + ["regular"] = { "roman", "regular", "book", "" }, + ["italic"] = { "italic", "it", "oblique", "kursiv", "bookitalic", "bookit" }, + ["medium"] = { "mediumregular", "medregular", "medium" }, + ["mediumitalic"] = { "mediumitalic", "meditalic" }, + ["mediumcaps"] = { "mediumcaps" }, + ["bold"] = { "bold", "bd", "kraeftig", "mediumregular", "semibold", "demi" }, + ["bolditalic"] = { "bolditalic", "boldit", "bdit", "boldoblique", "mediumitalic", "semibolditalic", "demiitalic" }, + ["smallcaps"] = { "smallcaps", "capitals", "sc" }, + ["heavy"] = { "heavyregular", "heavy" }, + ["heavyitalic"] = { "heavyitalic" }, + }, + ["default"] = { -- weight, width, italic + ["thin"] = { weight = { 100, 200, 300, 400, 500 }, width = 5, italic = false }, + ["extralight"] = { weight = { 200, 100, 300, 400, 500 }, width = 5, italic = false }, + ["light"] = { weight = { 300, 200, 100, 400, 500 }, width = 5, italic = false }, + ["regular"] = { weight = { 400, 500, 300, 200, 100 }, width = 5, italic = false }, + ["italic"] = { weight = { 400, 500, 300, 200, 100 }, width = 5, italic = true }, + ["medium"] = { weight = { 500, 400, 300, 200, 100 }, width = 5, italic = false }, + ["demibold"] = { weight = { 600, 700, 800, 900 }, width = 5, italic = false }, + ["bold"] = { weight = { 700, 600, 800, 900 }, width = 5, italic = false }, + ["bolditalic"] = { weight = { 700, 600, 800, 900 }, width = 5, italic = true }, + ["smallcaps"] = { weight = { 400, 500, 300, 200, 100 }, width = 5, italic = false }, + ["heavy"] = { weight = { 800, 900, 700, 600 }, width = 5, italic = false }, + ["black"] = { weight = { 900, 800, 700, 600 }, width = 5, italic = false }, + } +} + +names.simplefonts.slanted = names.simplefonts.italic +names.simplefonts.boldslanted = names.simplefonts.bolditalic + +names.default.normal = names.default.regular +names.default.slanted = names.default.italic +names.default.semibold = names.default.demibold +names.default.boldslanted = names.default.bolditalic + +local mathsettings = { + ["asanamath"] = { + extras = "asana-math", + goodies = { + ["tf"] = "anana-math", + }, + features = { + ["tf"] = "math\\mathsizesuffix", + }, + }, + ["cambriamath"] = { + extras = "cambria-math", + goodies = { + ["tf"] = "cambria-math", + }, + features = { + ["tf"] = "math\\mathsizesuffix", + }, + }, + ["neoeuler"] = { + extras = "euler-math", + features = { + ["tf"] = "math\\mathsizesuffix", + }, + }, + ["latinmodernmath"] = { + extras = "lm,lm-math", + goodies = { + ["tf"] = "lm", + }, + features = { + ["tf"] = "math\\mathsizesuffix,lm-math", + }, + }, + ["lucidabrightmathot"] = { + extras = "lucida-opentype-math", + goodies = { + ["tf"] = "lucida-opentype-math", + }, + features = { + ["tf"] = "math\\mathsizesuffix", + }, + }, + ["texgyrepagellamath"] = { + extras = "texgyre", + features = { + ["tf"] = "math\\mathsizesuffix", + }, + }, + ["texgyrebonummath"] = { + extras = "texgyre", + features = { + ["tf"] = "math\\mathsizesuffix", + }, + }, + ["texgyretermesmath"] = { + extras = "texgyre", + features = { + ["tf"] = "math\\mathsizesuffix", + }, + }, + ["xitsmath"] = { + extras = "xits-math", + goodies = { + ["tf"] = "xits-math", + }, + features = { + ["tf"] = "math\\mathsizesuffix", + }, + }, +} + +function commands.defineselectfont(settings) + local index = #data + 1 + data[index] = settings + selectfont.searchfiles(index) + selectfont.filterinput(index) + context(index) +end + +local function savefont(data,alternative,entries) + local f = data.fonts + if not f then + f = { } + data.fonts = f + end + f[alternative] = entries +end + +local function savefeatures(data,alternative,entries) + local f = data.features + if not f then + f = { } + data.features = f + end + f[alternative] = entries +end + +local function savegoodies(data,alternative,entries) + local g = data.goodies + if not f then + g = { } + data.goodies = g + end + g[alternative] = entries +end + +methods[v_simplefonts] = function(data,alternative,style) + local family = data.metadata.family + local names = names["simplefonts"][style] or names["simplefonts"]["regular"] + for _, name in next, names do + local filename = cleanname(formatters["%s%s"](family,name)) + local fullname = getlookups{ fullname = filename } + local fontname = getlookups{ fontname = filename } + local cleanfilename = getlookups{ cleanfilename = filename } + if #fullname > 0 then + savefont(data,alternative,fullname) + break + elseif #fontname > 0 then + savefont(data,alternative,fontname) + break + elseif #cleanfilename > 0 then + savefont(data,alternative,cleanfilename) + break + end + end +end + +methods[v_default] = function(data,alternative,style) + local family = data.metadata.family + local spec = names["default"][style] or names["default"]["regular"] + local weights = spec["weight"] + for _, weight in next, weights do + local pattern = getlookups{ + familyname = cleanname(family), + pfmweight = weight, + pfmwidth = spec["width"], + } + if #pattern > 0 then + local fontfiles = { } + for _, fontfile in next, pattern do + if (fontfile["angle"] and spec["italic"] == true) or (not fontfile["angle"] and spec["italic"] == false) then + fontfiles[#fontfiles + 1] = fontfile + end + end + savefont(data,alternative,fontfiles) + break + end + end +end + +methods[v_selectfont] = function(data,alternative,style) + local family = data.metadata.family + local spec = names["selectfont"][style] or names["selectfont"]["regular"] + local pattern = getlookups{ + familyname = cleanname(family), + weight = spec["weight"], + style = spec["style"], + width = spec["width"], + variant = spec["variant"] + } + if #pattern > 0 then + local fontfiles = { } + for _, fontfile in next, pattern do + if (fontfile["angle"] and spec["italic"] == true) or (not fontfile["angle"] and spec["italic"] == false) then + fontfiles[#fontfiles + 1] = fontfile + end + end + savefont(data,alternative,fontfiles) + end +end + +methods["name"] = function(data,alternative,filename) + local data = data + local family = data.metadata.family + local filename = cleanname(gsub(filename,"*",family)) + local fullname = getlookups{ fullname = filename } + local fontname = getlookups{ fontname = filename } + if #fullname > 0 then + savefont(data,alternative,fullname) + elseif #fontname > 0 then + savefont(data,alternative,fontname) + end +end + +methods["file"] = function(data,alternative,filename) + local data = data + local family = data.metadata.family + local filename = gsub(file.removesuffix(filename),"*",family) + local filename = getlookups{ cleanfilename = cleanname(filename) } + if #filename > 0 then + savefont(data,alternative,filename) + end +end + +methods["spec"] = function(data,alternative,filename) + local family = data.metadata.family + local weight, style, width, variant = splitup(filename,"-") + local pattern = getlookups{ + familyname = cleanname(family), + weight = weight or "normal", + style = style or "normal", + width = width or "normal", + variant = variant or "normal", + } + if #pattern > 0 then + savefont(data,alternative,pattern) + end +end + +methods["style"] = function(data,alternative,style) + local method = data.options.alternative or nil + (methods[method] or methods[v_default])(data,alternative,style) +end + +methods["features"] = function(data,alternative,features) + savefeatures(data,alternative,features) +end + +methods["goodies"] = function(data,alternative,goodies) + savegoodies(data,alternative,goodies) +end + +function selectfont.searchfiles(index) + local data = data[index] + for alternative, _ in next, alternatives do + local filename = data.files[alternative] + local method = data.options.alternative + local family = data.metadata.family + local style = alternatives[alternative] + if filename == "" then + local pattern = getlookups{ familyname = cleanname(family) } + if #pattern == 1 and alternative == "tf" then -- needs to be improved + savefont(data,alternative,pattern) + else + (methods[method] or methods[v_default])(data,alternative,style) + end + else + method, filename = splitup(filename,":") + if not filename then + filename = method + method = "name" + end + (methods[method] or methods["name"])(data,alternative,filename) + end + end +end + +function selectfont.filterinput(index) + local data = data[index] + for alternative, _ in next, alternatives do + local list = settings_to_array(data.alternatives[alternative]) + for _, entry in next, list do + method, entries = splitup(entry,":") + if not entries then + entries = method + method = "name" + end + (methods[method] or methods["name"])(data,alternative,entries) + end + end +end + +local function definefontsynonym(data,alternative,index,fallback) + local fontdata = data.fonts and data.fonts[alternative] + local style = data.metadata.style + local typeface = data.metadata.typeface + local mathsettings = mathsettings[cleanname(data.metadata.family)] + local features = mathsettings and mathsettings["features"] and (mathsettings["features"][alternative] or mathsettings["features"]["tf"]) or data.features and data.features[alternative] or "" + local goodies = mathsettings and mathsettings["goodies"] and (mathsettings["goodies"] [alternative] or mathsettings["goodies"] ["tf"]) or data.goodies and data.goodies [alternative] or "" + local parent = replacement["style"][alternative] or "" + local fontname, fontfile, fontparent + if fallback then + fontname = formatters["%s-%s-%s-fallback-%s"](typeface, style, alternative, index) + fontfile = formatters["%s-%s-%s-%s"] (typeface, style, alternative, index) + fontparent = formatters["%s-%s-%s-fallback-%s"](typeface, style, parent, index) + else + fontname = synonyms[style][alternative] + fontfile = formatters["%s-%s-%s"](typeface, style, alternative) + fontparent = formatters["%s-%s-%s"](typeface, style, parent) + end + if fontdata and #fontdata > 0 then + for _, size in next, sizes["default"] do + for _, entry in next, fontdata do + if entry["minsize"] and entry["maxsize"] then + if size[1] > entry["minsize"] and size[1] <= entry["maxsize"] then + registerdesignsizes( fontfile, size[2], entry["filename"] ) + end + end + end + end + for _, entry in next, fontdata do + local filename = entry["filename"] + local designsize = entry["designsize"] or 100 + if designsize == 100 or designsize == 120 or designsize == 0 then + registerdesignsizes( fontfile, "default", filename ) + break + end + end + if fallback then + context.definefontsynonym( { fontname }, { fontfile }, { features = features } ) + else + context.definefontsynonym( { fontname }, { fontfile }, { features = features, fallbacks = fontfile, goodies = goodies } ) + end + else + if fallback then + context.definefontsynonym( { fontname }, { fontparent }, { features = features } ) + else + context.definefontsynonym( { fontname }, { fontparent }, { features = features, fallbacks = fontfile, goodies = goodies } ) + end + end +end + +local function definetypescript(index) + local data = data[index] + local entry = data.fonts + local mathsettings = mathsettings[cleanname(data.metadata.family)] + local goodies = mathsettings and mathsettings.extras or data.options.goodies + local typeface = data.metadata.typeface + local style = data.metadata.style + if entry and entry["tf"] then + context.startfontclass( { typeface } ) + if goodies ~= "" then + goodies = utilities.parsers.settings_to_array(goodies) + for _, goodie in next, goodies do + context.loadfontgoodies( { goodie } ) + end + end + for alternative, _ in next, alternatives do + if synonyms[style][alternative] then -- prevent unnecessary synonyms for handwriting, calligraphy and math + definefontsynonym(data,alternative) + end + end + context.stopfontclass() + else + -- regular style not available, loading aborted + end +end + +function selectfont.registerfallback(typeface,style,index) + local t = fallbacks[typeface] + if not t then + fallbacks[typeface] = { [style] = { index } } + else + local s = t[style] + if not s then + fallbacks[typeface][style] = { index } + else + fallbacks[typeface][style][#s+1] = index + end + end +end + +local function definetextfontfallback(data,alternative,index) + local typeface = data.metadata.typeface + local style = data.metadata.style + local features = data.features[alternative] + local range = data.options.range + local rscale = data.options.scale ~= "" and data.options.scale or 1 + local check = data.options.check ~= "" and data.options.check or "yes" + local force = data.options.force ~= "" and data.options.force or "yes" + local synonym = formatters["%s-%s-%s-fallback-%s"](typeface, style, alternative, index) + local fallback = formatters["%s-%s-%s"] (typeface, style, alternative) + if index == 1 then + context.resetfontfallback( { fallback } ) + end + context.definefontfallback( { fallback }, { synonym }, { range }, { rscale = rscale, check = check, force = force } ) +end + +local function definetextfallback(entry,index) + local data = data[index] + local typeface = data.metadata.typeface + context.startfontclass( { typeface } ) + for alternative, _ in next, alternatives do + definefontsynonym (data,alternative,entry,true) + definetextfontfallback(data,alternative,entry) + end + context.stopfontclass() + -- inspect(data) +end + +local function definemathfontfallback(data,alternative,index) + local typeface = data.metadata.typeface + local style = data.metadata.style + local range = data.options.range + local rscale = data.options.scale ~= "" and data.options.scale or 1 + local check = data.options.check ~= "" and data.options.check or "yes" + local force = data.options.force ~= "" and data.options.force or "yes" + local offset = data.options.offset + local features = data.features[alternative] + local fontdata = data.fonts and data.fonts[alternative] + local fallback = formatters["%s-%s-%s"](typeface, style, alternative) + if index == 1 then + context.resetfontfallback( { fallback } ) + end + if fontdata and #fontdata > 0 then + for _, entry in next, fontdata do + local filename = entry["filename"] + local designsize = entry["designsize"] or 100 + if designsize == 100 or designsize == 120 or designsize == 0 then + context.definefontfallback( { fallback }, { formatters["file:%s*%s"](filename,features) }, { range }, { rscale = rscale, check = check, force = force, offset = offset } ) + break + end + end + end +end + +local function definemathfallback(entry,index) + local data = data[index] + local typeface = data.metadata.typeface + local style = data.metadata.style + context.startfontclass( { typeface } ) + for alternative, _ in next, alternatives do + if synonyms[style][alternative] then + definemathfontfallback(data,alternative,entry) + end + end + context.stopfontclass() + -- inspect(data) +end + +local function definefallbackfont(index) + local data = data[index] + local f = fallbacks[data.metadata.typeface] + if f then + local s = f[data.metadata.style] + if s then + for entry, fallback in next, s do + if data.metadata.style == "mm" then + definemathfallback(entry,fallback) + else + definetextfallback(entry,fallback) + end + end + end + end +end + +local function definetextfont(index) + local data = data[index] + local fontclass = data.metadata.typeface + local shortstyle = data.metadata.style + local style = styles[data.metadata.style] + local designsize = data.options.opticals == v_yes and "auto" or "default" + local scale = data.options.scale ~= "" and data.options.scale or 1 + context.definetypeface( { fontclass }, { shortstyle }, { style }, { "" }, { "default" }, { designsize = designsize, rscale = scale } ) +end + +local function definemathfont(index) + local data = data[index] + local fontclass = data.metadata.typeface + local shortstyle = data.metadata.style + local style = styles[data.metadata.style] + local scale = data.options.scale ~= "" and data.options.scale or 1 + local typescript = cleanname(data.metadata.family) + local entries = data.fonts + if entries then + context.definetypeface( { fontclass }, { shortstyle }, { style }, { "" }, { "default" }, { rscale = scale } ) + else + context.definetypeface( { fontclass }, { shortstyle }, { style }, { typescript }, { "default" }, { rscale = scale } ) + end +end + +function selectfont.definetypeface(index) + local data = data[index] + if data.metadata.style == "mm" then + definefallbackfont(index) + definetypescript (index) + definemathfont (index) + else + definefallbackfont(index) + definetypescript (index) + definetextfont (index) + end + -- inspect(data) +end + +commands.definefontfamily = selectfont.definetypeface +commands.definefallbackfamily = selectfont.registerfallback diff --git a/tex/context/base/font-sel.mkvi b/tex/context/base/font-sel.mkvi new file mode 100644 index 000000000..2c022473b --- /dev/null +++ b/tex/context/base/font-sel.mkvi @@ -0,0 +1,367 @@ +%D \module +%D [ file=font-sel, +%D version=2013.10.19, +%D title=\CONTEXT\ User Module, +%D subtitle=Selectfont, +%D author=Wolfgang Schuster, +%D date=\currentdate, +%D copyright=Wolfgang Schuster, +%D license=GNU General Public License] + +\writestatus{loading}{ConTeXt User Module / Selectfont} + +\registerctxluafile{font-sel}{1.000} + +\unprotect + +\installcorenamespace {selectfont} +\installsimplecommandhandler \??selectfont {selectfont} + +\unexpanded\def\selectfont_setparameters[#settings]% + {\begingroup + \setupcurrentselectfont[#settings]% + \edef\p_selectfont_preset{\selectfontparameter\c!preset}% + \ifx\p_selectfont_preset\empty \else + \processcommacommand[\p_selectfont_preset]\selectfont_preset_process + \setupcurrentselectfont[#settings]% + \fi + \setexpandedselectfontparameter\c!style {\expandnamespaceparameter\??selectfontstyle \selectfontparameter\c!style \s!rm }% + \setexpandedselectfontparameter\c!alternative{\expandnamespaceparameter\??selectfontalternative\selectfontparameter\c!alternative\v!default}% + \xdef\selectfont_index{\ctxcommand{ + defineselectfont { + metadata = { + typeface = "\selectfontparameter\c!name", + style = "\selectfontparameter\c!style", + family = "\selectfontparameter\c!family", + }, + options = { + opticals = "\selectfontparameter\c!opticalsize", + scale = "\selectfontparameter\c!scale", + goodies = "\selectfontparameter\c!goodies", + alternative = "\selectfontparameter\c!alternative", + range = "\selectfontparameter\c!range", % fallback only + offset = "\selectfontparameter\c!offset", % fallback only + check = "\selectfontparameter\c!check", % fallback only + force = "\selectfontparameter\c!force", % fallback only + }, + alternatives = { + ["tf"] = "\selectfontparameter\s!tf", + ["bf"] = "\selectfontparameter\s!bf", + ["it"] = "\selectfontparameter\s!it", + ["sl"] = "\selectfontparameter\s!sl", + ["bi"] = "\selectfontparameter\s!bi", + ["bs"] = "\selectfontparameter\s!bs", + ["sc"] = "\selectfontparameter\s!sc", + }, + files = { + ["tf"] = "\selectfontparameter\c!regularfont", + ["bf"] = "\selectfontparameter\c!boldfont", + ["it"] = "\selectfontparameter\c!italicfont", + ["sl"] = "\selectfontparameter\c!slantedfont", + ["bi"] = "\selectfontparameter\c!bolditalicfont", + ["bs"] = "\selectfontparameter\c!boldslantedfont", + ["sc"] = "\selectfontparameter\c!smallcapsfont", + }, + features = { + ["tf"] = "\selectfontparameter\c!regularfeatures", + ["bf"] = "\selectfontparameter\c!boldfeatures", + ["it"] = "\selectfontparameter\c!italicfeatures", + ["sl"] = "\selectfontparameter\c!slantedfeatures", + ["bi"] = "\selectfontparameter\c!bolditalicfeatures", + ["bs"] = "\selectfontparameter\c!boldslantedfeatures", + ["sc"] = "\selectfontparameter\c!smallcapsfeatures", + } + }}}% + \endgroup} + +%D \macros +%D {defineselectfontstyle} + +\installcorenamespace {selectfontstyle} + +\unexpanded\def\defineselectfontstyle + {\dodoubleargument\selectfont_style_define} + +\def\selectfont_style_define[#styles][#shortstyle]% + {\processcommalist[#styles]{\selectfont_style_define_indeed{#shortstyle}}} + +\def\selectfont_style_define_indeed#shortstyle#style% + {\setvalue{\??selectfontstyle#style}{#shortstyle}} + +\defineselectfontstyle [\s!rm,\s!serif] [\s!rm] +\defineselectfontstyle [\s!ss,\s!sans] [\s!ss] +\defineselectfontstyle [\s!tt,\s!mono] [\s!tt] +\defineselectfontstyle [\s!hw,\s!handwriting] [\s!hw] +\defineselectfontstyle [\s!cg,\s!calligraphy] [\s!cg] +\defineselectfontstyle [\s!mm,\s!math] [\s!mm] + +%D \macros +%D {definefontfamilypreset} + +\installcorenamespace {selectfontpreset} + +\unexpanded\def\definefontfamilypreset + {\dodoubleargument\selectfont_preset_define} + +\def\selectfont_preset_define[#name][#settings]% + {\doifassignmentelse{#settings} + {\setvalue{\??selectfontpreset#name}{\setupcurrentselectfont[#settings]}} + {\setvalue{\??selectfontpreset#name}{\csname\??selectfontpreset#settings\endcsname}}} + +\def\selectfont_preset_process#name% + {\ifcsname\??selectfontpreset#name\endcsname + \csname\??selectfontpreset#name\endcsname + \else + % unknown preset + \fi} + +\definefontfamilypreset [range:chinese] [\c!range={cjkcompatibilityforms,cjkcompatibilityideographs,cjkcompatibilityideographssupplement,cjkradicalssupplement,cjkstrokes,cjksymbolsandpunctuation,cjkunifiedideographs,cjkunifiedideographsextensiona,cjkunifiedideographsextensionb,halfwidthandfullwidthforms,verticalforms,bopomofo,bopomofoextended}] +\definefontfamilypreset [range:japanese] [\c!range={cjkcompatibilityforms,cjkcompatibilityideographs,cjkcompatibilityideographssupplement,cjkradicalssupplement,cjkstrokes,cjksymbolsandpunctuation,cjkunifiedideographs,cjkunifiedideographsextensiona,cjkunifiedideographsextensionb,halfwidthandfullwidthforms,verticalforms,hiragana,katakana}] +\definefontfamilypreset [range:korean] [\c!range={cjkcompatibilityforms,cjkcompatibilityideographs,cjkcompatibilityideographssupplement,cjkradicalssupplement,cjkstrokes,cjksymbolsandpunctuation,cjkunifiedideographs,cjkunifiedideographsextensiona,cjkunifiedideographsextensionb,halfwidthandfullwidthforms,verticalforms,hangulcompatibilityjamo,hanguljamo,hanguljamoextendeda,hanguljamoextendedb,hangulsyllables}] +\definefontfamilypreset [range:cyrillic] [\c!range={cyrillic,cyrillicextendeda,cyrillicextendedb,cyrillicsupplement}] +\definefontfamilypreset [range:greek] [\c!range={greekandcoptic,greekextended,ancientgreeknumbers}] + +\definefontfamilypreset [math:digitsnormal] [\c!range=digitsnormal] +\definefontfamilypreset [math:digitsbold] [\c!range=digitsnormal,\c!offset=digitsbold,\s!tf=style:bold] + +\definefontfamilypreset [math:uppercasenormal] [\c!range=uppercasenormal] +\definefontfamilypreset [math:uppercaseitalic] [\c!range=uppercasenormal,\c!offset=uppercaseitalic, \s!tf=style:italic] +\definefontfamilypreset [math:uppercasebold] [\c!range=uppercasenormal,\c!offset=uppercasebold, \s!tf=style:bold] +\definefontfamilypreset [math:uppercasebolditalic] [\c!range=uppercasenormal,\c!offset=uppercasebolditalic,\s!tf=style:bolditalic] + +\definefontfamilypreset [math:lowercasenormal] [\c!range=lowercasenormal] +\definefontfamilypreset [math:lowercaseitalic] [\c!range=lowercasenormal,\c!offset=lowercaseitalic, \s!tf=style:italic] +\definefontfamilypreset [math:lowercasebold] [\c!range=lowercasenormal,\c!offset=lowercasebold, \s!tf=style:bold] +\definefontfamilypreset [math:lowercasebolditalic] [\c!range=lowercasenormal,\c!offset=lowercasebolditalic,\s!tf=style:bolditalic] + +\definefontfamilypreset [math:mathematicaloperators] [\c!range=mathematicaloperators] + +\definefontfamilypreset [math:lowercasegreeknormal] [\c!range=lowercasegreeknormal] +\definefontfamilypreset [math:lowercasegreekitalic] [\c!range=lowercasegreeknormal,\c!offset=lowercasegreekitalic, \s!tf=style:italic] +\definefontfamilypreset [math:lowercasegreekbold] [\c!range=lowercasegreeknormal,\c!offset=lowercasegreekbold, \s!tf=style:bold] +\definefontfamilypreset [math:lowercasegreekbolditalic] [\c!range=lowercasegreeknormal,\c!offset=lowercasegreekbolditalic,\s!tf=style:bolditalic] + +\definefontfamilypreset [math:uppercasegreeknormal] [\c!range=uppercasegreeknormal] +\definefontfamilypreset [math:uppercasegreekitalic] [\c!range=uppercasegreeknormal,\c!offset=uppercasegreekitalic, \s!tf=style:italic] +\definefontfamilypreset [math:uppercasegreekbold] [\c!range=uppercasegreeknormal,\c!offset=uppercasegreekbold, \s!tf=style:bold] +\definefontfamilypreset [math:uppercasegreekbolditalic] [\c!range=uppercasegreeknormal,\c!offset=uppercasegreekbolditalic,\s!tf=style:bolditalic] + + +%D \macros +%D {defineselectfontalternative} +%D +%D The results between the old {\em simplefonts} and the new {\em selectfont} +%D can be different because simplefonts the name entries in the database to find +%D the styles for a font while selectfont the newer spec-method to the find the +%D files for each style. +%D +%D The used method depends on the command one uses to load a font but it is +%D also possible to switch between them with the {\em alternative} key, possible +%D values are: +%D +%D \startitemize[packed] +%D \startitem selectfont and \stopitem +%D \startitem simplefonts. \stopitem +%D \stopitemize + +\installcorenamespace {selectfontalternative} + +\unexpanded\def\defineselectfontalternative + {\dodoubleargument\selectfont_alternative_define} + +\def\selectfont_alternative_define[#name][#alternative]% + {\setvalue{\??selectfontalternative#name}{#alternative}} + +\defineselectfontalternative [\v!selectfont ] [\v!selectfont ] +\defineselectfontalternative [\v!simplefonts] [\v!simplefonts] +\defineselectfontalternative [\v!default ] [\v!default ] + +%D \macros +%D {definefontfamily,definefallbackfamily} +%D +%D The \tex{definefontfamily} creates like \tex{definetypeface} a collection of font +%D with different styles which can be later called with the \tex{setupbodyfont} command. +%D +%D The command takes three mendatory commands which are (a) the name of the fontclass, +%D (b) the styles of the font and (c) the name of the font. +%D +%D \starttyping +%D \definefontfamily [dejavu] [serif] [DejaVu Serif] +%D \definefontfamily [dejavu] [sans] [DejaVu Sans] +%D \definefontfamily [dejavu] [mono] [DejaVu Sans Mono] +%D \definefontfamily [dejavu] [math] [XITS Math] [scale=1.1] +%D +%D \definefontfamily [office] [serif] [Times New Roman] +%D \definefontfamily [office] [sans] [Arial] [scale=0.9] +%D \definefontfamily [office] [mono] [Courier] +%D \definefontfamily [office] [math] [TeX Gyre Termes Math] +%D +%D \definefontfamily [linux] [serif] [Linux Libertine O] +%D \definefontfamily [linux] [sans] [Linux Biolinum O] +%D \definefontfamily [linux] [mono] [Latin Modern Mono] +%D \definefontfamily [linux] [math] [TeX Gyre Pagella Math] [scale=0.9] +%D +%D \setupbodyfont[dejavu] +%D +%D \starttext +%D +%D \rm Serif \ss Sans \tt Mono \m{1+2=3} +%D +%D \switchtobodyfont[office] +%D +%D \rm Serif 123 \ss Sans \tt Mono \m{1+2=3} +%D +%D \switchtobodyfont[linux] +%D +%D \rm Serif 123 \ss Sans \tt Mono \m{1+2=3} +%D +%D \stoptext +%D \stoptyping +%D +%D When a document contains different languages and the global font lacks some characters +%D for one language, one could set a different font where these charcters are taken from. +%D This fallback font (there can be more than one for a certain style) could be set with +%D the \tex{definefallbackfamily} command which takes the same argument as +%D the \tex{definefontfamily} command. +%D +%D \starttyping +%D \definefallbackfamily [mainface] [serif] [DejaVu Serif] [range=cyrillic,force=yes] +%D \definefontfamily [mainface] [serif] [TeX Gyre Pagella] +%D +%D \setupbodyfont[mainface] +%D +%D \setuplanguage[en][patterns={us,ru}] +%D +%D \starttext +%D +%D \input knuth +%D +%D Традиционная систематика лишайников оказывается во многом условна и +%D +%D \stoptext +%D \stoptyping +%D +%D Another feature of the module is the \type{opticalsize} key which allows one to enable +%D optical sizes when they are a feature of the requested font. +%D +%D \starttyping +%D \definefontfamily[mainface][serif][Latin Modern Roman][opticalsize=yes] +%D +%D \setupbodyfont[mainface] +%D +%D \starttext +%D \scale[width=\textwidth]{\switchtobodyfont [6pt]\tf Regular, \it Italic \bf Bold and \bi BoldItalic} +%D \scale[width=\textwidth]{\switchtobodyfont [8pt]\tf Regular, \it Italic \bf Bold and \bi BoldItalic} +%D \scale[width=\textwidth]{\switchtobodyfont [10pt]\tf Regular, \it Italic \bf Bold and \bi BoldItalic} +%D \scale[width=\textwidth]{\switchtobodyfont [12pt]\tf Regular, \it Italic \bf Bold and \bi BoldItalic} +%D \scale[width=\textwidth]{\switchtobodyfont[17.3pt]\tf Regular, \it Italic \bf Bold and \bi BoldItalic} +%D \stoptext +%D \stoptyping + +% regularfont = … | * … | name:… | name:* … | file:… | file:* … | spec:…-…-… | style:medium + +\unexpanded\def\definefontfamily + {\doquadrupleempty\selectfont_family_define} + +\def\selectfont_family_define[#typeface][#style][#family][#settings]% + {\doifassignmentelse{#settings} + {\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},#settings]} + {\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},\c!preset={#settings}]}% + \ctxcommand{definefontfamily(\selectfont_index)}} + +\unexpanded\def\definefallbackfamily + {\doquadrupleempty\selectfont_fallback_define} + +\def\selectfont_fallback_define[#typeface][#style][#family][#settings]% + {\doifassignmentelse{#settings} + {\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},#settings]} + {\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},\c!preset={#settings}]}% + \edef\p_selectfont_style{\expandnamespacevalue\??selectfontstyle{#style}\s!rm}% + \ctxcommand{definefallbackfamily("#typeface","\p_selectfont_style",\selectfont_index)}} + +%D \macros +%D {setupfontfamily,setupfallbackfamily} +%D +%D For simple documents which don’t need complex font settings one could use +%D the \tex{setupfontfamily} command where the requested font is enabled immediately +%D without the need to load it with \tex{setupbodyfont}. The downside of this method +%D is that processing of the document takes longer with each additional font which +%D is set with \tex{setupfontfamily}. +%D +%D \starttyping +%D \setupfontfamily [serif] [DejaVu Serif] +%D \setupfontfamily [sans] [DejaVu Sans] +%D \setupfontfamily [mono] [DejaVu Sans Mono] +%D \setupfontfamily [math] [XITS Math] [scale=1.1] +%D +%D \starttext +%D +%D \rm Serif 123 \ss Sans \tt Mono \m{1+2=3} +%D +%D \stoptext +%D \stoptyping + +\newcount\c_selectfont_family +\newtoks \t_selectfont_fallback +\newtoks \t_selectfont_styles + +\unexpanded\def\setupfontfamily + {\dotripleempty\selectfont_family_setup} + +\def\selectfont_family_setup + {\ifsecondargument + \expandafter\selectfont_family_setup_yes + \else + \expandafter\selectfont_family_setup_nop + \fi} + +\def\selectfont_family_setup_yes[#style][#family][#settings]% + {\normalexpanded{\t_selectfont_styles{\selectfont_set_font_family[#style][#family][#settings]\the\t_selectfont_styles}}% + \selectfont_set_font_indeed} + +\def\selectfont_family_setup_nop[#settings][#dummya][#dummyb]% + {\setupselectfont[#settings]} + +\unexpanded\def\selectfont_set_default + {\selectfont_set_font_family[\v!serif][Latin Modern Roman][\c!opticalsize=\v!yes]% + \selectfont_set_font_family[\v!sans] [Latin Modern Sans] [\c!opticalsize=\v!yes]% + \selectfont_set_font_family[\v!mono] [Latin Modern Mono] [\c!opticalsize=\v!yes,\c!features=\s!none]} + +\unexpanded\def\setupfallbackfamily + {\dotripleempty\selectfont_fallback_setup} + +\def\selectfont_fallback_setup[#style][#family][#settings]% + {\normalexpanded{\t_selectfont_fallback{\the\t_selectfont_fallback\selectfont_set_font_fallback[#style][#family][#settings]}}} + +\def\selectfont_set_font_indeed + {\global\advance\c_selectfont_family\plusone + \edef\m_selectfont_typeface{\v!selectfont-\number\c_selectfont_family}% + \the\t_selectfont_fallback + \the\t_selectfont_styles + \selectfont_set_default + \setupbodyfont[\m_selectfont_typeface,\rootselectfontparameter\c!style]} + +\unexpanded\def\selectfont_set_font_family[#style]#dummy[#family]#dummy[#settings]% + {\ifcsname\m_selectfont_typeface#style\endcsname \else + \expandafter\let\csname\m_selectfont_typeface#style\endcsname\relax + \selectfont_family_define[\m_selectfont_typeface][#style][#family][#settings]% + \fi} + +\unexpanded\def\selectfont_set_font_fallback[#style]#dummy[#family]#dummy[#settings]% + {\selectfont_fallback_define[\m_selectfont_typeface][#style][#family][#settings]} + +%D You can apply a different feature set to each style of a font but if nothing +%D is set the global features are used. + +\setupselectfont + [ \c!features=\s!default, + \c!regularfeatures=\selectfontparameter\c!features, + \c!boldfeatures=\selectfontparameter\c!features, + \c!italicfeatures=\selectfontparameter\c!features, + \c!slantedfeatures=\selectfontparameter\c!features, + \c!bolditalicfeatures=\selectfontparameter\c!features, + \c!boldslantedfeatures=\selectfontparameter\c!features, + \c!smallcapsfeatures=\s!smallcaps, + \c!style=\s!rm] + +\protect \ No newline at end of file diff --git a/tex/context/base/font-sol.lua b/tex/context/base/font-sol.lua index db2dd24c2..9ccfd0588 100644 --- a/tex/context/base/font-sol.lua +++ b/tex/context/base/font-sol.lua @@ -571,7 +571,7 @@ local function doit(word,list,best,width,badness,line,set,listdir) local font = found.font local setdynamics = setfontdynamics[font] if setdynamics then - local processes = setdynamics(font,featurenumber) + local processes = setdynamics[featurenumber] for i=1,#processes do -- often more than 1 first = processes[i](first,font,featurenumber) end diff --git a/tex/context/base/font-sty.mkvi b/tex/context/base/font-sty.mkvi index 3caa94488..03fa598c2 100644 --- a/tex/context/base/font-sty.mkvi +++ b/tex/context/base/font-sty.mkvi @@ -172,6 +172,8 @@ %D The new one: +\setfalse\fontattributeisset + \unexpanded\def\dousestyleparameter#value% {\edef\currentstyleparameter{#value}% \ifx\currentstyleparameter\empty\else @@ -342,6 +344,9 @@ \font_styles_define_style_collection_a\s!default \fi} +\let\font_styles_define_style_collection_a\relax +\let\font_styles_define_style_collection_b\relax + \unexpanded\def\definestyleinstance {\doquadrupleargument\font_styles_define_style_instance} @@ -404,5 +409,4 @@ \let\dostopattributes\endgroup - \protect \endinput diff --git a/tex/context/base/font-sym.mkvi b/tex/context/base/font-sym.mkvi index e1d5332c4..c8ca49f74 100644 --- a/tex/context/base/font-sym.mkvi +++ b/tex/context/base/font-sym.mkvi @@ -52,6 +52,10 @@ \let\v_font_string_d\s!Serif % default fontstyle (will be redefined in type-ini) +\definefontsynonym + [CurrentFont] + [\noexpand\v_font_string_a\noexpand\v_font_string_c] + % potential generalization: % % \letvalue{\??fontfile:t:\s!rm}\s!Serif diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua index 27176dade..18da4f2e1 100644 --- a/tex/context/base/font-syn.lua +++ b/tex/context/base/font-syn.lua @@ -12,13 +12,14 @@ local next, tonumber, type, tostring = next, tonumber, type, tostring local sub, gsub, lower, match, find, lower, upper = string.sub, string.gsub, string.lower, string.match, string.find, string.lower, string.upper local find, gmatch = string.find, string.gmatch local concat, sort, format = table.concat, table.sort, string.format -local serialize = table.serialize +local serialize, sortedhash = table.serialize, table.sortedhash local lpegmatch = lpeg.match local unpack = unpack or table.unpack -local formatters = string.formatters +local formatters, topattern = string.formatters, string.topattern local allocate = utilities.storage.allocate local sparse = utilities.storage.sparse +local setmetatableindex = table.setmetatableindex local removesuffix = file.removesuffix local splitbase = file.splitbase @@ -34,11 +35,13 @@ local findfile = resolvers.findfile local cleanpath = resolvers.cleanpath local resolveresolved = resolvers.resolve +local settings_to_hash = utilities.parsers.settings_to_hash_tolerant + local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end) local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end) local trace_specifications = false trackers.register("fonts.specifications", function(v) trace_specifications = v end) -local report_names = logs.reporter("fonts","names") +local report_names = logs.reporter("fonts","names") --[[ldx--

This module implements a name to filename resolver. Names are resolved @@ -53,9 +56,12 @@ fonts.names = names local filters = names.filters or { } names.filters = filters +local treatments = names.treatments or { } +names.treatments = treatments + names.data = names.data or allocate { } -names.version = 1.110 +names.version = 1.123 names.basename = "names" names.saved = false names.loaded = false @@ -90,7 +96,8 @@ local weights = Cs ( -- not extra + P("heavy") + P("ultra") + P("black") - + P("bol") -- / "bold" +--+ P("bol") / "bold" -- blocks + + P("bol") + P("regular") / "normal" ) @@ -105,8 +112,8 @@ local styles = Cs ( + P("oblique") / "italic" + P("slanted") + P("roman") / "normal" - + P("ital") / "italic" - + P("ita") / "italic" + + P("ital") / "italic" -- might be tricky + + P("ita") / "italic" -- might be tricky ) local normalized_styles = sparse { @@ -178,6 +185,28 @@ names.knownvariants = { "smallcaps", } +local remappedweights = { + [""] = "normal", + ["bol"] = "bold", +} + +local remappedstyles = { + [""] = "normal", +} + +local remappedwidths = { + [""] = "normal", +} + +local remappedvariants = { + [""] = "normal", +} + +names.remappedweights = remappedweights setmetatableindex(remappedweights ,"self") +names.remappedstyles = remappedstyles setmetatableindex(remappedstyles ,"self") +names.remappedwidths = remappedwidths setmetatableindex(remappedwidths ,"self") +names.remappedvariants = remappedvariants setmetatableindex(remappedvariants,"self") + local any = P(1) local analyzed_table @@ -245,6 +274,7 @@ function fontloader.fullinfo(...) -- check with taco what we get / could get end filters.otf = fontloader.fullinfo +filters.ttf = fontloader.fullinfo function filters.afm(name) -- we could parse the afm file as well, and then report an error but @@ -257,7 +287,7 @@ function filters.afm(name) local f = io.open(name) if f then local hash = { } - for line in f:lines() do + for line in f:lines() do -- slow local key, value = match(line,"^(.+)%s+(.+)%s*$") if key and #key > 0 then hash[lower(key)] = value @@ -420,15 +450,17 @@ local function check_name(data,result,filename,modification,suffix,subfont) -- prepare local names = check_names(result) -- fetch - local familyname = names and names.preffamilyname or result.familyname - local fullname = names and names.fullname or result.fullname - local fontname = result.fontname - local subfamily = names and names.subfamily - local modifiers = names and names.prefmodifiers - local weight = names and names.weight or result.weight - local italicangle = tonumber(result.italicangle) - local subfont = subfont or nil - local rawname = fullname or fontname or familyname + local familyname = names and names.preffamilyname or result.familyname + local fullname = names and names.fullname or result.fullname + local fontname = result.fontname + local subfamily = names and names.subfamily + local modifiers = names and names.prefmodifiers + local weight = names and names.weight or result.weight + local italicangle = tonumber(result.italicangle) + local subfont = subfont or nil + local rawname = fullname or fontname or familyname + local filebase = removesuffix(basename(filename)) + local cleanfilename = cleanname(filebase) -- for WS -- normalize familyname = familyname and cleanname(familyname) fullname = fullname and cleanname(fullname) @@ -458,27 +490,42 @@ local function check_name(data,result,filename,modification,suffix,subfont) if not familyname then familyname = a_name end - fontname = fontname or fullname or familyname or basename(filename) + fontname = fontname or fullname or familyname or filebase -- maybe cleanfilename fullname = fullname or fontname familyname = familyname or fontname + -- we do these sparse + local units = result.units_per_em or 1000 + local minsize = result.design_range_bottom or 0 + local maxsize = result.design_range_top or 0 + local designsize = result.design_size or 0 + local angle = result.italicangle or 0 + local pfminfo = result.pfminfo + local pfmwidth = pfminfo and pfminfo.width or 0 + local pfmweight = pfminfo and pfminfo.weight or 0 + -- specifications[#specifications + 1] = { - filename = filename, -- unresolved - format = lower(suffix), - subfont = subfont, - rawname = rawname, - familyname = familyname, - fullname = fullname, - fontname = fontname, - subfamily = subfamily, - modifiers = modifiers, - weight = weight, - style = style, - width = width, - variant = variant, - minsize = result.design_range_bottom or 0, - maxsize = result.design_range_top or 0, - designsize = result.design_size or 0, - modification = modification or 0, + filename = filename, -- unresolved + cleanfilename = cleanfilename, + format = lower(suffix), + subfont = subfont, + rawname = rawname, + familyname = familyname, + fullname = fullname, + fontname = fontname, + subfamily = subfamily, + modifiers = modifiers, + weight = weight, + style = style, + width = width, + variant = variant, + units = units ~= 1000 and unit or nil, + pfmwidth = pfmwidth ~= 0 and pfmwidth or nil, + pfmweight = pfmweight ~= 0 and pfmweight or nil, + angle = angle ~= 0 and angle or nil, + minsize = minsize ~= 0 and minsize or nil, + maxsize = maxsize ~= 0 and maxsize or nil, + designsize = designsize ~= 0 and designsize or nil, + modification = modification ~= 0 and modification or nil, } end @@ -502,10 +549,10 @@ local function cleanupkeywords() local style = b_style or c_style or d_style or e_style or f_style or "normal" local width = b_width or c_width or d_width or e_width or f_width or "normal" local variant = b_variant or c_variant or d_variant or e_variant or f_variant or "normal" - if not weight or weight == "" then weight = "normal" end - if not style or style == "" then style = "normal" end - if not width or width == "" then width = "normal" end - if not variant or variant == "" then variant = "normal" end + weight = remappedweights [weight or ""] + style = remappedstyles [style or ""] + width = remappedwidths [width or ""] + variant = remappedvariants[variant or ""] weights [weight ] = (weights [weight ] or 0) + 1 styles [style ] = (styles [style ] or 0) + 1 widths [width ] = (widths [width ] or 0) + 1 @@ -524,12 +571,22 @@ local function collectstatistics() local data = names.data local specifications = data.specifications if specifications then - local weights = { } - local styles = { } - local widths = { } - local variants = { } + local f_w = formatters["%i"] + local f_a = formatters["%0.2f"] + -- normal stuff + local weights = { } + local styles = { } + local widths = { } + local variants = { } + -- weird stuff + local angles = { } + -- extra stuff + local pfmweights = { } setmetatableindex(pfmweights,"table") + local pfmwidths = { } setmetatableindex(pfmwidths, "table") + -- main loop for i=1,#specifications do - local s = specifications[i] + local s = specifications[i] + -- normal stuff local weight = s.weight local style = s.style local width = s.width @@ -538,13 +595,64 @@ local function collectstatistics() if style then styles [style ] = (styles [style ] or 0) + 1 end if width then widths [width ] = (widths [width ] or 0) + 1 end if variant then variants[variant] = (variants[variant] or 0) + 1 end + -- weird stuff + local angle = f_a(tonumber(s.angle) or 0) + angles[angle] = (angles[angles] or 0) + 1 + -- extra stuff + local pfmweight = f_w(s.pfmweight or 0) + local pfmwidth = f_w(s.pfmwidth or 0) + local tweights = pfmweights[pfmweight] + local twidths = pfmwidths [pfmwidth] + tweights[pfmweight] = (tweights[pfmweight] or 0) + 1 + twidths[pfmwidth] = (twidths [pfmwidth] or 0) + 1 + end + -- + local stats = data.statistics + stats.weights = weights + stats.styles = styles + stats.widths = widths + stats.variants = variants + stats.angles = angles + stats.pfmweights = pfmweights + stats.pfmwidths = pfmwidths + stats.fonts = #specifications + -- + setmetatableindex(pfmweights,nil) + setmetatableindex(pfmwidths, nil) + -- + report_names("") + report_names("weights") + report_names("") + report_names(formatters[" %T"](weights)) + report_names("") + report_names("styles") + report_names("") + report_names(formatters[" %T"](styles)) + report_names("") + report_names("widths") + report_names("") + report_names(formatters[" %T"](widths)) + report_names("") + report_names("variants") + report_names("") + report_names(formatters[" %T"](variants)) + report_names("") + report_names("angles") + report_names("") + report_names(formatters[" %T"](angles)) + report_names("") + report_names("pfmweights") + report_names("") + for k, v in sortedhash(pfmweights) do + report_names(formatters[" %-10s: %T"](k,v)) + end + report_names("") + report_names("pfmwidths") + report_names("") + for k, v in sortedhash(pfmwidths) do + report_names(formatters[" %-10s: %T"](k,v)) end - local stats = data.statistics - stats.weights = weights - stats.styles = styles - stats.widths = widths - stats.variants = variants - stats.fonts = #specifications + report_names("") end end @@ -608,8 +716,11 @@ local function checkduplicate(where) -- fails on "Romantik" but that's a border local specifications = data.specifications local loaded = { } if specifications and mapping then - for _, m in next, mapping do - for k, v in next, m do + -- was: for _, m in sortedhash(mapping) do + local order = filters.list + for i=1,#order do + local m = mapping[order[i]] + for k, v in sortedhash(m) do local s = specifications[v] local hash = formatters["%s-%s-%s-%s-%s"](s.familyname,s.weight or "*",s.style or "*",s.width or "*",s.variant or "*") local h = loaded[hash] @@ -633,7 +744,7 @@ local function checkduplicate(where) -- fails on "Romantik" but that's a border end end local n = 0 - for k, v in table.sortedhash(loaded) do + for k, v in sortedhash(loaded) do local nv = #v if nv > 1 then if trace_warnings then @@ -720,7 +831,7 @@ local function analyzefiles(olddata) local oldindices = olddata and olddata.indices or { } local oldspecifications = olddata and olddata.specifications or { } local oldrejected = olddata and olddata.rejected or { } - local treatmentdata = fonts.treatments.data + local treatmentdata = treatments.data or { } -- when used outside context local function identify(completename,name,suffix,storedname) local pathpart, basepart = splitbase(completename) nofread = nofread + 1 @@ -954,12 +1065,13 @@ function names.identify(force) analyzefiles(not force and names.readdata(names.basename)) rejectclashes() collectfamilies() - collectstatistics() + -- collectstatistics() cleanupkeywords() collecthashes() checkduplicates() addfilenames() -- sorthashes() -- will be resorted when saved + collectstatistics() report_names("total scan time %0.3f seconds",os.gettimeofday()-starttime) end @@ -1571,46 +1683,131 @@ end local lastlookups, lastpattern = { }, "" -function names.lookup(pattern,name,reload) -- todo: find - if lastpattern ~= pattern then - names.load(reload) - local specifications = names.data.specifications - local families = names.data.families - local lookups = specifications - if name then - lookups = families[name] - elseif not find(pattern,"=") then - lookups = families[pattern] +-- function names.lookup(pattern,name,reload) -- todo: find +-- if lastpattern ~= pattern then +-- names.load(reload) +-- local specifications = names.data.specifications +-- local families = names.data.families +-- local lookups = specifications +-- if name then +-- lookups = families[name] +-- elseif not find(pattern,"=") then +-- lookups = families[pattern] +-- end +-- if trace_names then +-- report_names("starting with %s lookups for %a",#lookups,pattern) +-- end +-- if lookups then +-- for key, value in gmatch(pattern,"([^=,]+)=([^=,]+)") do +-- local t, n = { }, 0 +-- if find(value,"*") then +-- value = topattern(value) +-- for i=1,#lookups do +-- local s = lookups[i] +-- if find(s[key],value) then +-- n = n + 1 +-- t[n] = lookups[i] +-- end +-- end +-- else +-- for i=1,#lookups do +-- local s = lookups[i] +-- if s[key] == value then +-- n = n + 1 +-- t[n] = lookups[i] +-- end +-- end +-- end +-- if trace_names then +-- report_names("%s matches for key %a with value %a",#t,key,value) +-- end +-- lookups = t +-- end +-- end +-- lastpattern = pattern +-- lastlookups = lookups or { } +-- end +-- return #lastlookups +-- end + +local function look_them_up(lookups,specification) + for key, value in next, specification do + local t, n = { }, 0 + if find(value,"*") then + value = topattern(value) + for i=1,#lookups do + local s = lookups[i] + if find(s[key],value) then + n = n + 1 + t[n] = lookups[i] + end + end + else + for i=1,#lookups do + local s = lookups[i] + if s[key] == value then + n = n + 1 + t[n] = lookups[i] + end + end end if trace_names then - report_names("starting with %s lookups for %a",#lookups,pattern) + report_names("%s matches for key %a with value %a",#t,key,value) + end + lookups = t + end + return lookups +end + +local function first_look(name,reload) + names.load(reload) + local data = names.data + local specifications = data.specifications + local families = data.families + if name then + return families[name] + else + return specifications + end +end + +function names.lookup(pattern,name,reload) -- todo: find + names.load(reload) + local data = names.data + local specifications = data.specifications + local families = data.families + local lookups = specifications + if name then + name = cleanname(name) + end + if type(pattern) == "table" then + local familyname = pattern.familyname + if familyname then + familyname = cleanname(familyname) + pattern.familyname = familyname end + local lookups = first_look(name or familyname,reload) if lookups then - for key, value in gmatch(pattern,"([^=,]+)=([^=,]+)") do - local t, n = { }, 0 - if find(value,"*") then - value = string.topattern(value) - for i=1,#lookups do - local s = lookups[i] - if find(s[key],value) then - n = n + 1 - t[n] = lookups[i] - end - end - else - for i=1,#lookups do - local s = lookups[i] - if s[key] == value then - n = n + 1 - t[n] = lookups[i] - end - end - end - if trace_names then - report_names("%s matches for key %a with value %a",#t,key,value) - end - lookups = t + if trace_names then + report_names("starting with %s lookups for '%T'",#lookups,pattern) end + lookups = look_them_up(lookups,pattern) + end + lastpattern = false + lastlookups = lookups or { } + elseif lastpattern ~= pattern then + local lookups = first_look(name or (not find(pattern,"=") and pattern),reload) + if lookups then + if trace_names then + report_names("starting with %s lookups for %a",#lookups,pattern) + end + local specification = settings_to_hash(pattern) + local familyname = specification.familyname + if familyname then + familyname = cleanname(familyname) + specification.familyname = familyname + end + lookups = look_them_up(lookups,specification) end lastpattern = pattern lastlookups = lookups or { } @@ -1722,3 +1919,49 @@ function names.resolvespec(askedname,sub) -- overloads previous definition report_names("unresolved: %s",askedname) end end + +-- We could generate typescripts with designsize info from the name database but +-- it's not worth the trouble as font names remain a mess: for instance how do we +-- idenfity a font? Names, families, subfamilies or whatever snippet can contain +-- a number related to the design size and so we end up with fuzzy logic again. So, +-- instead it's easier to make a few goody files. +-- +-- local hash = { } +-- +-- for i=1,#specifications do +-- local s = specifications[i] +-- local min = s.minsize or 0 +-- local max = s.maxsize or 0 +-- if min ~= 0 or max ~= 0 then +-- -- the usual name mess: +-- -- antykwa has modifiers so we need to take these into account, otherwise we get weird combinations +-- -- ebgaramond has modifiers with the size encoded, so we need to strip this in order to recognized similar styles +-- -- lm has 'slanted appended in some names so how to choose that one +-- -- +-- local modifier = string.gsub(s.modifiers or "normal","%d","") +-- -- print funny modifier +-- local instance = string.formatters["%s-%s-%s-%s-%s-%s"](s.familyname,s.width,s.style,s.weight,s.variant,modifier) +-- local h = hash[instance] +-- if not h then +-- h = { } +-- hash[instance] = h +-- end +-- size = string.formatters["%0.1fpt"]((min)/10) +-- h[size] = s.filename +-- end +-- end +-- +-- local newhash = { } +-- +-- for k, v in next, hash do +-- if next(v,next(v)) then +-- -- local instance = string.match(k,"(.+)%-.+%-.+%-.+$") +-- local instance = string.match(k,"(.+)%-.+%-.+$") +-- local instance = string.gsub(instance,"%-normal$","") +-- if not newhash[instance] then +-- newhash[instance] = v +-- end +-- end +-- end +-- +-- inspect(newhash) diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua index 316b947a3..827d70586 100644 --- a/tex/context/base/font-tfm.lua +++ b/tex/context/base/font-tfm.lua @@ -110,7 +110,7 @@ local function read_from_tfm(specification) constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) if not features.encoding then local encoding, filename = match(properties.filename,"^(.-)%-(.*)$") -- context: encoding-name.* - if filename and encoding and encodings.known[encoding] then + if filename and encoding and encodings.known and encodings.known[encoding] then features.encoding = encoding end end diff --git a/tex/context/base/font-tra.mkiv b/tex/context/base/font-tra.mkiv index 1877c4904..45d8a7280 100644 --- a/tex/context/base/font-tra.mkiv +++ b/tex/context/base/font-tra.mkiv @@ -186,6 +186,7 @@ \forgetparindent \forgeteverypar \tt + \lefttoright \hbox to \hsize \bgroup \hbox to 6\emwidth{\bf font\hss}% \vtop \bgroup diff --git a/tex/context/base/font-trt.lua b/tex/context/base/font-trt.lua index 6fc8028d1..abc92ba52 100644 --- a/tex/context/base/font-trt.lua +++ b/tex/context/base/font-trt.lua @@ -6,7 +6,11 @@ if not modules then modules = { } end modules ['font-trt'] = { license = "see context related readme files" } -local rawget, dofile, next = rawget, dofile, next +local rawget, dofile, next, type = rawget, dofile, next, type + +local cleanfilename = fonts.names.cleanfilename +local splitbase = file.splitbase +local lower = string.lower --[[ldx--

We provide a simple treatment mechanism (mostly because I want to demonstrate @@ -14,11 +18,24 @@ something in a manual). It's one of the few places where an lfg file gets loaded outside the goodies manager.

--ldx]]-- -local treatments = utilities.storage.allocate() -fonts.treatments = treatments -local treatmentdata = { } -treatments.data = treatmentdata -treatments.filename = "treatments.lfg" +local treatments = fonts.treatments or { } +fonts.treatments = treatments + +local treatmentdata = treatments.data or utilities.storage.allocate() +treatments.data = treatmentdata + +treatments.filename = "treatments.lfg" + +local trace_treatments = false trackers.register("fonts.treatments", function(v) trace_treatments = v end) +local report_treatment = logs.reporter("fonts","treatment") + +treatments.report = report_treatment + +function treatments.trace(...) + if trace_treatments then + report_treatment(...) + end +end -- function treatments.load(name) -- local filename = resolvers.findfile(name) @@ -55,3 +72,45 @@ table.setmetatableindex(treatmentdata,function(t,k) table.setmetatableindex(treatmentdata,nil) return treatmentdata[k] end) + +local function applyfix(fix,filename,data,n) + if type(fix) == "function" then + -- we assume that when needed the fix reports something + -- if trace_treatments then + -- report_treatment("applying treatment %a to file %a",n,filename) + -- end + fix(data) + elseif trace_treatments then + report_treatment("invalid treatment %a for file %a",n,filename) + end +end + +function treatments.applyfixes(filename,data) + local filename = cleanfilename(filename) + local pathpart, basepart = splitbase(filename) + local treatment = treatmentdata[filename] or treatmentdata[basepart] + if treatment then + local fixes = treatment.fixes + if not fixes then + -- nothing to fix + elseif type(fixes) == "table" then + for i=1,#fixes do + applyfix(fixes[i],filename,data,i) + end + else + applyfix(fixes,filename,data,1) + end + end +end + +function treatments.ignoredfile(fullname) + local treatmentdata = treatments.data or { } -- when used outside context + local _, basepart = splitbase(fullname) + local treatment = treatmentdata[basepart] or treatmentdata[lower(basepart)] + if treatment and treatment.ignored then + report_treatment("font file %a resolved as %a is ignored, reason %a",basepart,fullname,treatment.comment or "unknown") + return true + end +end + +fonts.names.ignoredfile = treatments.ignoredfile diff --git a/tex/context/base/grph-fig.mkiv b/tex/context/base/grph-fig.mkiv index 80b8e35d9..9b9333fa9 100644 --- a/tex/context/base/grph-fig.mkiv +++ b/tex/context/base/grph-fig.mkiv @@ -25,12 +25,21 @@ % \appendtoks \setbuffer[typeset-b]\endbuffer\to \everystarttext % \appendtoks \setbuffer[typeset-a]\endbuffer\to \everystarttext -\newcount\c_grph_buffers_n +% we could use \typesetbuffer[*] to access the last one -\let\m_grph_buffers_filename\empty +\newconstant\c_grph_buffers_mode + +\let\lasttypesetbuffer\empty \unexpanded\def\typesetbuffer - {\dodoubleempty\grph_buffers_typeset} + {\bgroup + \setconstant\c_grph_buffers_mode\plusone + \dodoubleempty\grph_buffers_typeset} + +\unexpanded\def\typesetbufferonly + {\bgroup + \setconstant\c_grph_buffers_mode\zerocount + \dodoubleempty\grph_buffers_typeset} \def\grph_buffers_typeset[#1][#2]% beware: this will mix up the mp graphics {\ifsecondargument @@ -44,11 +53,12 @@ \fi\fi} \def\grph_buffers_typeset_indeed[#1][#2]% we could use the via files - {\bgroup - \global\advance\c_grph_buffers_n\plusone - \edef\m_grph_buffers_filename{\jobname-buffer-\the\c_grph_buffers_n}% - \ctxcommand{runbuffer("\m_grph_buffers_filename.tmp","#1",true)}% - \externalfigure[\m_grph_buffers_filename.pdf][#2]% + {\doifnot{#1}{*}{\xdef\lasttypesetbuffer{\ctxcommand{runbuffer("#1",true)}}}% + \ifcase\c_grph_buffers_mode + % typesetonly + \or + \externalfigure[\lasttypesetbuffer][#2]% + \fi \egroup} % For manuals and such: diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua index 9603419ae..392aa58b1 100644 --- a/tex/context/base/grph-inc.lua +++ b/tex/context/base/grph-inc.lua @@ -38,7 +38,6 @@ run TeX code from within Lua. Some more functionality will move to Lua. ]]-- local format, lower, find, match, gsub, gmatch = string.format, string.lower, string.find, string.match, string.gsub, string.gmatch -local texbox = tex.box local contains = table.contains local concat, insert, remove = table.concat, table.insert, table.remove local todimen = string.todimen @@ -55,6 +54,15 @@ local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex local replacetemplate = utilities.templates.replace +local images = img + +local texgetbox = tex.getbox +local texsetbox = tex.setbox + +local hpack = node.hpack + +local context = context + local variables = interfaces.variables local codeinjections = backends.codeinjections local nodeinjections = backends.nodeinjections @@ -67,8 +75,6 @@ local trace_inclusion = false trackers.register("graphics.inclusion", functi local report_inclusion = logs.reporter("graphics","inclusion") -local context, img = context, img - local f_hash_part = formatters["%s->%s->%s"] local f_hash_full = formatters["%s->%s->%s->%s->%s->%s->%s"] @@ -82,7 +88,7 @@ local v_default = variables.default local maxdimen = 2^30-1 -function img.check(figure) +function images.check(figure) if figure then local width = figure.width local height = figure.height @@ -103,36 +109,38 @@ end --- some extra img functions --- can become luat-img.lua -local imgkeys = img.keys() +local allimagekeys = images.keys() -function img.totable(imgtable) +local function imagetotable(imgtable) local result = { } - for k=1,#imgkeys do - local key = imgkeys[k] + for k=1,#allimagekeys do + local key = allimagekeys[k] result[key] = imgtable[key] end return result end -function img.serialize(i,...) - return table.serialize(img.totable(i),...) +images.totable = imagetotable + +function images.serialize(i,...) + return table.serialize(imagetotable(i),...) end -function img.print(i,...) - return table.print(img.totable(i),...) +function images.print(i,...) + return table.print(imagetotable(i),...) end -function img.clone(i,data) +function images.clone(i,data) i.width = data.width or i.width i.height = data.height or i.height -- attr etc return i end -local validsizes = table.tohash(img.boxes()) -local validtypes = table.tohash(img.types()) +local validsizes = table.tohash(images.boxes()) +local validtypes = table.tohash(images.types()) -function img.checksize(size) +function images.checksize(size) if size then size = gsub(size,"box","") return validsizes[size] and size or "crop" @@ -143,7 +151,7 @@ end local indexed = { } -function img.ofindex(n) +function images.ofindex(n) return indexed[n] end @@ -430,7 +438,7 @@ function figures.initialize(request) request.height = h > 0 and h or nil -- request.page = math.max(tonumber(request.page) or 1,1) - request.size = img.checksize(request.size) + request.size = images.checksize(request.size) request.object = request.object == v_yes request["repeat"] = request["repeat"] == v_yes request.preview = request.preview == v_yes @@ -734,6 +742,9 @@ local function locate(request) -- name, format, cache local pattern = figures_patterns[i] if find(askedformat,pattern[1]) then format = pattern[2] + if trace_figures then + report_inclusion("asked format %a matches %a",askedformat,pattern[1]) + end break end end @@ -753,6 +764,7 @@ local function locate(request) -- name, format, cache elseif quitscanning then return register(askedname) end + askedformat = format -- new per 2013-08-05 elseif trace_figures then report_inclusion("unknown format %a",askedformat) end @@ -971,7 +983,7 @@ function figures.done(data) figures.nofprocessed = figures.nofprocessed + 1 data = data or callstack[#callstack] or lastfiguredata local dr, du, ds, nr = data.request, data.used, data.status, figures.boxnumber - local box = texbox[nr] + local box = texgetbox(nr) ds.width = box.width ds.height = box.height ds.xscale = ds.width /(du.width or 1) @@ -983,7 +995,7 @@ end function figures.dummy(data) data = data or callstack[#callstack] or lastfiguredata local dr, du, nr = data.request, data.used, figures.boxnumber - local box = node.hpack(node.new("hlist")) -- we need to set the dir (luatex 0.60 buglet) + local box = hpack(node.new("hlist")) -- we need to set the dir (luatex 0.60 buglet) du.width = du.width or figures.defaultwidth du.height = du.height or figures.defaultheight du.depth = du.depth or figures.defaultdepth @@ -991,7 +1003,7 @@ function figures.dummy(data) box.width = du.width box.height = du.height box.depth = du.depth - texbox[nr] = box -- hm, should be global (to be checked for consistency) + texsetbox(nr,box) -- hm, should be global (to be checked for consistency) end -- -- -- generic -- -- -- @@ -1033,7 +1045,7 @@ function checkers.generic(data) local hash = f_hash_full(name,page,size,color,conversion,resolution,mask) local figure = figures_loaded[hash] if figure == nil then - figure = img.new { + figure = images.new { filename = name, page = page, pagebox = dr.size, @@ -1041,7 +1053,7 @@ function checkers.generic(data) } codeinjections.setfigurecolorspace(data,figure) codeinjections.setfiguremask(data,figure) - figure = figure and img.check(img.scan(figure)) or false + figure = figure and images.check(images.scan(figure)) or false local f, d = codeinjections.setfigurealternative(data,figure) figure, data = f or figure, d or data figures_loaded[hash] = figure @@ -1084,18 +1096,18 @@ function includers.generic(data) if figure == nil then figure = ds.private if figure then - figure = img.copy(figure) - figure = figure and img.clone(figure,data.request) or false + figure = images.copy(figure) + figure = figure and images.clone(figure,data.request) or false end figures_used[hash] = figure end if figure then local nr = figures.boxnumber -- it looks like we have a leak in attributes here .. todo - local box = node.hpack(img.node(figure)) -- img.node(figure) not longer valid + local box = hpack(images.node(figure)) -- images.node(figure) not longer valid indexed[figure.index] = figure box.width, box.height, box.depth = figure.width, figure.height, 0 -- new, hm, tricky, we need to do that in tex (yet) - texbox[nr] = box + texsetbox(nr,box) ds.objectnumber = figure.objnum context.relocateexternalfigure() end diff --git a/tex/context/base/grph-inc.mkiv b/tex/context/base/grph-inc.mkiv index 8557bbb0b..5fb87a8b2 100644 --- a/tex/context/base/grph-inc.mkiv +++ b/tex/context/base/grph-inc.mkiv @@ -297,11 +297,12 @@ % \edef\p_width {\externalfigureparameter\c!width}% \edef\p_height{\externalfigureparameter\c!height}% + \edef\p_label {\externalfigureparameter\c!label}% % \dostarttagged\t!image\empty \ctxlua{figures.push { name = "\p_grph_include_name", - label = "\p_grph_include_label", + label = "\ifx\p_label\empty\p_grph_include_label\else\p_label\fi", page = "\externalfigureparameter\c!page", size = "\externalfigureparameter\c!size", object = "\externalfigureparameter\c!object", @@ -610,7 +611,9 @@ \letexternalfigureparameter\c!offset\v!overlay \letexternalfigureparameter\c!width \figurewidth \letexternalfigureparameter\c!height\figureheight - \inheritedexternalfigureframed{\vfilll\box\foundexternalfigure}% +% \letexternalfigureparameter\c!align \v!middle +% \letexternalfigureparameter\c!autowidth\v!no + \inheritedexternalfigureframed{\box\foundexternalfigure}% \fi \fi\fi \fi diff --git a/tex/context/base/java-ini.lua b/tex/context/base/java-ini.lua index 321e4e24d..673379494 100644 --- a/tex/context/base/java-ini.lua +++ b/tex/context/base/java-ini.lua @@ -6,30 +6,36 @@ if not modules then modules = { } end modules ['java-ini'] = { license = "see context related readme files" } +-- todo: don't flush scripts if no JS key + local format = string.format local concat = table.concat local lpegmatch, P, S, C, Carg, Cc = lpeg.match, lpeg.P, lpeg.S, lpeg.C, lpeg.Carg, lpeg.Cc -local allocate = utilities.storage.allocate -local settings_to_array = utilities.parsers.settings_to_array -local variables = interfaces.variables -local formatters = string.formatters +local allocate = utilities.storage.allocate +local settings_to_array = utilities.parsers.settings_to_array --- todo: don't flush scripts if no JS key +local variables = interfaces.variables +local formatters = string.formatters + +local context = context +local commands = commands -local trace_javascript = false trackers.register("backends.javascript", function(v) trace_javascript = v end) +local trace_javascript = false trackers.register("backends.javascript", function(v) trace_javascript = v end) local report_javascripts = logs.reporter ("interactions","javascripts") local status_javascripts = logs.messenger("interactions","javascripts") -interactions.javascripts = interactions.javascripts or { } -local javascripts = interactions.javascripts +local javascripts = interactions.javascripts or { } +interactions.javascripts = javascripts -javascripts.codes = allocate() -javascripts.preambles = allocate() -javascripts.functions = allocate() +local codes = allocate() +local preambles = allocate() +local functions = allocate() -local codes, preambles, functions = javascripts.codes, javascripts.preambles, javascripts.functions +javascripts.codes = codes +javascripts.preambles = preambles +javascripts.functions = functions local preambled = { } diff --git a/tex/context/base/l-boolean.lua b/tex/context/base/l-boolean.lua index f087f1a4c..8d11080e7 100644 --- a/tex/context/base/l-boolean.lua +++ b/tex/context/base/l-boolean.lua @@ -59,9 +59,9 @@ end function string.is_boolean(str,default) if type(str) == "string" then - if str == "true" or str == "yes" or str == "on" or str == "t" then + if str == "true" or str == "yes" or str == "on" or str == "t" or str == "1" then return true - elseif str == "false" or str == "no" or str == "off" or str == "f" then + elseif str == "false" or str == "no" or str == "off" or str == "f" or str == "0" then return false end end diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua index 3d0576eeb..40081cc3b 100644 --- a/tex/context/base/l-dir.lua +++ b/tex/context/base/l-dir.lua @@ -26,6 +26,8 @@ local isfile = lfs.isfile local currentdir = lfs.currentdir local chdir = lfs.chdir +local onwindows = os.type == "windows" or find(os.getenv("PATH"),";") + -- in case we load outside luatex if not isdir then @@ -136,11 +138,33 @@ end dir.collectpattern = collectpattern -local pattern = Ct { - [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3), - [2] = C(((1-S("*?/"))^0 * P("/"))^0), - [3] = C(P(1)^0) -} +local separator + +if onwindows then -- we could sanitize here + +-- pattern = Ct { +-- [1] = (C(P(".") + S("/\\")^1) + C(R("az","AZ") * P(":") * S("/\\")^0) + Cc("./")) * V(2) * V(3), +-- [2] = C(((1-S("*?/\\"))^0 * S("/\\"))^0), +-- [3] = C(P(1)^0) +-- } + + local slash = S("/\\") / "/" + + pattern = Ct { + [1] = (Cs(P(".") + slash^1) + Cs(R("az","AZ") * P(":") * slash^0) + Cc("./")) * V(2) * V(3), + [2] = Cs(((1-S("*?/\\"))^0 * slash)^0), + [3] = Cs(P(1)^0) + } + +else -- assume unix + + pattern = Ct { + [1] = (C(P(".") + P("/")^1) + Cc("./")) * V(2) * V(3), + [2] = C(((1-S("*?/"))^0 * P("/"))^0), + [3] = C(P(1)^0) + } + +end local filter = Cs ( ( P("**") / ".*" + @@ -257,8 +281,6 @@ end local make_indeed = true -- false -local onwindows = os.type == "windows" or find(os.getenv("PATH"),";") - if onwindows then function dir.mkdirs(...) @@ -273,9 +295,8 @@ if onwindows then str = str .. "/" .. s end end - local first, middle, last local drive = false - first, middle, last = match(str,"^(//)(//*)(.*)$") + local first, middle, last = match(str,"^(//)(//*)(.*)$") if first then -- empty network path == local path else diff --git a/tex/context/base/l-file.lua b/tex/context/base/l-file.lua index a64ee8656..ebb2b39f4 100644 --- a/tex/context/base/l-file.lua +++ b/tex/context/base/l-file.lua @@ -368,11 +368,14 @@ function file.joinpath(tab,separator) -- table return tab and concat(tab,separator or io.pathseparator) -- can have trailing // end +local someslash = S("\\/") local stripper = Cs(P(fwslash)^0/"" * reslasher) -local isnetwork = fwslash * fwslash * (1-fwslash) + (1-fwslash-colon)^1 * colon +local isnetwork = someslash * someslash * (1-someslash) + + (1-fwslash-colon)^1 * colon local isroot = fwslash^1 * -1 local hasroot = fwslash^1 +local reslasher = lpeg.replacer(S("\\/"),"/") local deslasher = lpeg.replacer(S("\\/")^1,"/") -- If we have a network or prefix then there is a change that we end up with two @@ -386,8 +389,13 @@ function file.join(...) local lst = { ... } local one = lst[1] if lpegmatch(isnetwork,one) then + local one = lpegmatch(reslasher,one) local two = lpegmatch(deslasher,concat(lst,"/",2)) - return one .. "/" .. two + if lpegmatch(hasroot,two) then + return one .. two + else + return one .. "/" .. two + end elseif lpegmatch(isroot,one) then local two = lpegmatch(deslasher,concat(lst,"/",2)) if lpegmatch(hasroot,two) then @@ -412,6 +420,8 @@ end -- print(file.join("http://a","/y")) -- print(file.join("http:///a","/y")) -- print(file.join("//nas-1","/y")) +-- print(file.join("//nas-1/a/b/c","/y")) +-- print(file.join("\\\\nas-1\\a\\b\\c","\\y")) -- The previous one fails on "a.b/c" so Taco came up with a split based -- variant. After some skyping we got it sort of compatible with the old @@ -421,9 +431,14 @@ end -- finds were replaced by lpegs. local drivespec = R("az","AZ")^1 * colon -local anchors = fwslash + drivespec -local untouched = periods + (1-period)^1 * P(-1) -local splitstarter = (Cs(drivespec * (bwslash/"/" + fwslash)^0) + Cc(false)) * Ct(lpeg.splitat(S("/\\")^1)) +local anchors = fwslash + + drivespec +local untouched = periods + + (1-period)^1 * P(-1) +local mswindrive = Cs(drivespec * (bwslash/"/" + fwslash)^0) +local mswinuncpath = (bwslash + fwslash) * (bwslash + fwslash) * Cc("//") +local splitstarter = (mswindrive + mswinuncpath + Cc(false)) + * Ct(lpeg.splitat(S("/\\")^1)) local absolute = fwslash function file.collapsepath(str,anchor) -- anchor: false|nil, true, "." @@ -490,6 +505,7 @@ end -- test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..") -- test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..") -- test("./a") +-- test([[\\a.b.c\d\e]]) local validchars = R("az","09","AZ","--","..") local pattern_a = lpeg.replacer(1-validchars) diff --git a/tex/context/base/l-io.lua b/tex/context/base/l-io.lua index 06e1fb5ef..52f166af9 100644 --- a/tex/context/base/l-io.lua +++ b/tex/context/base/l-io.lua @@ -35,6 +35,7 @@ local function readall(f) return f:read('*all') else local done = f:seek("set",0) + local step if size < 1024*1024 then step = 1024 * 1024 elseif size > 16*1024*1024 then @@ -59,7 +60,7 @@ io.readall = readall function io.loaddata(filename,textmode) -- return nil if empty local f = io.open(filename,(textmode and 'r') or 'rb') if f then --- local data = f:read('*all') + -- local data = f:read('*all') local data = readall(f) f:close() if #data > 0 then diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua index 323c73b69..399b3ad65 100644 --- a/tex/context/base/l-lpeg.lua +++ b/tex/context/base/l-lpeg.lua @@ -13,6 +13,19 @@ if not modules then modules = { } end modules ['l-lpeg'] = { lpeg = require("lpeg") +-- The latest lpeg doesn't have print any more, and even the new ones are not +-- available by default (only when debug mode is enabled), which is a pitty as +-- as it helps bailign down bottlenecks. Performance seems comparable, although +-- +-- local p = lpeg.C(lpeg.P(1)^0 * lpeg.P(-1)) +-- local a = string.rep("123",10) +-- lpeg.match(p,a) +-- +-- is nearly 20% slower and also still suboptimal (i.e. a match that runs from +-- begin to end, one of the cases where string matchers win). + +if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end + -- tracing (only used when we encounter a problem in integration of lpeg in luatex) -- some code will move to unicode and string @@ -69,7 +82,6 @@ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) lpeg.patterns = lpeg.patterns or { } -- so that we can share local patterns = lpeg.patterns - local anything = P(1) local endofstring = P(-1) local alwaysmatched = P(true) @@ -79,37 +91,59 @@ patterns.endofstring = endofstring patterns.beginofstring = alwaysmatched patterns.alwaysmatched = alwaysmatched -local digit, sign = R('09'), S('+-') +local sign = S('+-') +local zero = P('0') +local digit = R('09') +local octdigit = R("07") +local lowercase = R("az") +local uppercase = R("AZ") +local underscore = P("_") +local hexdigit = digit + lowercase + uppercase local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") local newline = crlf + S("\r\n") -- cr + lf local escaped = P("\\") * anything local squote = P("'") local dquote = P('"') local space = P(" ") - -local utfbom_32_be = P('\000\000\254\255') -local utfbom_32_le = P('\255\254\000\000') -local utfbom_16_be = P('\255\254') -local utfbom_16_le = P('\254\255') -local utfbom_8 = P('\239\187\191') +local period = P(".") +local comma = P(",") + +local utfbom_32_be = P('\000\000\254\255') -- 00 00 FE FF +local utfbom_32_le = P('\255\254\000\000') -- FF FE 00 00 +local utfbom_16_be = P('\254\255') -- FE FF +local utfbom_16_le = P('\255\254') -- FF FE +local utfbom_8 = P('\239\187\191') -- EF BB BF local utfbom = utfbom_32_be + utfbom_32_le + utfbom_16_be + utfbom_16_le + utfbom_8 local utftype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le") + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le") + utfbom_8 * Cc("utf-8") + alwaysmatched * Cc("utf-8") -- assume utf8 +local utfstricttype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le") + + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le") + + utfbom_8 * Cc("utf-8") local utfoffset = utfbom_32_be * Cc(4) + utfbom_32_le * Cc(4) + utfbom_16_be * Cc(2) + utfbom_16_le * Cc(2) + utfbom_8 * Cc(3) + Cc(0) local utf8next = R("\128\191") +patterns.utfbom_32_be = utfbom_32_be +patterns.utfbom_32_le = utfbom_32_le +patterns.utfbom_16_be = utfbom_16_be +patterns.utfbom_16_le = utfbom_16_le +patterns.utfbom_8 = utfbom_8 + +patterns.utf_16_be_nl = P("\000\r\000\n") + P("\000\r") + P("\000\n") +patterns.utf_16_le_nl = P("\r\000\n\000") + P("\r\000") + P("\n\000") + patterns.utf8one = R("\000\127") patterns.utf8two = R("\194\223") * utf8next patterns.utf8three = R("\224\239") * utf8next * utf8next patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next patterns.utfbom = utfbom patterns.utftype = utftype +patterns.utfstricttype = utfstricttype patterns.utfoffset = utfoffset local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four @@ -137,29 +171,14 @@ patterns.nonwhitespace = nonwhitespace local stripper = spacer^0 * C((spacer^0 * nonspacer^1)^0) -- from example by roberto ------ collapser = Cs(spacer^0/"" * ((spacer^1 * P(-1) / "") + (spacer^1/" ") + P(1))^0) +----- collapser = Cs(spacer^0/"" * ((spacer^1 * endofstring / "") + (spacer^1/" ") + P(1))^0) local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0)) patterns.stripper = stripper patterns.collapser = collapser -patterns.digit = digit -patterns.sign = sign -patterns.cardinal = sign^0 * digit^1 -patterns.integer = sign^0 * digit^1 -patterns.unsigned = digit^0 * P('.') * digit^1 -patterns.float = sign^0 * patterns.unsigned -patterns.cunsigned = digit^0 * P(',') * digit^1 -patterns.cfloat = sign^0 * patterns.cunsigned -patterns.number = patterns.float + patterns.integer -patterns.cnumber = patterns.cfloat + patterns.integer -patterns.oct = P("0") * R("07")^1 -patterns.octal = patterns.oct -patterns.HEX = P("0x") * R("09","AF")^1 -patterns.hex = P("0x") * R("09","af")^1 -patterns.hexadecimal = P("0x") * R("09","AF","af")^1 -patterns.lowercase = R("az") -patterns.uppercase = R("AZ") +patterns.lowercase = lowercase +patterns.uppercase = uppercase patterns.letter = patterns.lowercase + patterns.uppercase patterns.space = space patterns.tab = P("\t") @@ -167,12 +186,12 @@ patterns.spaceortab = patterns.space + patterns.tab patterns.newline = newline patterns.emptyline = newline^1 patterns.equal = P("=") -patterns.comma = P(",") -patterns.commaspacer = P(",") * spacer^0 -patterns.period = P(".") +patterns.comma = comma +patterns.commaspacer = comma * spacer^0 +patterns.period = period patterns.colon = P(":") patterns.semicolon = P(";") -patterns.underscore = P("_") +patterns.underscore = underscore patterns.escaped = escaped patterns.squote = squote patterns.dquote = dquote @@ -187,12 +206,38 @@ patterns.singlequoted = squote * patterns.nosquote * squote patterns.doublequoted = dquote * patterns.nodquote * dquote patterns.quoted = patterns.doublequoted + patterns.singlequoted -patterns.propername = R("AZ","az","__") * R("09","AZ","az", "__")^0 * P(-1) +patterns.digit = digit +patterns.octdigit = octdigit +patterns.hexdigit = hexdigit +patterns.sign = sign +patterns.cardinal = digit^1 +patterns.integer = sign^-1 * digit^1 +patterns.unsigned = digit^0 * period * digit^1 +patterns.float = sign^-1 * patterns.unsigned +patterns.cunsigned = digit^0 * comma * digit^1 +patterns.cfloat = sign^-1 * patterns.cunsigned +patterns.number = patterns.float + patterns.integer +patterns.cnumber = patterns.cfloat + patterns.integer +patterns.oct = zero * octdigit^1 +patterns.octal = patterns.oct +patterns.HEX = zero * P("X") * (digit+uppercase)^1 +patterns.hex = zero * P("x") * (digit+lowercase)^1 +patterns.hexadecimal = zero * S("xX") * hexdigit^1 + +patterns.hexafloat = sign^-1 + * zero * S("xX") + * (hexdigit^0 * period * hexdigit^1 + hexdigit^1 * period * hexdigit^0 + hexdigit^1) + * (S("pP") * sign^-1 * hexdigit^1)^-1 +patterns.decafloat = sign^-1 + * (digit^0 * period * digit^1 + digit^1 * period * digit^0 + digit^1) + * S("eE") * sign^-1 * digit^1 + +patterns.propername = (uppercase + lowercase + underscore) * (uppercase + lowercase + underscore + digit)^0 * endofstring patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1 patterns.beginline = #(1-newline) -patterns.longtostring = Cs(whitespace^0/"" * nonwhitespace^0 * ((whitespace^0/" " * (patterns.quoted + nonwhitespace)^1)^0)) +patterns.longtostring = Cs(whitespace^0/"" * ((patterns.quoted + nonwhitespace^1 + whitespace^1/"" * (P(-1) + Cc(" ")))^0)) local function anywhere(pattern) --slightly adapted from website return P { P(pattern) + 1 * V(1) } @@ -421,7 +466,10 @@ function lpeg.replacer(one,two,makefunction,isutf) -- in principle we should sor end end -function lpeg.finder(lst,makefunction) +-- local pattern1 = P(1-P(pattern))^0 * P(pattern) : test for not nil +-- local pattern2 = (P(pattern) * Cc(true) + P(1))^0 : test for true (could be faster, but not much) + +function lpeg.finder(lst,makefunction) -- beware: slower than find with 'patternless finds' local pattern if type(lst) == "table" then pattern = P(false) @@ -456,8 +504,8 @@ local splitters_f, splitters_s = { }, { } function lpeg.firstofsplit(separator) -- always return value local splitter = splitters_f[separator] if not splitter then - separator = P(separator) - splitter = C((1 - separator)^0) + local pattern = P(separator) + splitter = C((1 - pattern)^0) splitters_f[separator] = splitter end return splitter @@ -466,13 +514,35 @@ end function lpeg.secondofsplit(separator) -- nil if not split local splitter = splitters_s[separator] if not splitter then - separator = P(separator) - splitter = (1 - separator)^0 * separator * C(anything^0) + local pattern = P(separator) + splitter = (1 - pattern)^0 * pattern * C(anything^0) + splitters_s[separator] = splitter + end + return splitter +end + +local splitters_s, splitters_p = { }, { } + +function lpeg.beforesuffix(separator) -- nil if nothing but empty is ok + local splitter = splitters_s[separator] + if not splitter then + local pattern = P(separator) + splitter = C((1 - pattern)^0) * pattern * endofstring splitters_s[separator] = splitter end return splitter end +function lpeg.afterprefix(separator) -- nil if nothing but empty is ok + local splitter = splitters_p[separator] + if not splitter then + local pattern = P(separator) + splitter = pattern * C(anything^0) + splitters_p[separator] = splitter + end + return splitter +end + function lpeg.balancer(left,right) left, right = P(left), P(right) return P { left * ((1 - left - right) + V(1))^0 * right } @@ -832,9 +902,9 @@ end -- moved here (before util-str) -local digit = R("09") -local period = P(".") -local zero = P("0") +----- digit = R("09") +----- period = P(".") +----- zero = P("0") local trailingzeros = zero^0 * -digit -- suggested by Roberto R local case_1 = period * trailingzeros / "" local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") diff --git a/tex/context/base/l-os.lua b/tex/context/base/l-os.lua index 05ca0acdc..7f3fd7caf 100644 --- a/tex/context/base/l-os.lua +++ b/tex/context/base/l-os.lua @@ -127,7 +127,13 @@ function io.popen (...) ioflush() return iopopen(...) end function os.resultof(command) local handle = io.popen(command,"r") - return handle and handle:read("*all") or "" + if handle then + local result = handle:read("*all") or "" + handle:close() + return result + else + return "" + end end if not io.fileseparator then @@ -172,19 +178,21 @@ if not os.times then -- ? end end -os.gettimeofday = os.gettimeofday or os.clock -local startuptime = os.gettimeofday() +local gettimeofday = os.gettimeofday or os.clock +os.gettimeofday = gettimeofday + +local startuptime = gettimeofday() function os.runtime() - return os.gettimeofday() - startuptime + return gettimeofday() - startuptime end ---~ print(os.gettimeofday()-os.time()) ---~ os.sleep(1.234) ---~ print (">>",os.runtime()) ---~ print(os.date("%H:%M:%S",os.gettimeofday())) ---~ print(os.date("%H:%M:%S",os.time())) +-- print(os.gettimeofday()-os.time()) +-- os.sleep(1.234) +-- print (">>",os.runtime()) +-- print(os.date("%H:%M:%S",os.gettimeofday())) +-- print(os.date("%H:%M:%S",os.time())) -- no need for function anymore as we have more clever code and helpers now -- this metatable trickery might as well disappear @@ -380,31 +388,43 @@ end local timeformat = format("%%s%s",os.timezone(true)) local dateformat = "!%Y-%m-%d %H:%M:%S" +local lasttime = nil +local lastdate = nil function os.fulltime(t,default) - t = tonumber(t) or 0 + t = t and tonumber(t) or 0 if t > 0 then -- valid time elseif default then return default else - t = nil + t = time() + end + if t ~= lasttime then + lasttime = t + lastdate = format(timeformat,date(dateformat)) end - return format(timeformat,date(dateformat,t)) + return lastdate end local dateformat = "%Y-%m-%d %H:%M:%S" +local lasttime = nil +local lastdate = nil function os.localtime(t,default) - t = tonumber(t) or 0 + t = t and tonumber(t) or 0 if t > 0 then -- valid time elseif default then return default else - t = nil + t = time() end - return date(dateformat,t) + if t ~= lasttime then + lasttime = t + lastdate = date(dateformat,t) + end + return lastdate end function os.converttime(t,default) @@ -472,3 +492,60 @@ end -- print(os.which("inkscape")) -- print(os.which("gs.exe")) -- print(os.which("ps2pdf")) + +-- These are moved from core-con.lua (as I needed them elsewhere). + +local function isleapyear(year) + return (year % 400 == 0) or ((year % 100 ~= 0) and (year % 4 == 0)) +end + +os.isleapyear = isleapyear + +-- nicer: +-- +-- local days = { +-- [false] = { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 }, +-- [true] = { 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 } +-- } +-- +-- local function nofdays(year,month) +-- return days[isleapyear(year)][month] +-- return month == 2 and isleapyear(year) and 29 or days[month] +-- end +-- +-- more efficient: + +local days = { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 } + +local function nofdays(year,month) + if not month then + return isleapyear(year) and 365 or 364 + else + return month == 2 and isleapyear(year) and 29 or days[month] + end +end + +os.nofdays = nofdays + +function os.weekday(day,month,year) + return date("%w",time { year = year, month = month, day = day }) + 1 +end + +function os.validdate(year,month,day) + -- we assume that all three values are set + -- year is always ok, even if lua has a 1970 time limit + if month < 1 then + month = 1 + elseif month > 12 then + month = 12 + end + if day < 1 then + day = 1 + else + local max = nofdays(year,month) + if day > max then + day = max + end + end + return year, month, day +end diff --git a/tex/context/base/l-package.lua b/tex/context/base/l-package.lua index 579fd3941..0dbff7c57 100644 --- a/tex/context/base/l-package.lua +++ b/tex/context/base/l-package.lua @@ -17,7 +17,7 @@ if not modules then modules = { } end modules ['l-package'] = { -- -- local mysql = require("luasql.mysql") local type = type -local gsub, format = string.gsub, string.format +local gsub, format, find = string.gsub, string.format, string.find local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match @@ -68,6 +68,7 @@ local helpers = package.helpers or { sequence = { "already loaded", "preload table", + "qualified path", -- beware, lua itself doesn't handle qualified paths (prepends ./) "lua extra list", "lib extra list", "path specification", @@ -243,6 +244,23 @@ end helpers.loadedbypath = loadedbypath +local function loadedbyname(name,rawname) + if find(name,"^/") or find(name,"^[a-zA-Z]:/") then + local trace=helpers.trace + if trace then + helpers.report("qualified name, identifying '%s'",what,name) + end + if isreadable(name) then + if trace then + helpers.report("qualified name, '%s' found",what,name) + end + return loadfile(name) + end + end +end + +helpers.loadedbyname = loadedbyname + methods["already loaded"] = function(name) return package.loaded[name] end @@ -251,6 +269,10 @@ methods["preload table"] = function(name) return builtin["preload table"](name) end +methods["qualified path"]=function(name) + return loadedbyname(addsuffix(lualibfile(name),"lua"),name) +end + methods["lua extra list"] = function(name) return loadedbypath(addsuffix(lualibfile(name),"lua" ),name,getextraluapaths(),false,"lua") end diff --git a/tex/context/base/l-pdfview.lua b/tex/context/base/l-pdfview.lua index 6e0259299..6302fd6f6 100644 --- a/tex/context/base/l-pdfview.lua +++ b/tex/context/base/l-pdfview.lua @@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['l-pdfview'] = { license = "see context related readme files" } +-- Todo: add options in cnf file + -- Todo: figure out pdfopen/pdfclose on linux. Calling e.g. okular directly -- doesn't work in linux when issued from scite as it blocks the editor (no -- & possible or so). Unfortunately pdfopen keeps changing with not keeping @@ -15,62 +17,107 @@ if not modules then modules = { } end modules ['l-pdfview'] = { local format, concat = string.format, table.concat +local report = logs.reporter("pdfview") +local replace = utilities.templates.replace + pdfview = pdfview or { } -local opencalls, closecalls, allcalls, runner +local opencalls -- a table with templates that open a given pdf document +local closecalls -- a table with templates that close a given pdf document +local allcalls -- a table with templates that close all open pdf documents +local runner -- runner function +local expander -- filename cleanup function --- this might become template based +-- maybe spawn/execute spec in calls if os.type == "windows" then + -- os.setenv("path",os.getenv("path") .. ";" .. "c:/data/system/pdf-xchange") + -- os.setenv("path",os.getenv("path") .. ";" .. "c:/data/system/sumatrapdf") + + -- start is more flexible as it locates binaries in more places and doesn't lock + opencalls = { - ['default'] = "pdfopen --rxi --file", - ['acrobat'] = "pdfopen --rxi --file", - ['fullacrobat'] = "pdfopen --axi --file", - ['okular'] = 'start "test" "c:/data/system/kde/bin/okular.exe" --unique', -- todo! - ['sumatra'] = 'start "test" "c:/data/system/sumatrapdf/sumatrapdf.exe" -reuse-instance', - ['okular'] = 'start "test" "okular.exe" --unique', - ['sumatra'] = 'start "test" "sumatrapdf.exe" -reuse-instance -bg-color 0xCCCCCC', + ['default'] = [[pdfopen --rxi --file "%filename%"]], + ['acrobat'] = [[pdfopen --rxi --file "%filename%"]], + ['fullacrobat'] = [[pdfopen --axi --file "%filename%"]], + ['okular'] = [[start "test" okular.exe --unique "%filename%"]], + ['pdfxcview'] = [[start "test" pdfxcview.exe /A "nolock=yes=OpenParameters" "%filename%"]], + ['sumatra'] = [[start "test" sumatrapdf.exe -reuse-instance -bg-color 0xCCCCCC "%filename%"]], + ['auto'] = [[start "%filename%"]], } closecalls= { - ['default'] = "pdfclose --file", - ['acrobat'] = "pdfclose --file", - ['okular'] = false, - ['sumatra'] = false, + ['default'] = [[pdfclose --file "%filename%"]], + ['acrobat'] = [[pdfclose --file "%filename%"]], + ['okular'] = false, + ['pdfxcview'] = false, -- [[pdfxcview.exe /close:discard "%filename%"]], + ['sumatra'] = false, + ['auto'] = false, } allcalls = { - ['default'] = "pdfclose --all", - ['acrobat'] = "pdfclose --all", - ['okular'] = false, - ['sumatra'] = false, + ['default'] = [[pdfclose --all]], + ['acrobat'] = [[pdfclose --all]], + ['okular'] = false, + ['pdfxcview'] = false, + ['sumatra'] = false, + ['auto'] = false, } - pdfview.method = "acrobat" -- no longer usefull due to green pop up line and clasing reader/full + pdfview.method = "acrobat" -- no longer useful due to green pop up line and clashing reader/full + -- pdfview.method = "pdfxcview" pdfview.method = "sumatra" - runner = function(cmd) - os.execute(cmd) -- .. " > /null" + runner = function(template,variables) + local cmd = replace(template,variables) + -- cmd = cmd .. " > /null" + report("command: %s",cmd) + os.execute(cmd) + end + + expander = function(name) + -- We need to avoid issues with chdir to UNC paths and therefore expand + -- the path when we're current. (We could use one of the helpers instead) + if file.pathpart(name) == "" then + return file.collapsepath(file.join(lfs.currentdir(),name)) + else + return name + end end else opencalls = { - ['default'] = "pdfopen", -- we could pass the default here - ['okular'] = 'okular --unique' + ['default'] = [[pdfopen "%filename%"]], + ['okular'] = [[okular --unique "%filename%"]], + ['sumatra'] = [[wine "sumatrapdf.exe" -reuse-instance -bg-color 0xCCCCCC "%filename%"]], + ['pdfxcview'] = [[wine "pdfxcview.exe" /A "nolock=yes=OpenParameters" "%filename%"]], + ['auto'] = [[open "%filename%"]], } closecalls= { - ['default'] = "pdfclose --file", - ['okular'] = false, + ['default'] = [[pdfclose --file "%filename%"]], + ['okular'] = false, + ['sumatra'] = false, + ['auto'] = false, } allcalls = { - ['default'] = "pdfclose --all", - ['okular'] = false, + ['default'] = [[pdfclose --all]], + ['okular'] = false, + ['sumatra'] = false, + ['auto'] = false, } pdfview.method = "okular" + pdfview.method = "sumatra" -- faster and more complete - runner = function(cmd) - os.execute(cmd .. " 1>/dev/null 2>/dev/null &") + runner = function(template,variables) + local cmd = replace(template,variables) + cmd = cmd .. " 1>/dev/null 2>/dev/null &" + report("command: %s",cmd) + os.execute(cmd) + end + + expander = function(name) + return name end end @@ -93,8 +140,6 @@ function pdfview.status() return format("pdfview methods: %s, current method: %s (directives_pdfview_method)",pdfview.methods(),tostring(pdfview.method)) end --- local openedfiles = { } - local function fullname(name) return file.addsuffix(name,"pdf") end @@ -104,10 +149,9 @@ function pdfview.open(...) if opencall then local t = { ... } for i=1,#t do - local name = fullname(t[i]) + local name = expander(fullname(t[i])) if io.exists(name) then - runner(format('%s "%s"', opencall, name)) - -- openedfiles[name] = true + runner(opencall,{ filename = name }) end end end @@ -118,14 +162,10 @@ function pdfview.close(...) if closecall then local t = { ... } for i=1,#t do - local name = fullname(t[i]) - -- if openedfiles[name] then - runner(format('%s "%s"', closecall, name)) - -- openedfiles[name] = nil - -- else - -- pdfview.closeall() - -- break - -- end + local name = expander(fullname(t[i])) + if io.exists(name) then + replace(closecall,{ filename = name }) + end end end end @@ -133,13 +173,8 @@ end function pdfview.closeall() local allcall = allcalls[pdfview.method] if allcall then - runner(format('%s', allcall)) + runner(allcall) end - -- openedfiles = { } end ---~ pdfview.open("t:/document/show-exa.pdf") ---~ os.sleep(3) ---~ pdfview.close("t:/document/show-exa.pdf") - return pdfview diff --git a/tex/context/base/l-string.lua b/tex/context/base/l-string.lua index 77c076cc5..9b079b00a 100644 --- a/tex/context/base/l-string.lua +++ b/tex/context/base/l-string.lua @@ -91,6 +91,8 @@ end local pattern = P(" ")^0 * P(-1) +-- patterns.onlyspaces = pattern + function string.is_empty(str) if str == "" then return true diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua index 9a1b97fff..11cb66bef 100644 --- a/tex/context/base/l-table.lua +++ b/tex/context/base/l-table.lua @@ -16,6 +16,9 @@ local lpegmatch, patterns = lpeg.match, lpeg.patterns local floor = math.floor -- extra functions, some might go (when not used) +-- +-- we could serialize using %a but that won't work well is in the code we mostly use +-- floats and as such we get unequality e.g. in version comparisons local stripper = patterns.stripper @@ -343,6 +346,7 @@ local noquotes, hexify, handle, reduce, compact, inline, functions local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if', 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while', + 'NaN', 'goto', } local function simple_table(t) @@ -363,12 +367,12 @@ local function simple_table(t) else tt[nt] = tostring(v) -- tostring not needed end - elseif tv == "boolean" then - nt = nt + 1 - tt[nt] = tostring(v) elseif tv == "string" then nt = nt + 1 tt[nt] = format("%q",v) + elseif tv == "boolean" then + nt = nt + 1 + tt[nt] = v and "true" or "false" else tt = nil break @@ -394,7 +398,8 @@ end -- todo: %g faster on numbers than %s --- we can speed this up with repeaters and formatters (is indeed faster) +-- we can speed this up with repeaters and formatters but we haven't defined them +-- yet local propername = patterns.propername -- was find(name,"^%a[%w%_]*$") @@ -420,7 +425,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s[%q]={",depth,name)) end elseif tn == "boolean" then - handle(format("%s[%s]={",depth,tostring(name))) + handle(format("%s[%s]={",depth,name and "true" or "false")) else handle(format("%s{",depth)) end @@ -456,21 +461,21 @@ local function do_serialize(root,name,depth,level,indexed) --~ if v == root then -- circular --~ else - local t, tk = type(v), type(k) + local tv, tk = type(v), type(k) if compact and first and tk == "number" and k >= first and k <= last then - if t == "number" then + if tv == "number" then if hexify then handle(format("%s 0x%04X,",depth,v)) else handle(format("%s %s,",depth,v)) -- %.99g end - elseif t == "string" then + elseif tv == "string" then if reduce and tonumber(v) then handle(format("%s %s,",depth,v)) else handle(format("%s %q,",depth,v)) end - elseif t == "table" then + elseif tv == "table" then if not next(v) then handle(format("%s {},",depth)) elseif inline then -- and #t > 0 @@ -483,11 +488,11 @@ local function do_serialize(root,name,depth,level,indexed) else do_serialize(v,k,depth,level+1,true) end - elseif t == "boolean" then - handle(format("%s %s,",depth,tostring(v))) - elseif t == "function" then + elseif tv == "boolean" then + handle(format("%s %s,",depth,v and "true" or "false")) + elseif tv == "function" then if functions then - handle(format('%s load(%q),',depth,dump(v))) + handle(format('%s load(%q),',depth,dump(v))) -- maybe strip else handle(format('%s "function",',depth)) end @@ -498,7 +503,7 @@ local function do_serialize(root,name,depth,level,indexed) if false then handle(format("%s __p__=nil,",depth)) end - elseif t == "number" then + elseif tv == "number" then if tk == "number" then if hexify then handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) @@ -507,9 +512,9 @@ local function do_serialize(root,name,depth,level,indexed) end elseif tk == "boolean" then if hexify then - handle(format("%s [%s]=0x%04X,",depth,tostring(k),v)) + handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v)) else - handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) -- %.99g end elseif noquotes and not reserved[k] and lpegmatch(propername,k) then if hexify then @@ -524,7 +529,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g end end - elseif t == "string" then + elseif tv == "string" then if reduce and tonumber(v) then if tk == "number" then if hexify then @@ -533,7 +538,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]=%s,",depth,k,v)) end elseif tk == "boolean" then - handle(format("%s [%s]=%s,",depth,tostring(k),v)) + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=%s,",depth,k,v)) else @@ -547,14 +552,14 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]=%q,",depth,k,v)) end elseif tk == "boolean" then - handle(format("%s [%s]=%q,",depth,tostring(k),v)) + handle(format("%s [%s]=%q,",depth,k and "true" or "false",v)) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=%q,",depth,k,v)) else handle(format("%s [%q]=%q,",depth,k,v)) end end - elseif t == "table" then + elseif tv == "table" then if not next(v) then if tk == "number" then if hexify then @@ -563,7 +568,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]={},",depth,k)) end elseif tk == "boolean" then - handle(format("%s [%s]={},",depth,tostring(k))) + handle(format("%s [%s]={},",depth,k and "true" or "false")) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s={},",depth,k)) else @@ -579,7 +584,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) end elseif tk == "boolean" then - handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", "))) + handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", "))) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s={ %s },",depth,k,concat(st,", "))) else @@ -591,24 +596,24 @@ local function do_serialize(root,name,depth,level,indexed) else do_serialize(v,k,depth,level+1) end - elseif t == "boolean" then + elseif tv == "boolean" then if tk == "number" then if hexify then - handle(format("%s [0x%04X]=%s,",depth,k,tostring(v))) + handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false")) else - handle(format("%s [%s]=%s,",depth,k,tostring(v))) + handle(format("%s [%s]=%s,",depth,k,v and "true" or "false")) end elseif tk == "boolean" then - handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v))) + handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false")) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,tostring(v))) + handle(format("%s %s=%s,",depth,k,v and "true" or "false")) else - handle(format("%s [%q]=%s,",depth,k,tostring(v))) + handle(format("%s [%q]=%s,",depth,k,v and "true" or "false")) end - elseif t == "function" then + elseif tv == "function" then if functions then - local f = getinfo(v).what == "C" and dump(dummy) or dump(v) - -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v) + local f = getinfo(v).what == "C" and dump(dummy) or dump(v) -- maybe strip + -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v) -- maybe strip if tk == "number" then if hexify then handle(format("%s [0x%04X]=load(%q),",depth,k,f)) @@ -616,7 +621,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]=load(%q),",depth,k,f)) end elseif tk == "boolean" then - handle(format("%s [%s]=load(%q),",depth,tostring(k),f)) + handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f)) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=load(%q),",depth,k,f)) else @@ -631,7 +636,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]=%q,",depth,k,tostring(v))) end elseif tk == "boolean" then - handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v))) + handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v))) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=%q,",depth,k,tostring(v))) else @@ -713,321 +718,10 @@ local function serialize(_handle,root,name,specification) -- handle wins handle("}") end --- -- This is some 20% faster than using format (because formatters are much faster) but --- -- of course, inlining the format using .. is then again faster .. anyway, as we do --- -- some pretty printing as well there is not that much to gain unless we make a 'fast' --- -- ugly variant as well. But, we would have to move the formatter to l-string then. - --- local formatters = string.formatters - --- local function do_serialize(root,name,level,indexed) --- if level > 0 then --- if indexed then --- handle(formatters["%w{"](level)) --- else --- local tn = type(name) --- if tn == "number" then --- if hexify then --- handle(formatters["%w[%04H]={"](level,name)) --- else --- handle(formatters["%w[%s]={"](level,name)) --- end --- elseif tn == "string" then --- if noquotes and not reserved[name] and lpegmatch(propername,name) then --- handle(formatters["%w%s={"](level,name)) --- else --- handle(formatters["%w[%q]={"](level,name)) --- end --- elseif tn == "boolean" then --- handle(formatters["%w[%S]={"](level,name)) --- else --- handle(formatters["%w{"](level)) --- end --- end --- end --- -- we could check for k (index) being number (cardinal) --- if root and next(root) then --- -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) --- -- if compact then --- -- -- NOT: for k=1,#root do (we need to quit at nil) --- -- for k,v in ipairs(root) do -- can we use next? --- -- if not first then first = k end --- -- last = last + 1 --- -- end --- -- end --- local first, last = nil, 0 --- if compact then --- last = #root --- for k=1,last do --- if root[k] == nil then --- last = k - 1 --- break --- end --- end --- if last > 0 then --- first = 1 --- end --- end --- local sk = sortedkeys(root) --- for i=1,#sk do --- local k = sk[i] --- local v = root[k] --- --~ if v == root then --- -- circular --- --~ else --- local t, tk = type(v), type(k) --- if compact and first and tk == "number" and k >= first and k <= last then --- if t == "number" then --- if hexify then --- handle(formatters["%w %04H,"](level,v)) --- else --- handle(formatters["%w %s,"](level,v)) -- %.99g --- end --- elseif t == "string" then --- if reduce and tonumber(v) then --- handle(formatters["%w %s,"](level,v)) --- else --- handle(formatters["%w %q,"](level,v)) --- end --- elseif t == "table" then --- if not next(v) then --- handle(formatters["%w {},"](level)) --- elseif inline then -- and #t > 0 --- local st = simple_table(v) --- if st then --- handle(formatters["%w { %, t },"](level,st)) --- else --- do_serialize(v,k,level+1,true) --- end --- else --- do_serialize(v,k,level+1,true) --- end --- elseif t == "boolean" then --- handle(formatters["%w %S,"](level,v)) --- elseif t == "function" then --- if functions then --- handle(formatters['%w load(%q),'](level,dump(v))) --- else --- handle(formatters['%w "function",'](level)) --- end --- else --- handle(formatters["%w %Q,"](level,v)) --- end --- elseif k == "__p__" then -- parent --- if false then --- handle(formatters["%w __p__=nil,"](level)) --- end --- elseif t == "number" then --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=%04H,"](level,k,v)) --- else --- handle(formatters["%w [%s]=%s,"](level,k,v)) -- %.99g --- end --- elseif tk == "boolean" then --- if hexify then --- handle(formatters["%w [%S]=%04H,"](level,k,v)) --- else --- handle(formatters["%w [%S]=%s,"](level,k,v)) -- %.99g --- end --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- if hexify then --- handle(formatters["%w %s=%04H,"](level,k,v)) --- else --- handle(formatters["%w %s=%s,"](level,k,v)) -- %.99g --- end --- else --- if hexify then --- handle(formatters["%w [%q]=%04H,"](level,k,v)) --- else --- handle(formatters["%w [%q]=%s,"](level,k,v)) -- %.99g --- end --- end --- elseif t == "string" then --- if reduce and tonumber(v) then --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=%s,"](level,k,v)) --- else --- handle(formatters["%w [%s]=%s,"](level,k,v)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]=%s,"](level,k,v)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s=%s,"](level,k,v)) --- else --- handle(formatters["%w [%q]=%s,"](level,k,v)) --- end --- else --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=%q,"](level,k,v)) --- else --- handle(formatters["%w [%s]=%q,"](level,k,v)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]=%q,"](level,k,v)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s=%q,"](level,k,v)) --- else --- handle(formatters["%w [%q]=%q,"](level,k,v)) --- end --- end --- elseif t == "table" then --- if not next(v) then --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]={},"](level,k)) --- else --- handle(formatters["%w [%s]={},"](level,k)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]={},"](level,k)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s={},"](level,k)) --- else --- handle(formatters["%w [%q]={},"](level,k)) --- end --- elseif inline then --- local st = simple_table(v) --- if st then --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]={ %, t },"](level,k,st)) --- else --- handle(formatters["%w [%s]={ %, t },"](level,k,st)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]={ %, t },"](level,k,st)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s={ %, t },"](level,k,st)) --- else --- handle(formatters["%w [%q]={ %, t },"](level,k,st)) --- end --- else --- do_serialize(v,k,level+1) --- end --- else --- do_serialize(v,k,level+1) --- end --- elseif t == "boolean" then --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=%S,"](level,k,v)) --- else --- handle(formatters["%w [%s]=%S,"](level,k,v)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]=%S,"](level,k,v)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s=%S,"](level,k,v)) --- else --- handle(formatters["%w [%q]=%S,"](level,k,v)) --- end --- elseif t == "function" then --- if functions then --- local f = getinfo(v).what == "C" and dump(dummy) or dump(v) --- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v) --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=load(%q),"](level,k,f)) --- else --- handle(formatters["%w [%s]=load(%q),"](level,k,f)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]=load(%q),"](level,k,f)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s=load(%q),"](level,k,f)) --- else --- handle(formatters["%w [%q]=load(%q),"](level,k,f)) --- end --- end --- else --- if tk == "number" then --- if hexify then --- handle(formatters["%w [%04H]=%Q,"](level,k,v)) --- else --- handle(formatters["%w [%s]=%Q,"](level,k,v)) --- end --- elseif tk == "boolean" then --- handle(formatters["%w [%S]=%Q,"](level,k,v)) --- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then --- handle(formatters["%w %s=%Q,"](level,k,v)) --- else --- handle(formatters["%w [%q]=%Q,"](level,k,v)) --- end --- end --- --~ end --- end --- end --- if level > 0 then --- handle(formatters["%w}"](level)) --- end --- end - --- local function serialize(_handle,root,name,specification) -- handle wins --- local tname = type(name) --- if type(specification) == "table" then --- noquotes = specification.noquotes --- hexify = specification.hexify --- handle = _handle or specification.handle or print --- reduce = specification.reduce or false --- functions = specification.functions --- compact = specification.compact --- inline = specification.inline and compact --- if functions == nil then --- functions = true --- end --- if compact == nil then --- compact = true --- end --- if inline == nil then --- inline = compact --- end --- else --- noquotes = false --- hexify = false --- handle = _handle or print --- reduce = false --- compact = true --- inline = true --- functions = true --- end --- if tname == "string" then --- if name == "return" then --- handle("return {") --- else --- handle(name .. "={") --- end --- elseif tname == "number" then --- if hexify then --- handle(format("[0x%04X]={",name)) --- else --- handle("[" .. name .. "]={") --- end --- elseif tname == "boolean" then --- if name then --- handle("return {") --- else --- handle("{") --- end --- else --- handle("t={") --- end --- if root then --- -- The dummy access will initialize a table that has a delayed initialization --- -- using a metatable. (maybe explicitly test for metatable) --- if getmetatable(root) then -- todo: make this an option, maybe even per subtable --- local dummy = root._w_h_a_t_e_v_e_r_ --- root._w_h_a_t_e_v_e_r_ = nil --- end --- -- Let's forget about empty tables. --- if next(root) then --- do_serialize(root,name,0) --- end --- end --- handle("}") --- end +-- A version with formatters is some 20% faster than using format (because formatters are +-- much faster) but of course, inlining the format using .. is then again faster .. anyway, +-- as we do some pretty printing as well there is not that much to gain unless we make a +-- 'fast' ugly variant as well. But, we would have to move the formatter to l-string then. -- name: -- diff --git a/tex/context/base/l-unicode.lua b/tex/context/base/l-unicode.lua index 813ffd54b..7ada394d5 100644 --- a/tex/context/base/l-unicode.lua +++ b/tex/context/base/l-unicode.lua @@ -25,7 +25,7 @@ utf.values = utf.values or string.utfvalues -- string.bytepairs local type = type -local char, byte, format, sub = string.char, string.byte, string.format, string.sub +local char, byte, format, sub, gmatch = string.char, string.byte, string.format, string.sub, string.gmatch local concat = table.concat local P, C, R, Cs, Ct, Cmt, Cc, Carg, Cp = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Carg, lpeg.Cp local lpegmatch, patterns = lpeg.match, lpeg.patterns @@ -38,13 +38,14 @@ local replacer = lpeg.replacer local utfvalues = utf.values local utfgmatch = utf.gmatch -- not always present -local p_utftype = patterns.utftype -local p_utfoffset = patterns.utfoffset -local p_utf8char = patterns.utf8char -local p_utf8byte = patterns.utf8byte -local p_utfbom = patterns.utfbom -local p_newline = patterns.newline -local p_whitespace = patterns.whitespace +local p_utftype = patterns.utftype +local p_utfstricttype = patterns.utfstricttype +local p_utfoffset = patterns.utfoffset +local p_utf8char = patterns.utf8char +local p_utf8byte = patterns.utf8byte +local p_utfbom = patterns.utfbom +local p_newline = patterns.newline +local p_whitespace = patterns.whitespace if not unicode then @@ -621,116 +622,273 @@ function utf.magic(f) -- not used return lpegmatch(p_utftype,str) end -local function utf16_to_utf8_be(t) - if type(t) == "string" then - t = lpegmatch(utflinesplitter,t) - end - local result = { } -- we reuse result - for i=1,#t do - local r, more = 0, 0 - for left, right in bytepairs(t[i]) do - if right then - local now = 256*left + right - if more > 0 then - now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong - more = 0 - r = r + 1 - result[r] = utfchar(now) - elseif now >= 0xD800 and now <= 0xDBFF then - more = now - else - r = r + 1 - result[r] = utfchar(now) +local utf16_to_utf8_be, utf16_to_utf8_le +local utf32_to_utf8_be, utf32_to_utf8_le + +local utf_16_be_linesplitter = patterns.utfbom_16_be^-1 * lpeg.tsplitat(patterns.utf_16_be_nl) +local utf_16_le_linesplitter = patterns.utfbom_16_le^-1 * lpeg.tsplitat(patterns.utf_16_le_nl) + +-- we have three possibilities: + +-- bytepairs: 0.048 +-- gmatch : 0.069 +-- lpeg : 0.089 (match time captures) + +if bytepairs then + + -- with a little bit more code we could include the linesplitter + + utf16_to_utf8_be = function(t) + if type(t) == "string" then + t = lpegmatch(utf_16_be_linesplitter,t) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, 0 + for left, right in bytepairs(t[i]) do + if right then + local now = 256*left + right + if more > 0 then + now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong + more = 0 + r = r + 1 + result[r] = utfchar(now) + elseif now >= 0xD800 and now <= 0xDBFF then + more = now + else + r = r + 1 + result[r] = utfchar(now) + end end end + t[i] = concat(result,"",1,r) -- we reused tmp, hence t end - t[i] = concat(result,"",1,r) -- we reused tmp, hence t + return t end - return t -end -local function utf16_to_utf8_le(t) - if type(t) == "string" then - t = lpegmatch(utflinesplitter,t) + utf16_to_utf8_le = function(t) + if type(t) == "string" then + t = lpegmatch(utf_16_le_linesplitter,t) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, 0 + for left, right in bytepairs(t[i]) do + if right then + local now = 256*right + left + if more > 0 then + now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong + more = 0 + r = r + 1 + result[r] = utfchar(now) + elseif now >= 0xD800 and now <= 0xDBFF then + more = now + else + r = r + 1 + result[r] = utfchar(now) + end + end + end + t[i] = concat(result,"",1,r) -- we reused tmp, hence t + end + return t end - local result = { } -- we reuse result - for i=1,#t do - local r, more = 0, 0 - for left, right in bytepairs(t[i]) do - if right then - local now = 256*right + left - if more > 0 then - now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong - more = 0 - r = r + 1 - result[r] = utfchar(now) - elseif now >= 0xD800 and now <= 0xDBFF then - more = now + + utf32_to_utf8_be = function(t) + if type(t) == "string" then + t = lpegmatch(utflinesplitter,t) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, -1 + for a,b in bytepairs(t[i]) do + if a and b then + if more < 0 then + more = 256*256*256*a + 256*256*b + else + r = r + 1 + result[t] = utfchar(more + 256*a + b) + more = -1 + end else - r = r + 1 - result[r] = utfchar(now) + break end end + t[i] = concat(result,"",1,r) end - t[i] = concat(result,"",1,r) -- we reused tmp, hence t + return t end - return t -end -local function utf32_to_utf8_be(t) - if type(t) == "string" then - t = lpegmatch(utflinesplitter,t) - end - local result = { } -- we reuse result - for i=1,#t do - local r, more = 0, -1 - for a,b in bytepairs(t[i]) do - if a and b then - if more < 0 then - more = 256*256*256*a + 256*256*b + utf32_to_utf8_le = function(t) + if type(t) == "string" then + t = lpegmatch(utflinesplitter,t) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, -1 + for a,b in bytepairs(t[i]) do + if a and b then + if more < 0 then + more = 256*b + a + else + r = r + 1 + result[t] = utfchar(more + 256*256*256*b + 256*256*a) + more = -1 + end else - r = r + 1 - result[t] = utfchar(more + 256*a + b) - more = -1 + break end - else - break end + t[i] = concat(result,"",1,r) end - t[i] = concat(result,"",1,r) + return t end - return t -end -local function utf32_to_utf8_le(t) - if type(t) == "string" then - t = lpegmatch(utflinesplitter,t) +else + + utf16_to_utf8_be = function(t) + if type(t) == "string" then + t = lpegmatch(utf_16_be_linesplitter,t) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, 0 + for left, right in gmatch(t[i],"(.)(.)") do + if left == "\000" then -- experiment + r = r + 1 + result[r] = utfchar(byte(right)) + elseif right then + local now = 256*byte(left) + byte(right) + if more > 0 then + now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong + more = 0 + r = r + 1 + result[r] = utfchar(now) + elseif now >= 0xD800 and now <= 0xDBFF then + more = now + else + r = r + 1 + result[r] = utfchar(now) + end + end + end + t[i] = concat(result,"",1,r) -- we reused tmp, hence t + end + return t end - local result = { } -- we reuse result - for i=1,#t do - local r, more = 0, -1 - for a,b in bytepairs(t[i]) do - if a and b then - if more < 0 then - more = 256*b + a - else + + utf16_to_utf8_le = function(t) + if type(t) == "string" then + t = lpegmatch(utf_16_le_linesplitter,t) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, 0 + for left, right in gmatch(t[i],"(.)(.)") do + if right == "\000" then r = r + 1 - result[t] = utfchar(more + 256*256*256*b + 256*256*a) - more = -1 + result[r] = utfchar(byte(left)) + elseif right then + local now = 256*byte(right) + byte(left) + if more > 0 then + now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong + more = 0 + r = r + 1 + result[r] = utfchar(now) + elseif now >= 0xD800 and now <= 0xDBFF then + more = now + else + r = r + 1 + result[r] = utfchar(now) + end end - else - break end + t[i] = concat(result,"",1,r) -- we reused tmp, hence t end - t[i] = concat(result,"",1,r) + return t end - return t + + utf32_to_utf8_le = function() return { } end -- never used anyway + utf32_to_utf8_be = function() return { } end -- never used anyway + + -- the next one is slighty slower + + -- local result, lines, r, more = { }, { }, 0, 0 + -- + -- local simple = Cmt( + -- C(1) * C(1), function(str,p,left,right) + -- local now = 256*byte(left) + byte(right) + -- if more > 0 then + -- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong + -- more = 0 + -- r = r + 1 + -- result[r] = utfchar(now) + -- elseif now >= 0xD800 and now <= 0xDBFF then + -- more = now + -- else + -- r = r + 1 + -- result[r] = utfchar(now) + -- end + -- return p + -- end + -- ) + -- + -- local complex = Cmt( + -- C(1) * C(1), function(str,p,left,right) + -- local now = 256*byte(left) + byte(right) + -- if more > 0 then + -- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong + -- more = 0 + -- r = r + 1 + -- result[r] = utfchar(now) + -- elseif now >= 0xD800 and now <= 0xDBFF then + -- more = now + -- else + -- r = r + 1 + -- result[r] = utfchar(now) + -- end + -- return p + -- end + -- ) + -- + -- local lineend = Cmt ( + -- patterns.utf_16_be_nl, function(str,p) + -- lines[#lines+1] = concat(result,"",1,r) + -- r, more = 0, 0 + -- return p + -- end + -- ) + -- + -- local be_1 = patterns.utfbom_16_be^-1 * (simple + complex)^0 + -- local be_2 = patterns.utfbom_16_be^-1 * (lineend + simple + complex)^0 + -- + -- utf16_to_utf8_be = function(t) + -- if type(t) == "string" then + -- local s = t + -- lines, r, more = { }, 0, 0 + -- lpegmatch(be_2,s) + -- if r > 0 then + -- lines[#lines+1] = concat(result,"",1,r) + -- end + -- result = { } + -- return lines + -- else + -- for i=1,#t do + -- r, more = 0, 0 + -- lpegmatch(be_1,t[i]) + -- t[i] = concat(result,"",1,r) + -- end + -- result = { } + -- return t + -- end + -- end + end -utf.utf32_to_utf8_be = utf32_to_utf8_be -utf.utf32_to_utf8_le = utf32_to_utf8_le -utf.utf16_to_utf8_be = utf16_to_utf8_be utf.utf16_to_utf8_le = utf16_to_utf8_le +utf.utf16_to_utf8_be = utf16_to_utf8_be +utf.utf32_to_utf8_le = utf32_to_utf8_le +utf.utf32_to_utf8_be = utf32_to_utf8_be function utf.utf8_to_utf8(t) return type(t) == "string" and lpegmatch(utflinesplitter,t) or t @@ -777,11 +935,19 @@ end local _, l_remap = utf.remapper(little) local _, b_remap = utf.remapper(big) +function utf.utf8_to_utf16_be(str) + return char(254,255) .. lpegmatch(b_remap,str) +end + +function utf.utf8_to_utf16_le(str) + return char(255,254) .. lpegmatch(l_remap,str) +end + function utf.utf8_to_utf16(str,littleendian) if littleendian then - return char(255,254) .. lpegmatch(l_remap,str) + return utf.utf8_to_utf16_le(str) else - return char(254,255) .. lpegmatch(b_remap,str) + return utf.utf8_to_utf16_be(str) end end @@ -811,6 +977,22 @@ function utf.xstring(s) return format("0x%05X",type(s) == "number" and s or utfbyte(s)) end +function utf.toeight(str) + if not str then + return nil + end + local utftype = lpegmatch(p_utfstricttype,str) + if utftype == "utf-8" then + return sub(str,4) + elseif utftype == "utf-16-le" then + return utf16_to_utf8_le(str) + elseif utftype == "utf-16-be" then + return utf16_to_utf8_ne(str) + else + return str + end +end + -- local p_nany = p_utf8char / "" diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua index 4624a0507..7b7910fa7 100644 --- a/tex/context/base/l-url.lua +++ b/tex/context/base/l-url.lua @@ -79,12 +79,18 @@ setmetatable(escapes, { __index = function(t,k) return v end }) -local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most -local unescaper = Cs((escapedchar + 1)^0) +local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most +local unescaper = Cs((escapedchar + 1)^0) +local getcleaner = Cs((P("+++") / "%%2B" + P("+") / "%%20" + P(1))^1) -lpegpatterns.urlunescaped = escapedchar -lpegpatterns.urlescaper = escaper -lpegpatterns.urlunescaper = unescaper +lpegpatterns.urlunescaped = escapedchar +lpegpatterns.urlescaper = escaper +lpegpatterns.urlunescaper = unescaper +lpegpatterns.urlgetcleaner = getcleaner + +function url.unescapeget(str) + return lpegmatch(getcleaner,str) +end -- todo: reconsider Ct as we can as well have five return values (saves a table) -- so we can have two parsers, one with and one without diff --git a/tex/context/base/lang-def.mkiv b/tex/context/base/lang-def.mkiv index 9f84e90f0..ecd5b3c22 100644 --- a/tex/context/base/lang-def.mkiv +++ b/tex/context/base/lang-def.mkiv @@ -219,10 +219,9 @@ \c!rightquote=\upperrightsinglesixquote, \c!leftquotation=\lowerleftdoubleninequote, \c!rightquotation=\upperrightdoublesixquote, - \c!date={\v!day,\fourperemspace,\v!month,\space,\v!year} + \c!date={\v!day,{.\fourperemspace},\v!month,\space,\v!year}, \s!lefthyphenmin=2, - \s!righthyphenmin=3 -] + \s!righthyphenmin=3] \installlanguage [\s!sk] @@ -235,10 +234,9 @@ \c!rightquote=\upperrightsinglesixquote, \c!leftquotation=\lowerleftdoubleninequote, \c!rightquotation=\upperrightdoublesixquote, - \c!date={\v!day,\fourperemspace,\v!month,\space,\v!year} + \c!date={\v!day,{.\fourperemspace},\v!month,\space,\v!year}, \s!lefthyphenmin=2, - \s!righthyphenmin=3 -] + \s!righthyphenmin=3] \installlanguage [\s!hr] @@ -271,7 +269,7 @@ \installlanguage [\s!slovak] [\s!sk] \installlanguage [\s!croatian] [\s!hr] \installlanguage [\s!slovenian] [\s!sl] -\installlanguage [slovene] [\s!sl] % both possible (mojca: still needed?) +\installlanguage [slovene] [\s!sl] % both possible (mojca: still needed?) \def\doconvertsloveniancharacters{\dodoconvertcharacters{25}} @@ -400,7 +398,7 @@ \c!rightquote=\upperrightsingleninequote, \c!leftquotation=\upperleftdoublesixquote, \c!rightquotation=\upperrightdoubleninequote, - \c!date={\v!year,\space,\v!month,\space,\v!day} + \c!date={\v!year,\space,\v!month,\space,\v!day}, \s!patterns=\s!tk, \s!lefthyphenmin=1, \s!righthyphenmin=2] diff --git a/tex/context/base/lang-ini.mkiv b/tex/context/base/lang-ini.mkiv index 1297fe23e..17d00033b 100644 --- a/tex/context/base/lang-ini.mkiv +++ b/tex/context/base/lang-ini.mkiv @@ -374,15 +374,24 @@ \fi \lang_basics_synchronize_min_max} -\unexpanded\def\nohyphens % % % % % not clever, we still hyphenate but supress application +% \unexpanded\def\nohyphens % % % % % not clever, we still hyphenate but supress application +% {\ifx\dohyphens\relax +% \unexpanded\edef\dohyphens +% {\hyphenpenalty \the\hyphenpenalty +% \exhyphenpenalty\the\exhyphenpenalty +% \relax}% +% \fi +% \hyphenpenalty \plustenthousand +% \exhyphenpenalty\plustenthousand} + +\unexpanded\def\nohyphens % nicer for url's {\ifx\dohyphens\relax \unexpanded\edef\dohyphens - {\hyphenpenalty \the\hyphenpenalty - \exhyphenpenalty\the\exhyphenpenalty - \relax}% + {\hyphenminoffset\the\hyphenminoffset\relax + \lang_basics_synchronize_min_max}% \fi - \hyphenpenalty \plustenthousand - \exhyphenpenalty\plustenthousand} + \hyphenminoffset\plusthousand + \lang_basics_synchronize_min_max} \let\dohyphens\relax @@ -492,6 +501,10 @@ \fi \mainlanguagenumber\normallanguage} +\appendtoks + \normallanguage\mainlanguagenumber +\to \everybeforepagebody + %D New (see nomarking and nolist): \def\splitsequence#1#2% diff --git a/tex/context/base/lang-rep.lua b/tex/context/base/lang-rep.lua new file mode 100644 index 000000000..31ae36e6d --- /dev/null +++ b/tex/context/base/lang-rep.lua @@ -0,0 +1,189 @@ +if not modules then modules = { } end modules ['lang-rep'] = { + version = 1.001, + comment = "companion to lang-rep.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- A BachoTeX 2013 experiment, probably not that useful. Eventually I used a simpler +-- more generic example. + +local utfbyte, utfsplit = utf.byte, utf.split + +local trace_replacements = false trackers.register("languages.replacements", function(v) trace_replacements = v end) +local trace_detail = false trackers.register("languages.replacements.detail", function(v) trace_detail = v end) + +local report_replacement = logs.reporter("languages","replacements") + +local glyph_code = nodes.nodecodes.glyph + +local insert_node_before = nodes.insert_before +local remove_node = nodes.remove +local copy_node = nodes.copy + +local texsetattribute = tex.setattribute +local unsetvalue = attributes.unsetvalue + +local v_reset = interfaces.variables.reset + +local replacements = languages.replacements or { } +languages.replacements = replacements + +local a_replacements = attributes.private("replacements") + +local lists = { } +local last = 0 +local trees = { } + +table.setmetatableindex(lists,function(lists,name) + last = last + 1 + local list = { } + local data = { name = name, list = list, attribute = last } + lists[last] = data + lists[name] = data + trees[last] = list + return data +end) + +local function add(root,word,replacement) + local list = utfsplit(word,true) + for i=1,#list do + local l = utfbyte(list[i]) + if not root[l] then + root[l] = { } + end + if i == #list then + local newlist = utfsplit(replacement,true) + for i=1,#newlist do + newlist[i] = utfbyte(newlist[i]) + end + root[l].final = { + word = word, + replacement = replacement, + oldlength = #list, + newcodes = newlist, + } + end + root = root[l] + end +end + +function replacements.add(category,word,replacement) + local root = lists[category].list + if type(word) == "table" then + for word, replacement in next, word do + add(root,word,replacement) + end + else + add(root,word,replacement or "") + end +end + +local function hit(a,head) + local tree = trees[a] + if tree then + local root = tree[head.char] + if root then + local current = head.next + local lastrun = false + local lastfinal = false + while current and current.id == glyph_code do + local newroot = root[current.char] + if not newroot then + return lastrun, lastfinal + else + local final = newroot.final + if final then + if trace_detail then + report_replacement("hitting word %a, replacement %a",final.word,final.replacement) + end + lastrun = current + lastfinal = final + else + root = newroot + end + end + current = current.next + end + if lastrun then + return lastrun, lastfinal + end + end + end +end + +function replacements.handler(head) + local current = head + local done = false + while current do + if current.id == glyph_code then + local a = getattr(current,a_replacements) + if a then + local last, final = hit(a,current) + if last then + local oldlength = final.oldlength + local newcodes = final.newcodes + local newlength = #newcodes + if report_replacement then + report_replacement("replacing word %a by %a",final.word,final.replacement) + end + if oldlength == newlength then -- #old == #new + for i=1,newlength do + current.char = newcodes[i] + current = current.next + end + elseif oldlength < newlength then -- #old < #new + for i=1,newlength-oldlength do + local n = copy_node(current) + n.char = newcodes[i] + head, current = insert_node_before(head,current,n) + current = current.next + end + for i=newlength-oldlength+1,newlength do + current.char = newcodes[i] + current = current.next + end + else -- #old > #new + for i=1,oldlength-newlength do + head, current = remove_node(head,current,true) + end + for i=1,newlength do + current.char = newcodes[i] + current = current.next + end + end + done = true + end + end + end + current = current.next + end + return head, done +end + +local enabled = false + +function replacements.set(n) -- number or 'reset' + if n == v_reset then + n = unsetvalue + else + n = lists[n].attribute + if not enabled then + nodes.tasks.enableaction("processors","languages.replacements.handler") + if trace_replacements then + report_replacement("enabling replacement handler") + end + enabled = true + end + end + texsetattribute(a_replacements,n) +end + +-- interface + +commands.setreplacements = replacements.set +commands.addreplacements = replacements.add + +nodes.tasks.prependaction("processors","words","languages.replacements.handler") +nodes.tasks.disableaction("processors","languages.replacements.handler") diff --git a/tex/context/base/lang-url.lua b/tex/context/base/lang-url.lua index 35381e672..4ed5cdea1 100644 --- a/tex/context/base/lang-url.lua +++ b/tex/context/base/lang-url.lua @@ -8,11 +8,11 @@ if not modules then modules = { } end modules ['lang-url'] = { local utfcharacters, utfvalues, utfbyte, utfchar = utf.characters, utf.values, utf.byte, utf.char -context = context - commands = commands or { } local commands = commands +context = context + --[[

Hyphenating 's is somewhat tricky and a matter of taste. I did consider using a dedicated hyphenation pattern or dealing with it by node diff --git a/tex/context/base/lang-wrd.lua b/tex/context/base/lang-wrd.lua index 06a2311a6..bf066fc09 100644 --- a/tex/context/base/lang-wrd.lua +++ b/tex/context/base/lang-wrd.lua @@ -334,17 +334,17 @@ end -- for the moment we hook it into the attribute handler ---~ languagehacks = { } +-- languagehacks = { } ---~ function languagehacks.process(namespace,attribute,head) ---~ return languages.check(head) ---~ end +-- function languagehacks.process(namespace,attribute,head) +-- return languages.check(head) +-- end ---~ chars.plugins[chars.plugins+1] = { ---~ name = "language", ---~ namespace = languagehacks, ---~ processor = languagehacks.process ---~ } +-- chars.plugins[chars.plugins+1] = { +-- name = "language", +-- namespace = languagehacks, +-- processor = languagehacks.process +-- } -- interface diff --git a/tex/context/base/layo-ini.lua b/tex/context/base/layo-ini.lua index 56ced2c0b..d35d7ef69 100644 --- a/tex/context/base/layo-ini.lua +++ b/tex/context/base/layo-ini.lua @@ -6,17 +6,10 @@ if not modules then modules = { } end modules ['layo-ini'] = { license = "see context related readme files" } --- We need to share information between the TeX and Lua end --- about the typographical model. This happens here. --- --- Code might move. +-- We need to share information between the TeX and Lua end about the typographical +-- model. This happens here. This code might move. --- conditionals.layoutisdoublesided --- conditionals.layoutissinglesided --- texcount.pagenoshift --- texcount.realpageno - -local texcount = tex.count +local texgetcount = tex.getcount local conditionals = tex.conditionals layouts = { @@ -33,14 +26,14 @@ function status.leftorrightpagection(left,right) return left, right elseif conditionals.layoutissinglesided then return left, right - elseif texcount.pagenoshift % 2 == 0 then - if texcount.realpageno % 2 == 0 then + elseif texgetcount("pagenoshift") % 2 == 0 then + if texgetcount("realpageno") % 2 == 0 then return right, left else return left, right end else - if texcount.realpageno % 2 == 0 then + if texgetcount("realpageno") % 2 == 0 then return left, right else return right, left @@ -53,9 +46,9 @@ function status.isleftpage() return false elseif conditionals.layoutissinglesided then return false - elseif texcount.pagenoshift % 2 == 0 then - return texcount.realpageno % 2 == 0 + elseif texgetcount("pagenoshift") % 2 == 0 then + return texgetcount("realpageno") % 2 == 0 else - return not texcount.realpageno % 2 == 0 + return not texgetcount("realpageno") % 2 == 0 end end diff --git a/tex/context/base/lpdf-ano.lua b/tex/context/base/lpdf-ano.lua index adfea3812..3f0e718b3 100644 --- a/tex/context/base/lpdf-ano.lua +++ b/tex/context/base/lpdf-ano.lua @@ -12,7 +12,6 @@ if not modules then modules = { } end modules ['lpdf-ano'] = { local next, tostring = next, tostring local rep, format = string.rep, string.format -local texcount = tex.count local lpegmatch = lpeg.match local formatters = string.formatters @@ -52,6 +51,8 @@ local pdfannotation_node = nodepool.pdfannotation local pdfdestination_node = nodepool.pdfdestination local latelua_node = nodepool.latelua +local texgetcount = tex.getcount + local pdfdictionary = lpdf.dictionary local pdfarray = lpdf.array local pdfreference = lpdf.reference @@ -334,13 +335,38 @@ end -- runners and specials +-- runners["inner"] = function(var,actions) +-- if getinnermethod() == "names" then +-- local vi = var.i +-- if vi then +-- local vir = vi.references +-- if vir then +-- local internal = vir.internal +-- if internal then +-- var.inner = "aut:" .. internal +-- end +-- end +-- end +-- else +-- var.inner = nil +-- end +-- local prefix = var.p +-- local inner = var.inner +-- if inner and prefix and prefix ~= "" then +-- inner = prefix .. ":" .. inner -- might not always be ok +-- end +-- return link(nil,nil,inner,var.r,actions) +-- end + runners["inner"] = function(var,actions) + local internal = false if getinnermethod() == "names" then local vi = var.i if vi then local vir = vi.references if vir then - local internal = vir.internal + -- todo: no need for it when we have a real reference + internal = vir.internal if internal then var.inner = "aut:" .. internal end @@ -351,8 +377,9 @@ runners["inner"] = function(var,actions) end local prefix = var.p local inner = var.inner - if inner and prefix and prefix ~= "" then - inner = prefix .. ":" .. inner -- might not always be ok + if not internal and inner and prefix and prefix ~= "" then + -- no prefix with e.g. components + inner = prefix .. ":" .. inner end return link(nil,nil,inner,var.r,actions) end @@ -486,7 +513,7 @@ end function specials.deltapage(var,actions) local p = tonumber(var.operation) if p then - p = references.checkedrealpage(p + texcount.realpageno) + p = references.checkedrealpage(p + texgetcount("realpageno")) return link(nil,nil,nil,p,actions) end end diff --git a/tex/context/base/lpdf-epa.lua b/tex/context/base/lpdf-epa.lua index 034e6d7e2..61d57b8d3 100644 --- a/tex/context/base/lpdf-epa.lua +++ b/tex/context/base/lpdf-epa.lua @@ -15,11 +15,12 @@ local formatters = string.formatters ----- lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns -local trace_links = false trackers.register("figures.links", function(v) trace_links = v end) +local trace_links = false trackers.register("figures.links", function(v) trace_links = v end) +local report_link = logs.reporter("backend","merging") -local report_link = logs.reporter("backend","merging") - -local backends, lpdf = backends, lpdf +local backends = backends +local lpdf = lpdf +local context = context local variables = interfaces.variables local codeinjections = backends.pdf.codeinjections diff --git a/tex/context/base/lpdf-fmt.lua b/tex/context/base/lpdf-fmt.lua index 94c005f65..b444f03c3 100644 --- a/tex/context/base/lpdf-fmt.lua +++ b/tex/context/base/lpdf-fmt.lua @@ -36,7 +36,7 @@ local pdfstring = lpdf.string local pdfverbose = lpdf.verbose local pdfflushstreamfileobject = lpdf.flushstreamfileobject -local texset = tex.set -- we could make tex.setglobal +local texset = tex.set local addtoinfo = lpdf.addtoinfo local injectxmpinfo = lpdf.injectxmpinfo diff --git a/tex/context/base/lpdf-nod.lua b/tex/context/base/lpdf-nod.lua index 60d3fcd5b..6b104d2fa 100644 --- a/tex/context/base/lpdf-nod.lua +++ b/tex/context/base/lpdf-nod.lua @@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['lpdf-nod'] = { license = "see context related readme files" } -local format = string.format +local formatters = string.formatters local copy_node = node.copy local new_node = node.new @@ -59,7 +59,41 @@ end function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty) local t = copy_node(pdfsetmatrix) - t.data = format("%s %s %s %s",rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty + t.data = formatters["%s %s %s %s"](rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty + return t +end + +function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty) + local t = copy_node(pdfsetmatrix) + if type(rx) == "string" then + t.data = rx + else + if not rx then + rx = 1 + elseif rx == 0 then + rx = 0.0001 + end + if not ry then + ry = 1 + elseif ry == 0 then + ry = 0.0001 + end + if not sx then + sx = 0 + end + if not sy then + sy = 0 + end + if sx == 0 and sy == 0 then + if rx == 1 and ry == 1 then + t.data = "1 0 0 1" + else + t.data = formatters["%0.6f 0 0 %0.6f"](rx,ry) + end + else + t.data = formatters["%0.6f %0.6f %0.6f %0.6f"](rx,sx,sy,ry) + end + end return t end @@ -127,8 +161,12 @@ function nodepool.pdfdestination(w,h,d,name,view,n) local m = copy_node(pdfsetmatrix) local r = copy_node(pdfrestore) m.data = "1 0 0 1" - s.next = m m.next = t t.next = r - m.prev = s t.prev = m r.prev = t + s.next = m + m.next = t + t.next = r + m.prev = s + t.prev = m + r.prev = t return s -- a list else return t diff --git a/tex/context/base/lpdf-tag.lua b/tex/context/base/lpdf-tag.lua index 8cdb5f6a4..29ffcd207 100644 --- a/tex/context/base/lpdf-tag.lua +++ b/tex/context/base/lpdf-tag.lua @@ -32,6 +32,8 @@ local pdfflushobject = lpdf.flushobject local pdfreserveobject = lpdf.reserveobject local pdfpagereference = lpdf.pagereference +local texgetcount = tex.getcount + local nodepool = nodes.pool local pdfliteral = nodepool.pdfliteral @@ -69,10 +71,9 @@ local dashsplitter = lpeg.splitat("-") local add_ids = false -- true - ---~ function codeinjections.maptag(original,target,kind) ---~ mapping[original] = { target, kind or "inline" } ---~ end +-- function codeinjections.maptag(original,target,kind) +-- mapping[original] = { target, kind or "inline" } +-- end local function finishstructure() if #structure_kids > 0 then @@ -133,7 +134,7 @@ local pdf_struct_element = pdfconstant("StructElem") local function initializepage() index = 0 - pagenum = tex.count.realpageno + pagenum = texgetcount("realpageno") pageref = pdfreference(pdfpagereference(pagenum)) list = pdfarray() tree[pagenum] = list -- we can flush after done, todo diff --git a/tex/context/base/lpdf-wid.lua b/tex/context/base/lpdf-wid.lua index 9ea4744f1..30bd3572b 100644 --- a/tex/context/base/lpdf-wid.lua +++ b/tex/context/base/lpdf-wid.lua @@ -8,14 +8,18 @@ if not modules then modules = { } end modules ['lpdf-wid'] = { local gmatch, gsub, find, lower, format = string.gmatch, string.gsub, string.find, string.lower, string.format local stripstring = string.strip -local texbox, texcount = tex.box, tex.count local settings_to_array = utilities.parsers.settings_to_array local settings_to_hash = utilities.parsers.settings_to_hash -local report_media = logs.reporter("backend","media") -local report_attachment = logs.reporter("backend","attachment") +local report_media = logs.reporter("backend","media") +local report_attachment = logs.reporter("backend","attachment") -local backends, lpdf, nodes = backends, lpdf, nodes +local backends = backends +local lpdf = lpdf +local nodes = nodes +local context = context + +local texgetcount = tex.getcount local nodeinjections = backends.pdf.nodeinjections local codeinjections = backends.pdf.codeinjections @@ -487,8 +491,8 @@ end local function insertrenderingwindow(specification) local label = specification.label ---~ local openpage = specification.openpage ---~ local closepage = specification.closepage + -- local openpage = specification.openpage + -- local closepage = specification.closepage if specification.option == v_auto then if openpageaction then -- \handlereferenceactions{\v!StartRendering{#2}} @@ -504,7 +508,7 @@ local function insertrenderingwindow(specification) PC = (closepage and lpdf.action(closepage)) or nil, } end - local page = tonumber(specification.page) or texcount.realpageno -- todo + local page = tonumber(specification.page) or texgetcount("realpageno") -- todo local r = mu[label] or pdfreserveannotation() -- why the reserve here? local a = pdfdictionary { S = pdfconstant("Rendition"), @@ -536,34 +540,34 @@ local function insertrendering(specification) if not mf[label] then local filename = specification.filename local isurl = find(filename,"://") - --~ local start = pdfdictionary { - --~ Type = pdfconstant("MediaOffset"), - --~ S = pdfconstant("T"), -- time - --~ T = pdfdictionary { -- time - --~ Type = pdfconstant("Timespan"), - --~ S = pdfconstant("S"), - --~ V = 3, -- time in seconds - --~ }, - --~ } - --~ local start = pdfdictionary { - --~ Type = pdfconstant("MediaOffset"), - --~ S = pdfconstant("F"), -- frame - --~ F = 100 -- framenumber - --~ } - --~ local start = pdfdictionary { - --~ Type = pdfconstant("MediaOffset"), - --~ S = pdfconstant("M"), -- mark - --~ M = "somemark", - --~ } - --~ local parameters = pdfdictionary { - --~ BE = pdfdictionary { - --~ B = start, - --~ } - --~ } - --~ local parameters = pdfdictionary { - --~ Type = pdfconstant(MediaPermissions), - --~ TF = pdfstring("TEMPALWAYS") }, -- TEMPNEVER TEMPEXTRACT TEMPACCESS TEMPALWAYS - --~ } + -- local start = pdfdictionary { + -- Type = pdfconstant("MediaOffset"), + -- S = pdfconstant("T"), -- time + -- T = pdfdictionary { -- time + -- Type = pdfconstant("Timespan"), + -- S = pdfconstant("S"), + -- V = 3, -- time in seconds + -- }, + -- } + -- local start = pdfdictionary { + -- Type = pdfconstant("MediaOffset"), + -- S = pdfconstant("F"), -- frame + -- F = 100 -- framenumber + -- } + -- local start = pdfdictionary { + -- Type = pdfconstant("MediaOffset"), + -- S = pdfconstant("M"), -- mark + -- M = "somemark", + -- } + -- local parameters = pdfdictionary { + -- BE = pdfdictionary { + -- B = start, + -- } + -- } + -- local parameters = pdfdictionary { + -- Type = pdfconstant(MediaPermissions), + -- TF = pdfstring("TEMPALWAYS") }, -- TEMPNEVER TEMPEXTRACT TEMPACCESS TEMPALWAYS + -- } local descriptor = pdfdictionary { Type = pdfconstant("Filespec"), F = filename, diff --git a/tex/context/base/luat-cbk.lua b/tex/context/base/luat-cbk.lua index 5aa12005b..4f044f9ac 100644 --- a/tex/context/base/luat-cbk.lua +++ b/tex/context/base/luat-cbk.lua @@ -306,7 +306,7 @@ function garbagecollector.check(size,criterium) end end --- this will move +-- this will move to a module commands = commands or { } diff --git a/tex/context/base/luat-env.lua b/tex/context/base/luat-env.lua index 8753972c6..5558e0303 100644 --- a/tex/context/base/luat-env.lua +++ b/tex/context/base/luat-env.lua @@ -20,6 +20,8 @@ local report_lua = logs.reporter("resolvers","lua") local luautilities = utilities.lua local luasuffixes = luautilities.suffixes +local texgettoks = tex and tex.gettoks + environment = environment or { } local environment = environment @@ -28,7 +30,7 @@ local environment = environment local mt = { __index = function(_,k) if k == "version" then - local version = tex.toks and tex.toks.contextversiontoks + local version = texgettoks and texgettoks("contextversiontoks") if version and version ~= "" then rawset(environment,"version",version) return version @@ -36,7 +38,7 @@ local mt = { return "unknown" end elseif k == "kind" then - local kind = tex.toks and tex.toks.contextkindtoks + local kind = texgettoks and texgettoks("contextkindtoks") if kind and kind ~= "" then rawset(environment,"kind",kind) return kind diff --git a/tex/context/base/luat-fio.lua b/tex/context/base/luat-fio.lua index d61c6f142..dcc183167 100644 --- a/tex/context/base/luat-fio.lua +++ b/tex/context/base/luat-fio.lua @@ -84,26 +84,6 @@ if not resolvers.instance then end -local report_system = logs.reporter("system","files") -local report_files = logs.reporter("used files") - -luatex.registerstopactions(function() - local foundintrees = resolvers.instance.foundintrees - if #foundintrees > 0 then - logs.pushtarget("logfile") - logs.newline() - report_system("start used files") - logs.newline() - for i=1,#foundintrees do - report_files("%4i: % T",i,foundintrees[i]) - end - logs.newline() - report_system("stop used files") - logs.newline() - logs.poptarget() - end -end) - statistics.register("resource resolver", function() local scandata = resolvers.scandata() return format("loadtime %s seconds, %s scans with scantime %s seconds, %s shared scans, %s found files, scanned paths: %s", diff --git a/tex/context/base/luat-mac.lua b/tex/context/base/luat-mac.lua index c8be06b63..282dc8ce3 100644 --- a/tex/context/base/luat-mac.lua +++ b/tex/context/base/luat-mac.lua @@ -76,21 +76,23 @@ local function pop() top = remove(stack) end -local leftbrace = P("{") -- will be in patterns -local rightbrace = P("}") -local escape = P("\\") +local leftbrace = P("{") -- will be in patterns +local rightbrace = P("}") +local escape = P("\\") -local space = patterns.space -local spaces = space^1 -local newline = patterns.newline -local nobrace = 1 - leftbrace - rightbrace +local space = patterns.space +local spaces = space^1 +local newline = patterns.newline +local nobrace = 1 - leftbrace - rightbrace local longleft = leftbrace -- P("(") local longright = rightbrace -- P(")") local nolong = 1 - longleft - longright -local name = R("AZ","az")^1 -local csname = (R("AZ","az") + S("@?!_"))^1 +local utf8character = P(1) * R("\128\191")^1 -- unchecked but fast + +local name = (R("AZ","az") + utf8character)^1 +local csname = (R("AZ","az") + S("@?!_") + utf8character)^1 local longname = (longleft/"") * (nolong^1) * (longright/"") local variable = P("#") * Cs(name + longname) local escapedname = escape * csname diff --git a/tex/context/base/lxml-css.lua b/tex/context/base/lxml-css.lua index c5a85c2bd..0deaea4d3 100644 --- a/tex/context/base/lxml-css.lua +++ b/tex/context/base/lxml-css.lua @@ -30,8 +30,9 @@ if tex then local exheights = fonts.hashes.exheights local emwidths = fonts.hashes.emwidths + local texget = tex.get - percentage = function(s,pcf) return tonumber(s) * (pcf or tex.hsize) end + percentage = function(s,pcf) return tonumber(s) * (pcf or texget("hsize")) end exheight = function(s,exf) return tonumber(s) * (exf or exheights[true]) end emwidth = function(s,emf) return tonumber(s) * (emf or emwidths[true]) end pixels = function(s,pxf) return tonumber(s) * (pxf or emwidths[true]/300) end @@ -109,17 +110,17 @@ css.padding = padding -- print(padding("0",pixel,hsize,exheight,emwidth)) --- local currentfont = font.current --- local texdimen = tex.dimen --- local hashes = fonts.hashes --- local quads = hashes.quads --- local xheights = hashes.xheights +-- local currentfont = font.current +-- local texget = tex.get +-- local hashes = fonts.hashes +-- local quads = hashes.quads +-- local xheights = hashes.xheights -- -- local function padding(str) -- local font = currentfont() -- local exheight = xheights[font] -- local emwidth = quads[font] --- local hsize = texdimen.hsize/100 +-- local hsize = texget("hsize")/100 -- local pixel = emwidth/100 -- return padding(str,pixel,hsize,exheight,emwidth) -- end diff --git a/tex/context/base/lxml-ctx.lua b/tex/context/base/lxml-ctx.lua index 968dbda71..1191d6796 100644 --- a/tex/context/base/lxml-ctx.lua +++ b/tex/context/base/lxml-ctx.lua @@ -10,11 +10,13 @@ if not modules then modules = { } end modules ['lxml-ctx'] = { local format, find = string.format, string.find -local xml = xml - +local xml = xml xml.ctx = { } xml.ctx.enhancers = { } +local context = context +local commands = commands + -- hashen function xml.ctx.enhancers.compound(root,lpath,before,tokens,after) -- todo lpeg diff --git a/tex/context/base/lxml-dir.lua b/tex/context/base/lxml-dir.lua index 3c68664ae..48c0ac41e 100644 --- a/tex/context/base/lxml-dir.lua +++ b/tex/context/base/lxml-dir.lua @@ -24,12 +24,13 @@ local formatters = string.formatters -- -- -local lxml, context = lxml, context +local lxml = lxml +local context = context -local getid = lxml.getid +local getid = lxml.getid -lxml.directives = lxml.directives or { } -local directives = lxml.directives +local directives = lxml.directives or { } +lxml.directives = directives local report_lxml = logs.reporter("xml","tex") @@ -106,9 +107,11 @@ directives.handle = handle_setup function directives.setup(root,attribute,element) handle_setup('setup',root,attribute,element) end + function directives.before(root,attribute,element) handle_setup('before',root,attribute,element) end + function directives.after(root,attribute,element) handle_setup('after',root,attribute,element) end diff --git a/tex/context/base/lxml-ini.mkiv b/tex/context/base/lxml-ini.mkiv index d2d64aa8d..c3ab2dc73 100644 --- a/tex/context/base/lxml-ini.mkiv +++ b/tex/context/base/lxml-ini.mkiv @@ -87,10 +87,10 @@ \def\xmldisplayverbatim #1{\ctxlxml{displayverbatim("#1")}} \def\xmlinlineverbatim #1{\ctxlxml{inlineverbatim("#1")}} -\def\xmlload #1#2{\ctxlxml{load("#1","#2","\p_lxml_entities","\p_lxml_compress")}} -\def\xmlloadbuffer #1#2{\ctxlxml{loadbuffer("#1","#2","\p_lxml_entities","\p_lxml_compress")}} -\def\xmlloaddata #1#2{\ctxlxml{loaddata("#1",\!!bs#2\!!es,"\p_lxml_entities","\p_lxml_compress")}} -\def\xmlloadregistered #1#2{\ctxlxml{loadregistered("#1","\p_lxml_entities","\p_lxml_compress")}} +\def\xmlload #1#2{\ctxlxml{load("#1","#2","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}} +\def\xmlloadbuffer #1#2{\ctxlxml{loadbuffer("#1","#2","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}} +\def\xmlloaddata #1#2{\ctxlxml{loaddata("#1",\!!bs#2\!!es,"\directxmlparameter\c!entities","\directxmlparameter\c!compress")}} +\def\xmlloadregistered #1#2{\ctxlxml{loadregistered("#1","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}} \def\xmlloaddirectives #1{\ctxlxml{directives.load("any:///#1")}} \def\xmlpos #1{\ctxlxml{pos("#1")}} @@ -313,17 +313,6 @@ \xmlprocessingmode\executeifdefined{\??xmldefaults\directxmlparameter\c!default}\plusone \to \everysetupxml -\unexpanded\def\initializexmlprocessing % is this still needed? - {\the\everysetupxml} - -\let\p_lxml_entities\empty -\let\p_lxml_compress\empty - -\appendtoks - \edef\p_lxml_entities{\directxmlparameter\c!entities}% - \edef\p_lxml_compress{\directxmlparameter\c!compress}% -\to \everysetupxml - \setupxml [\c!default=, % flush all \c!compress=\v!no, % strip comment diff --git a/tex/context/base/lxml-sor.lua b/tex/context/base/lxml-sor.lua index 951017bcd..aba1c3b8d 100644 --- a/tex/context/base/lxml-sor.lua +++ b/tex/context/base/lxml-sor.lua @@ -9,9 +9,12 @@ if not modules then modules = { } end modules ['lxml-sor'] = { local format, concat, rep = string.format, table.concat, string.rep local lpegmatch = lpeg.match -local xml, lxml = xml, lxml +local xml = xml +local lxml = lxml +local context = context -lxml.sorters = lxml.sorters or { } +local lxmlsorters = lxml.sorters or { } +lxml.sorters = lxmlsorters if not lxml.splitid then local splitter = lpeg.C((1-lpeg.P(":"))^1) * lpeg.P("::") * lpeg.C(lpeg.P(1)^1) @@ -27,7 +30,7 @@ end local lists = { } -function lxml.sorters.reset(name) +function lxmlsorters.reset(name) lists[name] = { sorted = false, entries = { }, @@ -36,7 +39,7 @@ function lxml.sorters.reset(name) } end -function lxml.sorters.add(name,n,key) +function lxmlsorters.add(name,n,key) local list = lists[name] if list.sorted then -- reverse is messed up, we could regenerate it and go on @@ -56,7 +59,7 @@ function lxml.sorters.add(name,n,key) end end -function lxml.sorters.show(name) +function lxmlsorters.show(name) local list = lists[name] local entries = list and list.entries local NC, NR, bold = context.NC, context.NR, context.bold -- somehow bold is not working @@ -92,9 +95,9 @@ function lxml.sorters.show(name) end end -lxml.sorters.compare = sorters.comparers.basic -- (a,b) +lxmlsorters.compare = sorters.comparers.basic -- (a,b) -function lxml.sorters.sort(name) +function lxmlsorters.sort(name) local list = lists[name] local entries = list and list.entries if entries then @@ -117,7 +120,7 @@ function lxml.sorters.sort(name) r.split = splitter(strip(r.key)) end -- sorting - sorters.sort(results,lxml.sorters.compare) + sorters.sort(results,lxmlsorters.compare) -- finalizing list.nofsorted = #results local split = { } @@ -137,7 +140,7 @@ function lxml.sorters.sort(name) end end -function lxml.sorters.flush(name,setup) +function lxmlsorters.flush(name,setup) local list = lists[name] local results = list and list.results local xmlw = context.xmlw diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua index 2bb5844fc..3e10eb96d 100644 --- a/tex/context/base/lxml-tab.lua +++ b/tex/context/base/lxml-tab.lua @@ -34,6 +34,8 @@ as the current variant was written when showed up and it's easier build tables in one go.

--ldx]]-- +if lpeg.setmaxstack then lpeg.setmaxstack(1000) end -- deeply nested xml files + xml = xml or { } local xml = xml @@ -627,7 +629,6 @@ local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * val local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset local simpledoctype = (1-close)^1 -- * balanced^0 local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0) -local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0) local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua index 112f62751..2cbdfc886 100644 --- a/tex/context/base/lxml-tex.lua +++ b/tex/context/base/lxml-tex.lua @@ -27,6 +27,7 @@ local catcodenumbers = catcodes.numbers local ctxcatcodes = catcodenumbers.ctxcatcodes -- todo: use different method local notcatcodes = catcodenumbers.notcatcodes -- todo: use different method +local commands = commands local context = context local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing) @@ -1574,12 +1575,17 @@ statistics.register("xml load time", function() end) statistics.register("lxml preparation time", function() - local calls, cached = xml.lpathcalls(), xml.lpathcached() - if calls > 0 or cached > 0 then - return format("%s seconds, %s nodes, %s lpath calls, %s cached calls", - statistics.elapsedtime(lxml), nofindices, calls, cached) + if noffiles > 0 or nofconverted > 0 then + local calls = xml.lpathcalls() + local cached = xml.lpathcached() + if calls > 0 or cached > 0 then + return format("%s seconds, %s nodes, %s lpath calls, %s cached calls", + statistics.elapsedtime(lxml), nofindices, calls, cached) + else + return nil + end else - return nil + -- pretty close to zero so not worth mentioning end end) diff --git a/tex/context/base/m-chart.lua b/tex/context/base/m-chart.lua index c4da2eb63..2b9869379 100644 --- a/tex/context/base/m-chart.lua +++ b/tex/context/base/m-chart.lua @@ -19,7 +19,7 @@ local P, S, C, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.match local report_chart = logs.reporter("chart") -local points = number.points +local points = number.points -- we can use %p instead local variables = interfaces.variables @@ -499,7 +499,7 @@ local function process_cells(chart,xoffset,yoffset) local linesettings = settings.line context("flow_shape_line_color := \\MPcolor{%s} ;", linesettings.color) context("flow_shape_fill_color := \\MPcolor{%s} ;", linesettings.backgroundcolor) - context("flow_shape_line_width := %s ; ", points(linesettingsrulethickness)) + context("flow_shape_line_width := %s ; ", points(linesettings.rulethickness)) elseif focus[cell.focus] or focus[cell.name] then local focussettings = settings.focus context("flow_shape_line_color := \\MPcolor{%s} ;", focussettings.framecolor) @@ -580,7 +580,7 @@ local function process_connections(chart,xoffset,yoffset) context("flow_touchshape := %s ;", linesettings.offset == v_none and "true" or "false") context("flow_dsp_x := %s ; flow_dsp_y := %s ;",connection.dx or 0, connection.dy or 0) context("flow_connection_line_color := \\MPcolor{%s} ;",linesettings.color) - context("flow_connection_line_width := 2pt ;",points(linesettings.rulethickness)) + context("flow_connection_line_width := %s ;",points(linesettings.rulethickness)) context("flow_connect_%s_%s (%s) (%s,%s,%s) (%s,%s,%s) ;",where_cell,where_other,j,cellx,celly,what_cell,otherx,othery,what_other) context("flow_dsp_x := 0 ; flow_dsp_y := 0 ;") end diff --git a/tex/context/base/m-database.lua b/tex/context/base/m-database.lua index 47854daa0..91e9636ee 100644 --- a/tex/context/base/m-database.lua +++ b/tex/context/base/m-database.lua @@ -6,20 +6,22 @@ if not modules then modules = { } end modules ['m-database'] = { license = "see context related readme files" } -local sub, gmatch, format = string.sub, string.gmatch, string.format +local sub, gmatch = string.sub, string.gmatch local concat = table.concat local lpegpatterns, lpegmatch, lpegsplitat = lpeg.patterns, lpeg.match, lpeg.splitat -local lpegP, lpegC, lpegS, lpegCt = lpeg.P, lpeg.C, lpeg.S, lpeg.Ct +local lpegP, lpegC, lpegS, lpegCt, lpegCc, lpegCs = lpeg.P, lpeg.C, lpeg.S, lpeg.Ct, lpeg.Cc, lpeg.Cs local stripstring = string.strip +moduledata.database = moduledata.database or { } +moduledata.database.csv = moduledata.database.csv or { } + -- One also needs to enable context.trace, here we only plug in some code (maybe -- some day this tracker will also toggle the main context tracer. -local trace_flush = false trackers.register("module.database.flush", function(v) trace_flush = v end) - +local trace_flush = false trackers.register("module.database.flush", function(v) trace_flush = v end) local report_database = logs.reporter("database") -buffers.database = buffers.database or { } +local context = context local l_tab = lpegpatterns.tab local l_space = lpegpatterns.space @@ -36,7 +38,7 @@ local separators = { -- not interfaced spaces = l_space^1, } -function buffers.database.process(settings) +function moduledata.database.csv.process(settings) local data if settings.type == "file" then local filename = resolvers.finders.byscheme("any",settings.database) @@ -46,6 +48,8 @@ function buffers.database.process(settings) data = buffers.getlines(settings.database) end if data and #data > 0 then + local catcodes = tonumber(settings.catcodes) or tex.catcodetable + context.pushcatcodes(catcodes) if trace_flush then context.pushlogger(report_database) end @@ -55,7 +59,7 @@ function buffers.database.process(settings) local left, right = settings.left or "", settings.right or "" local setups = settings.setups or "" local strip = settings.strip == v_yes or false - local command = settings.command + local command = settings.command or "" separatorchar = (not separatorchar and ",") or separators[separatorchar] or separatorchar local separator = type(separatorchar) == "string" and lpegS(separatorchar) or separatorchar local whatever = lpegC((1 - separator)^0) @@ -63,7 +67,7 @@ function buffers.database.process(settings) local quotedata = nil for chr in gmatch(quotechar,".") do local quotechar = lpegP(chr) - local quoteword = l_space^0 * quotechar * lpegC((1 - quotechar)^0) * quotechar * l_space^0 + local quoteword = lpegCs(((l_space^0 * quotechar)/"") * (1 - quotechar)^0 * ((quotechar * l_space^0)/"")) if quotedata then quotedata = quotedata + quoteword else @@ -73,12 +77,34 @@ function buffers.database.process(settings) whatever = quotedata + whatever end local checker = commentchar ~= "" and lpegS(commentchar) - local splitter = lpegCt(whatever * (separator * whatever)^0) + if strip then + whatever = whatever / stripstring + end + if left ~= "" then + whatever = lpegCc(left) * whatever + end + if right ~= "" then + whatever = whatever * lpegCc(right) + end + if command ~= "" then + whatever = lpegCc("{") * whatever * lpegCc("}") + end + whatever = whatever * (separator/"" * whatever)^0 + if first ~= "" then + whatever = lpegCc(first) * whatever + end + if last ~= "" then + whatever = whatever * lpegCc(last) + end + if command ~= "" then + whatever = lpegCs(lpegCc(command) * whatever) + else + whatever = lpegCs(whatever) + end local found = false for i=1,#data do local line = data[i] if not lpegmatch(l_empty,line) and (not checker or not lpegmatch(checker,line)) then - local list = lpegmatch(splitter,line) if not found then if setups ~= "" then context.begingroup() @@ -87,39 +113,7 @@ function buffers.database.process(settings) context(before) found = true end - if trace_flush then - local result, r = { }, 0 - r = r + 1 ; result[r] = first - for j=1,#list do - local str = strip and stripstring(list[j]) or list[j] - r = r + 1 ; result[r] = left - if command == "" then - r = r + 1 ; result[r] = str - else - r = r + 1 ; result[r] = command - r = r + 1 ; result[r] = "{" - r = r + 1 ; result[r] = str - r = r + 1 ; result[r] = "}" - end - r = r + 1 ; result[r] = right - end - r = r + 1 ; result[r] = last - context(concat(result)) - else - context(first) - for j=1,#list do - local str = strip and stripstring(list[j]) or list[j] - context(left) - if command == "" then - context(str) - else - context(command) - context(false,str) - end - context(right) - end - context(last) - end + context(lpegmatch(whatever,line)) end end if found then @@ -128,6 +122,7 @@ function buffers.database.process(settings) context.endgroup() end end + context.popcatcodes() if trace_flush then context.poplogger() end diff --git a/tex/context/base/m-database.mkiv b/tex/context/base/m-database.mkiv index 0285d3bcd..cc7dd3d72 100644 --- a/tex/context/base/m-database.mkiv +++ b/tex/context/base/m-database.mkiv @@ -52,7 +52,7 @@ \let\currentdatabasename\currentdatabase \let\currentdatabase\empty \fi - \ctxlua{buffers.database.process { + \ctxlua{moduledata.database.csv.process { name = "\currentdatabase", type = "\currentdatabasetype", database = "\currentdatabasename", @@ -68,6 +68,7 @@ left = \!!bs\databaseparameter\c!left \!!es, right = \!!bs\databaseparameter\c!right \!!es, command = \!!bs\databaseparameter\c!command \!!es, + catcodes = \number\catcodetable }}} \unexpanded\def\processdatabasebuffer{\dodoubleempty\module_database_process_buffer} @@ -120,8 +121,7 @@ first={\endgraf[}, last={]\endgraf}, left={ (}, - right={) }, - command=\ruledhbox] + right={) }] \startbuffer[testbuffer] 1,2,3,4,5 diff --git a/tex/context/base/m-graph.mkiv b/tex/context/base/m-graph.mkiv index c15262cac..e99921c43 100644 --- a/tex/context/base/m-graph.mkiv +++ b/tex/context/base/m-graph.mkiv @@ -64,14 +64,7 @@ \c!method=\s!double] \startMPdefinitions{graph} - if unknown context_grap: input "mp-grap.mpiv" ; fi ; -\stopMPdefinitions - -% For backwards compatibility (for the moment), also load the graph macros in -% the standard MP instance (scaled integer): - -\startMPdefinitions - if unknown context_grap: input "mp-grap.mpiv" ; fi ; + if unknown context_grap : input mp-grap.mpiv ; fi ; \stopMPdefinitions \protect diff --git a/tex/context/base/m-hemistich.mkiv b/tex/context/base/m-hemistich.mkiv new file mode 100644 index 000000000..55fde7b92 --- /dev/null +++ b/tex/context/base/m-hemistich.mkiv @@ -0,0 +1,112 @@ +%D \module +%D [ file=m-hemistich, +%D version=2013.08.26, +%D title=\CONTEXT\ Extra Modules, +%D subtitle=Hemistiches, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +%D This is an experimental module for Idris. More is possible but not now. + +\unprotect + +\installcorenamespace{hemistich} + +\installcommandhandler \??hemistich {hemistich} \??hemistich + +\setuphemistich + [\c!width=\v!local, + \c!distance=4\emwidth, + \c!separator=\vl\hskip.25em\vl] + +\unexpanded\def\hemistiches + {\dosingleempty\dohemistiches} + +\unexpanded\def\dohemistiches + {\dodohemistiches\empty} + +\unexpanded\def\dodohemistiches#1[#2]#3#4% + {\dontleavehmode + \begingroup + \doifassignmentelse{#2} + {\edef\currenthemistich{#1}% + \setupcurrenthemistich[#2]} + {\def\currenthemistich{#2}}% + \doifelse{\hemistichparameter\c!width}\v!local + {\scratchwidth\availablehsize} + {\scratchwidth\hemistichparameter\c!width\relax}% + \spaceskip\zeropoint\s!plus\plusone\s!fill\relax + \hbox to \scratchwidth\bgroup + \scratchwidth.5\dimexpr\scratchwidth-\hemistichparameter\c!distance\relax + \hbox to \scratchwidth\bgroup + \usehemistichstyleandcolor\c!leftstyle\c!leftcolor#3% + \egroup + \hss + \begingroup + \usehemistichstyleandcolor\c!separatorstyle\c!separatorcolor + \hemistichparameter\c!separator + \endgroup + \hss + \hbox to \scratchwidth\bgroup + \usehemistichstyleandcolor\c!rightstyle\c!rightcolor#4% + \egroup + \egroup + \endgroup} + +\unexpanded\def\hemistichescaesura#1#2#3% + {\dodohemistiches\empty[\c!separator={#2}]{#1}{#3}} + +\appendtoks + \setvalue{\currenthemistich}{\dohemistiches{\currenthemistich}}% +\to \everydefinehemistich + +\protect + +\continueifinputfile{m-hemistich.mkiv} + +\setuphemistich + [leftcolor=darkred, + separatorcolor=darkgreen, + rightcolor=darkblue] + +\setupwhitespace + [big] + +\starttext + +% \righttoleft + +\hemistichescaesura{left side of the brain}{equals}{right side of the brain} + +\hemistiches{left side of the brain}{right side of the brain} + +\startitemize + \startitem + \hemistiches{left side of the brain}{right side of the brain} + \startitemize + \startitem + \hemistiches{left side of the brain}{right side of the brain} + \startitemize + \startitem + \hemistiches{left side of the brain}{right side of the brain} + \stopitem + \stopitemize + \stopitem + \stopitemize + \startitem + \hemistiches{left side of the brain}{right side of the brain} + \stopitem + \stopitem +\stopitemize + +\startitemize +\item \hemistiches{left side of the brain}{right side of the brain} +\stopitemize + +\stoptext + diff --git a/tex/context/base/m-nodechart.lua b/tex/context/base/m-nodechart.lua new file mode 100644 index 000000000..612b73767 --- /dev/null +++ b/tex/context/base/m-nodechart.lua @@ -0,0 +1,175 @@ +if not modules then modules = { } end modules ['m-nodechart'] = { + version = 1.001, + comment = "companion to m-nodechart.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format = string.format +local points = number.nopts +local ptfactor = number.dimenfactors.pt + +local nodecodes = nodes.nodecodes +local kerncodes = nodes.kerncodes +local penaltycodes = nodes.penaltycodes +local gluecodes = nodes.gluecodes +local whatsitcodes = nodes.whatsitcodes + +moduledata.charts = moduledata.charts or { } +moduledata.charts.nodes = moduledata.charts.nodes or { } + +local formatters = { } + +-- subtype font char lang left right uchyph components xoffset yoffset width height depth + +function formatters.glyph(n,comment) + return format("\\doFLOWglyphnode{%s}{%s}{%s}{%s}{U+%05X}",comment,n.subtype,n.font,n.char,n.char) +end + +-- pre post replace + +function formatters.disc(n,comment) + return format("\\doFLOWdiscnode{%s}{%s}",comment,n.subtype) +end + +-- subtype kern + +function formatters.kern(n,comment) + -- return format("\\doFLOWkernnode{%s}{%s}{%s}",comment,kerncodes[n.subtype],points(n.kern)) + return format("\\doFLOWkernnode{%s}{%s}{%.4f}",comment,kerncodes[n.subtype],n.kern*ptfactor) +end + +-- subtype penalty + +function formatters.penalty(n,comment) + return format("\\doFLOWpenaltynode{%s}{%s}{%s}",comment,"penalty",n.penalty) +end + +-- subtype width leader spec (stretch shrink ... + +function formatters.glue(n,comment) + local s = n.spec + -- return format("\\doFLOWgluenode{%s}{%s}{%s}{%s}{%s}",comment,gluecodes[n.subtype],points(s.width),points(s.stretch),points(s.shrink)) + return format("\\doFLOWgluenode{%s}{%s}{%.4f}{%.4f}{%.4f}",comment,gluecodes[n.subtype],s.width*ptfactor,s.stretch*ptfactor,s.shrink*ptfactor) +end + +-- subtype width leader spec (stretch shrink ... + +function formatters.whatsit(n,comment) + local subtype = n.subtype + local whatsit = whatsitcodes[subtype] + if whatsit == "dir" or whatsit == "localpar" then + return format("\\doFLOWdirnode{%s}{%s}{%s}",comment,whatsit,n.dir) + else + return nodecodes[n.id] + end +end + +-- I will make a dedicated set of shapes for this. + +local shapes = { + glyph = "procedure", + disc = "procedure", + kern = "action", + penalty = "action", + glue = "action", +} + +local function flow_nodes_to_chart(specification) + local head = specification.head + local box = specification.box + local comment = specification.comment or "" + local x = specification.x or 1 + local y = specification.y or 0 + -- + if box then + box = tex.getbox(tonumber(box)) + head = box and box.list + end + -- + local current = head + -- + while current do + local nodecode = nodecodes[current.id] + local formatter = formatters[nodecode] + local shape = shapes[nodecode] + y = y + 1 + local next = current.next + commands.flow_start_cell { shape = { framecolor = "nodechart:" .. nodecode } } + commands.flow_set_name(tostring(current)) + commands.flow_set_location(x,y) + if shape then + commands.flow_set_shape(shape) + end + if formatter then + commands.flow_set_text("node",formatter(current,comment)) + else + commands.flow_set_text("node",nodecode) + end + if next then + commands.flow_set_connection("bt","",tostring(next)) + end + if nodecode == "glyph" then + local components = current.components + if components then + commands.flow_set_connection("rl","",tostring(components)) + commands.flow_stop_cell() + n = flow_nodes_to_chart { head = components, comment = "component",x = x+2, y = y-1 } + else + commands.flow_stop_cell() + end + elseif nodecode == "disc" then + local pre = current.pre + local pos = current.post + local rep = current.replace + if pre and not rep and not rep then + if pre then + commands.flow_set_connection("rl","",tostring(pre)) + end + commands.flow_stop_cell() + if pre then + n = flow_nodes_to_chart { head = pre, comment = "prebreak", x = x+1, y = y-1 } + end + else + if pre then + commands.flow_set_connection("+rl","",tostring(pre)) + end + if rep then + commands.flow_set_connection("rl","",tostring(rep)) + end + if pos then + commands.flow_set_connection("-rl","",tostring(pos)) + end + commands.flow_stop_cell() + if pre then + n = flow_nodes_to_chart{ head = pre, comment = "prebreak", x = x+1, y = y-2 } + end + if rep then + n = flow_nodes_to_chart{ head = rep, comment = "replacement", x = x+1, y = y-1 } + end + if pos then + n = flow_nodes_to_chart{ head = pos, comment = "postbreak", x = x+1, y = y } + end + end + elseif nodecode == "hlist" then + local list = current.list + if list then + commands.flow_set_connection("rl","",tostring(list)) + commands.flow_stop_cell() + n = flow_nodes_to_chart { head = list, comment = "list", x = x+2, y = y-1 } + else + commands.flow_stop_cell() + end + else + commands.flow_stop_cell() + end + current = next + end +end + +function moduledata.charts.nodes.chart(specification) + commands.flow_start_chart(specification.name) + flow_nodes_to_chart(specification) + commands.flow_stop_chart() +end diff --git a/tex/context/base/m-nodechart.mkvi b/tex/context/base/m-nodechart.mkvi index 359d598ce..c9d985850 100644 --- a/tex/context/base/m-nodechart.mkvi +++ b/tex/context/base/m-nodechart.mkvi @@ -1,154 +1,19 @@ -\usemodule[chart] +%D \module +%D [ file=m-nodechart, +%D version=2011.11.11, % nos sure when it started, needed for fonts-mkiv +%D title=\CONTEXT\ Modules, +%D subtitle=Node Visualization, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] + +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. -\startluacode - -local format = string.format -local points = number.nopts -local ptfactor = number.dimenfactors.pt - -local nodecodes = nodes.nodecodes -local kerncodes = nodes.kerncodes -local penaltycodes = nodes.penaltycodes -local gluecodes = nodes.gluecodes -local whatsitcodes = nodes.whatsitcodes - -local formatters = { } - -function formatters.glyph(n,comment) - -- subtype font char lang left right uchyph components xoffset yoffset width height depth - return format("\\doFLOWglyphnode{%s}{%s}{%s}{%s}{U+%05X}",comment,n.subtype,n.font,n.char,n.char) -end - -function formatters.disc(n,comment) - -- pre post replace - return format("\\doFLOWdiscnode{%s}{%s}",comment,n.subtype) -end - -function formatters.kern(n,comment) - -- subtype kern - -- return format("\\doFLOWkernnode{%s}{%s}{%s}",comment,kerncodes[n.subtype],points(n.kern)) - return format("\\doFLOWkernnode{%s}{%s}{%.4f}",comment,kerncodes[n.subtype],n.kern*ptfactor) -end - -function formatters.penalty(n,comment) - -- subtype penalty - return format("\\doFLOWpenaltynode{%s}{%s}{%s}",comment,"penalty",n.penalty) -end - -function formatters.glue(n,comment) - -- subtype width leader spec (stretch shrink ... - local s = n.spec - -- return format("\\doFLOWgluenode{%s}{%s}{%s}{%s}{%s}",comment,gluecodes[n.subtype],points(s.width),points(s.stretch),points(s.shrink)) - return format("\\doFLOWgluenode{%s}{%s}{%.4f}{%.4f}{%.4f}",comment,gluecodes[n.subtype],s.width*ptfactor,s.stretch*ptfactor,s.shrink*ptfactor) -end - -function formatters.whatsit(n,comment) - -- subtype width leader spec (stretch shrink ... - local subtype = n.subtype - local whatsit = whatsitcodes[subtype] - if whatsit == "dir" or whatsit == "localpar" then - return format("\\doFLOWdirnode{%s}{%s}{%s}",comment,whatsit,n.dir) - else - return nodecodes[n.id] - end -end - -local shapes = { -- I will make a dedicated set of shapes for this. - glyph = "procedure", - disc = "procedure", - kern = "action", - penalty = "action", - glue = "action", -} - -local function flow_nodes_to_chart(head,comment,x,y,how) - local current = head - while current do - local nodecode = nodecodes[current.id] - local formatter = formatters[nodecode] - local shape = shapes[nodecode] - y = y + 1 - local next = current.next - commands.flow_start_cell { shape = { framecolor = "nodechart:" .. nodecode } } - commands.flow_set_name(tostring(current)) - commands.flow_set_location(x,y) - if shape then - commands.flow_set_shape(shape) - end - if formatter then - commands.flow_set_text("node",formatter(current,comment)) - else - commands.flow_set_text("node",nodecode) - end - if next then - commands.flow_set_connection("bt","",tostring(next)) - end - if nodecode == "glyph" then - local components = current.components - if components then - commands.flow_set_connection("rl","",tostring(components)) - commands.flow_stop_cell() - n = flow_nodes_to_chart(components,"component",x+2,y-1) - else - commands.flow_stop_cell() - end - elseif nodecode == "disc" then - local pre = current.pre - local pos = current.post - local rep = current.replace - if pre and not rep and not rep then - if pre then - commands.flow_set_connection("rl","",tostring(pre)) - end - commands.flow_stop_cell() - if pre then - n = flow_nodes_to_chart(pre,"prebreak",x+1,y-1) - end - else - if pre then - commands.flow_set_connection("+rl","",tostring(pre)) - end - if rep then - commands.flow_set_connection("rl","",tostring(rep)) - end - if pos then - commands.flow_set_connection("-rl","",tostring(pos)) - end - commands.flow_stop_cell() - if pre then - n = flow_nodes_to_chart(pre,"prebreak",x+1,y-2) - end - if rep then - n = flow_nodes_to_chart(rep,"replacement",x+1,y-1) - end - if pos then - n = flow_nodes_to_chart(pos,"postbreak",x+1,y) - end - end - elseif nodecode == "hlist" then - local list = current.list - if list then - commands.flow_set_connection("rl","",tostring(list)) - commands.flow_stop_cell() - n = flow_nodes_to_chart(list,"list",x+2,y-1) - else - commands.flow_stop_cell() - end - else - commands.flow_stop_cell() - end - current = next - end - return n -end - -function commands.flow_nodes_to_chart(name,head,max) - commands.flow_start_chart(name) - flow_nodes_to_chart(head,"",1,0) - commands.flow_stop_chart() -end - -\stopluacode +\registerctxluafile{m-nodechart}{1.001} + +\usemodule[chart] \unprotect @@ -200,17 +65,20 @@ end % this is a temporary interface ... we will have instances and optional settings -\unexpanded\def\boxtoFLOWchart#name#max#box% - {\ctxcommand{flow_nodes_to_chart("#name",tex.box[\number#box].list,\number#max)}} +\unexpanded\def\boxtoFLOWchart[#name]#box% + {\ctxlua{moduledata.charts.nodes.chart { + name = "#name", + box = \number#box, + }}} -\unexpanded\def\nextboxtoFLOWchart#name#max% - {\dowithnextbox{\boxtoFLOWchart{#name}{#max}\nextbox}} +\unexpanded\def\nextboxtoFLOWchart[#name]% + {\dowithnextbox{\boxtoFLOWchart[#name]\nextbox}} -\unexpanded\def\hboxtoFLOWchart#name#max% - {\nextboxtoFLOWchart{#name}{#max}\hbox} +\unexpanded\def\hboxtoFLOWchart[#name]% + {\nextboxtoFLOWchart[#name]\hbox} -\unexpanded\def\vboxtoFLOWchart#name#max% - {\nextboxtoFLOWchart{#name}{#max}\vbox} +\unexpanded\def\vboxtoFLOWchart[#name]% + {\nextboxtoFLOWchart[#name]\vbox} \protect @@ -224,7 +92,7 @@ end \startTEXpage[offset=10pt] - \hboxtoFLOWchart{dummy}{3}{an affil\discretionary{-}{-}{!}iation} + \hboxtoFLOWchart[dummy]{an affil\discretionary{-}{-}{!}iation} \FLOWchart[dummy][width=14em,height=3em,dx=1em,dy=.75em,hcompact=yes] @@ -232,7 +100,7 @@ end \startTEXpage[offset=10pt] - \hboxtoFLOWchart{dummy}{3}{an affiliation} + \hboxtoFLOWchart[dummy]{an affiliation} \FLOWchart[dummy][width=14em,height=3em,dx=.5em,dy=.75em,hcompact=yes] @@ -240,7 +108,7 @@ end \startTEXpage[offset=10pt] - \hboxtoFLOWchart{dummy}{3}{\nl effe fijn fietsen} + \hboxtoFLOWchart[dummy]{\nl effe fijn fietsen} \FLOWchart[dummy][width=14em,height=3em,dx=.5em,dy=.75em,hcompact=yes] @@ -248,7 +116,7 @@ end \startTEXpage[offset=10pt] - \hboxtoFLOWchart{dummy}{3}{\righttoleft t\kern 1pt est} + \hboxtoFLOWchart[dummy]{\righttoleft t\kern 1pt est} \FLOWchart[dummy][width=14em,height=3em,dx=.5em,dy=.75em,hcompact=yes] diff --git a/tex/context/base/m-punk.mkiv b/tex/context/base/m-punk.mkiv index 23b477cb6..6bf92e4c0 100644 --- a/tex/context/base/m-punk.mkiv +++ b/tex/context/base/m-punk.mkiv @@ -110,7 +110,10 @@ function metapost.characters.process(mpxformat, name, instances, scalefactor) data }, false, - flusher + flusher, + false, + false, + "all" ) lists[i] = { characters = characters, diff --git a/tex/context/base/m-r.mkii b/tex/context/base/m-r.mkii new file mode 100644 index 000000000..c2cb7ba88 --- /dev/null +++ b/tex/context/base/m-r.mkii @@ -0,0 +1,174 @@ +%D \module +%D [ file=m-r, +%D version=2006.06.06, +%D title=\CONTEXT\ Modules, +%D subtitle=R Support, +%D author={Johan Sandblom \& Hans Hagen}, +%D date=\currentdate, +%D copyright={Johan Sandblom \& Hans Hagen}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\unprotect + +%D The following R-processor is a variation on Johan Sandblom's +%D prototype. +%D +%D We can combine both variants in one macro definition. Also, we +%D can minimize the number of runs by checking for a change. + +%D JS: The call to R has \type {-q} in order to prevent banner, +%D \type {--save} to make sure it saves the workspace after the run, +%D \type {--restore} to make sure it reads any workspace from a +%D previous session. + +%D An easier and better solution is to use the buffering mechanisms: + +\def\Rbufferprefix{r-} + +\newcounter\nofRfiles + +\def\Rfile{\TEXbufferfile{\Rbufferprefix\nofRfiles}} + +\def\startR + {\doglobal\increment\nofRfiles + \dostartbuffer[\Rbufferprefix\nofRfiles][startR][stopR]} + +\def\stopR + {\doifmode{*\v!first}\runR + \typefile{\Rfile.out}} + +\def\startRhidden + {\doglobal\increment\nofRfiles + \dostartbuffer[\Rbufferprefix\nofRfiles][startRhidden][stopRhidden]} + +\def\stopRhidden + {\doifmode{*\v!first}\runR} + +\def\runR + {\executesystemcommand{texmfstart + --ifchanged=\Rfile\space --direct R + CMD BATCH -q --save --restore \Rfile\space \Rfile.out}} + +\protect \doifnotmode{demo}{\endinput} + +% Johan's test file: + +\usemodule[r] + +\def\R{R} + +\setupcolors[state=start] + +\setuptyping + [Rtype] + [color=darkgreen] + +\starttext + +First a test of whether the workspace is persistent: +bla + +\startR +a <- "bla" +b <- "blabla" +ls() +\stopR + +One \R run ends, another begins. + +\startR +ls() +\stopR + +Now follows a hidden \R run which cleans the R workspace + +\startRhidden +rm(list=ls()) +save.image() +\stopRhidden + +What is in the workspace now? + +\startR +ls() +\stopR + +Then a small test of generating a graphic, in this case a pdf +\startR +ushape <- c(rexp(500000), 12-rexp(500000)) +pdf("ushape.pdf") +par(mfrow=c(1,2)) +hist(ushape) +plot(density(ushape), main="Density") +dev.off() +\stopR + +The graphic \type{ushape.pdf} can be included in the standard \CONTEXT\ way +\startbuffer +\placefigure{An ugly distribution}{\externalfigure[ushape]} +\stopbuffer +\typebuffer +\getbuffer + +\startR +x <- rnorm(900) +y <- rexp(900) +# test comment +f <- gl(9,9,900) +summary(aov(y~x+Error(f))) +library(lattice) +pdf("lattice.pdf") +xyplot(y~x|f) +dev.off() +\stopR + +With \type{Sweave} lattice graphics calls must be enclosed in +\type{print()} statements but that is not necessary here. + +\startbuffer +\placefigure[here]{Lattice graphics}{\externalfigure[lattice]} +\stopbuffer +\typebuffer +\getbuffer + +A test string with nasty characters. In \R, the result of a statement +is not printed by default. Enclosing the statement in parentheses, +however causes the parser to see only the value of the statement and +applying the \type{print()} method. +\startR +(test <- ".*\\\\ [[{[{]{[{[{}\]\}=?!+%#|<|>@$") +cat(test) +\stopR + +A combination +\startbuffer +\placefigure{A combination of two previously used graphics}{ +\startcombination[2*1] + {\externalfigure[ushape][width=.4\textwidth]}{The first graphic, rescaled} + {\externalfigure[lattice][width=.4\textwidth]}{The second graphic, rescaled}} +\stopcombination +\stopbuffer +\typebuffer +\getbuffer + +Testing a function definition. + +\startR +a.df <- data.frame(a=1:2, b=rnorm(2)) +a.df$a +testfunction <- function(a=NULL, ...) { + for(i in 1:length(a)) { + gsub(a[[i]], "([a-r]|[A-R])", "bla")} + print(a)} +\stopR + +What is in the workspace now? + +\startR +ls() +\stopR + +\stoptext diff --git a/tex/context/base/m-r.tex b/tex/context/base/m-r.tex deleted file mode 100644 index ac895905c..000000000 --- a/tex/context/base/m-r.tex +++ /dev/null @@ -1,174 +0,0 @@ -%D \module -%D [ file=m-r, -%D version=2006.06.06, -%D title=\CONTEXT\ Modules, -%D subtitle=R Support, -%D author={Johan Sandblom \& Hans Hagen}, -%D date=\currentdate, -%D copyright={Johan Sandblom \& Hans Hagen}] -%C -%C This module is part of the \CONTEXT\ macro||package and is -%C therefore copyrighted by \PRAGMA. See mreadme.pdf for -%C details. - -\unprotect - -%D The following R-processor is a variation on Johan Sandblom's -%D prototype. -%D -%D We can combine both variants in one macro definition. Also, we -%D can minimize the number of runs by checking for a change. - -%D JS: The call to R has \type {-q} in order to prevent banner, -%D \type {--save} to make sure it saves the workspace after the run, -%D \type {--restore} to make sure it reads any workspace from a -%D previous session. - -%D An easier and better solution is to use the buffering mechanisms: - -\def\Rbufferprefix{r-} - -\newcounter\nofRfiles - -\def\Rfile{\TEXbufferfile{\Rbufferprefix\nofRfiles}}% - -\def\startR - {\doglobal\increment\nofRfiles - \dostartbuffer[\Rbufferprefix\nofRfiles][startR][stopR]} - -\def\stopR - {\doifmode{*\v!first}\runR - \typefile{\Rfile.out}} - -\def\startRhidden - {\doglobal\increment\nofRfiles - \dostartbuffer[\Rbufferprefix\nofRfiles][startRhidden][stopRhidden]} - -\def\stopRhidden - {\doifmode{*\v!first}\runR} - -\def\runR - {\executesystemcommand{texmfstart - --ifchanged=\Rfile\space --direct R - CMD BATCH -q --save --restore \Rfile\space \Rfile.out}} - -\protect \doifnotmode{demo}{\endinput} - -% Johan's test file: - -\usemodule[r] - -\def\R{R} - -\setupcolors[state=start] - -\setuptyping - [Rtype] - [color=darkgreen] - -\starttext - -First a test of whether the workspace is persistent: -bla - -\startR -a <- "bla" -b <- "blabla" -ls() -\stopR - -One \R run ends, another begins. - -\startR -ls() -\stopR - -Now follows a hidden \R run which cleans the R workspace - -\startRhidden -rm(list=ls()) -save.image() -\stopRhidden - -What is in the workspace now? - -\startR -ls() -\stopR - -Then a small test of generating a graphic, in this case a pdf -\startR -ushape <- c(rexp(500000), 12-rexp(500000)) -pdf("ushape.pdf") -par(mfrow=c(1,2)) -hist(ushape) -plot(density(ushape), main="Density") -dev.off() -\stopR - -The graphic \type{ushape.pdf} can be included in the standard \CONTEXT\ way -\startbuffer -\placefigure{An ugly distribution}{\externalfigure[ushape]} -\stopbuffer -\typebuffer -\getbuffer - -\startR -x <- rnorm(900) -y <- rexp(900) -# test comment -f <- gl(9,9,900) -summary(aov(y~x+Error(f))) -library(lattice) -pdf("lattice.pdf") -xyplot(y~x|f) -dev.off() -\stopR - -With \type{Sweave} lattice graphics calls must be enclosed in -\type{print()} statements but that is not necessary here. - -\startbuffer -\placefigure[here]{Lattice graphics}{\externalfigure[lattice]} -\stopbuffer -\typebuffer -\getbuffer - -A test string with nasty characters. In \R, the result of a statement -is not printed by default. Enclosing the statement in parentheses, -however causes the parser to see only the value of the statement and -applying the \type{print()} method. -\startR -(test <- ".*\\\\ [[{[{]{[{[{}\]\}=?!+%#|<|>@$") -cat(test) -\stopR - -A combination -\startbuffer -\placefigure{A combination of two previously used graphics}{ -\startcombination[2*1] - {\externalfigure[ushape][width=.4\textwidth]}{The first graphic, rescaled} - {\externalfigure[lattice][width=.4\textwidth]}{The second graphic, rescaled}} -\stopcombination -\stopbuffer -\typebuffer -\getbuffer - -Testing a function definition. - -\startR -a.df <- data.frame(a=1:2, b=rnorm(2)) -a.df$a -testfunction <- function(a=NULL, ...) { - for(i in 1:length(a)) { - gsub(a[[i]], "([a-r]|[A-R])", "bla")} - print(a)} -\stopR - -What is in the workspace now? - -\startR -ls() -\stopR - -\stoptext diff --git a/tex/context/base/m-spreadsheet.lua b/tex/context/base/m-spreadsheet.lua index 9d5106e35..f329acf9a 100644 --- a/tex/context/base/m-spreadsheet.lua +++ b/tex/context/base/m-spreadsheet.lua @@ -172,7 +172,7 @@ function functions._s_(row,col,c,f,t) for i=f,t do local ci = c[i] if type(ci) == "number" then - r = r + c[i] + r = r + ci end end return r diff --git a/tex/context/base/m-spreadsheet.mkiv b/tex/context/base/m-spreadsheet.mkiv index a05968990..5e0499184 100644 --- a/tex/context/base/m-spreadsheet.mkiv +++ b/tex/context/base/m-spreadsheet.mkiv @@ -120,12 +120,15 @@ \let\stopcell \module_spreadsheet_cell_stop \doifassignmentelse{#1} {\module_spreadsheet_start + \directsetup{spreadsheet:before:\currentspreadsheet}% \bTABLE[\c!align=\v!flushright,#1]} {\module_spreadsheet_start[#1]% + \directsetup{spreadsheet:before:\currentspreadsheet}% \bTABLE[\c!align=\v!flushright,#2]}} \unexpanded\def\stopspreadsheettable {\eTABLE + \directsetup{spreadsheet:after:\currentspreadsheet}% \stopspreadsheet \egroup} diff --git a/tex/context/base/m-translate.mkiv b/tex/context/base/m-translate.mkiv index 363f115cb..f36f9a9fb 100644 --- a/tex/context/base/m-translate.mkiv +++ b/tex/context/base/m-translate.mkiv @@ -89,7 +89,6 @@ \continueifinputfile{m-translate.mkiv} - \starttext \translateinput[Moica][Mojca] diff --git a/tex/context/base/m-zint.mkiv b/tex/context/base/m-zint.mkiv index 95b265c57..4957c8461 100644 --- a/tex/context/base/m-zint.mkiv +++ b/tex/context/base/m-zint.mkiv @@ -29,17 +29,21 @@ moduledata.zint = { } local format, lower, gsub = string.format, string.lower, string.gsub local patterns = lpeg.patterns -local zint = "zint" -- '"c:/program files/zint/zint.exe"' +local zint = "zint" -- '"c:/program files/zint/zint.exe"' +local defaultcode = "PDF417" -local whitespace = patterns.whitespace -local spaces = whitespace^0 -local key = (spaces / "") * patterns.digit^0 * (patterns.colon * spaces / "") -local value = (whitespace / "" + (1 - key))^1 -local pattern = lpeg.Cf(lpeg.Ct("") * (lpeg.Cg((lpeg.Cs(key) / tonumber) * (lpeg.Cs(value) / lower)) + patterns.anything)^0,rawset) +local whitespace = patterns.whitespace +local spaces = whitespace^0 +local key = (spaces / "") * patterns.digit^0 * (patterns.colon * spaces / "") +local value = (whitespace / "" + (1 - key))^1 +local pattern = lpeg.Cf(lpeg.Ct("") * (lpeg.Cg((lpeg.Cs(key) / tonumber) * (lpeg.Cs(value) / lower)) + patterns.anything)^0,rawset) local reverse local function cleancode(code) + if not code or code == "" then + code = defaultcode + end return lower(gsub(code," ","")) end @@ -76,6 +80,19 @@ end \stopluacode +\unprotect + +\unexpanded\def\barcode[#1]% [alternative=,text=] + {\bgroup + \getdummyparameters + [\c!alternative=,\c!text=,#1]% + \externalfigure + [\cldcontext{moduledata.zint.generate("\dummyparameter\c!alternative",\!!bs\dummyparameter\c!text\!!es)}]% + [#1,\c!alternative=,\c!text=]% + \egroup} + +\protect + \continueifinputfile{m-zint.mkiv} \starttext @@ -85,6 +102,10 @@ end \externalfigure[\cldcontext{moduledata.zint.generate("PDF417","Ton Otten")}] \blank \externalfigure[\cldcontext{moduledata.zint.generate("ISBN","9789490688011")}] + \blank + \barcode[text=Does It Work?,width=\textwidth] + \blank + \barcode[alternative=isbn,text=9789490688011,width=3cm] \stoptext diff --git a/tex/context/base/math-acc.mkvi b/tex/context/base/math-acc.mkvi new file mode 100644 index 000000000..415f2b91f --- /dev/null +++ b/tex/context/base/math-acc.mkvi @@ -0,0 +1,181 @@ +%D \module +%D [ file=math-acc, +%D version=2013.07.31, +%D title=\CONTEXT\ Math Macros, +%D subtitle=Accents, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\writestatus{loading}{ConTeXt Math Macros / Accents} + +% There are probably errors ... too distracted by amazing (piano) music videos running +% on top of scite ... so: experimental code. + +\unprotect + +% This module permits overloading of accents so that we can do fancy things. The +% implementation is similar to stackers. By default accenst are defined in a simple +% way. Contrary to extensibles accents cannot grow indefinitely. Alas the +% implementation of accents is different too, in the sense that they are +% prepositioned i.e. are already raised. (In my opinion for no real reason as they +% need to adapt anyway). +% +% $ \ruledhbox{$H$} \hat{H} \ruledhbox{$\widehat{H}$} \widehat{H} $ +% +% One alternative is: +% +% \definemathoverextensible [top] [hoed] ["FE302] +% \definemathoverextensible [top] [slang] ["FE303] +% +% $ \hoed{H} \ruledhbox{$\hoed{H}$} \ruledhbox{$\hoed{\tf H}$} \slang{H} $ +% +% But that nills the italic correction (and I'm in nood to mess with that again). +% +% \definemathaccents [myaccents] [color=darkred] +% \definemathtopaccent [myaccents] [mywidehat] ["0302] +% +% $ \hat{H} \widehat{H} \mywidehat{H} $ + +% A first variant (kept for educational purposed): +% +% \installcorenamespace{mathaccents} +% +% \installcommandhandler \??mathaccents {mathaccent} \??mathaccents +% +% \let\setupmathaccents\setupmathaccent +% +% \setupmathaccents +% [\c!top=, +% \c!bottom=, +% \c!mathstyle=, +% \c!color=, +% \c!command=\v!yes] +% +% \appendtoks +% \edef\p_top_bottom{\namedmathaccentparameter\currentmathaccent\c!top\namedmathaccentparameter\currentmathaccent\c!bottom}% +% \ifx\p_top_bottom\empty\else +% \edef\p_command{\mathaccentparameter\c!command}% +% \ifx\p_command\v!yes +% \setuevalue\currentmathaccent{\math_accent{\currentmathaccent}}% +% \fi +% \fi +% \to \everydefinemathaccent +% +% \def\math_accented_color_do_push{\pushcolor[\p_math_accent_color]} +% \let\math_accented_color_do_pop \popcolor +% +% \unexpanded\def\math_accent#1#2% +% {\begingroup +% \edef\currentmathaccent{#1}% +% \edef\p_math_accent_top {\mathaccentparameter\c!top}% +% \edef\p_math_accent_bottom{\mathaccentparameter\c!bottom}% +% \edef\p_math_accent_color {\mathaccentparameter\c!color}% +% \startusemathstyleparameter\mathaccentparameter +% \ifx\p_math_accented_color\empty +% \let\math_accented_color_do_pop\donothing +% \else +% \let\math_accented_color_do_pop\popcolor +% \math_accented_color_do_push +% \fi +% \ifx\p_math_accent_top\empty +% \ifx\p_math_accent_bottom\empty +% \else +% \Umathaccent bottom \fam\zerocount\p_math_accent_bottom +% \fi +% \else +% \ifx\p_math_accent_bottom\empty +% \Umathaccent \fam\zerocount\p_math_accent_top +% \else +% \Umathaccent both \fam\zerocount\p_math_accent_top +% \fam\zerocount\p_math_accent_bottom +% \fi +% \fi +% {\popcolor#2}% +% \stopusemathstyleparameter +% \endgroup} +% +% \definemathaccent [myaccents] [\c!color=red] +% \definemathaccent [mywidehat] [myaccents] [\c!top="0302] + +\installcorenamespace{mathaccents} + +\installcommandhandler \??mathaccents {mathaccents} \??mathaccents + +\setupmathaccents + [\c!top=, + \c!bottom=, + \c!mathstyle=, + \c!color=, + \c!command=\v!yes] + +\definemathaccents + [\v!both] + +\definemathaccents + [\v!top] + [\v!both] + +\definemathaccents + [\v!bottom] + [\v!both] + +\unexpanded\def\definemathtopaccent {\dotripleempty \math_accents_define_top } +\unexpanded\def\definemathbottomaccent{\dotripleempty \math_accents_define_bottom} +\unexpanded\def\definemathdoubleaccent{\doquadrupleempty\math_accents_define_double} + +\def\math_accents_define_top[#1][#2][#3]% class name top + {\ifthirdargument + \setuevalue{#2}{\math_accents_make_double {#1}\plusone{\number#3}\zerocount}% + \else + \setuevalue{#1}{\math_accents_make_double\noexpand\currentmathaccents\plusone{\number#2}\zerocount}% + \fi} + +\def\math_accents_define_bottom[#1][#2][#3]% class name bottom + {\ifthirdargument + \setuevalue{#2}{\math_accents_make_double {#1}\plustwo\zerocount{\number#3}}% + \else + \setuevalue{#1}{\math_accents_make_double\noexpand\currentmathaccents\plustwo\zerocount{\number#2}}% + \fi} + +\def\math_accents_define_double[#1][#2][#3][#4]% class name top bottom + {\iffourthargument + \setuevalue{#2}{\math_accents_make_double {#1}\plusthree{\number#3}{\number#4}}% + \else + \setuevalue{#1}{\math_accents_make_double\noexpand\currentmathaccents\plusthree{\number#2}{\number#3}}% + \fi} + +\def\math_accents_color_push_yes + {\pushcolor[\p_math_accent_color]% + \let\math_accents_color_pop\popcolor} + +\def\math_accents_color_push_nop + {\let\math_accents_color_pop\donothing} + +\unexpanded\def\math_accents_make_double#class#kind#top#bottom#content% + {\begingroup + \edef\currentmathaccents {#class}% + \edef\p_math_accent_color{\mathaccentsparameter\c!color}% + \startusemathstyleparameter\mathaccentsparameter + \ifx\p_math_accent_color\empty + \math_accents_color_push_nop + \else + \math_accents_color_push_yes + \fi + \ifcase#kind\or + \Umathaccent \fam\zerocount#top + \or + \Umathaccent bottom \fam\zerocount#bottom + \or + \Umathaccent both \fam\zerocount#top + \fam\zerocount#bottom + \fi + {\math_accents_color_pop#content}% + \stopusemathstyleparameter + \endgroup} + +\protect \endinput diff --git a/tex/context/base/math-act.lua b/tex/context/base/math-act.lua index 4f9b3b7e8..fc63a0090 100644 --- a/tex/context/base/math-act.lua +++ b/tex/context/base/math-act.lua @@ -10,19 +10,24 @@ if not modules then modules = { } end modules ['math-act'] = { local type, next = type, next local fastcopy = table.fastcopy +local formatters = string.formatters -local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end) -local report_math = logs.reporter("mathematics","initializing") +local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end) +local trace_collecting = false trackers.register("math.collecting", function(v) trace_collecting = v end) -local context = context -local commands = commands -local mathematics = mathematics -local texdimen = tex.dimen -local abs = math.abs +local report_math = logs.reporter("mathematics","initializing") -local sequencers = utilities.sequencers -local appendgroup = sequencers.appendgroup -local appendaction = sequencers.appendaction +local context = context +local commands = commands +local mathematics = mathematics +local texsetdimen = tex.setdimen +local abs = math.abs + +local sequencers = utilities.sequencers +local appendgroup = sequencers.appendgroup +local appendaction = sequencers.appendaction + +local fontchars = fonts.hashes.characters local mathfontparameteractions = sequencers.new { name = "mathparameters", @@ -286,14 +291,104 @@ end sequencers.appendaction("aftercopyingcharacters", "system","mathematics.overloaddimensions") --- a couple of predefined tewaks: +-- a couple of predefined tweaks: local tweaks = { } mathematics.tweaks = tweaks -function tweaks.fixbadprime(target,original) - target.characters[0xFE325] = target.characters[0x2032] -end +-- function tweaks.fixbadprime(target,original) +-- target.characters[0xFE325] = target.characters[0x2032] +-- end + +-- these could go to math-fbk + +-- local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap) +-- local characters = target.characters +-- -- if not characters[newchr] then -- xits needs an enforce +-- local addprivate = fonts.helpers.addprivate +-- local olddata = characters[oldchr] +-- if olddata then +-- if swap then +-- swap = characters[swap] +-- height = swap.depth +-- depth = 0 +-- else +-- height = height or 0 +-- depth = depth or 0 +-- end +-- local correction = swap and { "down", (olddata.height or 0) - height } or { "down", olddata.height } +-- local newdata = { +-- commands = { correction, { "slot", 1, oldchr } }, +-- width = olddata.width, +-- height = height, +-- depth = depth, +-- } +-- characters[newchr] = newdata +-- local nextglyph = olddata.next +-- while nextglyph do +-- local oldnextdata = characters[nextglyph] +-- local newnextdata = { +-- commands = { correction, { "slot", 1, nextglyph } }, +-- width = oldnextdata.width, +-- height = height, +-- depth = depth, +-- } +-- local newnextglyph = addprivate(target,formatters["original-%H"](nextglyph),newnextdata) +-- newdata.next = newnextglyph +-- local nextnextglyph = oldnextdata.next +-- if nextnextglyph == nextglyph then +-- break +-- else +-- olddata = oldnextdata +-- newdata = newnextdata +-- nextglyph = nextnextglyph +-- end +-- end +-- local hv = olddata.horiz_variants +-- if hv then +-- hv = fastcopy(hv) +-- newdata.horiz_variants = hv +-- for i=1,#hv do +-- local hvi = hv[i] +-- local oldglyph = hvi.glyph +-- local olddata = characters[oldglyph] +-- local newdata = { +-- commands = { correction, { "slot", 1, oldglyph } }, +-- width = olddata.width, +-- height = height, +-- depth = depth, +-- } +-- hvi.glyph = addprivate(target,formatters["original-%H"](oldglyph),newdata) +-- end +-- end +-- end +-- -- end +-- end + +-- function tweaks.fixoverline(target,original) +-- local height, depth = 0, 0 +-- local mathparameters = target.mathparameters +-- if mathparameters then +-- height = mathparameters.OverbarVerticalGap +-- depth = mathparameters.UnderbarVerticalGap +-- else +-- height = target.parameters.xheight/4 +-- depth = height +-- end +-- accent_to_extensible(target,0x203E,original,0x0305,height,depth) +-- -- also crappy spacing for our purpose: push to top of baseline +-- accent_to_extensible(target,0xFE3DE,original,0x23DE,height,depth,0x23DF) +-- accent_to_extensible(target,0xFE3DC,original,0x23DC,height,depth,0x23DD) +-- accent_to_extensible(target,0xFE3B4,original,0x23B4,height,depth,0x23B5) +-- -- for symmetry +-- target.characters[0xFE3DF] = original.characters[0x23DF] +-- target.characters[0xFE3DD] = original.characters[0x23DD] +-- target.characters[0xFE3B5] = original.characters[0x23B5] +-- -- inspect(fonts.helpers.expandglyph(target.characters,0x203E)) +-- -- inspect(fonts.helpers.expandglyph(target.characters,0x23DE)) +-- end + +-- sequencers.appendaction("aftercopyingcharacters", "system","mathematics.tweaks.fixoverline") -- for the moment always -- helpers @@ -301,6 +396,7 @@ local setmetatableindex = table.setmetatableindex local family_font = node.family_font local fontcharacters = fonts.hashes.characters +local fontdescriptions = fonts.hashes.descriptions local extensibles = utilities.storage.allocate() fonts.hashes.extensibles = extensibles @@ -324,24 +420,34 @@ local function extensiblecode(font,unicode) if not character then return unknown end + local first = character.next local code = unicode - local next = character.next + local next = first while next do code = next character = characters[next] next = character.next end local char = chardata[unicode] - local mathextensible = char and char.mathextensible + if not char then + return unknown + end if character.horiz_variants then if character.vert_variants then return { e_mixed, code, character } else - local e = mathextensible and extensibles[mathextensible] + local m = char.mathextensible + local e = m and extensibles[m] return e and { e, code, character } or unknown end elseif character.vert_variants then - local e = mathextensible and extensibles[mathextensible] + local m = char.mathextensible + local e = m and extensibles[m] + return e and { e, code, character } or unknown + elseif first then + -- assume accent (they seldom stretch .. sizes) + local m = char.mathextensible or char.mathstretch + local e = m and extensibles[m] return e and { e, code, character } or unknown else return unknown @@ -374,31 +480,199 @@ end -- abs(right["start"] - right["end"]) | right.advance | characters[right.glyph].width function commands.horizontalcode(family,unicode) - local font = family_font(family or 0) - local data = extensibles[font][unicode] - local kind = data[1] + local font = family_font(family or 0) + local data = extensibles[font][unicode] + local kind = data[1] + local loffset = 0 + local roffset = 0 if kind == e_left then local charlist = data[3].horiz_variants - local characters = fontcharacters[font] - local left = charlist[1] - texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0)) - texdimen.scratchrightoffset = 0 + if charlist then + local left = charlist[1] + loffset = abs((left["start"] or 0) - (left["end"] or 0)) + end elseif kind == e_right then local charlist = data[3].horiz_variants - local characters = fontcharacters[font] local right = charlist[#charlist] - texdimen.scratchleftoffset = 0 - texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0)) + roffset = abs((right["start"] or 0) - (right["end"] or 0)) elseif kind == e_horizontal then local charlist = data[3].horiz_variants - local characters = fontcharacters[font] - local left = charlist[1] - local right = charlist[#charlist] - texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0)) - texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0)) + if charlist then + local left = charlist[1] + local right = charlist[#charlist] + loffset = abs((left ["start"] or 0) - (left ["end"] or 0)) + roffset = abs((right["start"] or 0) - (right["end"] or 0)) + end else - texdimen.scratchleftoffset = 0 - texdimen.scratchrightoffset = 0 end + texsetdimen("scratchleftoffset",loffset) + texsetdimen("scratchrightoffset",roffset) context(kind) end + +-- experiment + +-- check: when true, only set when present in font +-- force: when false, then not set when already set + +local blocks = characters.blocks -- this will move to char-ini + +blocks["uppercasenormal"] = { first = 0x00041, last = 0x0005A } +blocks["uppercasebold"] = { first = 0x1D400, last = 0x1D419 } +blocks["uppercaseitalic"] = { first = 0x1D434, last = 0x1D44D } +blocks["uppercasebolditalic"] = { first = 0x1D468, last = 0x1D481 } +blocks["uppercasescript"] = { first = 0x1D49C, last = 0x1D4B5 } +blocks["uppercaseboldscript"] = { first = 0x1D4D0, last = 0x1D4E9 } +blocks["uppercasefraktur"] = { first = 0x1D504, last = 0x1D51D } +blocks["uppercasedoublestruck"] = { first = 0x1D538, last = 0x1D551 } +blocks["uppercaseboldfraktur"] = { first = 0x1D56C, last = 0x1D585 } +blocks["uppercasesansserifnormal"] = { first = 0x1D5A0, last = 0x1D5B9 } +blocks["uppercasesansserifbold"] = { first = 0x1D5D4, last = 0x1D5ED } +blocks["uppercasesansserifitalic"] = { first = 0x1D608, last = 0x1D621 } +blocks["uppercasesansserifbolditalic"] = { first = 0x1D63C, last = 0x1D655 } +blocks["uppercasemonospace"] = { first = 0x1D670, last = 0x1D689 } +blocks["uppercasegreeknormal"] = { first = 0x00391, last = 0x003AA } +blocks["uppercasegreekbold"] = { first = 0x1D6A8, last = 0x1D6C1 } +blocks["uppercasegreekitalic"] = { first = 0x1D6E2, last = 0x1D6FB } +blocks["uppercasegreekbolditalic"] = { first = 0x1D71C, last = 0x1D735 } +blocks["uppercasegreeksansserifbold"] = { first = 0x1D756, last = 0x1D76F } +blocks["uppercasegreeksansserifbolditalic"] = { first = 0x1D790, last = 0x1D7A9 } + +blocks["lowercasenormal"] = { first = 0x00061, last = 0x0007A } +blocks["lowercasebold"] = { first = 0x1D41A, last = 0x1D433 } +blocks["lowercaseitalic"] = { first = 0x1D44E, last = 0x1D467 } +blocks["lowercasebolditalic"] = { first = 0x1D482, last = 0x1D49B } +blocks["lowercasescript"] = { first = 0x1D4B6, last = 0x1D4CF } +blocks["lowercaseboldscript"] = { first = 0x1D4EA, last = 0x1D503 } +blocks["lowercasefraktur"] = { first = 0x1D51E, last = 0x1D537 } +blocks["lowercasedoublestruck"] = { first = 0x1D552, last = 0x1D56B } +blocks["lowercaseboldfraktur"] = { first = 0x1D586, last = 0x1D59F } +blocks["lowercasesansserifnormal"] = { first = 0x1D5BA, last = 0x1D5D3 } +blocks["lowercasesansserifbold"] = { first = 0x1D5EE, last = 0x1D607 } +blocks["lowercasesansserifitalic"] = { first = 0x1D622, last = 0x1D63B } +blocks["lowercasesansserifbolditalic"] = { first = 0x1D656, last = 0x1D66F } +blocks["lowercasemonospace"] = { first = 0x1D68A, last = 0x1D6A3 } +blocks["lowercasegreeknormal"] = { first = 0x003B1, last = 0x003CA } +blocks["lowercasegreekbold"] = { first = 0x1D6C2, last = 0x1D6DB } +blocks["lowercasegreekitalic"] = { first = 0x1D6FC, last = 0x1D715 } +blocks["lowercasegreekbolditalic"] = { first = 0x1D736, last = 0x1D74F } +blocks["lowercasegreeksansserifbold"] = { first = 0x1D770, last = 0x1D789 } +blocks["lowercasegreeksansserifbolditalic"] = { first = 0x1D7AA, last = 0x1D7C3 } + +blocks["digitsnormal"] = { first = 0x00030, last = 0x00039 } +blocks["digitsbold"] = { first = 0x1D7CE, last = 0x1D7D8 } +blocks["digitsdoublestruck"] = { first = 0x1D7D8, last = 0x1D7E2 } +blocks["digitssansserifnormal"] = { first = 0x1D7E2, last = 0x1D7EC } +blocks["digitssansserifbold"] = { first = 0x1D7EC, last = 0x1D805 } +blocks["digitsmonospace"] = { first = 0x1D7F6, last = 0x1D80F } + +blocks["mathematicaloperators"] = { first = 0x02200, last = 0x022FF } +blocks["miscellaneousmathematicalsymbolsa"] = { first = 0x027C0, last = 0x027EF } +blocks["miscellaneousmathematicalsymbolsb"] = { first = 0x02980, last = 0x029FF } +blocks["supplementalmathematicaloperators"] = { first = 0x02A00, last = 0x02AFF } +blocks["letterlikesymbols"] = { first = 0x02100, last = 0x0214F } +blocks["miscellaneoustechnical"] = { first = 0x02308, last = 0x0230B } +blocks["geometricshapes"] = { first = 0x025A0, last = 0x025FF } +blocks["miscellaneoussymbolsandarrows"] = { first = 0x02B30, last = 0x02B4C } +blocks["mathematicalalphanumericsymbols"] = { first = 0x00400, last = 0x1D7FF } + +blocks["digitslatin"] = { first = 0x00030, last = 0x00039 } +blocks["digitsarabicindic"] = { first = 0x00660, last = 0x00669 } +blocks["digitsextendedarabicindic"] = { first = 0x006F0, last = 0x006F9 } +------["digitsdevanagari"] = { first = 0x00966, last = 0x0096F } +------["digitsbengali"] = { first = 0x009E6, last = 0x009EF } +------["digitsgurmukhi"] = { first = 0x00A66, last = 0x00A6F } +------["digitsgujarati"] = { first = 0x00AE6, last = 0x00AEF } +------["digitsoriya"] = { first = 0x00B66, last = 0x00B6F } +------["digitstamil"] = { first = 0x00030, last = 0x00039 } -- no zero +------["digitstelugu"] = { first = 0x00C66, last = 0x00C6F } +------["digitskannada"] = { first = 0x00CE6, last = 0x00CEF } +------["digitsmalayalam"] = { first = 0x00D66, last = 0x00D6F } +------["digitsthai"] = { first = 0x00E50, last = 0x00E59 } +------["digitslao"] = { first = 0x00ED0, last = 0x00ED9 } +------["digitstibetan"] = { first = 0x00F20, last = 0x00F29 } +------["digitsmyanmar"] = { first = 0x01040, last = 0x01049 } +------["digitsethiopic"] = { first = 0x01369, last = 0x01371 } +------["digitskhmer"] = { first = 0x017E0, last = 0x017E9 } +------["digitsmongolian"] = { first = 0x01810, last = 0x01809 } + +-- operators : 0x02200 +-- symbolsa : 0x02701 +-- symbolsb : 0x02901 +-- supplemental : 0x02A00 + +-- todo: tounicode + +function mathematics.injectfallbacks(target,original) + local properties = original.properties + if properties and properties.hasmath then + local specification = target.specification + if specification then + local fallbacks = specification.fallbacks + if fallbacks then + local definitions = fonts.collections.definitions[fallbacks] + if definitions then + if trace_collecting then + report_math("adding fallback characters to font %a",specification.hash) + end + local definedfont = fonts.definers.internal + local copiedglyph = fonts.handlers.vf.math.copy_glyph + local fonts = target.fonts + local size = specification.size -- target.size + local characters = target.characters + if not fonts then + fonts = { } + target.fonts = fonts + target.type = "virtual" + target.properties.virtualized = true + end + if #fonts == 0 then + fonts[1] = { id = 0, size = size } -- sel, will be resolved later + end + local done = { } + for i=1,#definitions do + local definition = definitions[i] + local name = definition.font + local start = definition.start + local stop = definition.stop + local check = definition.check + local force = definition.force + local rscale = definition.rscale or 1 + local offset = definition.offset or start + local id = definedfont { name = name, size = size * rscale } + local index = #fonts + 1 + fonts[index] = { id = id, size = size } + local chars = fontchars[id] + if check then + for unicode = start, stop do + local unic = unicode + offset - start + if not chars[unicode] then + -- not in font + elseif force or (not done[unic] and not characters[unic]) then + if trace_collecting then + report_math("remapping math character, vector %a, font %a, character %C, %s",fallbacks,name,unic,"checked") + end + characters[unic] = copiedglyph(target,characters,chars,unicode,index) + done[unic] = true + end + end + else + for unicode = start, stop do + local unic = unicode + offset - start + if force or (not done[unic] and not characters[unic]) then + if trace_collecting then + report_math("remapping math character, vector %a, font %a, character %C, %s",fallbacks,name,unic,"unchecked") + end + characters[unic] = copiedglyph(target,characters,chars,unicode,index) + done[unic] = true + end + end + end + end + end + end + end + end +end + +sequencers.appendaction("aftercopyingcharacters", "system","mathematics.injectfallbacks") diff --git a/tex/context/base/math-ali.mkiv b/tex/context/base/math-ali.mkiv index db960de26..911a0d430 100644 --- a/tex/context/base/math-ali.mkiv +++ b/tex/context/base/math-ali.mkiv @@ -25,7 +25,7 @@ %D Modules may provide additional alignment features. The following %D mechanisms are provided by the core. -% n>1 #### needed, strange # interaction in recurse +% n>1 ### needed, strange # interaction in recurse \newtoks\c_math_align_a \newtoks\c_math_align_b @@ -39,9 +39,9 @@ \def\math_build_eqalign_step {\ifnum\recurselevel>\plusone %\appendtoks - % \tabskip\mathalignmentparameter\c!distance&\tabskip\zeropoint + % \tabskip\mathalignmentparameter\c!distance\aligntab\tabskip\zeropoint %\to\scratchtoks - \scratchtoks\expandafter{\the\scratchtoks\tabskip\mathalignmentparameter\c!distance&\tabskip\zeropoint}% + \scratchtoks\expandafter{\the\scratchtoks\tabskip\mathalignmentparameter\c!distance\aligntab\tabskip\zeropoint}% \fi \normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_a}}% \dorecurse{\numexpr\mathalignmentparameter\c!n-\plusone\relax} @@ -56,9 +56,9 @@ {\emptyhbox \mskip\thinmuskip \vcenter - {\openup\displayopenupvalue % was: \openup\jot + {\math_openup\displayopenupvalue % was: \openup\jot \mathsurround\zeropoint - \ialign{\strut\hfil$\displaystyle{##}$&$\displaystyle{{}##{}}$\hfil\crcr#1\crcr}}% + \ialign{\strut\hfil$\displaystyle{\alignmark\alignmark}$\aligntab$\displaystyle{{}\alignmark\alignmark{}}$\hfil\crcr#1\crcr}}% \mskip\thinmuskip} % preamble is scanned for tabskips so we need the span to prevent an error message @@ -68,14 +68,14 @@ % use zeroskipplusfill \def\math_prepare_r_eqalign_no - {\c_math_align_a{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}% - \c_math_align_b{&\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}% + {\c_math_align_a{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{\alignmark\alignmark}\math_right_of_eqalign\tabskip\zeropoint}% + \c_math_align_b{\aligntab\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{\alignmark\alignmark}\math_right_of_eqalign\tabskip\zeropoint}% \ifnum\mathraggedstatus=\plusone - \c_math_align_c{\hfil&\span\math_text_in_eqalign{##}\tabskip\zeropoint}% + \c_math_align_c{\hfil\aligntab\span\math_text_in_eqalign{\alignmark\alignmark}\tabskip\zeropoint}% \else\ifnum\mathraggedstatus=\plusthree - \c_math_align_c{\hfil\tabskip\zeropoint\s!plus 1\s!fill&\span\math_text_in_eqalign{##}\tabskip\zeropoint}% + \c_math_align_c{\hfil\tabskip\zeropoint\s!plus 1\s!fill\aligntab\span\math_text_in_eqalign{\alignmark\alignmark}\tabskip\zeropoint}% \else - \c_math_align_c{\hfil\tabskip\centering&\llap{\span\math_text_in_eqalign{##}}\tabskip\zeropoint}% + \c_math_align_c{\hfil\tabskip\centering\aligntab\llap{\span\math_text_in_eqalign{\alignmark\alignmark}}\tabskip\zeropoint}% \fi\fi \global\mathnumberstatus\zerocount \math_build_eqalign @@ -83,15 +83,15 @@ \tabskip\centering} \def\math_prepare_l_eqalign_no - {\c_math_align_a{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}% - \c_math_align_b{&\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{##}\math_right_of_eqalign\tabskip\zeropoint}% + {\c_math_align_a{\strut\math_first_in_eqalign\hfil\math_left_of_equalign\span\math_math_in_eqalign{\alignmark\alignmark}\math_right_of_eqalign\tabskip\zeropoint}% + \c_math_align_b{\aligntab\math_next_in_eqalign\math_left_of_equalign\span\math_math_in_eqalign{\alignmark\alignmark}\math_right_of_eqalign\tabskip\zeropoint}% % problem: number is handled after rest and so ends up in the margin \ifnum\mathraggedstatus=\plusone - \c_math_align_c{\hfil&\kern-\displaywidth\rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}% + \c_math_align_c{\hfil\aligntab\kern-\displaywidth\rlap{\span\math_text_in_eqalign{\alignmark\alignmark}}\tabskip\displaywidth}% \else\ifnum\mathraggedstatus=\plusthree - \c_math_align_c{\hfil\tabskip\zeropoint\s!plus 1\s!fill&\kern-\displaywidth\span\math_rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}% + \c_math_align_c{\hfil\tabskip\zeropoint\s!plus 1\s!fill\aligntab\kern-\displaywidth\span\math_rlap{\span\math_text_in_eqalign{\alignmark\alignmark}}\tabskip\displaywidth}% \else - \c_math_align_c{\hfil\tabskip\centering&\kern-\displaywidth\rlap{\span\math_text_in_eqalign{##}}\tabskip\displaywidth}% + \c_math_align_c{\hfil\tabskip\centering\aligntab\kern-\displaywidth\rlap{\span\math_text_in_eqalign{\alignmark\alignmark}}\tabskip\displaywidth}% \fi\fi \global\mathnumberstatus\zerocount \math_build_eqalign @@ -192,7 +192,7 @@ \c_math_eqalign_column\zerocount \processcommacommand [\mathalignmentparameter\c!align] - {\advance\c_math_eqalign_column\plusone\doseteqaligncolumn}% takes argument + {\advance\c_math_eqalign_column\plusone\math_eqalign_set_column}% takes argument \global\c_math_eqalign_column\plusone \dostarttagged\t!math\empty \dostarttagged\t!mathtable\currentmathalignment @@ -286,7 +286,7 @@ \fi \fi} -\def\doseteqaligncolumn#1% we could just add to the preamble (as with other alignments) +\def\math_eqalign_set_column#1% we could just add to the preamble (as with other alignments) {\expandafter\let\csname\??mathalignmentvariant\number\c_math_eqalign_column\expandafter\endcsname \csname\??mathalignmentvariant\ifcsname\??mathalignmentvariant#1\endcsname#1\else\v!normal\fi\endcsname} @@ -594,6 +594,8 @@ \setvalue {\e!stop \currentmathmatrix}{\math_matrix_stop}% no u else lookahead problem \to \everydefinemathmatrix +\let\math_matrix_NC\relax + \unexpanded\def\math_matrix_start#1% {\begingroup \edef\currentmathmatrix{#1}% @@ -607,12 +609,12 @@ \math_matrix_align_method_analyze \mathmatrixleft \mathmatrixbox\bgroup - \pushmacro\domatrixNC + \pushmacro\math_matrix_NC \let\endmath\relax - \def\NC{\domatrixNC}% - \def\MC{\domatrixNC\ifmmode\else$\def\endmath{$}\fi}% - \global\let\domatrixNC\dodomatrixNC - \def\NR{\endmath\global\let\domatrixNC\dodomatrixNC\crcr}% + \def\NC{\math_matrix_NC}% + \def\MC{\math_matrix_NC\ifmmode\else\startimath\let\endmath\stopimath\fi}% + \global\let\math_matrix_NC\math_matrix_NC_indeed + \def\NR{\endmath\global\let\math_matrix_NC\math_matrix_NC_indeed\crcr}% \normalbaselines \mathsurround\zeropoint \everycr\emptytoks @@ -620,8 +622,8 @@ \c_math_eqalign_column\zerocount \processcommacommand [\mathmatrixparameter\c!align] - {\advance\c_math_eqalign_column\plusone\doseteqaligncolumn}% was \dosetmatrixcolumn - \scratchcounter=\ifnum\c_math_eqalign_column>\zerocount \c_math_eqalign_column \else \plusone \fi + {\advance\c_math_eqalign_column\plusone\math_eqalign_set_column}% + \scratchcounter\ifnum\c_math_eqalign_column>\zerocount \c_math_eqalign_column \else \plusone \fi \global\c_math_eqalign_column\plusone \math_matrix_prepare} @@ -630,7 +632,7 @@ \mathstrut\crcr \noalign{\kern-\baselineskip}% \egroup - \popmacro\domatrixNC + \popmacro\math_matrix_NC \egroup \mathmatrixright \endgroup} @@ -640,13 +642,13 @@ \def\math_matrix_prepare {\c_math_align_a{\strut\math_first_in_eqalign\math_left_of_equalign\span - \math_text_in_eqalign{\mathmatrixparameter\c!style##}\math_right_of_eqalign}% - \c_math_align_b{&\hskip\mathmatrixparameter\c!distance + \math_text_in_eqalign{\mathmatrixparameter\c!style\alignmark\alignmark}\math_right_of_eqalign}% + \c_math_align_b{\aligntab\hskip\mathmatrixparameter\c!distance \math_next_in_eqalign\math_left_of_equalign\span - \math_text_in_eqalign{\mathmatrixparameter\c!style##}\math_right_of_eqalign}% - \c_math_align_c{&&\hskip\mathmatrixparameter\c!distance + \math_text_in_eqalign{\mathmatrixparameter\c!style\alignmark\alignmark}\math_right_of_eqalign}% + \c_math_align_c{\aligntab\aligntab\hskip\mathmatrixparameter\c!distance \math_left_of_equalign\span - \math_text_in_eqalign{\mathmatrixparameter\c!style##}\math_right_of_eqalign}% + \math_text_in_eqalign{\mathmatrixparameter\c!style\alignmark\alignmark}\math_right_of_eqalign}% \scratchtoks\emptytoks \normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_a}}% \dorecurse{\numexpr\scratchcounter-\plusone\relax} @@ -654,8 +656,8 @@ \normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_c}}% \halign \expandafter \bgroup\the\scratchtoks \crcr} -\unexpanded\def\dodomatrixNC - {\gdef\domatrixNC{\endmath&}} +\unexpanded\def\math_matrix_NC_indeed + {\gdef\math_matrix_NC{\endmath\aligntab}} \installcorenamespace{mathmatrixalignmethod} @@ -877,16 +879,16 @@ \lineskip\mathstackvgap \lineskiplimit\lineskip \let\stopmathmode\relax - \def\NC{\domatrixNC}% - \def\MC{\domatrixNC\startmathmode}% - \global\let\domatrixNC\dodomatrixNC + \def\NC{\math_matrix_NC}% + \def\MC{\math_matrix_NC\startmathmode}% + \global\let\math_matrix_NC\math_matrix_NC_indeed \def\NR {\stopmathmode - \global\let\domatrixNC\dodomatrixNC + \global\let\math_matrix_NC\math_matrix_NC_indeed \crcr}% \mathsurround\zeropoint \everycr\emptytoks - \halign\bgroup\hfil$\scriptstyle##$\hfil\crcr} + \halign\bgroup\hfil$\scriptstyle\alignmark\alignmark$\hfil\crcr} \def\stopsubstack {\crcr diff --git a/tex/context/base/math-def.mkiv b/tex/context/base/math-def.mkiv index 9201fc540..250986959 100644 --- a/tex/context/base/math-def.mkiv +++ b/tex/context/base/math-def.mkiv @@ -15,7 +15,7 @@ \unprotect -% this will be done at the lua end +%D Some day this will be done at the lua end. \startluacode mathematics.define(\number\defaultmathfamily) @@ -24,22 +24,8 @@ \activatemathcharacters -% will go to math-ext (if used at all) - -\Umathchardef\braceld=0 \defaultmathfamily "FF07A -\Umathchardef\bracerd=0 \defaultmathfamily "FF07B -\Umathchardef\bracelu=0 \defaultmathfamily "FF07C -\Umathchardef\braceru=0 \defaultmathfamily "FF07D - -% ctx specific - -% \bgroup -% \catcode`|=\othercatcode -% \global\let\|=| -% \egroup - -% The \mfunction macro is an alternative for \hbox with a -% controlable font switch. +%D The \mfunction macro is an alternative for \hbox with a controlable font +%D switch. \definemathcommand [arccos] [nolop] {\mfunctionlabeltext{arccos}} \definemathcommand [arcsin] [nolop] {\mfunctionlabeltext{arcsin}} @@ -69,7 +55,7 @@ \definemathcommand [max] [limop] {\mfunctionlabeltext{max}} \definemathcommand [min] [limop] {\mfunctionlabeltext{min}} \definemathcommand [mod] [limop] {\mfunctionlabeltext{mod}} -%definemathcommand [div] [limop] {\mfunctionlabeltext{div}} % overloads \div symbol +%definemathcommand [div] [limop] {\mfunctionlabeltext{div}} % overloads \div symbol \definemathcommand [projlim] [limop] {\mfunctionlabeltext{projlim}} \definemathcommand [Pr] [limop] {\mfunctionlabeltext{Pr}} \definemathcommand [sec] [nolop] {\mfunctionlabeltext{sec}} @@ -79,157 +65,23 @@ \definemathcommand [tanh] [nolop] {\mfunctionlabeltext{tanh}} \definemathcommand [tan] [nolop] {\mfunctionlabeltext{tan}} -% \definemathcommand [integers] {{\mathblackboard Z}} -% \definemathcommand [reals] {{\mathblackboard R}} -% \definemathcommand [rationals] {{\mathblackboard Q}} -% \definemathcommand [naturalnumbers]{{\mathblackboard N}} -% \definemathcommand [complexes] {{\mathblackboard C}} -% \definemathcommand [primes] {{\mathblackboard P}} - \let\normalmatharg\arg % todo: maybe automatically -% using attributes - -\setnewconstant\bigmathdelimitermethod\plusone - -\def\plainbigdelimiters % traditional method - {\bigmathdelimitermethod\plustwo} - -\plainbigdelimiters % is default for the moment but not so nice - -\def\doplainbigmath#1#2% - {{\hbox{$% - \nulldelimiterspace\zeropoint\relax - \mathsurround\zeropoint - $}}} - -\def\doleftbigmath #1{\ifx#1\relax\else\left#1\expandafter\doleftbigmath \fi} -\def\dorightbigmath#1{\ifx#1\relax\else\right.\expandafter\dorightbigmath\fi} - -\installcorenamespace{mathbig} - -\unexpanded\def\choosemathbig#1#2% so we accent \big{||} as well - {{\hbox{$% - \ifcase\bigmathdelimitermethod - \doleftbigmath#2\relax - \dorightbigmath#2\relax - \or - \attribute\mathsizeattribute#1\relax - \doleftbigmath#2\relax - \dorightbigmath#2\relax - \else - \doleftbigmath#2\relax - \vbox to\getvalue{\??mathbig\number#1}\bodyfontsize{}% - \dorightbigmath#2\relax - \fi - \nulldelimiterspace\zeropoint\relax - \mathsurround\zeropoint - $}}} - -\definemathcommand [big] {\choosemathbig\plusone } \setvalue{\??mathbig1}{0.85} -\definemathcommand [Big] {\choosemathbig\plustwo } \setvalue{\??mathbig2}{1.15} -\definemathcommand [bigg] {\choosemathbig\plusthree} \setvalue{\??mathbig3}{1.45} -\definemathcommand [Bigg] {\choosemathbig\plusfour } \setvalue{\??mathbig4}{1.75} - -\definemathcommand [bigl] [open] [one] {\big} -\definemathcommand [bigm] [rel] [one] {\big} -\definemathcommand [bigr] [close] [one] {\big} -\definemathcommand [Bigl] [open] [one] {\Big} -\definemathcommand [Bigm] [rel] [one] {\Big} -\definemathcommand [Bigr] [close] [one] {\Big} -\definemathcommand [biggl] [open] [one] {\bigg} -\definemathcommand [biggm] [rel] [one] {\bigg} -\definemathcommand [biggr] [close] [one] {\bigg} -\definemathcommand [Biggl] [open] [one] {\Bigg} -\definemathcommand [Biggm] [rel] [one] {\Bigg} -\definemathcommand [Biggr] [close] [one] {\Bigg} - %D This needs checking: -\def\setoperatorlimits#1#2% operator limits +\unexpanded\def\setoperatorlimits#1#2% operator limits {\savenormalmeaning{#1}% - \def#1{\csname normal\strippedcsname#1\endcsname#2}} - -\setoperatorlimits\int \intlimits -\setoperatorlimits\iint \intlimits -\setoperatorlimits\iiint \intlimits -\setoperatorlimits\oint \intlimits -\setoperatorlimits\oiint \intlimits -\setoperatorlimits\oiiint \intlimits -\setoperatorlimits\intclockwise \intlimits -\setoperatorlimits\ointclockwise \intlimits -\setoperatorlimits\ointctrclockwise \intlimits - -%D This is a temporary hack until we figure out how to do this correctly, -%D preferably using math parameters but we can also consider doing some -%D node juggling here. - -\unexpanded\def\implies {\mathrel{\;\Longrightarrow\;}} -\unexpanded\def\impliedby{\mathrel{\;\Longleftarrow\;}} -\unexpanded\def\And {\mathrel{\;\internalAnd\;}} -%unexpanded\def\iff {\;\Longleftrightarrow\;} -\setuvalue {iff}{\;\Longleftrightarrow\;} % nicer for if checker - -% todo: virtual in math-vfu - -% \definemathcommand [mapsto] {\mapstochar\rightarrow} -% \definemathcommand [hookrightarrow] {\lhook\joinrel\rightarrow} -% \definemathcommand [hookleftarrow] {\leftarrow\joinrel\rhook} -% \definemathcommand [bowtie] {\mathrel\triangleright\joinrel\mathrel\triangleleft} -% \definemathcommand [models] {\mathrel|\joinrel=} -% \definemathcommand [iff] {\;\Longleftrightarrow\;} - -% hm - -% ldots = 2026 -% vdots = 22EE -% cdots = 22EF -% ddots = 22F1 -% udots = 22F0 - -% \def\PLAINldots{\ldotp\ldotp\ldotp} -% \def\PLAINcdots{\cdotp\cdotp\cdotp} - -% \def\PLAINvdots -% {\vbox{\baselineskip.4\bodyfontsize\lineskiplimit\zeropoint\kern.6\bodyfontsize\hbox{.}\hbox{.}\hbox{.}}} - -% \def\PLAINddots -% {\mkern1mu% -% \raise.7\bodyfontsize\vbox{\kern.7\bodyfontsize\hbox{.}}% -% \mkern2mu% -% \raise.4\bodyfontsize\relax\hbox{.}% -% \mkern2mu% -% \raise.1\bodyfontsize\hbox{.}% -% \mkern1mu} - -% \definemathcommand [ldots] [inner] {\PLAINldots} -% \definemathcommand [cdots] [inner] {\PLAINcdots} -% \definemathcommand [vdots] [nothing] {\PLAINvdots} -% \definemathcommand [ddots] [inner] {\PLAINddots} - -%D \starttyping -%D $\sqrt[3]{10}$ -%D \stoptyping - -\def\rootradical{\Uroot \defaultmathfamily "221A } % can be done in char-def - -\def\root#1\of{\rootradical{#1}} % #2 - -\unexpanded\def\sqrt{\doifnextoptionalelse\rootwithdegree\rootwithoutdegree} - -\def\rootwithdegree [#1]{\rootradical{#1}} -\def\rootwithoutdegree {\rootradical {}} - -\definemathcommand [mathstrut] {\vphantom{(}} -\definemathcommand [joinrel] {\mathrel{\mkern-3mu}} - -\unexpanded\def\{{\mathortext\lbrace\letterleftbrace } % or maybe a chardef -\unexpanded\def\}{\mathortext\rbrace\letterrightbrace} % or maybe a chardef -\unexpanded\def\|{\mathortext\vert \letterbar } % or maybe a chardef - -%D The following colon related definitions are provided by Aditya -%D Mahajan who derived them from \type {mathtools.sty} and \type -%D {colonequals.sty}. + \expandafter\def\expandafter#1\expandafter{\csname normal\strippedcsname#1\endcsname#2}} + +\setoperatorlimits \int \intlimits +\setoperatorlimits \iint \intlimits +\setoperatorlimits \iiint \intlimits +\setoperatorlimits \oint \intlimits +\setoperatorlimits \oiint \intlimits +\setoperatorlimits \oiiint \intlimits +\setoperatorlimits \intclockwise \intlimits +\setoperatorlimits \ointclockwise \intlimits +\setoperatorlimits \ointctrclockwise \intlimits %D \macros %D {centercolon, colonminus, minuscolon, colonequals, equalscolon, @@ -238,6 +90,11 @@ %D equalscoloncolon, coloncolonapprox, approxcoloncolon, %D colonsim, simcoloncolon} %D +%D The following colon related definitions are provided by Aditya +%D Mahajan who derived them from \type {mathtools.sty} and \type +%D {colonequals.sty}. This will be redone as part of the overhaul +%D and font updates. +%D %D In $a := b$ the colon is not vertically centered with the equal %D to. Also the distance between colon and equal to is a bit large. %D So, we define a vertically centered colon \tex {centercolon} and @@ -263,16 +120,13 @@ %D \formula {A \colonsim B} %D \formula {A \simcoloncolon B} %D \stoplines - +%D %D The next macros take care of the space between the colon and the %D relation symbol. \definemathcommand [colonsep] {\mkern-1.2mu} \definemathcommand [doublecolonsep] {\mkern-0.9mu} -%D Now we define all the colon relations .. needs checking with char-def.lua ... -%d will move to a separate module. - \definemathcommand [centercolon] [rel] {\mathstylevcenteredhbox\colon} \definemathcommand [colonminus] [rel] {\centercolon\colonsep\mathrel{-}} \definemathcommand [minuscolon] [rel] {\mathrel{-}\colonsep\centercolon} % native char @@ -293,93 +147,6 @@ \definemathcommand [colonsim] [rel] {\coloncolon\colonsep\sim} \definemathcommand [simcoloncolon] [rel] {\sim\coloncolon\colonsep} -%D Goodies. We might move this elsewhere. - -% Be careful in choosing what accents you take (the code below uses a -% combining one): -% -% \startbuffer -% % $\Umathaccent top 0 0 "20D7 {example}$ -% % $\Umathaccent top fixed 0 0 "20D7 {example}$ -% $\Umathaccent 0 0 "20D7 {example}$ -% $\Umathaccent fixed 0 0 "20D7 {example}$ -% $\Umathaccent bottom 0 0 "20D7 {example}$ -% $\Umathaccent bottom fixed 0 0 "20D7 {example}$ -% $\Umathaccent both 0 0 "20D7 -% 0 0 "20D7 {example}$ -% $\Umathaccent both fixed 0 0 "20D7 -% fixed 0 0 "20D7 {example}$ -% $\Umathaccent both 0 0 "20D7 -% fixed 0 0 "20D7 {example}$ -% $\Umathaccent both fixed 0 0 "20D7 -% 0 0 "20D7 {example}$ -% \stopbuffer -% -% \setupbodyfont[modern] \getbuffer -% \setupbodyfont[xits] \getbuffer -% \setupbodyfont[cambria] \getbuffer - -\unexpanded\def\underleftarrow #1{\mathop{\Uunderdelimiter \defaultmathfamily "2190 {#1}}} -\unexpanded\def\overleftarrow #1{\mathop{\Uoverdelimiter \defaultmathfamily "2190 {#1}}} -\unexpanded\def\underrightarrow#1{\mathop{\Uunderdelimiter \defaultmathfamily "2192 {#1}}} -\unexpanded\def\overrightarrow #1{\mathop{\Uoverdelimiter \defaultmathfamily "2192 {#1}}} - -% watch out: here we have a class (zero): - -\unexpanded\def\normaldoublebrace {\Umathaccents 0 \defaultmathfamily "23DE 0 \defaultmathfamily "23DF } -\unexpanded\def\normaldoubleparent{\Umathaccents 0 \defaultmathfamily "23DC 0 \defaultmathfamily "23DD } - -% let's keep this - -\def\Umathbotaccent{\Umathaccent \s!bottom } -\def\Umathaccents {\Umathaccent \s!both } - -\let\normaloverbrace \overbrace -\let\normalunderbrace \underbrace -\let\normaloverparent \overparent -\let\normalunderparent \underparent -\let\normaloverbracket \overbracket -\let\normalunderbracket \underbracket -\let\normalunderleftarrow \underleftarrow -\let\normaloverleftarrow \overleftarrow -\let\normalunderrightarrow\underrightarrow -\let\normaloverrightarrow \overrightarrow - -\unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits} -\unexpanded\def\stackrel #1#2{\mathrel{\mathop{#2}\limits^{#1}}} - -\unexpanded\def\overbrace {\mathopwithlimits\normaloverbrace } -\unexpanded\def\underbrace {\mathopwithlimits\normalunderbrace } -\unexpanded\def\doublebrace {\mathopwithlimits\normaldoublebrace } -\unexpanded\def\overparent {\mathopwithlimits\normaloverparent } -\unexpanded\def\underparent {\mathopwithlimits\normalunderparent } -\unexpanded\def\overbracket {\mathopwithlimits\normaloverbracket } -\unexpanded\def\underbracket {\mathopwithlimits\normalunderbracket } -\unexpanded\def\doubleparent {\mathopwithlimits\normaldoubleparent } -\unexpanded\def\underleftarrow {\mathopwithlimits\normalunderleftarrow } -\unexpanded\def\overleftarrow {\mathopwithlimits\normaloverleftarrow } -\unexpanded\def\underrightarrow{\mathopwithlimits\normalunderrightarrow} -\unexpanded\def\overrightarrow {\mathopwithlimits\normaloverrightarrow } - -\let\lceil \lceiling -\let\rceil \rceiling - -\let\normalsurd\surd - -\unexpanded\def\surd{\normalsurd{}} - -% Some special characters: - -\unexpanded\def\nabla{∇} % this one adapts - -% -% todo mathclass=punctuation ord - -% \Umathcode"02C="6 \defaultmathfamily "02C -% \Umathcode"02E="0 \defaultmathfamily "02E - -% tricky .. todo - % \appendtoks % \def\over{\primitive\over}% % \to \everymathematics diff --git a/tex/context/base/math-dir.lua b/tex/context/base/math-dir.lua new file mode 100644 index 000000000..507a24e41 --- /dev/null +++ b/tex/context/base/math-dir.lua @@ -0,0 +1,145 @@ +if not modules then modules = { } end modules ['math-dir'] = { + version = 1.001, + comment = "companion to typo-dir.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- As I'm wrapping up the updated math support (for CTX/TUG 2013) I wondered about numbers in +-- r2l math mode. Googling lead me to TUGboat, Volume 25 (2004), No. 2 where I see numbers +-- running from left to right. Makes me wonder how far we should go. And as I was looking +-- into bidi anyway, it's a nice distraction. +-- +-- I first tried to hook something into noads but that gets pretty messy due to indirectness +-- char noads. If needed, I'll do it that way. With regards to spacing: as we can assume that +-- only numbers are involved we can safely swap them and the same is true for mirroring. But +-- anyway, I'm not too happy with this solution so eventually I'll do something with noads (as +-- an alternative method). Yet another heuristic approach. + +local nodes, node = nodes, node + +local trace_directions = false trackers.register("typesetters.directions.math", function(v) trace_directions = v end) + +local report_directions = logs.reporter("typesetting","math directions") + +local insert_node_before = nodes.insert_before +local insert_node_after = nodes.insert_after + +local nodecodes = nodes.nodecodes +local tasks = nodes.tasks + +local glyph_code = nodecodes.glyph +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist + +local nodepool = nodes.pool + +local new_textdir = nodepool.textdir + +local chardirections = characters.directions +local charmirrors = characters.mirrors +local charclasses = characters.textclasses + +local directions = typesetters.directions or { } + +local a_mathbidi = attributes.private('mathbidi') + +local function processmath(head) + local current = head + local done = false + local start = nil + local stop = nil + local function capsulate() + head = insert_node_before(head,start,new_textdir("+TLT")) + insert_node_after(head,stop,new_textdir("-TLT")) + if trace_directions then + report_directions("reversed: %s",nodes.listtoutf(start,false,false,stop)) + end + done = true + start = false + stop = nil + end + while current do + local id = current.id + if id == glyph_code then + local char = current.char + local cdir = chardirections[char] + if cdir == "en" or cdir == "an" then -- we could check for mathclass punctuation + if not start then + start = current + end + stop = current + else + if not start then + -- nothing + elseif start == stop then + start = nil + else + capsulate() + end + if cdir == "on" then + local mirror = charmirrors[char] + if mirror then + local class = charclasses[char] + if class == "open" or class == "close" then + current.char = mirror + if trace_directions then + report_directions("mirrored: %C to %C",char,mirror) + end + done = true + end + end + end + end + elseif not start then + -- nothing + elseif start == stop then + start = nil + else + capsulate(head,start,stop) + -- math can pack things into hlists .. we need to make sure we don't process + -- too often: needs checking + if id == hlist_code or id == vlist_code then + local list, d = processmath(current.list) + current.list = list + if d then + done = true + end + end + end + current = current.next + end + if not start then + -- nothing + elseif start == stop then + -- nothing + else + capsulate() + end + return head, done +end + +local enabled = false + +function directions.processmath(head) -- style, penalties + if enabled then + local a = head[a_mathbidi] + if a and a > 0 then + return processmath(head) + end + end + return head, false +end + +function directions.setmath(n) + if not enabled and n and n > 0 then + if trace_directions then + report_directions("enabling directions handler") + end + tasks.enableaction("math","typesetters.directions.processmath") + enabled = true + end +end + +commands.setmathdirection = directions.setmath diff --git a/tex/context/base/math-fbk.lua b/tex/context/base/math-fbk.lua index eebc4e4e7..b364d1208 100644 --- a/tex/context/base/math-fbk.lua +++ b/tex/context/base/math-fbk.lua @@ -6,17 +6,20 @@ if not modules then modules = { } end modules ['math-fbk'] = { license = "see context related readme files" } -local trace_fallbacks = false trackers.register("math.fallbacks", function(v) trace_fallbacks = v end) +local trace_fallbacks = false trackers.register("math.fallbacks", function(v) trace_fallbacks = v end) -local report_fallbacks = logs.reporter("math","fallbacks") +local report_fallbacks = logs.reporter("math","fallbacks") -local fallbacks = { } -mathematics.fallbacks = fallbacks +local formatters = string.formatters +local fastcopy = table.fastcopy + +local fallbacks = { } +mathematics.fallbacks = fallbacks local virtualcharacters = { } -local identifiers = fonts.hashes.identifiers -local lastmathids = fonts.hashes.lastmathids +local identifiers = fonts.hashes.identifiers +local lastmathids = fonts.hashes.lastmathids -- we need a trick (todo): if we define scriptscript, script and text in -- that order we could use their id's .. i.e. we could always add a font @@ -25,6 +28,11 @@ local lastmathids = fonts.hashes.lastmathids -- -- todo: use index 'true when luatex provides that feature (on the agenda) +-- to be considered: +-- +-- in luatex provide reserve_id (and pass id as field of tfmdata) +-- in context define three sizes but pass them later i.e. do virtualize afterwards + function fallbacks.apply(target,original) local mathparameters = target.mathparameters -- why not hasmath if mathparameters then @@ -39,7 +47,7 @@ function fallbacks.apply(target,original) end -- This is not okay yet ... we have no proper way to refer to 'self' -- otherwise I will make my own id allocator). -local self = #usedfonts == 0 and font.nextid() or nil -- will be true + local self = #usedfonts == 0 and font.nextid() or nil -- will be true local textid, scriptid, scriptscriptid local textindex, scriptindex, scriptscriptindex local textdata, scriptdata, scriptscriptdata @@ -48,26 +56,27 @@ local self = #usedfonts == 0 and font.nextid() or nil -- will be true -- textid = nil -- self -- scriptid = nil -- no smaller -- scriptscriptid = nil -- no smaller -textid = self -scriptid = self -scriptscriptid = self + textid = self + scriptid = self + scriptscriptid = self elseif mathsize == 2 then -- scriptsize -- textid = nil -- self -textid = self + textid = self scriptid = lastmathids[3] scriptscriptid = lastmathids[3] else -- textsize -- textid = nil -- self -textid = self + textid = self scriptid = lastmathids[2] scriptscriptid = lastmathids[3] end if textid then textindex = #usedfonts + 1 usedfonts[textindex] = { id = textid } - textdata = identifiers[textid] +-- textdata = identifiers[textid] or target + textdata = target else textdata = target end @@ -87,8 +96,7 @@ textid = self scriptscriptindex = scriptindex scriptscriptdata = scriptdata end --- report_fallbacks("used textid: %s, used script id: %s, used scriptscript id: %s", --- tostring(textid),tostring(scriptid),tostring(scriptscriptid)) + -- report_fallbacks("used textid: %S, used script id: %S, used scriptscript id: %S",textid,scriptid,scriptscriptid) local data = { textdata = textdata, scriptdata = scriptdata, @@ -96,6 +104,9 @@ textid = self textindex = textindex, scriptindex = scriptindex, scriptscriptindex = scriptscriptindex, + textid = textid, + scriptid = scriptid, + scriptscriptid = scriptscriptid, characters = characters, unicode = k, target = target, @@ -103,24 +114,32 @@ textid = self size = size, mathsize = mathsize, } --- inspect(usedfonts) + target.mathrelation = data + -- inspect(usedfonts) for k, v in next, virtualcharacters do if not characters[k] then local tv = type(v) + local cd = nil if tv == "table" then - characters[k] = v + cd = v elseif tv == "number" then - characters[k] = characters[v] + cd = characters[v] elseif tv == "function" then - characters[k] = v(data) + cd = v(data) + end + if cd then + characters[k] = cd + else + -- something else end if trace_fallbacks then if characters[k] then - report_fallbacks("extending font %a with %U",target.properties.fullname,k) + report_fallbacks("extending math font %a with %U",target.properties.fullname,k) end end end end + data.unicode = nil end end @@ -310,3 +329,177 @@ virtualcharacters[0xFE352] = function(data) end end +-- we could move the defs from math-act here + +local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset) + local characters = target.characters + local addprivate = fonts.helpers.addprivate + local olddata = characters[oldchr] + if olddata and not olddata.commands then + if swap then + swap = characters[swap] + height = swap.depth + depth = 0 + else + height = height or 0 + depth = depth or 0 + end + local correction = swap and { "down", (olddata.height or 0) - height } or { "down", olddata.height + (offset or 0)} + local newdata = { + commands = { correction, { "slot", 1, oldchr } }, + width = olddata.width, + height = height, + depth = depth, + } + local glyphdata = newdata + local nextglyph = olddata.next + while nextglyph do + local oldnextdata = characters[nextglyph] + if oldnextdata then + local newnextdata = { + commands = { correction, { "slot", 1, nextglyph } }, + width = oldnextdata.width, + height = height, + depth = depth, + } + local newnextglyph = addprivate(target,formatters["M-N-%H"](nextglyph),newnextdata) + newdata.next = newnextglyph + local nextnextglyph = oldnextdata.next + if nextnextglyph == nextglyph then + break + else + olddata = oldnextdata + newdata = newnextdata + nextglyph = nextnextglyph + end + else + report_fallbacks("error in fallback: no valid next, slot %X",nextglyph) + break + end + end + local hv = olddata.horiz_variants + if hv then + hv = fastcopy(hv) + newdata.horiz_variants = hv + for i=1,#hv do + local hvi = hv[i] + local oldglyph = hvi.glyph + local olddata = characters[oldglyph] + if olddata then + local newdata = { + commands = { correction, { "slot", 1, oldglyph } }, + width = olddata.width, + height = height, + depth = depth, + } + hvi.glyph = addprivate(target,formatters["M-H-%H"](oldglyph),newdata) + else + report_fallbacks("error in fallback: no valid horiz_variants, slot %X, index %i",oldglyph,i) + end + end + end + return glyphdata + else + return olddata + end +end + +virtualcharacters[0x203E] = function(data) -- could be FE33E instead + local target = data.target + local height, depth = 0, 0 + local mathparameters = target.mathparameters + if mathparameters then + height = mathparameters.OverbarVerticalGap + depth = mathparameters.UnderbarVerticalGap + else + height = target.parameters.xheight/4 + depth = height + end + return accent_to_extensible(target,0x203E,data.original,0x0305,height,depth) +end + +virtualcharacters[0xFE33E] = virtualcharacters[0x203E] -- convenient +virtualcharacters[0xFE33F] = virtualcharacters[0x203E] -- convenient + +local function smashed(data,unicode,swap,private) + local target = data.target + local original = data.original + local chardata = target.characters[unicode] + if chardata and chardata.height > target.parameters.xheight then + return accent_to_extensible(target,private,original,unicode,0,0,swap) + else + return original.characters[unicode] + end +end + +addextra(0xFE3DE, { description="EXTENSIBLE OF 0x03DE", unicodeslot=0xFE3DE, mathextensible = "r", mathstretch = "h" } ) +addextra(0xFE3DC, { description="EXTENSIBLE OF 0x03DC", unicodeslot=0xFE3DC, mathextensible = "r", mathstretch = "h" } ) +addextra(0xFE3B4, { description="EXTENSIBLE OF 0x03B4", unicodeslot=0xFE3B4, mathextensible = "r", mathstretch = "h" } ) + +virtualcharacters[0xFE3DE] = function(data) return smashed(data,0x23DE,0x23DF,0xFE3DE) end +virtualcharacters[0xFE3DC] = function(data) return smashed(data,0x23DC,0x23DD,0xFE3DC) end +virtualcharacters[0xFE3B4] = function(data) return smashed(data,0x23B4,0x23B5,0xFE3B4) end + +addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mathextensible = "r", mathstretch = "h" } ) +addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h" } ) +addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h" } ) + +virtualcharacters[0xFE3DF] = function(data) return data.original.characters[0x23DF] end +virtualcharacters[0xFE3DD] = function(data) return data.original.characters[0x23DD] end +virtualcharacters[0xFE3B5] = function(data) return data.original.characters[0x23B5] end + +-- todo: add some more .. numbers might change + +addextra(0xFE302, { description="EXTENSIBLE OF 0x0302", unicodeslot=0xFE302, mathstretch = "h" } ) +addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mathstretch = "h" } ) + +local function smashed(data,unicode,private) + local target = data.target + local height = target.parameters.xheight / 2 + local c = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height) + c.top_accent = nil + return c +end + +virtualcharacters[0xFE302] = function(data) return smashed(data,0x0302,0xFE302) end +virtualcharacters[0xFE303] = function(data) return smashed(data,0x0303,0xFE303) end + +-- another crazy hack .. doesn't work as we define scrscr first .. we now have smaller +-- primes so we have smaller primes for the moment, big ones will become an option + +local function smashed(data,unicode,optional) + local oldchar = data.characters[unicode] + if oldchar then + local height = 1.2 * data.target.parameters.xheight + local newchar = { + commands = { + { "down", oldchar.height - height }, + { "char", unicode }, + }, + height = height, + width = oldchar.width, + } + return newchar + elseif not optional then + report_fallbacks("missing %U prime in font %a",unicode,data.target.properties.fullname) + end +end + +addextra(0xFE932, { description="SMASHED PRIME 0x02032", unicodeslot=0xFE932 } ) +addextra(0xFE933, { description="SMASHED PRIME 0x02033", unicodeslot=0xFE933 } ) +addextra(0xFE934, { description="SMASHED PRIME 0x02034", unicodeslot=0xFE934 } ) +addextra(0xFE957, { description="SMASHED PRIME 0x02057", unicodeslot=0xFE957 } ) + +addextra(0xFE935, { description="SMASHED BACKWARD PRIME 0x02035", unicodeslot=0xFE935 } ) +addextra(0xFE936, { description="SMASHED BACKWARD PRIME 0x02036", unicodeslot=0xFE936 } ) +addextra(0xFE937, { description="SMASHED BACKWARD PRIME 0x02037", unicodeslot=0xFE937 } ) + +virtualcharacters[0xFE932] = function(data) return smashed(data,0x02032) end +virtualcharacters[0xFE933] = function(data) return smashed(data,0x02033) end +virtualcharacters[0xFE934] = function(data) return smashed(data,0x02034) end +virtualcharacters[0xFE957] = function(data) return smashed(data,0x02057) end + +virtualcharacters[0xFE935] = function(data) return smashed(data,0x02035,true) end +virtualcharacters[0xFE936] = function(data) return smashed(data,0x02036,true) end +virtualcharacters[0xFE937] = function(data) return smashed(data,0x02037,true) end + diff --git a/tex/context/base/math-fen.mkiv b/tex/context/base/math-fen.mkiv index f7edc582e..94d93e4af 100644 --- a/tex/context/base/math-fen.mkiv +++ b/tex/context/base/math-fen.mkiv @@ -32,18 +32,15 @@ % test $a\fancybracket{\frac{1}{b}}c$ test \par \installcorenamespace{mathfences} -\installcorenamespace{mathfencesleft} -\installcorenamespace{mathfencesmiddle} -\installcorenamespace{mathfencesright} \installcommandhandler \??mathfences {mathfence} \??mathfences \let\setupmathfences\setupmathfence \setupmathfences - [\c!left=0x2E, - \c!right=0x2E, - \c!middle=0x2E, + [\c!left=, + \c!right=, + \c!middle=, \c!mathstyle=, \c!color=, \c!command=] @@ -55,40 +52,61 @@ \fi \to \everydefinemathfence -\def\math_fenced_left {\normalleft \utfchar{\mathfenceparameter\c!left }} % no Uchar here -\def\math_fenced_middle{\normalmiddle\utfchar{\mathfenceparameter\c!middle}} % no Uchar here -\def\math_fenced_right {\normalright \utfchar{\mathfenceparameter\c!right }} % no Uchar here +% we need the direct use of \Udelimiter because of { etc + +\def\math_fenced_left {\edef\p_left{\mathfenceparameter\c!left}% + \math_fenced_color_push + \normalleft\ifx\p_left\empty.\else\Udelimiter\plusfour\fam\p_left\relax\fi + \math_fenced_color_pop} +\def\math_fenced_middle{\edef\p_middle{\mathfenceparameter\c!middle}% + \mskip\thinmuskip + \math_fenced_color_push + \normalmiddle\ifx\p_middle\empty.\else\Udelimiter\plusfour\fam\p_middle\relax\fi + \math_fenced_color_pop + \mskip\thinmuskip} +\def\math_fenced_right {\edef\p_right{\mathfenceparameter\c!right}% + \math_fenced_color_push + \normalright\ifx\p_right\empty.\else\Udelimiter\plusfive\fam\p_right\relax\fi + \math_fenced_color_pop} + +\def\math_fenced_color_do_push{\pushcolor[\p_math_fenced_color]} +\let\math_fenced_color_do_pop \popcolor + +\let\math_fenced_color_push\donothing +\let\math_fenced_color_pop \donothing \let\fence \relax \let\fenced\relax -\unexpanded\def\math_fenced_fenced[#1]% - {\begingroup +\newcount\c_math_fenced_nesting + +\unexpanded\def\math_fenced_fenced_start#1% + {\advance\c_math_fenced_nesting\plusone + \begingroup \edef\currentmathfence{#1}% \startusemathstyleparameter\mathfenceparameter \let\fence\math_fenced_middle \edef\p_math_fenced_color{\mathfenceparameter\c!color}% \ifx\p_math_fenced_color\empty - \expandafter\math_fenced_normal + \let\math_fenced_color_push\donothing + \let\math_fenced_color_pop \donothing \else - \expandafter\math_fenced_colored - \fi} + \let\math_fenced_color_push\math_fenced_color_do_push + \let\math_fenced_color_pop \math_fenced_color_do_pop + \fi + \math_fenced_left} -\def\math_fenced_normal#1% - {\math_fenced_left - #1% +\unexpanded\def\math_fenced_fenced_stop#1% + {\edef\currentmathfence{#1}% \math_fenced_right \stopusemathstyleparameter - \endgroup} + \endgroup + \advance\c_math_fenced_nesting\minusone} -\def\math_fenced_colored#1% - {\pushcolor[\p_math_fenced_color]% - \math_fenced_left - \popcolor - #1% - \pushcolor[\p_math_fenced_color]% +\unexpanded\def\math_fenced_fenced[#1]#2% + {\math_fenced_fenced_start{#1}% + #2% \math_fenced_right - \popcolor \stopusemathstyleparameter \endgroup} @@ -96,13 +114,353 @@ \let\fenced\math_fenced_fenced \to \everymathematics -\definemathfence [parenthesis] [\c!left=0x28,\c!right=0x29] -\definemathfence [bracket] [\c!left=0x5B,\c!right=0x5D] -\definemathfence [braces] [\c!left=0x7B,\c!right=0x7D] -\definemathfence [bar] [\c!left=0x7C,\c!right=0x7C] -\definemathfence [doublebar] [\c!left=0x2016,\c!right=0x2016] -\definemathfence [angle] [\c!left=0x3C,\c!right=0x3E] +% todo: | in mathmode letter +% +% \appendtoks +% \let\bar\letterbar +% \to \everymathematics +% +% but then we don't have it in embedded text too so ... + +\definemathfence [parenthesis] [\c!left="0028,\c!right="0029] +\definemathfence [bracket] [\c!left="005B,\c!right="005D] +\definemathfence [brace] [\c!left="007B,\c!right="007D] +\definemathfence [bar] [\c!left="007C,\c!right="007C] +\definemathfence [doublebar] [\c!left="2016,\c!right="2016] +\definemathfence [triplebar] [\c!left="2980,\c!right="2980] +\definemathfence [angle] [\c!left="27E8,\c!right="27E9] +\definemathfence [doubleangle] [\c!left="27EA,\c!right="27EB] +\definemathfence [solidus] [\c!left="2044,\c!right="2044] +\definemathfence [nothing] + +\definemathfence [mirrored] % \v!mirrored + +\definemathfence [mirroredparenthesis] [mirrored] [\c!right="0028,\c!left="0029] +\definemathfence [mirroredbracket] [mirrored] [\c!right="005B,\c!left="005D] +\definemathfence [mirroredbrace] [mirrored] [\c!right="007B,\c!left="007D] +\definemathfence [mirroredbar] [mirrored] [\c!right="007C,\c!left="007C] +\definemathfence [mirroreddoublebar] [mirrored] [\c!right="2016,\c!left="2016] +\definemathfence [mirroredtriplebar] [mirrored] [\c!right="2980,\c!left="2980] +\definemathfence [mirroredangle] [mirrored] [\c!right="27E8,\c!left="27E9] +\definemathfence [mirroreddoubleangle] [mirrored] [\c!right="27EA,\c!left="27EB] +\definemathfence [mirroredsolidus] [mirrored] [\c!right="2044,\c!left="2044] +\definemathfence [mirrorednothing] [mirorred] + +%D A bonus: + +\unexpanded\def\Lparent {\math_fenced_fenced_start{parenthesis}} \unexpanded\def\Rparent {\math_fenced_fenced_stop{parenthesis}} +\unexpanded\def\Lbracket {\math_fenced_fenced_start{bracket}} \unexpanded\def\Rbracket {\math_fenced_fenced_stop{bracket}} +\unexpanded\def\Lbrace {\math_fenced_fenced_start{brace}} \unexpanded\def\Rbrace {\math_fenced_fenced_stop{brace}} +\unexpanded\def\Langle {\math_fenced_fenced_start{angle}} \unexpanded\def\Rangle {\math_fenced_fenced_stop{angle}} +\unexpanded\def\Ldoubleangle {\math_fenced_fenced_start{doubleangle}} \unexpanded\def\Rdoubleangle {\math_fenced_fenced_stop{doubleangle}} +\unexpanded\def\Lbar {\math_fenced_fenced_start{bar}} \unexpanded\def\Rbar {\math_fenced_fenced_stop{bar}} +\unexpanded\def\Ldoublebar {\math_fenced_fenced_start{doublebar}} \unexpanded\def\Rdoublebar {\math_fenced_fenced_stop{doublebar}} +\unexpanded\def\Ltriplebar {\math_fenced_fenced_start{triplebar}} \unexpanded\def\Rtriplebar {\math_fenced_fenced_stop{triplebar}} +\unexpanded\def\Lsolidus {\math_fenced_fenced_start{solidus}} \unexpanded\def\Rsolidus {\math_fenced_fenced_stop{solidus}} +\unexpanded\def\Lnothing {\math_fenced_fenced_start{nothing}} \unexpanded\def\Rnothing {\math_fenced_fenced_stop{nothing}} + +\unexpanded\def\Lparentmirrored {\math_fenced_fenced_stop{mirroredparenthesis}} \unexpanded\def\Rparentmirrored {\math_fenced_fenced_start{mirroredparenthesis}} +\unexpanded\def\Lbracketmirrored {\math_fenced_fenced_stop{mirroredbracket}} \unexpanded\def\Rbracketmirrored {\math_fenced_fenced_start{mirroredbracket}} +\unexpanded\def\Lbracemirrored {\math_fenced_fenced_stop{mirroredbrace}} \unexpanded\def\Rbracemirrored {\math_fenced_fenced_start{mirroredbrace}} +\unexpanded\def\Langlemirrored {\math_fenced_fenced_stop{mirroredangle}} \unexpanded\def\Ranglemirrored {\math_fenced_fenced_start{mirroredangle}} +\unexpanded\def\Ldoubleanglemirrored {\math_fenced_fenced_stop{mirroreddoubleangle}} \unexpanded\def\Rdoubleanglemirrored {\math_fenced_fenced_start{mirroreddoubleangle}} +\unexpanded\def\Lbarmirrored {\math_fenced_fenced_stop{mirroredbar}} \unexpanded\def\Rbarmirrored {\math_fenced_fenced_start{mirroredbar}} +\unexpanded\def\Ldoublebarmirrored {\math_fenced_fenced_stop{mirroreddoublebar}} \unexpanded\def\Rdoublebarmirrored {\math_fenced_fenced_start{mirroreddoublebar}} +\unexpanded\def\Ltriplebarmirrored {\math_fenced_fenced_stop{mirroredtriplebar}} \unexpanded\def\Rtriplebarmirrored {\math_fenced_fenced_start{mirroredtriplebar}} +\unexpanded\def\Lsolidusmirrored {\math_fenced_fenced_stop{mirroredsolidus}} \unexpanded\def\Rsolidusmirrored {\math_fenced_fenced_start{mirroredsolidus}} +\unexpanded\def\Lnothingmirrored {\math_fenced_fenced_stop{mirrorednothing}} \unexpanded\def\Rnothingmirrored {\math_fenced_fenced_start{mirrorednothing}} + +%D And another one: + +% \setupmathfences[color=darkgreen] +% +% \startformula +% \left{ \frac{1}{a} \right} +% \left[ \frac{1}{b} \right] +% \left( \frac{1}{c} \right) +% \left< \frac{1}{d} \right> +% \left| \frac{1}{e} \right| +% \stopformula + +\installcorenamespace{mathleft} +\installcorenamespace{mathright} +\installcorenamespace{mathmiddle} + +\unexpanded\def\left {\afterassignment\math_left \let\nexttoken} +\unexpanded\def\right {\afterassignment\math_right \let\nexttoken} +\unexpanded\def\middle{\afterassignment\math_middle\let\nexttoken} + +\newconditional\c_math_fenced_done +\newconditional\c_math_fenced_unknown \settrue\c_math_fenced_unknown + +\def\math_left + {\settrue\c_math_fenced_done + \edef\m_math_left{\meaning\nexttoken}% + \csname\??mathleft\ifcsname\??mathleft\m_math_left\endcsname\m_math_left\else\s!unknown\fi\endcsname} + +\def\math_right + {\settrue\c_math_fenced_done + \edef\m_math_right{\meaning\nexttoken}% + \csname\??mathright\ifcsname\??mathright\m_math_right\endcsname\m_math_right\else\s!unknown\fi\endcsname} + +\def\math_middle + {\settrue\c_math_fenced_done + \edef\m_math_middle{\meaning\nexttoken}% + \csname\??mathmiddle\ifcsname\??mathmiddle\m_math_middle\endcsname\m_math_middle\else\s!unknown\fi\endcsname} + +\setvalue{\??mathleft \s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalleft \nexttoken\fi} +\setvalue{\??mathright \s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalright \nexttoken\fi} +\setvalue{\??mathmiddle\s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalmiddle\nexttoken\fi} + +\unexpanded\def\installmathfencepair#1#2#3#4% + {\expandafter\let\csname\??mathleft \meaning#1\endcsname#2% + \expandafter\let\csname\??mathright\meaning#3\endcsname#4} + +\expandafter\let\csname\??mathleft \meaning [\endcsname\Lbracket +\expandafter\let\csname\??mathleft \meaning (\endcsname\Lparent +\expandafter\let\csname\??mathleft \meaning <\endcsname\Langle +\expandafter\let\csname\??mathleft \meaning ⟨\endcsname\Langle +\expandafter\let\csname\??mathleft \meaning ⟪\endcsname\Ldoubleangle +\expandafter\let\csname\??mathleft \meaning {\endcsname\Lbrace +\expandafter\let\csname\??mathleft \meaning |\endcsname\Lbar +\expandafter\let\csname\??mathleft \meaning ‖\endcsname\Ldoublebar +\expandafter\let\csname\??mathleft \meaning ⦀\endcsname\Ltriplebar +\expandafter\let\csname\??mathleft \meaning /\endcsname\Lsolidus +\expandafter\let\csname\??mathleft \meaning .\endcsname\Lnothing + +\expandafter\let\csname\??mathright\meaning ]\endcsname\Rbracket +\expandafter\let\csname\??mathright\meaning )\endcsname\Rparent +\expandafter\let\csname\??mathright\meaning >\endcsname\Rangle +\expandafter\let\csname\??mathright\meaning ⟩\endcsname\Rangle +\expandafter\let\csname\??mathright\meaning ⟫\endcsname\Rdoubleangle +\expandafter\let\csname\??mathright\meaning }\endcsname\Rbrace +\expandafter\let\csname\??mathright\meaning |\endcsname\Rbar +\expandafter\let\csname\??mathright\meaning ⦀\endcsname\Rtriplebar +\expandafter\let\csname\??mathright\meaning /\endcsname\Rsolidus +\expandafter\let\csname\??mathright\meaning .\endcsname\Rnothing + +\expandafter\let\csname\??mathright\meaning [\endcsname\Lbracketmirrored +\expandafter\let\csname\??mathright\meaning (\endcsname\Lparentmirrored +\expandafter\let\csname\??mathright\meaning <\endcsname\Langlemirrored +\expandafter\let\csname\??mathright\meaning ⟨\endcsname\Langlemirrored +\expandafter\let\csname\??mathright\meaning ⟪\endcsname\Ldoubleanglemirrored +\expandafter\let\csname\??mathright\meaning {\endcsname\Lbracemirrored +%expandafter\let\csname\??mathright\meaning |\endcsname\Lbarmirrored +%expandafter\let\csname\??mathright\meaning ‖\endcsname\Ldoublebarmirrored +%expandafter\let\csname\??mathright\meaning ⦀\endcsname\Ltriplebarmirrored +\expandafter\let\csname\??mathright\meaning /\endcsname\Lsolidusmirrored +\expandafter\let\csname\??mathright\meaning .\endcsname\Lnothingmirrored + +\expandafter\let\csname\??mathleft \meaning ]\endcsname\Rbracketmirrored +\expandafter\let\csname\??mathleft \meaning )\endcsname\Rparentmirrored +\expandafter\let\csname\??mathleft \meaning >\endcsname\Ranglemirrored +\expandafter\let\csname\??mathleft \meaning ⟩\endcsname\Ranglemirrored +\expandafter\let\csname\??mathleft \meaning ⟫\endcsname\Rdoubleanglemirrored +\expandafter\let\csname\??mathleft \meaning }\endcsname\Rbracemirrored +%expandafter\let\csname\??mathleft \meaning |\endcsname\Rbarmirrored +%expandafter\let\csname\??mathleft \meaning ‖\endcsname\Rdoublebarmirrored +%expandafter\let\csname\??mathleft \meaning ⦀\endcsname\Rtriplebarmirrored +\expandafter\let\csname\??mathleft \meaning /\endcsname\Rsolidusmirrored +\expandafter\let\csname\??mathleft \meaning .\endcsname\Rnothingmirrored + +% todo paren parent + +\let\lbrack\lbracket +\let\rbrack\rbracket + +\installmathfencepair \lbrace \Lbrace \rbrace \Rbrace +\installmathfencepair \lbracket \Lbracket \rbracket \Rbracket +\installmathfencepair \lparen \Lparen \rparen \Rparen +\installmathfencepair \lparent \Lparent \rparent \Rparent +\installmathfencepair \langle \Langle \rangle \Rangle +%installmathfencepair \lrangle \Ldoubleangle \rrangle \Rdoubleangle +%installmathfencepair \lbar \Lbar \rbar \Rbar +\installmathfencepair \vert \Lbar \vert \Rbar +\installmathfencepair \solidus \Lsolidus \solidus \Rsolidus + +\unexpanded\def\{{\mathortext\lbrace \letterleftbrace } % or maybe a chardef +\unexpanded\def\}{\mathortext\rbrace \letterrightbrace } % or maybe a chardef +\unexpanded\def\[{\mathortext\lbracket\letterleftbracket } % or maybe a chardef +\unexpanded\def\]{\mathortext\rbracket\letterrightbracket} % or maybe a chardef +\unexpanded\def\({\mathortext\lparent \letterleftparent } % or maybe a chardef +\unexpanded\def\){\mathortext\rparent \letterrightparent } % or maybe a chardef +\unexpanded\def\|{\mathortext\vert \letterbar } % or maybe a chardef +%unexpanded\def\/{\mathortext\solidus \letterslash } % or maybe a chardef + +\installmathfencepair \{ \Lbrace \} \Rbrace +\installmathfencepair \[ \Lbracket \] \Rbracket +\installmathfencepair \( \Lparent \) \Rparent +\installmathfencepair \< \Langle \> \Rangle +\installmathfencepair \| \Lbar \| \Rbar + +%D As we have overloaded \type {\left} and \type {\right} we also need a more +%D clever version of the following: + +% methods: +% +% 1: none +% 2: lua +% 3: tex + +% variants: +% +% 1: step 1 +% 2: step 2 +% 3: htdp * 1.33^n +% 4: size * 1.33^n + +\setnewconstant\bigmathdelimitermethod \plusone +\setnewconstant\bigmathdelimitervariant\plusthree + +\unexpanded\def\plainbigdelimiters % traditional method + {\bigmathdelimitermethod\plustwo} + +\plainbigdelimiters % is default for the moment but not so nice + +% \setconstant\bigmathdelimitermethod\plusone + +\installcorenamespace{mathbig} + +\unexpanded\def\choosemathbig#1#2% so we accept \big{||} as well + {{\hbox\bgroup + \startimath + \ifcase\bigmathdelimitermethod + \math_fenced_step#2\relax + \or + \attribute\mathsizeattribute\numexpr\bigmathdelimitervariant*\plushundred+#1\relax + \math_fenced_step#2\relax + \else + \math_fenced_step#2{\vbox to\getvalue{\??mathbig\number#1}\bodyfontsize{}}% + \fi + \nulldelimiterspace\zeropoint\relax + \mathsurround\zeropoint + \stopimath + \egroup}} + +\def\math_fenced_step#1#2% + {\setfalse\c_math_fenced_unknown + \setfalse\c_math_fenced_done + \left#1\relax + \ifconditional\c_math_fenced_done + #2% + \right.\relax + \else + \left.\relax + #2% + \setfalse\c_math_fenced_done + \right#1\relax + \ifconditional\c_math_fenced_done + \else + \right.\relax + \fi + \fi} + +\unexpanded\def\mathdelimiterstep#1#2% + {\begingroup + \attribute\mathsizeattribute\numexpr\plushundred+#1\relax + \math_fenced_step#2\relax + \endgroup} + +\setvalue{\??mathbig1}{0.85} +\setvalue{\??mathbig2}{1.15} +\setvalue{\??mathbig3}{1.45} +\setvalue{\??mathbig4}{1.75} + +\definemathcommand [big] {\choosemathbig\plusone } +\definemathcommand [Big] {\choosemathbig\plustwo } +\definemathcommand [bigg] {\choosemathbig\plusthree} +\definemathcommand [Bigg] {\choosemathbig\plusfour } + +\definemathcommand [bigl] [open] [one] {\big} +\definemathcommand [bigm] [rel] [one] {\big} +\definemathcommand [bigr] [close] [one] {\big} +\definemathcommand [Bigl] [open] [one] {\Big} +\definemathcommand [Bigm] [rel] [one] {\Big} +\definemathcommand [Bigr] [close] [one] {\Big} +\definemathcommand [biggl] [open] [one] {\bigg} +\definemathcommand [biggm] [rel] [one] {\bigg} +\definemathcommand [biggr] [close] [one] {\bigg} +\definemathcommand [Biggl] [open] [one] {\Bigg} +\definemathcommand [Biggm] [rel] [one] {\Bigg} +\definemathcommand [Biggr] [close] [one] {\Bigg} %definemathfence [fancybracket] [bracket] [command=yes,color=red] +% experimental accents: +% +% \definemathoverextensible [top] [hoed] ["FE302] +% \definemathoverextensible [top] [slang] ["FE303] + +%D This is needed for mathml (used in mrow, so it gets reset): + +\let\math_fences_saved_left \left +\let\math_fences_saved_middle\middle +\let\math_fences_saved_right \right + +% \def\math_fences_traced#1{\ruledhbox{\ttx#1\low{\the\c_math_fenced_nesting}}} + +\unexpanded\def\math_fences_checked_left + {%\math_fences_traced L% + \math_fences_saved_left} + +\unexpanded\def\math_fences_checked_middle + {%\math_fences_traced M% + \ifcase\c_math_fenced_nesting + \expandafter\math_fences_saved_middle + \else + \expandafter\firstofoneargument + \fi} + +\unexpanded\def\math_fences_checked_right + {%\math_fences_traced R% + \ifcase\c_math_fenced_nesting + \expandafter\firstofoneargument + \else + \expandafter\math_fences_saved_right + \fi} + +\newconditional\c_math_checked_done % only bars + +\unexpanded\def\math_fences_checked_left_or_right + {%\math_fences_traced B% + \ifcase\c_math_fenced_nesting + \settrue\c_math_checked_done + \expandafter\math_fences_saved_left + \else\ifconditional\c_math_checked_done + \setfalse\c_math_checked_done + \doubleexpandafter\math_fences_saved_right + \else + \doubleexpandafter\math_fences_saved_middle + \fi\fi} + +\unexpanded\def\math_fences_checked_start + {\c_math_fenced_nesting\zerocount} + +\unexpanded\def\math_fences_checked_stop + {\ifcase\c_math_fenced_nesting\else + \right.\relax % error, todo: nil spacing + \expandafter\math_fences_checked_stop + \fi} + +\unexpanded\def\startcheckedfences + {\begingroup + \let\left \math_fences_checked_left + \let\middle\math_fences_checked_middle + \let\right \math_fences_checked_right + \math_fences_checked_start} + +\unexpanded\def\stopcheckedfences + {\math_fences_checked_stop + \endgroup} + +\let\leftorright\math_fences_checked_left_or_right % for bars + +%D The next characters were used for constructing nicer extensibles but +%D nowadays we have real characters. + +\Umathchardef\braceld=0 \defaultmathfamily "FF07A +\Umathchardef\bracerd=0 \defaultmathfamily "FF07B +\Umathchardef\bracelu=0 \defaultmathfamily "FF07C +\Umathchardef\braceru=0 \defaultmathfamily "FF07D + \protect diff --git a/tex/context/base/math-frc.mkiv b/tex/context/base/math-frc.mkiv index e11e79e82..65fa30942 100644 --- a/tex/context/base/math-frc.mkiv +++ b/tex/context/base/math-frc.mkiv @@ -102,6 +102,7 @@ \setupmathfractions [\c!mathstyle=, \c!alternative=\v!inner, + \c!margin=\zeropoint, \c!rulethickness=.25\exheight, \c!left=0x2E, \c!right=0x2E, @@ -111,9 +112,12 @@ \setuevalue{\currentmathfraction}{\math_frac{\currentmathfraction}}% \to \everydefinemathfraction +\newdimen\d_math_fraction_margin + \unexpanded\def\math_frac#1% {\begingroup \edef\currentmathfraction{#1}% + \d_math_fraction_margin\mathfractionparameter\c!margin \edef\p_math_fractions_color{\mathfractionparameter\c!color}% \ifx\p_math_fractions_color\empty \expandafter\math_frac_normal @@ -139,19 +143,58 @@ \number\dimexpr\mathfractionparameter\c!rulethickness% )}} -\setvalue{\??mathfractionalternative\v!inner}#1#2% +% Having a \withmarginornot{#1}{#2} makes not much sense nor do +% 4 tests or 4 redundant kerns (longer node lists plus possible +% interference). A split in normal and margin also makes testing +% easier. When left and right margins are needed we might merge +% the variants again. After all, these are not real installers. + +\setvalue{\??mathfractionalternative\v!inner}% + {\ifcase\d_math_fraction_margin + \expandafter\math_fraction_inner_normal + \else + \expandafter\math_fraction_inner_margin + \fi} + +\def\math_fraction_inner_normal#1#2% {\Ustack{% {\usemathstyleparameter\mathfractionparameter{#1}}% we should store this one \math_frac_command {\usemathstyleparameter\mathfractionparameter{#2}}% and reuse it here }\endgroup} -\setvalue{\??mathfractionalternative\v!outer}#1#2% +\def\math_fraction_inner_margin#1#2% + {\Ustack{% + {\kern\d_math_fraction_margin + \usemathstyleparameter\mathfractionparameter{#1}% we should store this one + \kern\d_math_fraction_margin}% + \math_frac_command + {\kern\d_math_fraction_margin + \usemathstyleparameter\mathfractionparameter{#2}% and reuse it here + \kern\d_math_fraction_margin}% + }\endgroup} + +\setvalue{\??mathfractionalternative\v!outer}% + {\ifcase\d_math_fraction_margin + \expandafter\math_fraction_outer_normal + \else + \expandafter\math_fraction_outer_margin + \fi} + +\def\math_fraction_outer_normal#1#2% {\Ustack{% \usemathstyleparameter\mathfractionparameter {{#1}\math_frac_command{#2}}% }\endgroup} +\def\math_fraction_outer_margin#1#2% + {\Ustack{% + \usemathstyleparameter\mathfractionparameter + {{\kern\d_math_fraction_margin#1\kern\d_math_fraction_margin}% + \math_frac_command + {\kern\d_math_fraction_margin#2\kern\d_math_fraction_margin}}% + }\endgroup} + \definemathfraction[frac][\c!mathstyle=] \unexpanded\def\xfrac {\begingroup\let\xfrac\xxfrac\math_frac_alternative\scriptstyle} diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua index 7ba1d4514..6be06e634 100644 --- a/tex/context/base/math-ini.lua +++ b/tex/context/base/math-ini.lua @@ -16,19 +16,18 @@ if not modules then modules = { } end modules ['math-ini'] = { -- then we also have to set the other characters (only a subset done now) local formatters, find = string.formatters, string.find -local utfchar, utfbyte = utf.char, utf.byte -local setmathcode, setdelcode = tex.setmathcode, tex.setdelcode -local settexattribute = tex.setattribute +local utfchar, utfbyte, utflength = utf.char, utf.byte, utf.length local floor = math.floor -local context = context +local context = context +local commands = commands -local contextsprint = context.sprint -local contextfprint = context.fprint -- a bit inefficient +local contextsprint = context.sprint +local contextfprint = context.fprint -- a bit inefficient -local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end) +local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end) -local report_math = logs.reporter("mathematics","initializing") +local report_math = logs.reporter("mathematics","initializing") mathematics = mathematics or { } local mathematics = mathematics @@ -40,6 +39,10 @@ local unsetvalue = attributes.unsetvalue local allocate = utilities.storage.allocate local chardata = characters.data +local texsetattribute = tex.setattribute +local setmathcode = tex.setmathcode +local setdelcode = tex.setdelcode + local families = allocate { mr = 0, mb = 1, @@ -87,6 +90,7 @@ local classes = allocate { large = 1, -- op variable = 7, -- alphabetic number = 7, -- alphabetic + root = 16, -- a private one } local open_class = 4 @@ -151,6 +155,10 @@ local function radical(family,slot) return formatters['\\Uradical "%X "%X '](family,slot) end +local function root(family,slot) + return formatters['\\Uroot "%X "%X '](family,slot) +end + local function mathchardef(name,class,family,slot) return formatters['\\Umathchardef\\%s "%X "%X "%X '](name,class,family,slot) end @@ -191,29 +199,42 @@ local setmathcharacter = function(class,family,slot,unicode,mset,dset) return mset, dset end +local f_accent = formatters[ [[\ugdef\%s{\Umathaccent 0 "%X "%X }]] ] +local f_topaccent = formatters[ [[\ugdef\%s{\Umathaccent 0 "%X "%X }]] ] +local f_botaccent = formatters[ [[\ugdef\%s{\Umathbotaccent 0 "%X "%X }]] ] +local f_over = formatters[ [[\ugdef\%s{\Udelimiterover "%X "%X }]] ] +local f_under = formatters[ [[\ugdef\%s{\Udelimiterunder "%X "%X }]] ] +local f_fence = formatters[ [[\ugdef\%s{\Udelimiter "%X "%X "%X }]] ] +local f_delimiter = formatters[ [[\ugdef\%s{\Udelimiter 0 "%X "%X }]] ] +local f_radical = formatters[ [[\ugdef\%s{\Uradical "%X "%X }]] ] +local f_root = formatters[ [[\ugdef\%s{\Uroot "%X "%X }]] ] +----- f_char = formatters[ [[\ugdef\%s{\Umathchar "%X "%X "%X }]] +local f_char = formatters[ [[\Umathchardef\%s "%X "%X "%X ]] ] + local setmathsymbol = function(name,class,family,slot) -- hex is nicer for tracing if class == classes.accent then - contextsprint(formatters[ [[\ugdef\%s{\Umathaccent 0 "%X "%X }]] ](name,family,slot)) + contextsprint(f_accent(name,family,slot)) elseif class == classes.topaccent then - contextsprint(formatters[ [[\ugdef\%s{\Umathaccent 0 "%X "%X }]] ](name,family,slot)) + contextsprint(f_topaccent(name,family,slot)) elseif class == classes.botaccent then - contextsprint(formatters[ [[\ugdef\%s{\Umathbotaccent 0 "%X "%X }]] ](name,family,slot)) + contextsprint(f_botaccent(name,family,slot)) elseif class == classes.over then - contextsprint(formatters[ [[\ugdef\%s{\Udelimiterover "%X "%X }]] ](name,family,slot)) + contextsprint(f_over(name,family,slot)) elseif class == classes.under then - contextsprint(formatters[ [[\ugdef\%s{\Udelimiterunder "%X "%X }]] ](name,family,slot)) + contextsprint(f_under(name,family,slot)) elseif class == open_class or class == close_class or class == middle_class then setdelcode("global",slot,{family,slot,0,0}) - contextsprint(formatters[ [[\ugdef\%s{\Udelimiter "%X "%X "%X }]] ](name,class,family,slot)) + contextsprint(f_fence(name,class,family,slot)) elseif class == classes.delimiter then setdelcode("global",slot,{family,slot,0,0}) - contextsprint(formatters[ [[\ugdef\%s{\Udelimiter 0 "%X "%X }]] ](name,family,slot)) + contextsprint(f_delimiter(name,family,slot)) elseif class == classes.radical then - contextsprint(formatters[ [[\ugdef\%s{\Uradical "%X "%X }]] ](name,family,slot)) + contextsprint(f_radical(name,family,slot)) + elseif class == classes.root then + contextsprint(f_root(name,family,slot)) else -- beware, open/close and other specials should not end up here - -- contextsprint(formatters[ [[\ugdef\%s{\Umathchar "%X "%X "%X }]],name,class,family,slot)) - contextsprint(formatters[ [[\Umathchardef\%s "%X "%X "%X ]] ](name,class,family,slot)) + contextsprint(f_char(name,class,family,slot)) end end @@ -313,30 +334,55 @@ function mathematics.define(family) end -- needed for mathml analysis - +-- string with # > 1 are invalid -- we could cache +local lpegmatch = lpeg.match + +local utf8byte = lpeg.patterns.utf8byte * lpeg.P(-1) + +-- function somechar(c) +-- local b = lpegmatch(utf8byte,c) +-- return b and chardata[b] +-- end + + +local somechar = { } + +table.setmetatableindex(somechar,function(t,k) + local b = lpegmatch(utf8byte,k) + local v = b and chardata[b] or false + t[k] = v + return v +end) + local function utfmathclass(chr, default) - local cd = chardata[utfbyte(chr)] + local cd = somechar[chr] return cd and cd.mathclass or default or "unknown" end -local function utfmathaccent(chr,default,asked) - local cd = chardata[utfbyte(chr)] +local function utfmathaccent(chr,default,asked1,asked2) + local cd = somechar[chr] if not cd then return default or false end - if asked then + if asked1 and asked1 ~= "" then local mc = cd.mathclass - if mc and mc == asked then + if mc and (mc == asked1 or mc == asked2) then return true end local ms = cd.mathspec + if not ms then + local mp = cd.mathparent + if mp then + ms = chardata[mp].mathspec + end + end if ms then for i=1,#ms do local msi = ms[i] local mc = msi.class - if mc and mc == asked then + if mc and (mc == asked1 or mc == asked2) then return true end end @@ -360,32 +406,38 @@ local function utfmathaccent(chr,default,asked) return default or false end -local function utfmathstretch(chr, default) -- "h", "v", "b", "" - local cd = chardata[utfbyte(chr)] +local function utfmathstretch(chr,default) -- "h", "v", "b", "" + local cd = somechar[chr] return cd and cd.mathstretch or default or "" end -local function utfmathcommand(chr,default,asked) --- local cd = chardata[utfbyte(chr)] --- local cmd = cd and cd.mathname --- return cmd or default or "" - local cd = chardata[utfbyte(chr)] +local function utfmathcommand(chr,default,asked1,asked2) + local cd = somechar[chr] if not cd then return default or "" end - if asked then + if asked1 then local mn = cd.mathname local mc = cd.mathclass - if mn and mc and mc == asked then + if mn and mc and (mc == asked1 or mc == asked2) then return mn end local ms = cd.mathspec + if not ms then + local mp = cd.mathparent + if mp then + ms = chardata[mp].mathspec + end + end if ms then for i=1,#ms do local msi = ms[i] local mn = msi.name - if mn and msi.class == asked then - return mn + if mn then + local mc = msi.class + if mc == asked1 or mc == asked2 then + return mn + end end end end @@ -409,7 +461,7 @@ local function utfmathcommand(chr,default,asked) end local function utfmathfiller(chr, default) - local cd = chardata[utfbyte(chr)] + local cd = somechar[chr] local cmd = cd and (cd.mathfiller or cd.mathname) return cmd or default or "" end @@ -430,17 +482,31 @@ function commands.doifelseutfmathaccent(chr,asked) commands.doifelse(utfmathaccent(chr,nil,asked)) end +function commands.utfmathcommandabove(asked) context(utfmathcommand(asked,nil,"topaccent","over" )) end +function commands.utfmathcommandbelow(asked) context(utfmathcommand(asked,nil,"botaccent","under")) end + +function commands.doifelseutfmathabove(chr) commands.doifelse(utfmathaccent(chr,nil,"topaccent","over" )) end +function commands.doifelseutfmathbelow(chr) commands.doifelse(utfmathaccent(chr,nil,"botaccent","under")) end + -- helpers +-- +-- 1: step 1 +-- 2: step 2 +-- 3: htdp * 1.33^n +-- 4: size * 1.33^n -function mathematics.big(tfmdata,unicode,n) +function mathematics.big(tfmdata,unicode,n,method) local t = tfmdata.characters local c = t[unicode] - if c then + if c and n > 0 then local vv = c.vert_variants or c.next and t[c.next].vert_variants if vv then local vvn = vv[n] return vvn and vvn.glyph or vv[#vv].glyph or unicode - else + elseif method == 1 or method == 2 then + if method == 2 then -- large steps + n = n * 2 + end local next = c.next while next do if n <= 1 then @@ -455,6 +521,27 @@ function mathematics.big(tfmdata,unicode,n) end end end + else + local size = 1.33^n + if method == 4 then + size = tfmdata.parameters.size * size + else -- if method == 3 then + size = (c.height + c.depth) * size + end + local next = c.next + while next do + local cn = t[next] + if (cn.height + cn.depth) >= size then + return next + else + local tn = cn.next + if tn then + next = tn + else + return next + end + end + end end end return unicode @@ -491,10 +578,10 @@ end -- -- function commands.taggedmathfunction(tag,label) -- if label then --- settexattribute(a_mathcategory,registercategory(1,tag,tag)) +-- texsetattribute(a_mathcategory,registercategory(1,tag,tag)) -- context.mathlabeltext(tag) -- else --- settexattribute(a_mathcategory,1) +-- texsetattribute(a_mathcategory,1) -- context(tag) -- end -- end @@ -517,13 +604,13 @@ function commands.taggedmathfunction(tag,label,apply) noffunctions = noffunctions + 1 functions[noffunctions] = tag functions[tag] = noffunctions - settexattribute(a_mathcategory,noffunctions + delta) + texsetattribute(a_mathcategory,noffunctions + delta) else - settexattribute(a_mathcategory,n + delta) + texsetattribute(a_mathcategory,n + delta) end context.mathlabeltext(tag) else - settexattribute(a_mathcategory,1000 + delta) + texsetattribute(a_mathcategory,1000 + delta) context(tag) end end @@ -542,6 +629,6 @@ function commands.resetmathattributes() end end for i=1,#list do - settexattribute(list[i],unsetvalue) + texsetattribute(list[i],unsetvalue) end end diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv index 81ec339d6..f2327c22c 100644 --- a/tex/context/base/math-ini.mkiv +++ b/tex/context/base/math-ini.mkiv @@ -44,6 +44,14 @@ \registerctxluafile{math-noa}{1.001} \registerctxluafile{math-tag}{1.001} \registerctxluafile{math-fbk}{1.001} +\registerctxluafile{math-dir}{1.001} + +%D A few compatibility helpers: + +\def\Umathbotaccent{\Umathaccent \s!bottom } +\def\Umathaccents {\Umathaccent \s!both } + +%D The attributes that we will use: \definesystemattribute[mathalphabet] [public] \definesystemattribute[mathsize] [public] @@ -54,6 +62,7 @@ \definesystemattribute[mathcategory] [public] \definesystemattribute[mathmode] [public] \definesystemattribute[mathitalics] [public] +\definesystemattribute[mathbidi] [public] \definesystemattribute[displaymath] [public] @@ -277,9 +286,15 @@ \def\utfmathclassfiltered #1#2{\ctxcommand{utfmathclass (\!!bs#1\!!es,nil,"#2")}} \def\utfmathcommandfiltered#1#2{\ctxcommand{utfmathcommand(\!!bs#1\!!es,nil,"#2")}} +\def\utfmathcommandabove#1{\ctxcommand{utfmathcommandabove(\!!bs#1\!!es)}} +\def\utfmathcommandbelow#1{\ctxcommand{utfmathcommandbelow(\!!bs#1\!!es)}} + \unexpanded\def\doifelseutfmathaccent #1{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es)}} \unexpanded\def\doifelseutfmathaccentfiltered#1#2{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es,"#2")}} +\unexpanded\def\doifelseutfmathabove #1{\ctxcommand{doifelseutfmathabove(\!!bs#1\!!es)}} +\unexpanded\def\doifelseutfmathbelow #1{\ctxcommand{doifelseutfmathbelow(\!!bs#1\!!es)}} + %D Not used that much: \installcorenamespace{mathcodecommand} @@ -349,6 +364,17 @@ \unexpanded\def\mathcommand#1% {\csname\??mathcommand#1\endcsname} +%D Let's define a few comands here: + +\definemathcommand [mathstrut] {\vphantom{(}} +%definemathcommand [joinrel] {\mathrel{\mkern-3mu}} +\definemathcommand [joinrel] [rel] {\mkern-3mu} + +%D We could have a arg variant \unknown\ but not now. + +\unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits} +\unexpanded\def\stackrel #1#2{\mathrel{\mathop{#2}\limits^{#1}}} + %D Moved from font-ini.mkiv: %D %D \macros @@ -388,6 +414,17 @@ {\ifdefined\normalhbox\else\let\normalhbox\hbox\fi % ? \let\hbox\mbox} +\unexpanded\def\snappedmath#1% sort of \struttedbox + {\dontleavehmode + \begingroup + \setbox\scratchbox\hbox\bgroup + \startimath#1\stopimath + \egroup + \ht\scratchbox\strutht + \dp\scratchbox\strutht + \box\scratchbox + \endgroup} + %D The next hack is needed needed for sine, cosine etc. \let\mathfunction\firstofoneunexpanded @@ -494,10 +531,6 @@ %D \tt test $\sin{(x^{\sin(x^{\sin(x)})})}$ test %D \stoptyping -%D Some goodies: - -\unexpanded\def\Angstrom{\nomathematics{\Aring}} - %D \macros %D {nonknuthmode, donknuthmode} %D @@ -531,12 +564,13 @@ \def\activatemathcharacters {\the\activatedmathcharacters} -% \setnewconstant\primeasciicode 39 % ' +% beware, not runtime, so has to happen at format generation \activatemathcharacter\circumflexasciicode \activatemathcharacter\underscoreasciicode \activatemathcharacter\ampersandasciicode -\activatemathcharacter\primeasciicode + +% \activatemathcharacter\primeasciicode % not used: % @@ -601,57 +635,57 @@ \Umathchardef\prime "0 "0 "2032 \fi -\let\math_prime_indeed_normal\prime - -\appendtoks - \let\math_prime_indeed_normal\prime % gets defined later - \let\mathfontprime\prime % for tracing - \let\prime\math_prime_indeed % so this is needed -\to \everydump - -\unexpanded\def\math_prime_indeed - {\iffontchar\textfont\zerocount"FE325\relax - ^\bgroup - \expandafter\math_prime_indeed_virtual % virtual mess (using funny signal) - \else - % \expandafter\math_prime_indeed_normal % gets collapsed - \expandafter\math_prime_indeed_crapped % gets collapsed - \fi} - -\def\math_prime_indeed_crapped - {{^{\math_prime_indeed_normal}}} - -\let\prime\math_prime_indeed - -\def\math_prime_indeed_virtual - {\math_prime_indeed_normal - \futurelet\nexttoken\math_prime_indeed_choice} - -\installcorenamespace{mathprime} - -\def\math_prime_indeed_choice - {\csname\??mathprime - \ifx '\nexttoken a\else - \ifx \math_prime_indeed_normal\nexttoken a\else - \ifx \prime\nexttoken a\else - \ifx\superscriptcircumflextoken\nexttoken b\else - \ifx\othercircumflextoken \nexttoken b\else - c\fi\fi\fi\fi\fi - \endcsname} - -\setvalue{\??mathprime a}#1{\math_prime_indeed_virtual} -\setvalue{\??mathprime b}#1#2{#2\egroup} -\setvalue{\??mathprime c}{\egroup} - -\let\activemathprime\math_prime_indeed - -\bgroup - - \catcode\primeasciicode\activecatcode - - \global\everymathematics\expandafter{\the\everymathematics\let'\math_prime_indeed} % todo: do this at the lua end - -\egroup +% \let\math_prime_indeed_normal\prime +% +% \appendtoks +% \let\math_prime_indeed_normal\prime % gets defined later +% \let\mathfontprime\prime % for tracing +% \let\prime\math_prime_indeed % so this is needed +% \to \everydump +% +% \unexpanded\def\math_prime_indeed +% {\iffontchar\textfont\zerocount"FE325\relax +% ^\bgroup +% \expandafter\math_prime_indeed_virtual % virtual mess (using funny signal) +% \else +% % \expandafter\math_prime_indeed_normal % gets collapsed +% \expandafter\math_prime_indeed_crapped % gets collapsed +% \fi} +% +% \def\math_prime_indeed_crapped +% {{^{\math_prime_indeed_normal}}} +% +% % \let\prime\math_prime_indeed +% +% \def\math_prime_indeed_virtual +% {\math_prime_indeed_normal +% \futurelet\nexttoken\math_prime_indeed_choice} +% +% \installcorenamespace{mathprime} +% +% \def\math_prime_indeed_choice +% {\csname\??mathprime +% \ifx '\nexttoken a\else +% \ifx \math_prime_indeed_normal\nexttoken a\else +% \ifx \prime\nexttoken a\else +% \ifx\superscriptcircumflextoken\nexttoken b\else +% \ifx\othercircumflextoken \nexttoken b\else +% c\fi\fi\fi\fi\fi +% \endcsname} +% +% \setvalue{\??mathprime a}#1{\math_prime_indeed_virtual} +% \setvalue{\??mathprime b}#1#2{#2\egroup} +% \setvalue{\??mathprime c}{\egroup} +% +% \let\activemathprime\math_prime_indeed +% +% \bgroup +% +% \catcode\primeasciicode\activecatcode +% +% \global\everymathematics\expandafter{\the\everymathematics\let'\math_prime_indeed} % todo: do this at the lua end +% +% \egroup \bgroup @@ -672,8 +706,8 @@ \newconditional \knuthmode -\let\nonknuthmode\relax -\let\donknuthmode\relax +\let\nonknuthmode\relax % no longer needed in MkIV +\let\donknuthmode\relax % no longer needed in MkIV % \def\nonknuthmode % {\pushcatcodetable @@ -784,7 +818,34 @@ \setupmathematics [\c!compact=no] -%D Arabic: +% \enabletrackers[typesetters.directions.math] + +%D Right||to||left typesetting in math is supported by the \type {align} parameter +%D with as option the \type {bidi} parameter. Of course support for special symbols +%D like square roots depends on the font as well. We probably need to mirror a few +%D more characters. +%D +%D \startbuffer +%D \removeunwantedspaces +%D \m{ ( 1 = 1) }\quad +%D \m{ (123 = 123) }\quad +%D \m{ a ( 1 = 1) b }\quad +%D \m{ a (123 = 123) b }\quad +%D \m{ x = 123 y + (1 / \sqrt {x}) } +%D \stopbuffer +%D +%D \typebuffer +%D +%D \starttabulate[|T|T||] +%D \HL +%D \NC align \NC bidi \NC \NC \NR +%D \HL +%D \NC l2r \NC no \NC \setupmathematics[bidi=no] \getbuffer \NC \NR +%D \NC l2r \NC yes \NC \setupmathematics[bidi=yes] \getbuffer \NC \NR +%D \NC r2l \NC no \NC \setupmathematics[align=r2l,bidi=no] \getbuffer \NC \NR +%D \NC r2l \NC yes \NC \setupmathematics[align=r2l,bidi=yes] \getbuffer \NC \NR +%D \HL +%D \stoptabulate \newconditional\c_math_right_to_left @@ -797,9 +858,28 @@ \appendtoks \math_basics_synchronize_direction -%to \everymathematics % comes too late and I'm not in the mood for a mixed mode kludge now +%to \everymathematics % comes too late and I'm not in the mood for a mixed mode kludge now (should be a property of beginmath nodes and passed to callbacks) +\to \everyswitchmathematics + +% experimental (needed for an article) + +\installcorenamespace {mathbidi} + +\newcount\c_math_bidi + +\setvalue{\??mathbidi\v!no }{\ctxcommand{setmathdirection(0)}\c_math_bidi\attributeunsetvalue} +\setvalue{\??mathbidi\v!yes}{\ctxcommand{setmathdirection(1)}\c_math_bidi\plusone} + +\appendtoks + \edef\p_bidi{\mathematicsparameter\c!bidi}% + \csname\??mathbidi\ifcsname\??mathbidi\p_bidi\endcsname\p_bidi\else\v!no\fi\endcsname +\to \everysetupmathematics + +\appendtoks + \attribute\mathbidiattribute\ifconditional\c_math_right_to_left\c_math_bidi\else\attributeunsetvalue\fi \to \everyswitchmathematics + %D Delayed: greek. %D %D \starttyping @@ -1008,6 +1088,9 @@ % \global\mathcode\c_math_period\c_math_special % \to \everyjob +% \activatemathcharacter\c_math_comma +% \activatemathcharacter\c_math_period + \appendtoks \mathcode\c_math_comma \c_math_special \mathcode\c_math_period\c_math_special @@ -1639,7 +1722,7 @@ % % \def\displ@y % {\global\dt@ptrue -% \openup\displayopenupvalue % was \openup\jot +% \math_openup\displayopenupvalue % was \openup\jot % \everycr % {\noalign % {\ifdt@p @@ -1660,7 +1743,7 @@ \unexpanded\def\math_display_align_hack % I don't like the global, maybe we should push and pop {\global\let\math_display_align_hack_indeed\math_display_align_hack_remove_skip - \openup\displayopenupvalue % was \openup\jot + \math_openup\displayopenupvalue % was \math_openup\jot \everycr{\noalign{\math_display_align_hack_indeed}}} \def\math_display_align_hack_remove_skip @@ -1723,3 +1806,20 @@ \def\mathhorizontalcode#1#2{\ctxcommand{horizontalcode(\number#1,\number#2)}} \protect \endinput + +% % not used (yet) +% +% \newtoks \everystartimath +% \newtoks \everystopimath +% +% \unexpanded\def\startimath{\Ustartmath\the\everystartimath} +% \unexpanded\def\stopimath {\the\everystopimath\Ustopmath} +% +% \unexpanded\def\m% +% {\relax +% \ifmmode\expandafter\math_m_stay\else\expandafter\math_m_math\fi} +% +% \unexpanded\def\math_m_math#1% +% {\startimath#1\stopimath} +% +% \let\math_m_stay\firstofoneargument diff --git a/tex/context/base/math-map.lua b/tex/context/base/math-map.lua index a0d7457d1..8d301ac33 100644 --- a/tex/context/base/math-map.lua +++ b/tex/context/base/math-map.lua @@ -21,6 +21,8 @@ if not modules then modules = { } end modules ['math-map'] = { -- todo: alphabets namespace -- maybe: script/scriptscript dynamic, +-- superscripped primes get unscripted ! + -- to be looked into once the fonts are ready (will become font -- goodie): -- @@ -36,11 +38,15 @@ local merged = table.merged local extract = bit32.extract local allocate = utilities.storage.allocate -local texattribute = tex.attribute + local otffeatures = fonts.constructors.newfeatures("otf") local registerotffeature = otffeatures.register + local setmetatableindex = table.setmetatableindex +local texgetattribute = tex.getattribute +local texsetattribute = tex.setattribute + local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end) local report_remapping = logs.reporter("mathematics","remapping") @@ -54,30 +60,30 @@ local mathematics = mathematics -- it otherwise. mathematics.gaps = { - [0x1D455] = 0x0210E, -- H - [0x1D49D] = 0x0212C, -- script B - [0x1D4A0] = 0x02130, -- script E - [0x1D4A1] = 0x02131, -- script F - [0x1D4A3] = 0x0210B, -- script H - [0x1D4A4] = 0x02110, -- script I - [0x1D4A7] = 0x02112, -- script L - [0x1D4A8] = 0x02133, -- script M - [0x1D4AD] = 0x0211B, -- script R - [0x1D4BA] = 0x0212F, -- script e - [0x1D4BC] = 0x0210A, -- script g - [0x1D4C4] = 0x02134, -- script o - [0x1D506] = 0x0212D, -- fraktur C - [0x1D50B] = 0x0210C, -- fraktur H - [0x1D50C] = 0x02111, -- fraktur I - [0x1D515] = 0x0211C, -- fraktur R - [0x1D51D] = 0x02128, -- fraktur Z - [0x1D53A] = 0x02102, -- bb C - [0x1D53F] = 0x0210D, -- bb H - [0x1D545] = 0x02115, -- bb N - [0x1D547] = 0x02119, -- bb P - [0x1D548] = 0x0211A, -- bb Q - [0x1D549] = 0x0211D, -- bb R - [0x1D551] = 0x02124, -- bb Z + [0x1D455] = 0x0210E, -- ℎ h + [0x1D49D] = 0x0212C, -- ℬ script B + [0x1D4A0] = 0x02130, -- ℰ script E + [0x1D4A1] = 0x02131, -- ℱ script F + [0x1D4A3] = 0x0210B, -- ℋ script H + [0x1D4A4] = 0x02110, -- ℐ script I + [0x1D4A7] = 0x02112, -- ℒ script L + [0x1D4A8] = 0x02133, -- ℳ script M + [0x1D4AD] = 0x0211B, -- ℛ script R + [0x1D4BA] = 0x0212F, -- ℯ script e + [0x1D4BC] = 0x0210A, -- ℊ script g + [0x1D4C4] = 0x02134, -- ℴ script o + [0x1D506] = 0x0212D, -- ℭ fraktur C + [0x1D50B] = 0x0210C, -- ℌ fraktur H + [0x1D50C] = 0x02111, -- ℑ fraktur I + [0x1D515] = 0x0211C, -- ℜ fraktur R + [0x1D51D] = 0x02128, -- ℨ fraktur Z + [0x1D53A] = 0x02102, -- ℂ bb C + [0x1D53F] = 0x0210D, -- ℍ bb H + [0x1D545] = 0x02115, -- ℕ bb N + [0x1D547] = 0x02119, -- ℙ bb P + [0x1D548] = 0x0211A, -- ℚ bb Q + [0x1D549] = 0x0211D, -- ℝ bb R + [0x1D551] = 0x02124, -- ℤ bb Z } local function fillinmathgaps(tfmdata,key,value) @@ -132,6 +138,7 @@ local regular_tf = { }, symbols = { [0x2202]=0x2202, [0x2207]=0x2207, + [0x0027]=0x2032, -- prime }, } @@ -164,6 +171,7 @@ local regular_it = { }, symbols = { [0x2202]=0x1D715, [0x2207]=0x1D6FB, + [0x0027]=0x2032, -- prime }, } @@ -189,6 +197,7 @@ local regular_bf= { }, symbols = { [0x2202]=0x1D6DB, [0x2207]=0x1D6C1, + [0x0027]=0x2032, -- prime }, } @@ -214,6 +223,7 @@ local regular_bi = { }, symbols = { [0x2202]=0x1D74F, [0x2207]=0x1D735, + [0x0027]=0x2032, -- prime }, } @@ -264,6 +274,7 @@ local sansserif_bf = { }, symbols = { [0x2202]=0x1D789, [0x2207]=0x1D76F, + [0x0027]=0x2032, -- prime }, } @@ -289,6 +300,7 @@ local sansserif_bi = { }, symbols = { [0x2202]=0x1D7C3, [0x2207]=0x1D7A9, + [0x0027]=0x2032, -- prime }, } @@ -333,7 +345,8 @@ local blackboard_tf = { [0x0393]=0x0213E, [0x03A0]=0x0213F, }, symbols = { -- sum - [0x2211]=0x02140, + [0x2211]=0x02140, + [0x0027]=0x2032, -- prime }, } @@ -524,7 +537,7 @@ function mathematics.getboth(alphabet,style) end function mathematics.getstyle(style) - local r = mathremap[texattribute[mathalphabet]] + local r = mathremap[texgetattribute(mathalphabet)] local alphabet = r and r.alphabet or "regular" local data = alphabets[alphabet][style] return data and data.attribute @@ -533,22 +546,22 @@ end function mathematics.syncboth(alphabet,style) local data = alphabet and alphabets[alphabet] or regular data = style and data[style] or data.tf - texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet] + texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet]) end function mathematics.syncstyle(style) - local r = mathremap[texattribute[mathalphabet]] + local r = mathremap[texgetattribute(mathalphabet)] local alphabet = r and r.alphabet or "regular" local data = alphabets[alphabet][style] - texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet] + texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet]) end function mathematics.syncname(alphabet) -- local r = mathremap[mathalphabet] - local r = mathremap[texattribute[mathalphabet]] + local r = mathremap[texgetattribute(mathalphabet)] local style = r and r.style or "tf" local data = alphabets[alphabet][style] - texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet] + texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet]) end local islcgreek = regular_tf.lcgreek diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua index b309ba077..f3987c12f 100644 --- a/tex/context/base/math-noa.lua +++ b/tex/context/base/math-noa.lua @@ -20,6 +20,7 @@ if not modules then modules = { } end modules ['math-noa'] = { local utfchar, utfbyte = utf.char, utf.byte local formatters = string.formatters +local div = math.div local fonts, nodes, node, mathematics = fonts, nodes, node, mathematics @@ -59,10 +60,10 @@ local insert_node_after = node.insert_after local insert_node_before = node.insert_before local free_node = node.free local new_node = node.new -- todo: pool: math_noad math_sub +local copy_node = node.copy local new_kern = nodes.pool.kern local new_rule = nodes.pool.rule -local concat_nodes = nodes.concat local topoints = number.points @@ -75,7 +76,8 @@ local fontemwidths = fonthashes.emwidths local fontexheights = fonthashes.exheights local variables = interfaces.variables -local texattribute = tex.attribute +local texsetattribute = tex.setattribute +local texgetattribute = tex.getattribute local unsetvalue = attributes.unsetvalue local chardata = characters.data @@ -118,9 +120,11 @@ local hlist_code = nodecodes.hlist local glyph_code = nodecodes.glyph local left_fence_code = 1 +local right_fence_code = 3 local function process(start,what,n,parent) if n then n = n + 1 else n = 0 end + local prev = nil while start do local id = start.id if trace_processing then @@ -148,6 +152,10 @@ local function process(start,what,n,parent) elseif id == math_char or id == math_textchar or id == math_delim then break elseif id == math_noad then +if prev then + -- we have no proper prev in math nodes yet + start.prev = prev +end local noad = start.nucleus if noad then process(noad,what,n,start) end -- list noad = start.sup if noad then process(noad,what,n,start) end -- list noad = start.sub if noad then process(noad,what,n,start) end -- list @@ -183,6 +191,7 @@ local function process(start,what,n,parent) else -- glue, penalty, etc end +prev = start start = start.next end end @@ -233,27 +242,39 @@ families[math_char] = function(pointer) local char = pointer.char local bold = boldmap[char] local newa = a - 3 - if bold then + if not bold then + if trace_families then + report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa]) + end + pointer.fam = newa + elseif not fontcharacters[font_of_family(newa)][bold] then + if trace_families then + report_families("no bold character for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa]) + end + if newa > 3 then + pointer.fam = newa - 3 + end + else pointer[a_exportstatus] = char pointer.char = bold if trace_families then report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa]) end + pointer.fam = newa + end + else + local char = pointer.char + if not fontcharacters[font_of_family(a)][char] then + if trace_families then + report_families("no bold replacement for %C",char) + end else if trace_families then - report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa]) + report_families("family of %C becomes %s with remap %s",char,a,familymap[a]) end + pointer.fam = a end - pointer.fam = newa - else - if trace_families then - local char = pointer.char - report_families("family of %C becomes %s with remap %s",char,a,familymap[a]) - end - pointer.fam = a end - else - -- pointer.fam = 0 end end end @@ -267,8 +288,20 @@ families[math_delim] = function(pointer) -- no bold delimiters in unicode a = a - 3 end - pointer.small_fam = a - pointer.large_fam = a + local char = pointer.small_char + local okay = fontcharacters[font_of_family(a)][char] + if okay then + pointer.small_fam = a + elseif a > 2 then + pointer.small_fam = a - 3 + end + local char = pointer.large_char + local okay = fontcharacters[font_of_family(a)][char] + if okay then + pointer.large_fam = a + elseif a > 2 then + pointer.large_fam = a - 3 + end else pointer.small_fam = 0 pointer.large_fam = 0 @@ -442,16 +475,20 @@ local mathsize = attributes.private("mathsize") local resize = { } processors.resize = resize resize[math_fence] = function(pointer) - if pointer.subtype == left_fence_code then + local subtype = pointer.subtype + if subtype == left_fence_code or subtype == right_fence_code then local a = pointer[mathsize] if a and a > 0 then + local method, size = div(a,100), a % 100 pointer[mathsize] = 0 - local d = pointer.delim - local df = d.small_fam - local id = font_of_family(df) - if id > 0 then - local ch = d.small_char - d.small_char = mathematics.big(fontdata[id],ch,a) + local delimiter = pointer.delim + local chr = delimiter.small_char + if chr > 0 then + local fam = delimiter.small_fam + local id = font_of_family(fam) + if id > 0 then + delimiter.small_char = mathematics.big(fontdata[id],chr,size,method) + end end end end @@ -462,147 +499,35 @@ function handlers.resize(head,style,penalties) return true end --- respacing - --- local mathpunctuation = attributes.private("mathpunctuation") --- --- local respace = { } processors.respace = respace - --- only [nd,ll,ul][po][nd,ll,ul] - --- respace[math_char] = function(pointer,what,n,parent) -- not math_noad .. math_char ... and then parent --- pointer = parent --- if pointer and pointer.subtype == noad_ord then --- local a = pointer[mathpunctuation] --- if a and a > 0 then --- pointer[mathpunctuation] = 0 --- local current_nucleus = pointer.nucleus --- if current_nucleus.id == math_char then --- local current_char = current_nucleus.char --- local fc = chardata[current_char] --- fc = fc and fc.category --- if fc == "nd" or fc == "ll" or fc == "lu" then --- local next_noad = pointer.next --- if next_noad and next_noad.id == math_noad and next_noad.subtype == noad_punct then --- local next_nucleus = next_noad.nucleus --- if next_nucleus.id == math_char then --- local next_char = next_nucleus.char --- local nc = chardata[next_char] --- nc = nc and nc.category --- if nc == "po" then --- local last_noad = next_noad.next --- if last_noad and last_noad.id == math_noad and last_noad.subtype == noad_ord then --- local last_nucleus = last_noad.nucleus --- if last_nucleus.id == math_char then --- local last_char = last_nucleus.char --- local lc = chardata[last_char] --- lc = lc and lc.category --- if lc == "nd" or lc == "ll" or lc == "lu" then --- local ord = new_node(math_noad) -- todo: pool --- ord.subtype, ord.nucleus, ord.sub, ord.sup, ord.attr = noad_ord, next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr --- -- next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr = nil, nil, nil, nil --- next_noad.nucleus, next_noad.sub, next_noad.sup = nil, nil, nil -- else crash with attributes ref count --- --~ next_noad.attr = nil --- ord.next = last_noad --- pointer.next = ord --- free_node(next_noad) --- end --- end --- end --- end --- end --- end --- end --- end --- end --- end --- end - --- local comma = 0x002C --- local period = 0x002E --- --- respace[math_char] = function(pointer,what,n,parent) --- pointer = parent --- if pointer and pointer.subtype == noad_punct then --- local current_nucleus = pointer.nucleus --- if current_nucleus.id == math_char then --- local current_nucleus = pointer.nucleus --- if current_nucleus.id == math_char then --- local current_char = current_nucleus.char --- local a = pointer[mathpunctuation] --- if not a or a == 0 then --- if current_char == comma then --- -- default tex: 2,5 or 2, 5 --> 2, 5 --- elseif current_char == period then --- -- default tex: 2.5 or 2. 5 --> 2.5 --- pointer.subtype = noad_ord --- end --- elseif a == 1 then --- local next_noad = pointer.next --- if next_noad and next_noad.id == math_noad then --- local next_nucleus = next_noad.nucleus --- if next_nucleus.id == math_char and next_nucleus.char == 0 then --- nodes.remove(pointer,next_noad,true) --- end --- if current_char == comma then --- -- default tex: 2,5 or 2, 5 --> 2, 5 --- elseif current_char == period then --- -- default tex: 2.5 or 2. 5 --> 2.5 --- pointer.subtype = noad_ord --- end --- end --- elseif a == 2 then --- if current_char == comma or current_char == period then --- local next_noad = pointer.next --- if next_noad and next_noad.id == math_noad then --- local next_nucleus = next_noad.nucleus --- if next_nucleus.id == math_char and next_nucleus.char == 0 then --- if current_char == comma then --- -- adaptive: 2, 5 --> 2, 5 --- elseif current_char == period then --- -- adaptive: 2. 5 --> 2. 5 --- end --- nodes.remove(pointer,next_noad,true) --- else --- if current_char == comma then --- -- adaptive: 2,5 --> 2,5 --- pointer.subtype = noad_ord --- elseif current_char == period then --- -- adaptive: 2.5 --> 2.5 --- pointer.subtype = noad_ord --- end --- end --- end --- end --- end --- end --- end --- end --- end --- --- function handlers.respace(head,style,penalties) --- processnoads(head,respace,"respace") --- return true --- end - --- The following code is dedicated to Luigi Scarso who pointed me --- to the fact that \not= is not producing valid pdf-a code. --- The code does not solve this for virtual characters but it does --- a decent job on collapsing so that fonts that have the right --- glyph will have a decent unicode point. In the meantime this code --- has been moved elsewhere. local collapse = { } processors.collapse = collapse local mathpairs = characters.mathpairs -mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034 } -- (prime,prime) (prime,doubleprime) -mathpairs[0x2033] = { [0x2032] = 0x2034 } -- (doubleprime,prime) +mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034, [0x2034] = 0x2057 } -- (prime,prime) (prime,doubleprime) (prime,tripleprime) +mathpairs[0x2033] = { [0x2032] = 0x2034, [0x2033] = 0x2057 } -- (doubleprime,prime) (doubleprime,doubleprime) +mathpairs[0x2034] = { [0x2032] = 0x2057 } -- (tripleprime,prime) + +mathpairs[0x2035] = { [0x2035] = 0x2036, [0x2036] = 0x2037 } -- (reversedprime,reversedprime) (reversedprime,doublereversedprime) +mathpairs[0x2036] = { [0x2035] = 0x2037 } -- (doublereversedprime,reversedprime) mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D } mathpairs[0x222C] = { [0x222B] = 0x222D } -mathpairs[0x007C] = { [0x007C] = 0x2016 } -- double bars +mathpairs[0x007C] = { [0x007C] = 0x2016, [0x2016] = 0x2980 } -- bar+bar=double bar+double=triple +mathpairs[0x2016] = { [0x007C] = 0x2980 } -- double+bar=triple + +local movesub = { + -- primes + [0x2032] = 0xFE932, + [0x2033] = 0xFE933, + [0x2034] = 0xFE934, + [0x2057] = 0xFE957, + -- reverse primes + [0x2035] = 0xFE935, + [0x2036] = 0xFE936, + [0x2037] = 0xFE937, +} local validpair = { [noad_rel] = true, @@ -612,48 +537,79 @@ local validpair = { [noad_opnolimits] = true, } -local function collapsepair(pointer,what,n,parent) -- todo: switch to turn in on and off +local function movesubscript(parent,current_nucleus,current_char) + local prev = parent.prev + if prev and prev.id == math_noad then + if not prev.sup and not prev.sub then + current_nucleus.char = movesub[current_char or current_nucleus.char] + -- {f} {'}_n => f_n^' + local nucleus = parent.nucleus + local sub = parent.sub + local sup = parent.sup + prev.sup = nucleus + prev.sub = sub + local dummy = copy_node(nucleus) + dummy.char = 0 + parent.nucleus = dummy + parent.sub = nil + if trace_collapsing then + report_collapsing("fixing subscript") + end + end + end +end + +local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to turn in on and off if parent then if validpair[parent.subtype] then local current_nucleus = parent.nucleus - if not parent.sub and not parent.sup and current_nucleus.id == math_char then + if current_nucleus.id == math_char then local current_char = current_nucleus.char - local mathpair = mathpairs[current_char] - if mathpair then - local next_noad = parent.next - if next_noad and next_noad.id == math_noad then - if validpair[next_noad.subtype] then - local next_nucleus = next_noad.nucleus - if next_nucleus.id == math_char then - local next_char = next_nucleus.char - local newchar = mathpair[next_char] - if newchar then - local fam = current_nucleus.fam - local id = font_of_family(fam) - local characters = fontcharacters[id] - if characters and characters[newchar] then - if trace_collapsing then - report_collapsing("%U + %U => %U",current_char,next_char,newchar) - end - current_nucleus.char = newchar - local next_next_noad = next_noad.next - if next_next_noad then - parent.next = next_next_noad - next_next_noad.prev = parent - else - parent.next = nil + if not parent.sub and not parent.sup then + local mathpair = mathpairs[current_char] + if mathpair then + local next_noad = parent.next + if next_noad and next_noad.id == math_noad then + if validpair[next_noad.subtype] then + local next_nucleus = next_noad.nucleus + if next_nucleus.id == math_char then + local next_char = next_nucleus.char + local newchar = mathpair[next_char] + if newchar then + local fam = current_nucleus.fam + local id = font_of_family(fam) + local characters = fontcharacters[id] + if characters and characters[newchar] then + if trace_collapsing then + report_collapsing("%U + %U => %U",current_char,next_char,newchar) + end + current_nucleus.char = newchar + local next_next_noad = next_noad.next + if next_next_noad then + parent.next = next_next_noad + next_next_noad.prev = parent + else + parent.next = nil + end + parent.sup = next_noad.sup + parent.sub = next_noad.sub + next_noad.sup = nil + next_noad.sub = nil + free_node(next_noad) + collapsepair(pointer,what,n,parent,true) + if not nested and movesub[current_char] then + movesubscript(parent,current_nucleus) + end end - parent.sup = next_noad.sup - parent.sub = next_noad.sub - next_noad.sup = nil - next_noad.sub = nil - free_node(next_noad) - collapsepair(pointer,what,n,parent) end end end end + elseif not nested and movesub[current_char] then + movesubscript(parent,current_nucleus,current_char) end + elseif not nested and movesub[current_char] then + movesubscript(parent,current_nucleus,current_char) end end end @@ -824,7 +780,7 @@ function mathematics.setalternate(fam,tag) local mathalternates = tfmdata.shared and tfmdata.shared.mathalternates if mathalternates then local m = mathalternates[tag] - tex.attribute[a_mathalternate] = m and m.attribute or unsetvalue + texsetattribute(a_mathalternate,m and m.attribute or unsetvalue) end end @@ -947,11 +903,8 @@ trackers.register("math.italics", function(v) if k > 0 then return setcolor(new_rule(k,ex,ex),c_positive_d) else - return concat_nodes { - old_kern(k), - setcolor(new_rule(-k,ex,ex),c_negative_d), - old_kern(k), - } + -- influences un* + return old_kern(k) .. setcolor(new_rule(-k,ex,ex),c_negative_d) .. old_kern(k) end end else @@ -1063,14 +1016,14 @@ function mathematics.setitalics(n) enable() end if n == variables.reset then - texattribute[a_mathitalics] = unsetvalue + texsetattribute(a_mathitalics,unsetvalue) else - texattribute[a_mathitalics] = tonumber(n) or unsetvalue + texsetattribute(a_mathitalics,tonumber(n) or unsetvalue) end end function mathematics.resetitalics() - texattribute[a_mathitalics] = unsetvalue + texsetattribute(a_mathitalics,unsetvalue) end -- variants @@ -1134,6 +1087,50 @@ function handlers.variants(head,style,penalties) return true end +-- for manuals + +local classes = { } + +local colors = { + [noadcodes.rel] = "trace:dr", + [noadcodes.ord] = "trace:db", + [noadcodes.bin] = "trace:dg", + [noadcodes.open] = "trace:dm", + [noadcodes.close] = "trace:dm", + [noadcodes.punct] = "trace:dc", + -- [noadcodes.opdisplaylimits] = "", + -- [noadcodes.oplimits] = "", + -- [noadcodes.opnolimits] = "", + -- [noadcodes.inner = "", + -- [noadcodes.under = "", + -- [noadcodes.over = "", + -- [noadcodes.vcenter = "", +} + +classes[math_char] = function(pointer,what,n,parent) + local color = colors[parent.subtype] + if color then + setcolor(pointer,color) + else + resetcolor(pointer) + end +end + +function handlers.classes(head,style,penalties) + processnoads(head,classes,"classes") + return true +end + +trackers.register("math.classes",function(v) tasks.setaction("math","noads.handlers.classes",v) end) + +-- just for me + +function handlers.showtree(head,style,penalties) + inspect(nodes.totree(head)) +end + +trackers.register("math.showtree",function(v) tasks.setaction("math","noads.handlers.showtree",v) end) + -- the normal builder function builders.kernel.mlist_to_hlist(head,style,penalties) diff --git a/tex/context/base/math-pln.mkiv b/tex/context/base/math-pln.mkiv index b862bb4cb..5e4c43c81 100644 --- a/tex/context/base/math-pln.mkiv +++ b/tex/context/base/math-pln.mkiv @@ -87,13 +87,31 @@ \unexpanded\def\pmatrix#1% {\left(\matrix{#1}\right)} -\unexpanded\def\openup - {\afterassignment\math_openup\scratchdimen=} - -\def\math_openup - {\advance\lineskip \scratchdimen - \advance\baselineskip \scratchdimen - \advance\lineskiplimit\scratchdimen} +% \unexpanded\def\openup +% {\afterassignment\math_openup\scratchdimen=} +% +% \def\math_openup +% {\advance\lineskip \scratchdimen +% \advance\baselineskip \scratchdimen +% \advance\lineskiplimit\scratchdimen} + +\let\math_closeup\relax + +\unexpanded\def\math_openup + {\afterassignment\math_openup_indeed\scratchdimen} + +\def\math_openup_indeed + {\unexpanded\edef\math_closeup + {\lineskip \the\lineskip + \baselineskip \the\baselineskip + \lineskiplimit\the\lineskiplimit + \relax}% + \advance \lineskip \scratchdimen + \advance \baselineskip \scratchdimen + \advance \lineskiplimit \scratchdimen} + +\let\openup \math_openup +\def\closeup{\math_closeup} % dynamic \unexpanded\def\displaylines#1% {\the\mathdisplayaligntweaks diff --git a/tex/context/base/math-rad.mkvi b/tex/context/base/math-rad.mkvi new file mode 100644 index 000000000..541a7038e --- /dev/null +++ b/tex/context/base/math-rad.mkvi @@ -0,0 +1,287 @@ +%D \module +%D [ file=math-rad, +%D version=2013.07.13, +%D title=\CONTEXT\ Math Macros, +%D subtitle=Radicals, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\writestatus{loading}{ConTeXt Math Macros / Radicals} + +\unprotect + +%D \starttyping +%D $\sqrt[3]{10}$ +%D \stoptyping +%D +%D This root command will be overloaded later: + +%D Old stuff: + +% \def\rootradical{\Uroot \defaultmathfamily "221A } % can be done in char-def +% \def\surdradical{\Uradical \defaultmathfamily "221A } % can be done in char-def + +\def\root#1\of{\rootradical{#1}} % #2 + +\unexpanded\def\sqrt{\doifnextoptionalelse\rootwithdegree\rootwithoutdegree} + +\def\rootwithdegree[#1]{\rootradical{#1}} +\def\rootwithoutdegree {\rootradical {}} + +%D Even older stuff: + +% % is now a ordinary character +% +% \let\normalsurd\surd % \Uradical "0 "221A +% \unexpanded\def\surd{\normalsurd{}} + + +%D The real thing: + +\installcorenamespace{mathradical} +\installcorenamespace{mathradicalalternative} + +\installcommandhandler \??mathradical {mathradical} \??mathradical + +\setupmathradical + [\c!alternative=\v!normal, + \c!mpoffset=.25\exheight] + +\appendtoks + \setuevalue{\currentmathradical}{\math_radical_handle{\currentmathradical}} +\to \everydefinemathradical + +\unexpanded\def\math_radical_handle#tag% + {\begingroup + \edef\currentmathradical{#tag}% + \doifnextoptionalelse\math_radical_degree_yes\math_radical_degree_nop} + +\def\math_radical_alternative{\csname\??mathradicalalternative\mathradicalparameter\c!alternative\endcsname} + +\def\m_math_no_degree{{}} + +\def\math_radical_degree_yes[#degree]{\edef\currentmathradicaldegree{#degree}\math_radical_indeed} +\def\math_radical_degree_nop {\let\currentmathradicaldegree\m_math_no_degree\math_radical_indeed} + +\def\math_radical_indeed#body% + {\math_radical_alternative{#body}\endgroup} + +\setvalue{\??mathradicalalternative\v!default}% #1% + {\rootradical{\currentmathradicaldegree}} + +\setvalue{\??mathradicalalternative\v!normal}#body% + {\edef\p_color{\mathradicalparameter\c!color}% + \ifx\p_color\empty + \rootradical{\currentmathradicaldegree}{#body}% {} really needed as \rootradical expands first + \else\ifx\currentmathradicaldegree\empty + \pushcolor[\p_color]% + \rootradical{\currentmathradicaldegree}% + {\popcolor#body}% + \else + \pushcolor[\p_color]% + \rootradical{\popcolor\currentmathradicaldegree\pushcolor[\p_color]}% + {\popcolor#body}% + \fi\fi} + +% As I had a long standing whish to see a proper final root element I decided +% to make one my own. +% +% \startMPcode +% path p ; p := unitsquare xysized(4cm,1cm) ; +% +% path q ; q := boundingbox p enlarged (bbheight(p)/10) ; +% numeric h ; h := bbheight(q) ; +% +% draw p ; +% draw +% llcorner q shifted (-h/2,h/2) -- +% llcorner q shifted (-h/4,0) -- +% ulcorner q -- +% urcorner q -- +% urcorner q shifted (0,-h/10) ; +% \stopMPcode +% +% \startMPextensions +% vardef math_root(expr w,h,d,o) = +% path q ; q := boundingbox unitsquare xysized(w,h) enlarged (o); +% llcorner q shifted (-h/2,h/2) -- +% llcorner q shifted (-h/4,-d) -- +% ulcorner q -- +% urcorner q -- +% urcorner q shifted (0,-h/10) +% enddef ; +% \stopMPextensions +% +% \startuniqueMPgraphic{root}{width,height,depth,offset,linewidth} +% pickup pencircle scaled \MPvar{linewidth} ; +% draw math_root(\MPvar{width},\MPvar{height},\MPvar{depth},\MPvar{offset}) ; +% \stopuniqueMPgraphic +% +% \unexpanded\def\sqrt#1% +% {\begingroup +% \setbox\scratchbox\mathstylehbox{#1}% +% \scratchoffset\MPrawvar{root}{offset}%.25\exheight +% \scratchwidth \wd\scratchbox +% \scratchheight\ht\scratchbox +% \scratchdepth \dp\scratchbox +% \setbox2=\hbox\bgroup % todo: tag this box as sqrt +% \uniqueMPgraphic +% {root}% +% {width=\the\scratchwidth,% +% depth=\the\scratchdepth,% +% height=\the\scratchheight,% +% offset=\the\scratchoffset, +% linewidth=\the\linewidth}% +% \egroup +% \scratchdimen\wd2 +% \lower\dimexpr\scratchoffset+\scratchdepth\relax\box2 +% \hskip-\scratchdimen +% \hbox to \scratchdimen{\hss\box\scratchbox\hskip\scratchoffset}% +% \endgroup} + +\startMPextensions + vardef math_radical_simple(expr w,h,d,o) = + (-h/2-o,h/2-o) -- + (-h/4-o,-d-o) -- + (-o,h+o) -- + (w+o,h+o) -- + (w+o,h-h/10+o) + enddef ; +\stopMPextensions + +\startuniqueMPgraphic{math:radical:default}%{...} + draw + math_radical_simple(OverlayWidth,OverlayHeight,OverlayDepth,OverlayOffset) + withpen pencircle xscaled (2OverlayLineWidth) yscaled (3OverlayLineWidth/4) rotated 30 + % dashed evenly + withcolor OverlayLineColor ; +\stopuniqueMPgraphic + +% todo: spacing .. this is just an experiment (article driven) + +\setvalue{\??mathradicalalternative\v!mp}#body% we could use dowithnextbox + {\begingroup + \scratchoffset\mathradicalparameter\c!mpoffset + \setbox\nextbox\mathstylehbox{#body}% + % we use the \overlay variables as these are passes anyway and + % it's more efficient than using parameters + \edef\overlaywidth {\the\wd\nextbox}% + \edef\overlayheight {\the\ht\nextbox}% + \edef\overlaydepth {\the\dp\nextbox}% + \edef\overlayoffset {\the\scratchoffset}% + \edef\overlaylinewidth{\the\linewidth}% + \edef\overlaylinecolor{\mathradicalparameter\c!color}% + % + \edef\p_mp{\mathradicalparameter\c!mp}% + % + \setbox\scratchbox\hbox\bgroup % todo: tag this box as sqrt + \uniqueMPgraphic + {\p_mp}% + %{...}% + \egroup + \scratchdimen \wd\scratchbox + \scratchtopoffset \dimexpr\scratchoffset+\dp\nextbox\relax + \scratchbottomoffset\dimexpr\scratchoffset+\ht\nextbox/2\relax + \hbox to \scratchdimen{\hss\box\nextbox\hskip\scratchoffset}% + \hskip-\scratchdimen + \lower\dimexpr\scratchtopoffset\box\scratchbox% + \ifx\currentmathradicaldegree\empty \else + \setbox\scratchbox\mathstylehbox{\scriptscriptstyle\currentmathradicaldegree\hss}% + \wd\scratchbox\scratchdimen + \hskip-\scratchdimen + \raise\dimexpr\scratchbottomoffset\box\scratchbox + \fi + \endgroup} + +\definemathradical[sqrt][mp=math:radical:default] + +% \setupmathradical[sqrt][alternative=normal,color=darkblue] +% \setupmathradical[sqrt][alternative=mp,color=darkgreen] + +%D Because I wanted to illustrate some more fun stuff another mechanism +%D is provided as well ... let's put some dangerous tools in the hand of +%D math juglers like Aditya. + +\installcorenamespace{mathornament} +\installcorenamespace{mathornamentalternative} + +\installcommandhandler \??mathornament {mathornament} \??mathornament + +\setupmathornament + [\c!alternative=\v!mp, % currently mp only .. maybe some day layer too + \c!mpoffset=.25\exheight] + +\appendtoks + \setuevalue{\currentmathornament}{\math_ornament_handle{\currentmathornament}} +\to \everydefinemathornament + +\unexpanded\def\math_ornament_handle#tag#body% + {\begingroup + \edef\currentmathornament{#tag}% + \csname\??mathornamentalternative\mathornamentparameter\c!alternative\endcsname{#body}% + \endgroup} + +\setvalue{\??mathornamentalternative\v!mp}#body% we could use dowithnextbox + {\begingroup + \scratchoffset\mathornamentparameter\c!mpoffset + \setbox\nextbox\mathstylehbox{#body}% + \edef\overlaywidth {\the\wd\nextbox}% + \edef\overlayheight {\the\ht\nextbox}% + \edef\overlaydepth {\the\dp\nextbox}% + \edef\overlayoffset {\the\scratchoffset}% + \edef\overlaylinewidth{\the\linewidth}% + \edef\overlaylinecolor{\mathornamentparameter\c!color}% + \edef\p_mp{\mathornamentparameter\c!mp}% + % thw width of the graphic determines the width of the final result + \setbox\scratchbox\hbox{\uniqueMPgraphic{\p_mp}}% todo: add code key + tag + \scratchdimen \wd\scratchbox + % \scratchtopoffset \dimexpr\scratchoffset+\dp\nextbox\relax + % \scratchbottomoffset\dimexpr\scratchoffset+\ht\nextbox/2\relax + \hbox to \scratchdimen{\hss\box\nextbox\hss}% + \hskip-\scratchdimen + \box\scratchbox + \endgroup} + +% \startMPextensions +% vardef math_ornament_hat(expr w,h,d,o,l) = +% image ( path p ; p := +% (w/2,h + 10l) -- +% (o + w,h + o) -- +% (w/2,h + 7l) -- +% (-o,h + o) -- +% cycle ; +% fill p ; +% setbounds currentpicture to (-o,0) -- (w+o,0) -- (w+o,h+2o) -- (-o,h+2o) -- cycle ; +% ) +% enddef ; +% \stopMPextensions +% +% \startuniqueMPgraphic{math:ornament:hat} +% draw +% math_ornament_hat( +% OverlayWidth, +% OverlayHeight, +% OverlayDepth, +% OverlayOffset, +% OverlayLineWidth +% ) +% withpen +% pencircle +% xscaled (2OverlayLineWidth) +% yscaled (3OverlayLineWidth/4) +% rotated 30 +% withcolor +% OverlayLineColor ; +% draw boundingbox currentpicture; +% \stopuniqueMPgraphic +% +% \definemathornament [mathhat] [mp=math:ornament:hat] +% +% \dorecurse{8}{$\mathhat{\blackrule[width=#1ex,color=gray]}$ } + +\protect \endinput diff --git a/tex/context/base/math-ren.lua b/tex/context/base/math-ren.lua index 2e7dba13d..5c4c13369 100644 --- a/tex/context/base/math-ren.lua +++ b/tex/context/base/math-ren.lua @@ -63,7 +63,3 @@ mathematics.renderset = renderset function commands.mathrenderset(list) context(renderset(list)) end - --- function commands.setmatrendering(list) --- tex.setattribute(renderset(list)) --- end diff --git a/tex/context/base/math-stc.mkvi b/tex/context/base/math-stc.mkvi index 2dc2b2c22..76a07db5c 100644 --- a/tex/context/base/math-stc.mkvi +++ b/tex/context/base/math-stc.mkvi @@ -48,6 +48,10 @@ %D %D In the end we have a more flexible mechanism which also handles text variants. +%D When wrapping up some math developments I decided to add mp support here +%D as well. A nice evening job with Joe Bonamassa performing live on the big +%D screen (real nice bluray's). See meta-imp-mat.mkiv for examples. + % possible improvements: % % - we could skip the left/right offsets when offset=normal, this saves some access time @@ -56,27 +60,40 @@ \installcorenamespace {mathextensiblefallbacks} +% currently no italic correction ... problem is that we don't know yet if we have an italic +% below so we we need to postpone + \def\math_stackers_fallback {\hbox to \scratchwidth{\csname\??mathextensiblefallbacks\ifcsname\??mathextensiblefallbacks\number\scratchunicode\endcsname\number\scratchunicode\fi\endcsname}} %{\csname\??mathextensiblefallbacks\ifcsname\??mathextensiblefallbacks\number\scratchunicode\endcsname\number\scratchunicode\fi\endcsname } +% \def\math_stackers_regular +% {\mathstylehbox{\Umathaccent\fam\zerocount\scratchunicode{\hskip\scratchwidth}}} + \def\math_stackers_regular - {\mathstylehbox{\Umathaccent\fam\zerocount\scratchunicode{\hskip\scratchwidth}}} + {\mathstylehbox{\usemathstackerscolorparameter\c!color + \Umathaccent\fam\zerocount\scratchunicode{\hskip\scratchwidth}}} \def\math_stackers_stretch % we don't have that one yet - {\mathstylehbox{\Umathaccent\fam\zerocount\scratchunicode{\hskip\hsize}}} + {\mathstylehbox{\usemathstackerscolorparameter\c!color + \Umathaccent\fam\zerocount\scratchunicode{\hskip\hsize}}} + +% these delimiters are a unuseable as theu don't center for small arguments: +% +% $\Umathaccent 0 0 "2190{x}$ \par $\Umathaccent 0 0 "27F8{x}$\par +% $\Udelimiterunder 0 "2190{x}$ \par $\Udelimiterunder 0 "27F8{x}$\par \setvalue{\??mathextensiblefallbacks}{\hbox{\vrule\!!width\scratchwidth\!!height.1\exheight\!!depth\zeropoint}} -\def\math_stackers_with_fallback#codepoint% - {\begingroup - \scratchunicode#codepoint\relax - \ifcase\mathextensiblecode\fam\scratchunicode\relax - \math_stackers_fallback - \else - \math_stackers_stretch - \fi - \endgroup} +% \def\math_stackers_with_fallback#codepoint% +% {\begingroup +% \scratchunicode#codepoint\relax +% \ifcase\mathextensiblecode\fam\scratchunicode\relax +% \math_stackers_fallback +% \else +% \math_stackers_stretch +% \fi +% \endgroup} %D We don't really need this because we can assume that fonts have the right %D extensibles. If needed I will make a general virtual extender for \OPENTYPE\ @@ -114,6 +131,12 @@ [%c!alternative=\v!text, % text | mathematics \c!left=, \c!right=, + \c!mathclass=\s!rel, + \c!alternative=\v!normal, + \c!mp=math:stacker:\number\scratchunicode, + \c!mpheight=\exheight, + \c!mpdepth=\exheight, + \c!mpoffset=.25\exheight, \c!voffset=.25\exheight, \c!hoffset=.5\emwidth, \c!minheight=\exheight, @@ -121,7 +144,7 @@ \c!minwidth=\emwidth, \c!order=\v!normal, \c!strut=, - %\c!color=, % todo: when I need it + \c!color=, % todo: when I need it \c!topcommand=, \c!middlecommand=, \c!bottomcommand=, @@ -132,6 +155,7 @@ %D top of the baseline by default. \installcorenamespace {mathstackerslocation} +\installcorenamespace {mathstackersalternative} \letvalue{\??mathstackerslocation\v!top }\plusone % on top of baseline \letvalue{\??mathstackerslocation\v!high }\plustwo % 25 % down @@ -172,6 +196,23 @@ \math_stackers_fallback \fi} +% no checking, we assume sane use + +\letvalue{\??mathstackersalternative\v!normal }\math_stackers_content +\letvalue{\??mathstackersalternative\v!default}\math_stackers_content + +\setvalue{\??mathstackersalternative\v!mp}% + {\hbox\bgroup % todo: add code key + tag + \edef\overlaywidth {\the\scratchwidth}% + \edef\overlayheight {\the\dimexpr\mathstackersparameter\c!mpheight}% + \edef\overlaydepth {\the\dimexpr\mathstackersparameter\c!mpdepth}% + \edef\overlayoffset {\the\dimexpr\mathstackersparameter\c!mpoffset}% + \edef\overlaylinewidth{\the\linewidth}% + \edef\overlaylinecolor{\mathstackersparameter\c!color}% + \edef\p_mp{\mathstackersparameter\c!mp}% + \uniqueMPgraphic{\p_mp}% + \egroup} + \def\math_stackers_check_unicode#codepoint% {\scratchunicode#codepoint\relax \scratchhoffset\mathstackersparameter\c!hoffset\relax @@ -223,11 +264,12 @@ {\begingroup \edef\currentmathstackers{#category}% \mathstackersparameter\c!left\relax - \ifmmode\mathrel\else\dontleavehmode\fi - {\edef\p_offset {\mathstackersparameter\c!offset}% - \edef\p_location{\mathstackersparameter\c!location}% - \edef\p_order {\mathstackersparameter\c!order}% - \edef\p_strut {\mathstackersparameter\c!strut}% + \ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi + {\edef\p_offset {\mathstackersparameter\c!offset}% + \edef\p_location {\mathstackersparameter\c!location}% + \edef\p_order {\mathstackersparameter\c!order}% + \edef\p_strut {\mathstackersparameter\c!strut}% + \edef\p_alternative{\mathstackersparameter\c!alternative}% \ifx\p_order\v!reverse \edef\m_math_stackers_text_top {#bottomtext}% \edef\m_math_stackers_text_bottom{#toptext}% @@ -285,17 +327,17 @@ \advance\scratchwidth2\scratchhoffset % \ifcase#method\relax - \setbox\scratchboxthree\math_stackers_content + \setbox\scratchboxthree\csname\??mathstackersalternative\p_alternative\endcsname \fi % \ifdim\wd\scratchboxone<\scratchwidth - \setbox\scratchboxone\hbox to \scratchwidth{\hss\box\scratchboxone\hss}% + \setbox\scratchboxone\hbox to \scratchwidth{\hss\unhbox\scratchboxone\hss}% unhboxing makes leaders work \fi \ifdim\wd\scratchboxtwo<\scratchwidth - \setbox\scratchboxtwo\hbox to \scratchwidth{\hss\box\scratchboxtwo\hss}% + \setbox\scratchboxtwo\hbox to \scratchwidth{\hss\unhbox\scratchboxtwo\hss}% \fi \ifdim\wd\scratchboxthree<\scratchwidth - \setbox\scratchboxthree\hbox to \scratchwidth{\hss\box\scratchboxthree\hss}% + \setbox\scratchboxthree\hbox to \scratchwidth{\hss\unhbox\scratchboxthree\hss}% \fi % \ifcsname\??mathstackerslocation\p_location\endcsname @@ -395,17 +437,30 @@ %D The next one deals with under and over extensibles (arrows mostly): -\unexpanded\def\math_stackers_double#where#category#codepoint#text% +\installcorenamespace {mathclasses} + +\letvalue{\??mathclasses }\mathord +\letvalue{\??mathclasses rel}\mathrel +\letvalue{\??mathclasses ord}\mathord + +\def\math_class_by_parameter#1% + {\normalexpanded{\noexpand\math_class_by_parameter_indeed{#1\c!mathclass}}} + +\def\math_class_by_parameter_indeed#1% + {\csname\??mathclasses\ifcsname\??mathclasses#1\endcsname#1\fi\endcsname} + +\unexpanded\def\math_stackers_make_double#top#bottom#category#codepoint#codeextra#text% {\begingroup \edef\currentmathstackers{#category}% \mathstackersparameter\c!left\relax - \ifmmode\mathrel\else\dontleavehmode\fi + \ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi {\edef\currentmathstackers{#category}% \edef\m_math_stackers_text_middle {#text}% % - \edef\p_offset {\mathstackersparameter\c!offset}% - \edef\p_location{\mathstackersparameter\c!location}% - \edef\p_strut {\mathstackersparameter\c!strut}% + \edef\p_offset {\mathstackersparameter\c!offset}% + \edef\p_location {\mathstackersparameter\c!location}% + \edef\p_strut {\mathstackersparameter\c!strut}% + \edef\p_alternative{\mathstackersparameter\c!alternative}% % \scratchleftoffset \zeropoint \scratchrightoffset\zeropoint @@ -425,7 +480,7 @@ \fi \advance\scratchwidth2\scratchhoffset % - \setbox\scratchboxtwo \math_stackers_content + \setbox\scratchboxtwo \csname\??mathstackersalternative\p_alternative\endcsname \setbox\scratchboxthree\hbox to \scratchwidth{\hss\box\scratchboxthree\hss}% % \math_stackers_normalize_three @@ -436,49 +491,74 @@ % \ifdim\htdp\scratchboxtwo>\zeropoint \kern-\scratchwidth - \ifcase#where\relax + \ifcase#top\else \math_stackers_top\bgroup - \raise\dimexpr\scratchheight+\scratchtopoffset\relax + % \raise\dimexpr\scratchheight+\scratchtopoffset\relax + \raise\dimexpr\scratchheight+\mathstackersparameter\c!voffset\relax \box\scratchboxtwo \egroup - \else + \fi + \scratchunicode#codeextra\relax + \ifcase\scratchunicode\else + \kern-\scratchwidth + \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname + \fi + \ifcase#bottom\else \math_stackers_bottom\bgroup - \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\scratchbottomoffset\relax + % \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\scratchbottomoffset\relax + \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\mathstackersparameter\c!voffset\relax \box\scratchboxtwo \egroup \fi \fi}% \mathstackersparameter\c!right\relax - \endgroup} - -\unexpanded\def\definemathoverextensible {\dotripleempty\math_extensiblies_define_over } -\unexpanded\def\definemathunderextensible{\dotripleempty\math_extensiblies_define_under} - -\def\math_extensiblies_define_over[#1][#2][#3]% + \edef\p_limits{\mathstackersparameter\c!mathlimits}% + \ifx\p_limits\v!yes + \expandafter\endgroup\expandafter\limits + \else + \expandafter\endgroup + \fi} + +\unexpanded\def\definemathoverextensible {\dotripleempty \math_extensibles_define_over } +\unexpanded\def\definemathunderextensible {\dotripleempty \math_extensibles_define_under} +\unexpanded\def\definemathdoubleextensible{\doquadrupleempty\math_extensibles_define_double} + +\def\math_extensibles_define_over[#1][#2][#3]% {\ifthirdargument - \setuevalue{#2}{\math_stackers_double\zerocount{#1}{\number#3}}% + \setuevalue{#2}{\math_stackers_make_double\plusone \zerocount{#1}{\number#3}{0}}% \else - \setuevalue{#1}{\math_stackers_double\zerocount\noexpand\currentmathstackers{\number#2}}% + \setuevalue{#1}{\math_stackers_make_double\plusone \zerocount\noexpand\currentmathstackers{\number#2}{0}}% \fi} -\def\math_extensiblies_define_under[#1][#2][#3]% +\def\math_extensibles_define_under[#1][#2][#3]% {\ifthirdargument - \setuevalue{#2}{\math_stackers_double\plusone{#1}{\number#3}}% + \setuevalue{#2}{\math_stackers_make_double\zerocount\plusone{#1}{\number#3}{0}}% + \else + \setuevalue{#1}{\math_stackers_make_double\zerocount\plusone\noexpand\currentmathstackers{\number#2}{0}}% + \fi} + +\def\math_extensibles_define_double[#1][#2][#3][#4]% + {\iffourthargument + \setuevalue{#2}{\math_stackers_make_double\plusone \plusone{#1}{\number#3}{\number#4}}% \else - \setuevalue{#1}{\math_stackers_double\plusone\noexpand\currentmathstackers{\number#2}}% + \setuevalue{#1}{\math_stackers_make_double\plusone \plusone\noexpand\currentmathstackers{\number#2}{\number#3}}% \fi} -\unexpanded\def\mathover {\begingroup\dosingleempty\math_stackers_handle_over } -\unexpanded\def\mathunder{\begingroup\dosingleempty\math_stackers_handle_under} +\unexpanded\def\mathover {\begingroup\dosingleempty\math_stackers_handle_over } +\unexpanded\def\mathunder {\begingroup\dosingleempty\math_stackers_handle_under } +\unexpanded\def\mathdouble{\begingroup\dodoubleempty\math_stackers_handle_double} \def\math_stackers_handle_over[#category]% - {\math_stackers_handle_double\zerocount{\iffirstargument#category\else\v!top \fi}} % will be defined later on + {\math_stackers_direct_double\plusone\zerocount{\iffirstargument#category\else\v!top \fi}} % will be defined later on \def\math_stackers_handle_under[#category]#codepoint#bottomtext% - {\math_stackers_handle_double\plusone {\iffirstargument#category\else\v!bottom\fi}} % will be defined later on + {\math_stackers_direct_double\zerocount\plusone{\iffirstargument#category\else\v!bottom\fi}} % will be defined later on + +\def\math_stackers_handle_double[#category]#codepoint#bottomtext% + {\math_stackers_direct_double\plusone\plusone {\iffirstargument#category\else\v!bottom\fi}} % will be defined later on -\def\math_stackers_handle_double#location#category#codepoint#text% - {\math_stackers_double#location{#category}{#codepoint}{#text}% +\def\math_stackers_direct_double#top#bottom#category#codepoint#text% + {\math_stackers_make_double#top#bottom{#category}{#codepoint}{#text}% \endgroup} %D Here is a bonus macro that takes three texts. It can be used to get consistent @@ -558,23 +638,79 @@ [\c!order=\v!reverse] \definemathstackers - [\v!top] + [\v!both] [\v!mathematics] - [\c!location=\v!top, + [\c!location=\v!top, % ? \c!strut=\v!no, \c!middlecommand=\mathematics, \c!hoffset=\zeropoint] +\definemathstackers + [\v!top] + [\v!both] + \definemathstackers [\v!bottom] - [\v!mathematics] - [\c!location=\v!top, - \c!strut=\v!no, - \c!middlecommand=\mathematics, - \c!hoffset=\zeropoint] + [\v!both] + +\definemathstackers + [vfenced] + [\v!both] + [\c!mathclass=\s!ord, + \c!mathlimits=\v!yes] % These are compatibity definitions, math only. +% todo: top= bottom= middle= is nicer (compare math-fen) + +%D We save a few definitions that we automatically got from the \type {char-def.lua} +%D database. + +% Be careful in choosing what accents you take (the code below uses a +% combining one): +% +% \startbuffer +% % $\Umathaccent top 0 0 "20D7 {example}$ +% % $\Umathaccent top fixed 0 0 "20D7 {example}$ +% $\Umathaccent 0 0 "20D7 {example}$ +% $\Umathaccent fixed 0 0 "20D7 {example}$ +% $\Umathaccent bottom 0 0 "20D7 {example}$ +% $\Umathaccent bottom fixed 0 0 "20D7 {example}$ +% $\Umathaccent both 0 0 "20D7 +% 0 0 "20D7 {example}$ +% $\Umathaccent both fixed 0 0 "20D7 +% fixed 0 0 "20D7 {example}$ +% $\Umathaccent both 0 0 "20D7 +% fixed 0 0 "20D7 {example}$ +% $\Umathaccent both fixed 0 0 "20D7 +% 0 0 "20D7 {example}$ +% \stopbuffer +% +% \setupbodyfont[modern] \getbuffer +% \setupbodyfont[xits] \getbuffer +% \setupbodyfont[cambria] \getbuffer + +\unexpanded\def\normaldoublebrace {\Umathaccents 0 \defaultmathfamily "23DE 0 \defaultmathfamily "23DF } +\unexpanded\def\normaldoubleparent{\Umathaccents 0 \defaultmathfamily "23DC 0 \defaultmathfamily "23DD } + +% let's keep this + +\let\normaloverbrace \overbrace +\let\normalunderbrace \underbrace +\let\normaloverparent \overparent +\let\normalunderparent \underparent +\let\normaloverbracket \overbracket +\let\normalunderbracket \underbracket +\let\normalunderleftarrow \underleftarrow +\let\normaloverleftarrow \overleftarrow +\let\normalunderrightarrow\underrightarrow +\let\normaloverrightarrow \overrightarrow + +\let\lceil \lceiling +\let\rceil \rceiling + +%D Here come the new ones: + \definemathstackers [\v!none] [\v!mathematics] [\c!hoffset=\zeropoint] \definemathstackers [\v!normal] [\v!mathematics] [\c!hoffset=0.5\emwidth] % the default \definemathstackers [\v!small] [\v!mathematics] [\c!hoffset=1\emwidth] @@ -583,8 +719,12 @@ \definemathextensible [\v!reverse] [xrel] ["002D] \definemathextensible [\v!reverse] [xequal] ["003D] -\definemathextensible [\v!reverse] [xleftarrow] ["2190] -\definemathextensible [\v!reverse] [xrightarrow] ["2192] +\definemathextensible [\v!reverse] [xleftarrow] ["2190] % ["27F5] +\definemathextensible [\v!reverse] [xrightarrow] ["2192] % ["27F6] +\definemathextensible [\v!reverse] [xleftrightarrow] ["27F7] +\definemathextensible [\v!reverse] [xLeftarrow] ["27F8] +\definemathextensible [\v!reverse] [xRightarrow] ["27F9] +\definemathextensible [\v!reverse] [xLeftrightarrow] ["27FA] \definemathextensible [\v!reverse] [xtwoheadleftarrow] ["219E] \definemathextensible [\v!reverse] [xtwoheadrightarrow] ["21A0] \definemathextensible [\v!reverse] [xmapsto] ["21A6] @@ -598,15 +738,15 @@ \definemathextensible [\v!reverse] [xleftrightharpoons] ["21CB] \definemathextensible [\v!reverse] [xrightleftharpoons] ["21CC] \definemathextensible [\v!reverse] [xtriplerel] ["2261] -\definemathextensible [\v!reverse] [xleftrightarrow] ["27F7] -\definemathextensible [\v!reverse] [xLeftarrow] ["27F8] -\definemathextensible [\v!reverse] [xRightarrow] ["27F9] -\definemathextensible [\v!reverse] [xLeftrightarrow] ["27FA] \definemathextensible [\v!mathematics] [mrel] ["002D] \definemathextensible [\v!mathematics] [mequal] ["003D] -\definemathextensible [\v!mathematics] [mleftarrow] ["2190] -\definemathextensible [\v!mathematics] [mrightarrow] ["2192] +\definemathextensible [\v!mathematics] [mleftarrow] ["2190] % ["27F5] +\definemathextensible [\v!mathematics] [mrightarrow] ["2192] % ["27F6] +\definemathextensible [\v!mathematics] [mleftrightarrow] ["27F7] +\definemathextensible [\v!mathematics] [mLeftarrow] ["27F8] +\definemathextensible [\v!mathematics] [mRightarrow] ["27F9] +\definemathextensible [\v!mathematics] [mLeftrightarrow] ["27FA] \definemathextensible [\v!mathematics] [mtwoheadleftarrow] ["219E] \definemathextensible [\v!mathematics] [mtwoheadrightarrow] ["21A0] \definemathextensible [\v!mathematics] [mmapsto] ["21A6] @@ -620,18 +760,18 @@ \definemathextensible [\v!mathematics] [mleftrightharpoons] ["21CB] \definemathextensible [\v!mathematics] [mrightleftharpoons] ["21CC] \definemathextensible [\v!mathematics] [mtriplerel] ["2261] -\definemathextensible [\v!mathematics] [mleftrightarrow] ["27F7] -\definemathextensible [\v!mathematics] [mLeftarrow] ["27F8] -\definemathextensible [\v!mathematics] [mRightarrow] ["27F9] -\definemathextensible [\v!mathematics] [mLeftrightarrow] ["27FA] \definemathextensible [\v!text] [trel] ["002D] \definemathextensible [\v!text] [tequal] ["003D] -\definemathextensible [\v!text] [tleftarrow] ["2190] -\definemathextensible [\v!text] [trightarrow] ["2192] +\definemathextensible [\v!text] [tmapsto] ["21A6] +\definemathextensible [\v!text] [tleftarrow] ["2190] % ["27F5] +\definemathextensible [\v!text] [trightarrow] ["2192] % ["27F6] +\definemathextensible [\v!text] [tleftrightarrow] ["27F7] +\definemathextensible [\v!text] [tLeftarrow] ["27F8] +\definemathextensible [\v!text] [tRightarrow] ["27F9] +\definemathextensible [\v!text] [tLeftrightarrow] ["27FA] \definemathextensible [\v!text] [ttwoheadleftarrow] ["219E] \definemathextensible [\v!text] [ttwoheadrightarrow] ["21A0] -\definemathextensible [\v!text] [tmapsto] ["21A6] \definemathextensible [\v!text] [thookleftarrow] ["21A9] \definemathextensible [\v!text] [thookrightarrow] ["21AA] \definemathextensible [\v!text] [tleftharpoondown] ["21BD] @@ -642,30 +782,59 @@ \definemathextensible [\v!text] [tleftrightharpoons] ["21CB] \definemathextensible [\v!text] [trightleftharpoons] ["21CC] \definemathextensible [\v!text] [ttriplerel] ["2261] -\definemathextensible [\v!text] [tleftrightarrow] ["27F7] -\definemathextensible [\v!text] [tLeftarrow] ["27F8] -\definemathextensible [\v!text] [tRightarrow] ["27F9] -\definemathextensible [\v!text] [tLeftrightarrow] ["27FA] -\definemathoverextensible [\v!top] [overleftarrow] ["2190] -\definemathoverextensible [\v!top] [overrightarrow] ["2192] +\definemathoverextensible [\v!top] [overleftarrow] ["2190] % ["27F5] +\definemathoverextensible [\v!top] [overrightarrow] ["2192] % ["27F6] +\definemathoverextensible [\v!top] [overleftrightarrow] ["27F7] +\definemathoverextensible [\v!top] [overtwoheadleftarrow] ["27F8] +\definemathoverextensible [\v!top] [overtwoheadrightarrow] ["27F9] \definemathoverextensible [\v!top] [overleftharpoondown] ["21BD] \definemathoverextensible [\v!top] [overleftharpoonup] ["21BC] \definemathoverextensible [\v!top] [overrightharpoondown] ["21C1] \definemathoverextensible [\v!top] [overrightharpoonup] ["21C0] -\definemathoverextensible [\v!top] [overleftrightarrow] ["27F7] -\definemathoverextensible [\v!top] [overtwoheadleftarrow] ["27F8] -\definemathoverextensible [\v!top] [overtwoheadrightarrow] ["27F9] -\definemathunderextensible [\v!bottom] [underleftarrow] ["2190] -\definemathunderextensible [\v!bottom] [underrightarrow] ["2192] +\definemathunderextensible [\v!bottom] [underleftarrow] ["2190] % ["27F5] +\definemathunderextensible [\v!bottom] [underrightarrow] ["2192] % ["27F6] +\definemathunderextensible [\v!bottom] [underleftrightarrow] ["27F7] +\definemathunderextensible [\v!bottom] [undertwoheadleftarrow] ["27F8] +\definemathunderextensible [\v!bottom] [undertwoheadrightarrow] ["27F9] \definemathunderextensible [\v!bottom] [underleftharpoondown] ["21BD] \definemathunderextensible [\v!bottom] [underleftharpoonup] ["21BC] \definemathunderextensible [\v!bottom] [underrightharpoondown] ["21C1] \definemathunderextensible [\v!bottom] [underrightharpoonup] ["21C0] -\definemathunderextensible [\v!bottom] [underleftrightarrow] ["27F7] -\definemathunderextensible [\v!bottom] [undertwoheadleftarrow] ["27F8] -\definemathunderextensible [\v!bottom] [undertwoheadrightarrow] ["27F9] + +% We don't use overline and underline. This is one of the overlooked aspects of +% unicode cq. opentype math: why treat rules different than e.g. arrows and +% accents. It is a bit unfortunate that the opportunity to move math to new +% technologies happened outside the tex domain (and/or some aspects were kept +% while in fact they were side effects of limitations of traditional fonts). +% From the unicode aware tex engines' implementation point of view things +% could have been done a bit nicer but then: the community didn't seem to care +% too much and just has to follow now. +% +% Anyhow, we use a character based approach so that at least we get unicode +% stuff in the backend (okay, we still need to deal with some cut and paste +% issues but at least we now know what we deal with. + +% alternatively we can move the original to FE* + +\definemathoverextensible [vfenced] [overbar] ["FE33E] % ["203E] +\definemathunderextensible [vfenced] [underbar] ["FE33F] % ["203E] +\definemathdoubleextensible [vfenced] [doublebar] ["FE33E] ["FE33F] + +\definemathoverextensible [vfenced] [overbrace] ["FE3DE] % ["023DE] +\definemathunderextensible [vfenced] [underbrace] ["FE3DF] % ["023DF] +\definemathdoubleextensible [vfenced] [doublebrace] ["FE3DE] ["FE3DF] + +\definemathoverextensible [vfenced] [overparent] ["FE3DC] % ["023DC] +\definemathunderextensible [vfenced] [underparent] ["FE3DD] % ["023DD] +\definemathdoubleextensible [vfenced] [doubleparent] ["FE3DC] ["FE3DD] + +\definemathoverextensible [vfenced] [overbracket] ["FE3B4] % ["023B4] +\definemathunderextensible [vfenced] [underbracket] ["FE3B5] % ["023B5] +\definemathdoubleextensible [vfenced] [doublebracket] ["FE3B4] ["FE3B5] + +% \unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits} %D Some bonus ones (for the moment here): @@ -696,7 +865,7 @@ #3% \endgroup}} -% These will be defined in char-def as well: +% These will be defined in char-def as well once we have \leaders \unexpanded\def\rightarrowfill {\math_stackers_hacked_fill \relbar \relbar \rightarrow} \unexpanded\def\leftarrowfill {\math_stackers_hacked_fill \leftarrow \relbar \relbar } @@ -734,6 +903,7 @@ {\expandafter\let\csname\??mathextensiblefallbacks\number#2\expandafter\endcsname\csname#1\endcsname \expandafter\let\csname #1\expandafter\endcsname\csname#1\endcsname} +\defineextensiblefiller [barfill] ["203E] \defineextensiblefiller [relfill] ["002D] \defineextensiblefiller [equalfill] ["003D] \defineextensiblefiller [leftarrowfill] ["2190] @@ -755,6 +925,7 @@ \defineextensiblefiller [Leftarrowfill] ["27F8] \defineextensiblefiller [Rightarrowfill] ["27F9] \defineextensiblefiller [Leftrightarrowfill] ["27FA] +\defineextensiblefiller [Leftrightarrowfill] ["27FA] %D Extra: @@ -766,6 +937,40 @@ % \mathchardef\doublebond"003D % \mathchardef\triplebond"2261 +%D Also handy: + +\unexpanded\def\definemathunstacked + {\dotripleempty\math_stackers_define_unstacked_normal} + +\def\math_stackers_define_unstacked_normal[#1][#2][#3]% category name unicode + {\ifthirdargument + \setuevalue{#2}{\math_stackers_unstacked_normal{#1}{\number#3}}% + \else + \setuevalue{#1}{\math_stackers_unstacked_normal\noexpand\currentmathstackers{\number#2}}% + \fi} + +\unexpanded\def\math_stackers_unstacked_normal#category#codepoint% + {\begingroup + \edef\currentmathstackers{#category}% + \edef\p_moffset{\mathstackersparameter\c!moffset}% + \ifx\p_moffset\empty \else + \mskip\scratchmuskip + \fi + \ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi + {\usemathstackerscolorparameter\c!color + \Umathchar\zerocount\defaultmathfamily#codepoint}% + \ifx\p_moffset\empty \else + \mskip\scratchmuskip + \fi + \endgroup} + +\definemathstackers [\v!wide] [\c!moffset=\thickmuskip,\c!mathclass=\s!rel] + +\definemathunstacked [\v!wide] [And] ["0026] % \mathrel{\;&\;} +\definemathunstacked [\v!wide] [impliedby] ["27F8] % \mathrel{\;\Longleftarrow\;} +\definemathunstacked [\v!wide] [implies] ["27F9] % \mathrel{\;\Longrightarrow\;} +\definemathunstacked [\v!wide] [iff] ["27FA] % \mathrel{\;\Longleftrightarrow\;} + \protect \endinput % \mathrel{\mathop{\hbox to \dimen0{\hss\copy4\hss}} diff --git a/tex/context/base/math-vfu.lua b/tex/context/base/math-vfu.lua index 2f7c0507b..6d9a9f903 100644 --- a/tex/context/base/math-vfu.lua +++ b/tex/context/base/math-vfu.lua @@ -26,6 +26,7 @@ local type, next = type, next local max = math.max local format = string.format local utfchar = utf.char +local fastcopy = table.copy local fonts, nodes, mathematics = fonts, nodes, mathematics @@ -38,6 +39,7 @@ local report_virtual = logs.reporter("fonts","virtual math") local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex +local formatters = string.formatters local mathencodings = allocate() fonts.encodings.math = mathencodings -- better is then: fonts.encodings.vectors @@ -199,21 +201,44 @@ end -- { "node", nodeinjections.transform(.7,0,0,.7) }, -- commands[#commands+1] = { "node", nodeinjections.restore() } -local done = { } +-- local done = { } +-- +-- local function raise(main,characters,id,size,unicode,private,n,id_of_smaller) -- this is a real fake mess +-- local raised = characters[private] +-- if raised then +-- if not done[unicode] then +-- report_virtual("temporary too large %U due to issues in luatex backend",unicode) +-- done[unicode] = true +-- end +-- local up = 0.85 * main.parameters.x_height +-- local slot = { "slot", id, private } +-- local commands = { +-- push, +-- { "down", - up }, +-- -- { "scale", .7, 0, 0, .7 }, +-- slot, +-- } +-- for i=2,n do +-- commands[#commands+1] = slot +-- end +-- commands[#commands+1] = pop +-- characters[unicode] = { +-- width = .7 * n * raised.width, +-- height = .7 * (raised.height + up), +-- depth = .7 * (raised.depth - up), +-- commands = commands, +-- } +-- end +-- end -local function raise(main,characters,id,size,unicode,private,n) -- this is a real fake mess - local raised = characters[private] +local function raise(main,characters,id,size,unicode,private,n,id_of_smaller) -- this is a real fake mess + local raised = fonts.hashes.characters[main.fonts[id_of_smaller].id][private] -- characters[private] if raised then - if not done[unicode] then - report_virtual("temporary too large %U due to issues in luatex backend",unicode) - done[unicode] = true - end local up = 0.85 * main.parameters.x_height - local slot = { "slot", id, private } + local slot = { "slot", id_of_smaller, private } local commands = { push, { "down", - up }, - -- { "scale", .7, 0, 0, .7 }, slot, } for i=2,n do @@ -221,9 +246,10 @@ local function raise(main,characters,id,size,unicode,private,n) -- this is a rea end commands[#commands+1] = pop characters[unicode] = { - width = .7 * n * raised.width, - height = .7 * (raised.height + up), - depth = .7 * (raised.depth - up), + width = n * raised.width, + height = (raised.height or 0) + up, + depth = (raised.depth or 0) - up, + italic = raised.italic, commands = commands, } end @@ -406,7 +432,25 @@ local function repeated(main,characters,id,size,unicode,u,n,private,fraction) -- end end +-- we use the fact that context defines the smallest sizes first .. a real dirty and ugly hack + +local data_of_smaller = nil +local size_of_smaller = 0 + function vfmath.addmissing(main,id,size) + + local id_of_smaller = nil + + if size < size_of_smaller or size_of_smaller == 0 then + data_of_smaller = main.fonts[id] + id_of_smaller = id + else + id_of_smaller = #main.fonts + 1 + main.fonts[id_of_smaller] = data_of_smaller + end + + -- here id is the index in fonts (normally 14 or so) and that slot points to self + local characters = main.characters local shared = main.shared local variables = main.goodies.mathematics and main.goodies.mathematics.variables or { } @@ -504,9 +548,11 @@ function vfmath.addmissing(main,id,size) repeated(main,characters,id,size,0x222C,0x222B,2,0xFF800,1/3) repeated(main,characters,id,size,0x222D,0x222B,3,0xFF810,1/3) - -- raise (main,characters,id,size,0x02032,0xFE325,1) -- prime - -- raise (main,characters,id,size,0x02033,0xFE325,2) -- double prime - -- raise (main,characters,id,size,0x02034,0xFE325,3) -- triple prime + characters[0xFE325] = fastcopy(characters[0x2032]) + + raise (main,characters,id,size,0x02032,0xFE325,1,id_of_smaller) -- prime + raise (main,characters,id,size,0x02033,0xFE325,2,id_of_smaller) -- double prime + raise (main,characters,id,size,0x02034,0xFE325,3,id_of_smaller) -- triple prime -- there are more (needs discussion first): @@ -515,6 +561,9 @@ function vfmath.addmissing(main,id,size) characters[0x02B9] = characters[0x2032] -- we're nice + data_of_smaller = main.fonts[id] + size_of_smaller = size + end local unique = 0 -- testcase: \startTEXpage \math{!\text{-}\text{-}\text{-}} \stopTEXpage @@ -534,6 +583,82 @@ setmetatableindex(reverse, function(t,name) return r end) +local function copy_glyph(main,target,original,unicode,slot) + local addprivate = fonts.helpers.addprivate + local olddata = original[unicode] + if olddata then + local newdata = { + width = olddata.width, + height = olddata.height, + depth = olddata.depth, + italic = olddata.italic, + kerns = olddata.kerns, + commands = { { "slot", slot, unicode } }, + } + local glyphdata = newdata + local nextglyph = olddata.next + while nextglyph do + local oldnextdata = original[nextglyph] + local newnextdata = { + commands = { { "slot", slot, nextglyph } }, + width = oldnextdata.width, + height = oldnextdata.height, + depth = oldnextdata.depth, + } + local newnextglyph = addprivate(main,formatters["M-N-%H"](nextglyph),newnextdata) + newdata.next = newnextglyph +-- report_virtual("copied next: %X",newdata.next) + local nextnextglyph = oldnextdata.next + if nextnextglyph == nextglyph then + break + else + olddata = oldnextdata + newdata = newnextdata + nextglyph = nextnextglyph + end + end + local hv = olddata.horiz_variants + if hv then + hv = fastcopy(hv) + newdata.horiz_variants = hv + for i=1,#hv do + local hvi = hv[i] + local oldglyph = hvi.glyph + local olddata = original[oldglyph] + local newdata = { + commands = { { "slot", slot, oldglyph } }, + width = olddata.width, + height = olddata.height, + depth = olddata.depth, + } + hvi.glyph = addprivate(main,formatters["M-H-%H"](oldglyph),newdata) +-- report_virtual("copied h variant: %X at index %i",hvi.glyph,i) + end + end + local vv = olddata.vert_variants + if vv then + vv = fastcopy(vv) + newdata.vert_variants = vv + for i=1,#vv do + local vvi = vv[i] + local oldglyph = vvi.glyph + local olddata = original[oldglyph] + local newdata = { + commands = { { "slot", slot, oldglyph } }, + width = olddata.width, + height = olddata.height, + depth = olddata.depth, + } + vvi.glyph = addprivate(main,formatters["M-V-%H"](oldglyph),newdata) +-- report_virtual("copied v variant: %X at index %i",vvi.glyph,i) + end + end + return newdata + end +end + +vfmath.copy_glyph = copy_glyph + function vfmath.define(specification,set,goodies) local name = specification.name -- symbolic name local size = specification.size -- given size @@ -576,7 +701,7 @@ function vfmath.define(specification,set,goodies) shared[n] = { } end if trace_virtual then - report_virtual("loading font %a subfont %s with name %a at %p as id %s using encoding %p",name,s,ssname,size,id,ss.vector) + report_virtual("loading font %a subfont %s with name %a at %p as id %s using encoding %a",name,s,ssname,size,id,ss.vector) end if not ss.checked then ss.checked = true @@ -677,6 +802,7 @@ function vfmath.define(specification,set,goodies) parameters.x_height = parameters.x_height or 0 -- local already_reported = false + local parameters_done = false for s=1,n do local ss, fs = okset[s], loaded[s] if not fs then @@ -685,7 +811,13 @@ function vfmath.define(specification,set,goodies) -- skip, redundant else local newparameters = fs.parameters - if not newparameters then + local newmathparameters = fs.mathparameters + if newmathparameters then + if not parameters_done or ss.parameters then + mathparameters = newmathparameters + parameters_done = true + end + elseif not newparameters then report_virtual("no parameters set in font %a",name) elseif ss.extension then mathparameters.math_x_height = newparameters.x_height or 0 -- math_x_height : height of x @@ -716,187 +848,202 @@ function vfmath.define(specification,set,goodies) mathparameters.axis_height = newparameters[22] or 0 -- axis_height : height of fraction lines above the baseline -- report_virtual("loading and virtualizing font %a at size %p, setting sy parameters",name,size) end - local vectorname = ss.vector - if vectorname then - local offset = 0xFF000 - local vector = mathencodings[vectorname] - local rotcev = reverse[vectorname] - local isextension = ss.extension - if vector and rotcev then - local fc, fd, si = fs.characters, fs.descriptions, shared[s] - local skewchar = ss.skewchar - for unicode, index in next, vector do - local fci = fc[index] - if not fci then - local fontname = fs.properties.name or "unknown" - local rf = reported[fontname] - if not rf then rf = { } reported[fontname] = rf end - local rv = rf[vectorname] - if not rv then rv = { } rf[vectorname] = rv end - local ru = rv[unicode] - if not ru then - if trace_virtual then - report_virtual("unicode slot %U has no index %H in vector %a for font %a",unicode,index,vectorname,fontname) - elseif not already_reported then - report_virtual("the mapping is incomplete for %a at %p",name,size) - already_reported = true - end - rv[unicode] = true - end - else - local ref = si[index] - if not ref then - ref = { { 'slot', s, index } } - si[index] = ref - end - local kerns = fci.kerns - local width = fci.width - local italic = fci.italic - if italic and italic > 0 then - -- int_a^b - if isextension then - width = width + italic -- for obscure reasons the integral as a width + italic correction - end - end - if kerns then - local krn = { } - for k, v in next, kerns do -- kerns is sparse - local rk = rotcev[k] - if rk then - krn[rk] = v -- kerns[k] - end - end - if not next(krn) then - krn = nil - end - local t = { - width = width, - height = fci.height, - depth = fci.depth, - italic = italic, - kerns = krn, - commands = ref, - } - if skewchar then - local k = kerns[skewchar] - if k then - t.top_accent = width/2 + k + if ss.overlay then + local fc = fs.characters + local first = ss.first + if first then + local last = ss.last or first + for unicode = first, last do + characters[unicode] = copy_glyph(main,characters,fc,unicode,s) + end + else + for unicode, data in next, fc do + characters[unicode] = copy_glyph(main,characters,fc,unicode,s) + end + end + else + local vectorname = ss.vector + if vectorname then + local offset = 0xFF000 + local vector = mathencodings[vectorname] + local rotcev = reverse[vectorname] + local isextension = ss.extension + if vector and rotcev then + local fc, fd, si = fs.characters, fs.descriptions, shared[s] + local skewchar = ss.skewchar + for unicode, index in next, vector do + local fci = fc[index] + if not fci then + local fontname = fs.properties.name or "unknown" + local rf = reported[fontname] + if not rf then rf = { } reported[fontname] = rf end + local rv = rf[vectorname] + if not rv then rv = { } rf[vectorname] = rv end + local ru = rv[unicode] + if not ru then + if trace_virtual then + report_virtual("unicode slot %U has no index %H in vector %a for font %a",unicode,index,vectorname,fontname) + elseif not already_reported then + report_virtual("the mapping is incomplete for %a at %p",name,size) + already_reported = true end + rv[unicode] = true end - characters[unicode] = t else - characters[unicode] = { - width = width, - height = fci.height, - depth = fci.depth, - italic = italic, - commands = ref, - } - end - end - end - if isextension then - -- todo: if multiple ex, then 256 offsets per instance - local extension = mathencodings["large-to-small"] - local variants_done = fs.variants_done - for index, fci in next, fc do -- the raw ex file - if type(index) == "number" then local ref = si[index] if not ref then ref = { { 'slot', s, index } } si[index] = ref end + local kerns = fci.kerns + local width = fci.width local italic = fci.italic - local t = { - width = fci.width, - height = fci.height, - depth = fci.depth, - italic = italic, - commands = ref, - } - local n = fci.next - if n then - t.next = offset + n - elseif variants_done then - local vv = fci.vert_variants - if vv then - t.vert_variants = vv - end - local hv = fci.horiz_variants - if hv then - t.horiz_variants = hv + if italic and italic > 0 then + -- int_a^b + if isextension then + width = width + italic -- for obscure reasons the integral as a width + italic correction end - else - local vv = fci.vert_variants - if vv then - for i=1,#vv do - local vvi = vv[i] - vvi.glyph = vvi.glyph + offset + end + if kerns then + local krn = { } + for k, v in next, kerns do -- kerns is sparse + local rk = rotcev[k] + if rk then + krn[rk] = v -- kerns[k] end - t.vert_variants = vv end - local hv = fci.horiz_variants - if hv then - for i=1,#hv do - local hvi = hv[i] - hvi.glyph = hvi.glyph + offset + if not next(krn) then + krn = nil + end + local t = { + width = width, + height = fci.height, + depth = fci.depth, + italic = italic, + kerns = krn, + commands = ref, + } + if skewchar then + local k = kerns[skewchar] + if k then + t.top_accent = width/2 + k end - t.horiz_variants = hv end + characters[unicode] = t + else + characters[unicode] = { + width = width, + height = fci.height, + depth = fci.depth, + italic = italic, + commands = ref, + } end - characters[offset + index] = t end end - fs.variants_done = true - for unicode, index in next, extension do - local cu = characters[unicode] - if cu then - cu.next = offset + index - else - local fci = fc[index] - if not fci then - -- do nothing - else - -- probably never entered + if isextension then + -- todo: if multiple ex, then 256 offsets per instance + local extension = mathencodings["large-to-small"] + local variants_done = fs.variants_done + for index, fci in next, fc do -- the raw ex file + if type(index) == "number" then local ref = si[index] if not ref then ref = { { 'slot', s, index } } si[index] = ref end - local kerns = fci.kerns - if kerns then - local krn = { } - -- for k=1,#kerns do - -- krn[offset + k] = kerns[k] - -- end - for k, v in next, kerns do -- is kerns sparse? - krn[offset + k] = v + local italic = fci.italic + local t = { + width = fci.width, + height = fci.height, + depth = fci.depth, + italic = italic, + commands = ref, + } + local n = fci.next + if n then + t.next = offset + n + elseif variants_done then + local vv = fci.vert_variants + if vv then + t.vert_variants = vv + end + local hv = fci.horiz_variants + if hv then + t.horiz_variants = hv end - characters[unicode] = { - width = fci.width, - height = fci.height, - depth = fci.depth, - italic = fci.italic, - commands = ref, - kerns = krn, - next = offset + index, - } else - characters[unicode] = { - width = fci.width, - height = fci.height, - depth = fci.depth, - italic = fci.italic, - commands = ref, - next = offset + index, - } + local vv = fci.vert_variants + if vv then + for i=1,#vv do + local vvi = vv[i] + vvi.glyph = vvi.glyph + offset + end + t.vert_variants = vv + end + local hv = fci.horiz_variants + if hv then + for i=1,#hv do + local hvi = hv[i] + hvi.glyph = hvi.glyph + offset + end + t.horiz_variants = hv + end + end + characters[offset + index] = t + end + end + fs.variants_done = true + for unicode, index in next, extension do + local cu = characters[unicode] + if cu then + cu.next = offset + index + else + local fci = fc[index] + if not fci then + -- do nothing + else + -- probably never entered + local ref = si[index] + if not ref then + ref = { { 'slot', s, index } } + si[index] = ref + end + local kerns = fci.kerns + if kerns then + local krn = { } + -- for k=1,#kerns do + -- krn[offset + k] = kerns[k] + -- end + for k, v in next, kerns do -- is kerns sparse? + krn[offset + k] = v + end + characters[unicode] = { + width = fci.width, + height = fci.height, + depth = fci.depth, + italic = fci.italic, + commands = ref, + kerns = krn, + next = offset + index, + } + else + characters[unicode] = { + width = fci.width, + height = fci.height, + depth = fci.depth, + italic = fci.italic, + commands = ref, + next = offset + index, + } + end end end end end + else + report_virtual("error in loading %a, problematic vector %a",name,vectorname) end - else - report_virtual("error in loading %a, problematic vector %a",name,vectorname) end end mathematics.extras.copy(main) --not needed here (yet) diff --git a/tex/context/base/meta-fig.mkiv b/tex/context/base/meta-fig.mkiv index 7fbc33be9..46dc4cffc 100644 --- a/tex/context/base/meta-fig.mkiv +++ b/tex/context/base/meta-fig.mkiv @@ -54,7 +54,7 @@ \unexpanded\def\MPfigure#1#2% test for dup figure, can be replaced by a textext {\bgroup - \getfiguredimensionsonly[#1]% [\c!object=\v!no] already set + \getfiguredimensions[#1]% [\c!object=\v!no] already set \startMPcode externalfigure "#1" xscaled \the\dimexpr\figurewidth \relax\space % must be points diff --git a/tex/context/base/meta-fnt.lua b/tex/context/base/meta-fnt.lua new file mode 100644 index 000000000..cf47f0c92 --- /dev/null +++ b/tex/context/base/meta-fnt.lua @@ -0,0 +1,269 @@ +if not modules then modules = { } end modules ['meta-fnt'] = { + version = 1.001, + comment = "companion to meta-fnt.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local concat = table.concat +local format = string.format +local formatters = string.formatters +local chardata = characters.data +local fontdata = fonts.hashes.identifiers + +local vffonts = fonts.handlers.vf + +local mpfonts = fonts.mp or { } +fonts.mp = mpfonts + +mpfonts.version = mpfonts.version or 1.20 +mpfonts.inline = true +mpfonts.cache = containers.define("fonts", "mp", mpfonts.version, true) + +metapost.fonts = metapost.fonts or { } + +-- a few glocals + +local characters, descriptions = { }, { } +local factor, code, slot, width, height, depth, total, variants, bbox, llx, lly, urx, ury = 100, { }, 0, 0, 0, 0, 0, 0, true, 0, 0, 0, 0 + +-- The next variant of ActualText is what Taco and I could come up with +-- eventually. As of September 2013 Acrobat copies okay, Summatra copies a +-- question mark, pdftotext injects an extra space and Okular adds a +-- newline plus space. + +-- return formatters["BT /Span << /ActualText (CONTEXT) >> BDC [] TJ % t EMC ET"](code) + +local function topdf(n,code) + if n < 0x10000 then + return formatters["BT /Span << /ActualText >> BDC [] TJ % t EMC ET"](n,code) + else + return formatters["BT /Span << /ActualText >> BDC [] TJ % t EMC ET"](n/1024+0xD800,n%1024+0xDC00,code) + end +end + +-- local function topdf(n,code) +-- return formatters["/Span << /ActualText (CTX) >> BDC % t EMC"](code) +-- end + +local flusher = { + startfigure = function(_chr_,_llx_,_lly_,_urx_,_ury_) + code = { } + slot = _chr_ + llx = _llx_ + lly = _lly_ + urx = _urx_ + ury = _ury_ + width = urx - llx + height = ury + depth = -lly + total = total + 1 + inline = mpfonts.inline + end, + flushfigure = function(t) + for i=1,#t do + code[#code+1] = t[i] + end + end, + stopfigure = function() + local cd = chardata[n] + descriptions[slot] = { + -- unicode = slot, + name = cd and cd.adobename, + width = width * 100, + height = height * 100, + depth = depth * 100, + boundingbox = { llx, lly, urx, ury }, + } + if inline then + characters[slot] = { + commands = { + { "special", "pdf: " .. topdf(slot,code) }, + } + } + else + characters[slot] = { + commands = { + { + "image", + { + stream = topdf(slot,code), + bbox = { 0, -depth * 65536, width * 65536, height * 65536 } + }, + }, + } + } + end + end +} + +local function process(mpxformat,name,instances,scalefactor) + local filename = resolvers.findfile(name) + local attributes = filename and lfs.isfile(filename) and lfs.attributes(filename) + if attributes then + statistics.starttiming(metapost.fonts) + scalefactor = scalefactor or 1 + instances = instances or metapost.fonts.instances or 1 -- maybe store in liost too + local fontname = file.removesuffix(file.basename(name)) + local modification = attributes.modification + local filesize = attributes.size + local hash = file.robustname(formatters["%s %05i %03i"](fontname,scalefactor*1000,instances)) + local lists = containers.read(mpfonts.cache,hash) + if not lists or lists.modification ~= modification or lists.filesize ~= filesize or lists.instances ~= instances or lists.scalefactor ~= scalefactor then + statistics.starttiming(flusher) + local data = io.loaddata(filename) + metapost.reset(mpxformat) + metapost.setoutercolor(2) -- no outer color and no reset either + lists = { } + for i=1,instances do + characters = { } + descriptions = { } + metapost.process( + mpxformat, + { + formatters["randomseed := %s ;"](i*10), + formatters["charscale := %s ;"](scalefactor), + data, + }, + false, + flusher, + false, + false, + "all" + ) + lists[i] = { + characters = characters, + descriptions = descriptions, + parameters = { + designsize = 655360, + slant = 0, + space = 333 * scalefactor, + space_stretch = 166.5 * scalefactor, + space_shrink = 111 * scalefactor, + x_height = 431 * scalefactor, + quad = 1000 * scalefactor, + extra_space = 0, + }, + properties = { + name = formatters["%s-%03i"](hash,i), + virtualized = true, + spacer = "space", + } + } + end + lists.version = metapost.variables.fontversion or "1.000" + lists.modification = modification + lists.filesize = filesize + lists.instances = instances + lists.scalefactor = scalefactor + metapost.reset(mpxformat) -- saves memory + lists = containers.write(mpfonts.cache, hash, lists) + statistics.stoptiming(flusher) + end + variants = variants + #lists + statistics.stoptiming(metapost.fonts) + return lists + else + return { } + end +end + +metapost.fonts.flusher = flusher +metapost.fonts.instances = 1 +metapost.fonts.process = process + +local function build(g,v) + local size = g.specification.size + local data = process(v[2],v[3],v[4],size/655360,v[6]) + local list = { } + local t = { } + for d=1,#data do + t = fonts.constructors.scale(data[d],-1000) + local id = font.nextid() + t.fonts = { { id = id } } + fontdata[id] = t + if v[5] then + vffonts.helpers.composecharacters(t) + end + list[d] = font.define(t) + end + for k, v in next, t do -- last t + g[k] = v -- kind of replace, when not present, make nil + end + g.properties.virtualized = true + g.variants = list +end + +vffonts.combiner.commands.metapost = build +vffonts.combiner.commands.metafont = build + +statistics.register("metapost font generation", function() + if total > 0 then + local time = statistics.elapsedtime(flusher) + if total > 0 then + return format("%i glyphs, %.3f seconds runtime, %i glyphs/second", total, time, total/time) + else + return format("%i glyphs, %.3f seconds runtime", total, time) + end + end +end) + +statistics.register("metapost font loading",function() + if variants > 0 then + local time = statistics.elapsedtime(metapost.fonts) + if variants > 0 then + return format("%.3f seconds, %i instances, %0.3f instances/second", time, variants, variants/time) + else + return format("%.3f seconds, %i instances", time, variants) + end + end +end) + +-- fonts.definers.methods.install( "bidi", { +-- { +-- "metapost", -- method +-- "metafun", -- format +-- "fontoeps.mp", -- filename +-- 1, -- instances +-- false, -- compose +-- }, +-- } ) + +local report = logs.reporter("metapost","fonts") + +function metapost.fonts.define(specification) + local fontname = specification.fontname or "" + local filename = specification.filename or "" + local format = specification.format or "metafun" + if fontname == "" then + report("no fontname given") + return + end + if filename == "" then + report("no filename given for %a",fontname) + return + end + local fullname = resolvers.findfile(filename) + if fullname == "" then + report("unable to locate file %a",filename) + return + end + report("generating font %a using format %a and file %a",fontname,format,filename) + fonts.definers.methods.install(fontname, { + { + specification.engine or "metapost", + format, + filename, + specification.instances or 1, + specification.compose or false, + }, + } ) +end + +commands.definemetafont = metapost.fonts.define + +-- metapost.fonts.define { +-- fontname = "bidi", +-- filename = "bidi-symbols.mp", +-- } diff --git a/tex/context/base/meta-fnt.mkiv b/tex/context/base/meta-fnt.mkiv new file mode 100644 index 000000000..603fcf14d --- /dev/null +++ b/tex/context/base/meta-fnt.mkiv @@ -0,0 +1,36 @@ +%D \module +%D [ file=meta-fnt, +%D version=2013.09.06, +%D title=\METAPOST\ Graphics, +%D subtitle=Fonts, +%D author=Hans Hagen, +%D date=\ currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\writestatus{loading}{MetaPost Graphics / Fonts} + +\registerctxluafile{meta-fnt}{1.001} + +\unprotect + +\unexpanded\def\definemetafont + {\dotripleempty\meta_font_define} + +\def\meta_font_define[#1][#2][#3]% + {\ctxcommand{definemetafont { + fontname = "#1", + filename = "#2" + % no #3 settings yet (compose, instances) + }}} + +% \startluacode +% metapost.fonts.define { fontname = "bidi-symbols", filename = "bidi-symbols.mp" } +% \stopluacode + +% \definemetafont[bidi-symbols][bidi-symbols.mp] + +\protect \endinput diff --git a/tex/context/base/meta-imp-txt.mkiv b/tex/context/base/meta-imp-txt.mkiv index 3a9ad5927..bcfc5513f 100644 --- a/tex/context/base/meta-imp-txt.mkiv +++ b/tex/context/base/meta-imp-txt.mkiv @@ -170,12 +170,12 @@ % we default to nothing \stopuseMPgraphic -\unexpanded\def\followtokens#1% +\unexpanded\def\dofollowtokens#1#2% {\vbox\bgroup \forgetall \dontcomplain \startMPenvironment - \doifundefined{RotFont}{\definefont[RotFont][RegularBold]} + \doifundefined{RotFont}{\definefont[RotFont][RegularBold]}% \stopMPenvironment \MPtoks\emptytoks \resetMPdrawing @@ -183,13 +183,13 @@ \includeMPgraphic{followtokens} ; picture pic[] ; numeric len[], n ; n := 0 ; \stopMPdrawing - \handletokens#1\with\processfollowingtoken + \handletokens#2\with\processfollowingtoken \startMPdrawing if unknown RotPath : path RotPath ; RotPath := origin ; fi ; if unknown RotColor : color RotColor ; RotColor := black ; fi ; if unknown TraceRot : boolean TraceRot ; TraceRot := false ; fi ; if unknown ExtraRot : numeric ExtraRot ; ExtraRot := 0 ; fi ; - numeric al, at, pl, wid, pos ; pair ap, ad ; + numeric al, at, pl, pc, wid, pos ; pair ap, ad ; al := arclength RotPath ; if al=0 : al := len[n] + ExtraRot ; @@ -199,13 +199,19 @@ RotPath := RotPath scaled ((len[n]+ExtraRot)/al) ; al := arclength RotPath ; fi ; - pl := (al-len[n])/(if n>1 : (n-1) else : 1 fi) ; + if \number#1 = 1 : + pl := (al-len[n])/(if n>1 : (n-1) else : 1 fi) ; + pc := 0 ; + else : % centered / MP + pl := 0 ; + pc := arclength RotPath/2 - len[n]/2 ; + fi ; if TraceRot : draw RotPath withpen pencircle scaled 1pt withcolor blue ; fi ; for i=1 upto n : wid := abs(xpart urcorner pic[i] - xpart llcorner pic[i]) ; - pos := len[i]-wid/2 + (i-1)*pl ; + pos := len[i]-wid/2 + (i-1)*pl + pc ; at := arctime pos of RotPath ; ap := point at of RotPath ; ad := direction at of RotPath ; @@ -225,6 +231,11 @@ \resetMPdrawing \egroup} +\unexpanded\def\followtokens {\dofollowtokens1} +\unexpanded\def\followtokenscentered{\dofollowtokens0} + +% stretched variant: +% % \followtokens % {This is just a dummy text, kerned by T{\kern % -.1667em\lower .5ex\hbox {E}}{\kern -.125emX} and typeset @@ -232,6 +243,27 @@ % E}{\setMFPfont T}{\setMFPfont A}{\setMFPfont % P}{\setMFPfont O}{\setMFPfont S}{\setMFPfont T}.\quad} +% centered variant: +% +% \def\followtokengraphicscale#1{%% +% \startuseMPgraphic {followtokens} +% path RotPath; RotPath := reverse halfcircle scaled #1 ; +% draw RotPath ; +% setbounds currentpicture to boundingbox fullcircle scaled 12cm ; +% \stopuseMPgraphic} +% +% \startoverlay +% {\followtokengraphicscale{12cm}%% +% \followtokenscentered{There was question on the list about this kind of graphics.}} +% {\followtokengraphicscale{10cm}%% +% \followtokenscentered{And Marco patched followingtokens to handle a centered text.}} +% {\followtokengraphicscale{8cm}%% +% \followtokenscentered{That ended up as variant branch in the main macro.}} +% {\followtokengraphicscale{6cm}%% +% \followtokenscentered{So now we have two commands.}} +% \stopoverlay + + \startuseMPgraphic{fuzzycount} begingroup save height, span, drift, d, cp ; diff --git a/tex/context/base/meta-ini.mkiv b/tex/context/base/meta-ini.mkiv index 6502047fb..32f72cfe6 100644 --- a/tex/context/base/meta-ini.mkiv +++ b/tex/context/base/meta-ini.mkiv @@ -429,16 +429,19 @@ \hskip\cldcontext{fonts.hashes.parameters[font.current()].designsize}sp\relax \endgroup} +\definefontsynonym[MetafunDefault][Regular*default] + \startMPinitializations % scale is not yet ok - defaultfont:="\truefontname{Regular}"; - defaultscale:=\the\bodyfontsize/10pt; + defaultfont:="\truefontname{MetafunDefault}"; + % defaultscale:=\the\bodyfontsize/10pt; % only when hard coded 10pt + defaultscale:=1; \stopMPinitializations % watch out, this is a type1 font because mp can only handle 8 bit fonts -\startMPinitializations % scale is not yet ok - defaultfont:="rm-lmtt10"; -\stopMPinitializations +% \startMPinitializations % scale is not yet ok +% defaultfont:="rm-lmtt10"; +% \stopMPinitializations %D A signal that we're in combined \CONTEXT||\METAFUN mode: @@ -593,7 +596,7 @@ %D \stoptyping \def\overlaystamp % watch the \MPcolor, since colors can be redefined - {\overlaywidth:\overlayheight:\overlaydepth:\MPcolor\overlaycolor:\MPcolor\overlaylinecolor} + {\overlaywidth:\overlayheight:\overlaydepth:\overlayoffset:\overlaylinewidth:\MPcolor\overlaycolor:\MPcolor\overlaylinecolor} %D A better approach is to let additional variables play a role %D in determining the uniqueness. In the next macro, the @@ -1094,8 +1097,9 @@ CurrentLayout:="\currentlayout"; OverlayWidth:=\overlaywidth; OverlayHeight:=\overlayheight; - OverlayDepth:=\overlayheight; + OverlayDepth:=\overlaydepth; OverlayLineWidth:=\overlaylinewidth; + OverlayOffset:=\overlayoffset; % \m_meta_colo_initializations % @@ -1360,6 +1364,67 @@ defaultcolormodel := \ifcase\MPcolormethod1\or1\or3\else3\fi; \stopMPinitializations +%D macros +%D {mprunvar,mpruntab,mprunset} +%D +%D \starttyping +%D \startMPcode +%D passvariable("version","1.0") ; +%D passvariable("number",123) ; +%D passvariable("string","whatever") ; +%D passvariable("point",(1.5,2.8)) ; +%D passvariable("triplet",(1/1,1/2,1/3)) ; +%D passvariable("quad",(1.1,2.2,3.3,4.4)) ; +%D passvariable("boolean",false) ; +%D passvariable("path",fullcircle scaled 1cm) ; +%D draw fullcircle scaled 20pt ; +%D \stopMPcode +%D +%D \ctxlua{inspect(metapost.variables)} +%D +%D \MPrunvar{version} \MPruntab{quad}{3} (\MPrunset{triplet}{,}) +%D +%D $(x,y) = (\MPruntab{point}{1},\MPruntab{point}{2})$ +%D $(x,y) = (\MPrunset{point}{,})$ +%D \stoptyping + +\def\MPrunvar #1{\ctxcommand{mprunvar("#1")}} \let\mprunvar\MPrunvar +\def\MPruntab#1#2{\ctxcommand{mprunvar("#1",\number#2)}} \let\mpruntab\MPruntab +\def\MPrunset#1#2{\ctxcommand{mprunvar("#1","#2")}} \let\mprunset\MPrunset + +%D We also provide an outputless run: + +\unexpanded\def\startMPcalculation + {\begingroup + \setbox\nextbox\hbox\bgroup + \dosinglegroupempty\meta_start_calculation} + +\def\meta_start_calculation + {\iffirstargument + \expandafter\meta_start_calculation_instance + \else + \expandafter\meta_start_calculation_standard + \fi} + +\def\meta_start_calculation_instance#1#2\stopMPcalculation + {\edef\currentMPinstance{#1}% + \let\currentMPgraphicname\empty + \edef\currentMPformat{\MPinstanceparameter\s!format}% + \meta_enable_include + \meta_process_graphic{#2;draw origin}% + \egroup + \endgroup} + +\def\meta_start_calculation_standard#1#2\stopMPcalculation + {\let\currentMPinstance\defaultMPinstance + \let\currentMPgraphicname\empty + \edef\currentMPformat{\MPinstanceparameter\s!format}% + \meta_process_graphic{#2;draw origin}% + \egroup + \endgroup} + +\let\stopMPcalculation\relax + %D \macros %D {setupMPgraphics} %D diff --git a/tex/context/base/meta-pag.mkiv b/tex/context/base/meta-pag.mkiv index 7124902e7..a25353b18 100644 --- a/tex/context/base/meta-pag.mkiv +++ b/tex/context/base/meta-pag.mkiv @@ -41,7 +41,7 @@ % maybe always set as frozen anyway \startMPinitializations - def LoadPageState = + % def LoadPageState = OnRightPage:=\MPonrightpage; OnOddPage:=\MPonoddpage; RealPageNumber:=\the\realpageno; @@ -88,6 +88,8 @@ LayoutColumns:=\the\layoutcolumns; LayoutColumnDistance:=\the\layoutcolumndistance; LayoutColumnWidth:=\the\layoutcolumnwidth; + def LoadPageState = + % now always set .. this dummy can move to the mp code enddef; \stopMPinitializations diff --git a/tex/context/base/meta-pdf.lua b/tex/context/base/meta-pdf.lua index e51290e42..c49ad92ae 100644 --- a/tex/context/base/meta-pdf.lua +++ b/tex/context/base/meta-pdf.lua @@ -23,6 +23,8 @@ local report_mptopdf = logs.reporter("graphics","mptopdf") local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context +local texgetattribute = tex.getattribute + local pdfrgbcode = lpdf.rgbcode local pdfcmykcode = lpdf.cmykcode local pdfgraycode = lpdf.graycode @@ -84,7 +86,7 @@ end local function flushconcat() if m_stack_concat then - mpscode(f_concatm(unpack(m_stack_concat))) + mpscode(f_concat(unpack(m_stack_concat))) m_stack_concat = nil end end @@ -539,7 +541,7 @@ function mptopdf.convertmpstopdf(name) resetall() local ok, m_data, n = resolvers.loadbinfile(name, 'tex') -- we need a binary load ! if ok then - mps.colormodel = tex.attribute[a_colorspace] + mps.colormodel = texgetattribute(a_colorspace) statistics.starttiming(mptopdf) mptopdf.nofconverted = mptopdf.nofconverted + 1 pdfcode(formatters["\\letterpercent\\space mptopdf begin: n=%s, file=%s"](mptopdf.nofconverted,file.basename(name))) diff --git a/tex/context/base/meta-pdf.mkiv b/tex/context/base/meta-pdf.mkiv index a8fdaff42..3469419d4 100644 --- a/tex/context/base/meta-pdf.mkiv +++ b/tex/context/base/meta-pdf.mkiv @@ -37,7 +37,7 @@ \def\PDFMPformoffset{\ifdefined\objectoffset\objectoffset\else\zeropoint\fi} % obsolete, will go -\def\convertMPtoPDF#1#2#3% scaling no longer supported at this level (so #2 & #3 are ignored) +\unexpanded\def\convertMPtoPDF#1#2#3% scaling no longer supported at this level (so #2 & #3 are ignored) {\dostarttagged\t!mpgraphic\empty \naturalvbox attr \imageattribute 1 \bgroup \message{[MP to PDF]}% diff --git a/tex/context/base/mlib-ctx.lua b/tex/context/base/mlib-ctx.lua index 04e0efcb4..a1a4e645a 100644 --- a/tex/context/base/mlib-ctx.lua +++ b/tex/context/base/mlib-ctx.lua @@ -43,6 +43,7 @@ local function setmpsformat(specification) specification.method = method end specification.mpx = metapost.format(instance,format,method) + return specification end local extensiondata = metapost.extensiondata or storage.allocate { } diff --git a/tex/context/base/mlib-pdf.lua b/tex/context/base/mlib-pdf.lua index 963309951..d5f22cd7e 100644 --- a/tex/context/base/mlib-pdf.lua +++ b/tex/context/base/mlib-pdf.lua @@ -10,8 +10,9 @@ if not modules then modules = { } end modules ['mlib-pdf'] = { local format, concat, gsub = string.format, table.concat, string.gsub local abs, sqrt, round = math.abs, math.sqrt, math.round -local setmetatable = setmetatable -local Cf, C, Cg, Ct, P, S, lpegmatch = lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.match +local setmetatable, rawset, tostring, tonumber, type = setmetatable, rawset, tostring, tonumber, type +local P, S, C, Ct, Cc, Cg, Cf, Carg = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Carg +local lpegmatch = lpeg.match local formatters = string.formatters local report_metapost = logs.reporter("metapost") @@ -30,13 +31,12 @@ metapost.flushers = metapost.flushers or { } local pdfflusher = { } metapost.flushers.pdf = pdfflusher -metapost.multipass = false +metapost.multipass = false -- to be stacked metapost.n = 0 -metapost.optimize = true -- false +metapost.optimize = true -- false local experiment = true -- uses context(node) that already does delayed nodes - -local savedliterals = nil -- needs checking +local savedliterals = nil -- needs checking local mpsliteral = nodes.pool.register(node.new("whatsit",nodes.whatsitcodes.pdfliteral)) -- pdfliteral.mode = 1 local pdfliteral = function(s) @@ -268,7 +268,48 @@ metapost.flushnormalpath = flushnormalpath -- performance penalty, but so is passing extra arguments (result, flusher, after) -- and returning stuff. -local function ignore() end +local ignore = function () end + +local space = P(" ") +local equal = P("=") +local key = C((1-equal)^1) * equal +local newline = S("\n\r")^1 +local number = (((1-space-newline)^1) / tonumber) * (space^0) +local variable = + lpeg.P("1:") * key * number + + lpeg.P("2:") * key * C((1-newline)^0) + + lpeg.P("3:") * key * (P("false") * Cc(false) + P("true") * Cc(true)) + + lpeg.S("4568") * P(":") * key * Ct(number^1) + + lpeg.P("7:") * key * Ct(Ct(number * number^-5)^1) + +local pattern = Cf ( Carg(1) * (Cg(variable * newline^0)^0), rawset) + +metapost.variables = { } -- to be stacked +metapost.llx = 0 -- to be stacked +metapost.lly = 0 -- to be stacked +metapost.urx = 0 -- to be stacked +metapost.ury = 0 -- to be stacked + +function commands.mprunvar(key,n) + local value = metapost.variables[key] + if value ~= nil then + local tvalue = type(value) + if tvalue == "table" then + local ntype = type(n) + if ntype == "number" then + context(value[n]) + elseif ntype == "string" then + context(concat(value,n)) + else + context(concat(value," ")) + end + elseif tvalue == "number" or tvalue == "boolean" then + context(tostring(value)) + elseif tvalue == "string" then + context(value) + end + end +end function metapost.flush(result,flusher,askedfig) if result then @@ -283,15 +324,29 @@ function metapost.flush(result,flusher,askedfig) local stopfigure = flusher.stopfigure local flushfigure = flusher.flushfigure local textfigure = flusher.textfigure - for f=1, #figures do + for f=1,#figures do local figure = figures[f] local objects = getobjects(result,figure,f) - local fignum = figure:charcode() or 0 + local fignum = figure:charcode() or 0 if askedfig == "direct" or askedfig == "all" or askedfig == fignum then local t = { } local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false local bbox = figure:boundingbox() local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4] + local variables = { } + metapost.variables = variables + metapost.properties = { + llx = llx, + lly = lly, + urx = urx, + ury = ury, + slot = figure:charcode(), + width = figure:width(), + height = figure:height(), + depth = figure:depth(), + italic = figure:italcorr(), + } + -- replaced by the above metapost.llx = llx metapost.lly = lly metapost.urx = urx @@ -308,8 +363,10 @@ function metapost.flush(result,flusher,askedfig) for o=1,#objects do local object = objects[o] local objecttype = object.type - if objecttype == "start_bounds" or objecttype == "stop_bounds" or objecttype == "special" then + if objecttype == "start_bounds" or objecttype == "stop_bounds" then -- skip + elseif objecttype == "special" then + lpegmatch(pattern,object.prescript,1,variables) elseif objecttype == "start_clip" then t[#t+1] = "q" flushnormalpath(object.path,t,false) diff --git a/tex/context/base/mlib-pps.lua b/tex/context/base/mlib-pps.lua index 93bddc2dd..9f6cedad1 100644 --- a/tex/context/base/mlib-pps.lua +++ b/tex/context/base/mlib-pps.lua @@ -6,20 +6,20 @@ if not modules then modules = { } end modules ['mlib-pps'] = { license = "see context related readme files", } --- todo: make a hashed textext variant where we only process the text once (normally --- we cannot assume that no macros are involved which influence a next textext +-- todo: pass multipass nicer local format, gmatch, match, split = string.format, string.gmatch, string.match, string.split local tonumber, type = tonumber, type local round = math.round -local insert, concat = table.insert, table.concat +local insert, remove, concat = table.insert, table.remove, table.concat local Cs, Cf, C, Cg, Ct, P, S, V, Carg = lpeg.Cs, lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.V, lpeg.Carg local lpegmatch = lpeg.match local formatters = string.formatters local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context -local texbox = tex.box +local texgetbox = tex.getbox +local texsetbox = tex.setbox local copy_list = node.copy_list local free_list = node.flush_list local setmetatableindex = table.setmetatableindex @@ -211,52 +211,65 @@ local function checkandconvert(ca,cb) end end -local current_format, current_graphic, current_initializations - -metapost.multipass = false +local stack = { } -- quick hack, we will pass topofstack around +local top = nil +local nofruns = 0 -- askedfig: "all", "first", number -local textexts = { } -- all boxes, optionally with a different color -local texslots = { } -- references to textexts in order or usage -local texorder = { } -- references to textexts by mp index -local textrial = 0 -local texfinal = 0 -local scratchbox = 0 +local function startjob(texmode) + top = { + textexts = { }, -- all boxes, optionally with a different color + texslots = { }, -- references to textexts in order or usage + texorder = { }, -- references to textexts by mp index + textrial = 0, + texfinal = 0, + -- used by tx plugin + texhash = { }, + texlast = 0, + texmode = texmode, -- some day we can then skip all pre/postscripts + } + insert(stack,top) + if trace_runs then + report_metapost("starting run at level %i",#stack) + end + return top +end -local function freeboxes() - for n, box in next, textexts do - local tn = textexts[n] - if tn then +local function stopjob() + if top then + for n, tn in next, top.textexts do free_list(tn) - -- texbox[scratchbox] = tn - -- texbox[scratchbox] = nil -- this frees too if trace_textexts then report_textexts("freeing box %s",n) end end + if trace_runs then + report_metapost("stopping run at level %i",#stack) + end + remove(stack) + top = stack[#stack] + return top end - textexts = { } - texslots = { } - texorder = { } - textrial = 0 - texfinal = 0 end -metapost.resettextexts = freeboxes +function metapost.settextexts () end -- obsolete +function metapost.resettextexts() end -- obsolete + +-- end of new function metapost.settext(box,slot) - textexts[slot] = copy_list(texbox[box]) - texbox[box] = nil + top.textexts[slot] = copy_list(texgetbox(box)) + texsetbox(box,nil) -- this will become - -- textexts[slot] = texbox[box] + -- top.textexts[slot] = texgetbox(box) -- unsetbox(box) end function metapost.gettext(box,slot) - texbox[box] = copy_list(textexts[slot]) + texsetbox(box,copy_list(top.textexts[slot])) if trace_textexts then report_textexts("putting text %s in box %s",slot,box) end - -- textexts[slot] = nil -- no, pictures can be placed several times + -- top.textexts[slot] = nil -- no, pictures can be placed several times end -- rather generic pdf, so use this elsewhere too it no longer pays @@ -489,7 +502,10 @@ local do_safeguard = ";" local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"] function metapost.textextsdata() - local t, nt, n = { }, 0, 0 + local texorder = top.texorder + local textexts = top.textexts + local collected = { } + local nofcollected = 0 for n=1,#texorder do local box = textexts[texorder[n]] if box then @@ -497,25 +513,23 @@ function metapost.textextsdata() if trace_textexts then report_textexts("passed data item %s: (%p,%p,%p)",n,wd,ht,dp) end - nt = nt + 1 - t[nt] = f_text_data(n,wd,n,ht,n,dp) + nofcollected = nofcollected + 1 + collected[nofcollected] = f_text_data(n,wd,n,ht,n,dp) else break end end --- inspect(t) - return t + return collected end -metapost.intermediate = metapost.intermediate or {} -metapost.intermediate.actions = metapost.intermediate.actions or {} -metapost.intermediate.needed = false +metapost.intermediate = metapost.intermediate or { } +metapost.intermediate.actions = metapost.intermediate.actions or { } metapost.method = 1 -- 1:dumb 2:clever -- maybe we can latelua the texts some day -local nofruns = 0 -- askedfig: "all", "first", number +local processmetapost = metapost.process local function checkaskedfig(askedfig) -- return askedfig, wrappit if not askedfig then @@ -534,18 +548,42 @@ local function checkaskedfig(askedfig) -- return askedfig, wrappit end end -function metapost.graphic_base_pass(specification) - local mpx = specification.mpx -- mandate - local data = specification.data or "" - local definitions = specification.definitions or "" --- local extensions = metapost.getextensions(specification.instance,specification.useextensions) - local extensions = specification.extensions or "" - local inclusions = specification.inclusions or "" +local function extrapass() + if trace_runs then + report_metapost("second run of job %s, asked figure %a",top.nofruns,top.askedfig) + end + processmetapost(top.mpx, { + top.wrappit and do_begin_fig or "", + no_trial_run, + concat(metapost.textextsdata()," ;\n"), + top.initializations, + do_safeguard, + top.data, + top.wrappit and do_end_fig or "", + }, false, nil, false, true, top.askedfig) + -- context.MPLIBresettexts() -- must happen afterwards +end + +function metapost.graphic_base_pass(specification) -- name will change (see mlib-ctx.lua) + local top = startjob(true) + -- + local mpx = specification.mpx -- mandate + local data = specification.data or "" + local definitions = specification.definitions or "" + -- local extensions = metapost.getextensions(specification.instance,specification.useextensions) + local extensions = specification.extensions or "" + local inclusions = specification.inclusions or "" local initializations = specification.initializations or "" - local askedfig = specification.figure -- no default else no wrapper + local askedfig = specification.figure -- no default else no wrapper -- - nofruns = nofruns + 1 local askedfig, wrappit = checkaskedfig(askedfig) + -- + nofruns = nofruns + 1 + -- + top.askedfig = askedfig + top.wrappit = wrappit + top.nofruns = nofruns + -- local done_1, done_2, done_3, forced_1, forced_2, forced_3 data, done_1, forced_1 = checktexts(data) -- we had preamble = extensions + inclusions @@ -559,12 +597,12 @@ function metapost.graphic_base_pass(specification) else inclusions, done_3, forced_3 = checktexts(inclusions) end - metapost.intermediate.needed = false - metapost.multipass = false -- no needed here - current_format = mpx - current_graphic = data - current_initializations = initializations - local method = metapost.method + top.intermediate = false + top.multipass = false -- no needed here + top.mpx = mpx + top.data = data + top.initializations = initializations + local method = metapost.method if trace_runs then if method == 1 then report_metapost("forcing two runs due to library configuration") @@ -585,19 +623,19 @@ function metapost.graphic_base_pass(specification) report_metapost("first run of job %s, asked figure %a",nofruns,askedfig) end -- first true means: trialrun, second true means: avoid extra run if no multipass - local flushed = metapost.process(mpx, { + local flushed = processmetapost(mpx, { definitions, extensions, inclusions, wrappit and do_begin_fig or "", do_first_run, do_trial_run, - current_initializations, + initializations, do_safeguard, - current_graphic, + data, wrappit and do_end_fig or "", }, true, nil, not (forced_1 or forced_2 or forced_3), false, askedfig) - if metapost.intermediate.needed then + if top.intermediate then for _, action in next, metapost.intermediate.actions do action() end @@ -605,44 +643,35 @@ function metapost.graphic_base_pass(specification) if not flushed or not metapost.optimize then -- tricky, we can only ask once for objects and therefore -- we really need a second run when not optimized - context.MPLIBextrapass(askedfig) + -- context.MPLIBextrapass(askedfig) + context(extrapass) end else if trace_runs then report_metapost("running job %s, asked figure %a",nofruns,askedfig) end - metapost.process(mpx, { + processmetapost(mpx, { preamble, wrappit and do_begin_fig or "", do_first_run, no_trial_run, - current_initializations, + initializations, do_safeguard, - current_graphic, + data, wrappit and do_end_fig or "", }, false, nil, false, false, askedfig) end + context(stopjob) end -function metapost.graphic_extra_pass(askedfig) - if trace_runs then - report_metapost("second run of job %s, asked figure %a",nofruns,askedfig) - end - local askedfig, wrappit = checkaskedfig(askedfig) - metapost.process(current_format, { - wrappit and do_begin_fig or "", - no_trial_run, - concat(metapost.textextsdata()," ;\n"), - current_initializations, - do_safeguard, - current_graphic, - wrappit and do_end_fig or "", - }, false, nil, false, true, askedfig) - context.MPLIBresettexts() -- must happen afterwards +function metapost.process(...) + startjob(false) + processmetapost(...) + stopjob() end local start = [[\starttext]] -local preamble = [[\long\def\MPLIBgraphictext#1{\startTEXpage[scale=10000]#1\stopTEXpage}]] +local preamble = [[\def\MPLIBgraphictext#1{\startTEXpage[scale=10000]#1\stopTEXpage}]] local stop = [[\stoptext]] function makempy.processgraphics(graphics) @@ -658,15 +687,20 @@ function makempy.processgraphics(graphics) os.execute(command) if io.exists(pdffile) then command = format("pstoedit -ssp -dt -f mpost %s %s", pdffile, mpyfile) + logs.newline() + report_metapost("running: %s",command) + logs.newline() os.execute(command) local result, r = { }, 0 if io.exists(mpyfile) then local data = io.loaddata(mpyfile) - for figure in gmatch(data,"beginfig(.-)endfig") do - r = r + 1 - result[r] = formatters["begingraphictextfig%sendgraphictextfig ;\n"](figure) + if data and #data > 0 then + for figure in gmatch(data,"beginfig(.-)endfig") do + r = r + 1 + result[r] = formatters["begingraphictextfig%sendgraphictextfig ;\n"](figure) + end + io.savedata(mpyfile,concat(result,"")) end - io.savedata(mpyfile,concat(result,"")) end end stoptiming(makempy) @@ -687,9 +721,9 @@ local resetteractions = sequencers.new { arguments = "t" } local analyzeractions = sequencers.new { arguments = "object,prescript" } local processoractions = sequencers.new { arguments = "object,prescript,before,after" } -appendgroup(resetteractions, "system") -appendgroup(analyzeractions, "system") -appendgroup(processoractions, "system") +appendgroup(resetteractions, "system") +appendgroup(analyzeractions, "system") +appendgroup(processoractions,"system") -- later entries come first @@ -728,48 +762,56 @@ end -- end function metapost.pluginactions(what,t,flushfigure) -- before/after object, depending on what - for i=1,#what do - local wi = what[i] - if type(wi) == "function" then - -- assume injection - flushfigure(t) -- to be checked: too many 0 g 0 G - t = { } - wi() - else - t[#t+1] = wi + if top.texmode then + for i=1,#what do + local wi = what[i] + if type(wi) == "function" then + -- assume injection + flushfigure(t) -- to be checked: too many 0 g 0 G + t = { } + wi() + else + t[#t+1] = wi + end end + return t end - return t end function metapost.resetplugins(t) -- intialize plugins, before figure - -- plugins can have been added - resetter = resetteractions .runner - analyzer = analyzeractions .runner - processor = processoractions .runner - -- let's apply one runner - resetter(t) + if top.texmode then + -- plugins can have been added + resetter = resetteractions.runner + analyzer = analyzeractions.runner + processor = processoractions.runner + -- let's apply one runner + resetter(t) + end end function metapost.analyzeplugins(object) -- each object (first pass) - local prescript = object.prescript -- specifications - if prescript and #prescript > 0 then - return analyzer(object,splitprescript(prescript)) + if top.texmode then + local prescript = object.prescript -- specifications + if prescript and #prescript > 0 then + return analyzer(object,splitprescript(prescript)) + end end end function metapost.processplugins(object) -- each object (second pass) - local prescript = object.prescript -- specifications - if prescript and #prescript > 0 then - local before = { } - local after = { } - processor(object,splitprescript(prescript),before,after) - return #before > 0 and before, #after > 0 and after - else - local c = object.color - if c and #c > 0 then - local b, a = colorconverter(c) - return { b }, { a } + if top.texmode then + local prescript = object.prescript -- specifications + if prescript and #prescript > 0 then + local before = { } + local after = { } + processor(object,splitprescript(prescript),before,after) + return #before > 0 and before, #after > 0 and after + else + local c = object.color + if c and #c > 0 then + local b, a = colorconverter(c) + return { b }, { a } + end end end end @@ -799,29 +841,37 @@ local function cl_reset(t) t[#t+1] = metapost.colorinitializer() -- only color end -local tx_hash = { } -local tx_last = 0 - local function tx_reset() - tx_hash = { } - tx_last = 0 + if top then + top.texhash = { } + top.texlast = 0 + end end local fmt = formatters["%s %s %s % t"] +local pat = lpeg.tsplitat(":") local function tx_analyze(object,prescript) -- todo: hash content and reuse them local tx_stage = prescript.tx_stage if tx_stage == "trial" then - textrial = textrial + 1 + local tx_trial = top.textrial + 1 + top.textrial = tx_trial local tx_number = tonumber(prescript.tx_number) local s = object.postscript or "" local c = object.color -- only simple ones, no transparency + if #c == 0 then + local txc = prescript.tx_color + if txc then + c = lpegmatch(pat,txc) + end + end local a = prescript.tr_alternative local t = prescript.tr_transparency - local h = fmt(tx_number,a or "?",t or "?",c) - local n = tx_hash[h] -- todo: hashed variant with s (nicer for similar labels) + local h = fmt(tx_number,a or "-",t or "-",c or "-") + local n = top.texhash[h] -- todo: hashed variant with s (nicer for similar labels) if not n then - tx_last = tx_last + 1 + local tx_last = top.texlast + 1 + top.texlast = tx_last if not c then -- no color elseif #c == 1 then @@ -844,31 +894,35 @@ local function tx_analyze(object,prescript) -- todo: hash content and reuse them end end context.MPLIBsettext(tx_last,s) - metapost.multipass = true - tx_hash[h] = tx_last - texslots[textrial] = tx_last - texorder[tx_number] = tx_last + top.multipass = true + metapost.multipass = true -- ugly + top.texhash[h] = tx_last + top.texslots[tx_trial] = tx_last + top.texorder[tx_number] = tx_last if trace_textexts then - report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,tx_last,h) + report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,tx_trial,tx_number,tx_last,h) end else - texslots[textrial] = n + top.texslots[tx_trial] = n if trace_textexts then - report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,n,h) + report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,tx_trial,tx_number,n,h) end end elseif tx_stage == "extra" then - textrial = textrial + 1 + local tx_trial = top.textrial + 1 + top.textrial = tx_trial local tx_number = tonumber(prescript.tx_number) - if not texorder[tx_number] then + if not top.texorder[tx_number] then local s = object.postscript or "" - tx_last = tx_last + 1 + local tx_last = top.texlast + 1 + top.texlast = tx_last context.MPLIBsettext(tx_last,s) - metapost.multipass = true - texslots[textrial] = tx_last - texorder[tx_number] = tx_last + top.multipass = true + metapost.multipass = true -- ugly + top.texslots[tx_trial] = tx_last + top.texorder[tx_number] = tx_last if trace_textexts then - report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,textrial,tx_number,tx_last) + report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,tx_trial,tx_number,tx_last) end end end @@ -880,13 +934,13 @@ local function tx_process(object,prescript,before,after) tx_number = tonumber(tx_number) local tx_stage = prescript.tx_stage if tx_stage == "final" then - texfinal = texfinal + 1 - local n = texslots[texfinal] + top.texfinal = top.texfinal + 1 + local n = top.texslots[top.texfinal] if trace_textexts then - report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,texfinal,tx_number,n) + report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,top.texfinal,tx_number,n) end local sx, rx, ry, sy, tx, ty = cm(object) -- needs to be frozen outside the function - local box = textexts[n] + local box = top.textexts[n] if box then before[#before+1] = function() -- flush always happens, we can have a special flush function injected before @@ -928,8 +982,9 @@ local function gt_analyze(object,prescript) local gt_stage = prescript.gt_stage if gt_stage == "trial" then graphics[#graphics+1] = formatters["\\MPLIBgraphictext{%s}"](object.postscript or "") - metapost.intermediate.needed = true - metapost.multipass = true + top.intermediate = true + top.multipass = true + metapost.multipass = true -- ugly end end diff --git a/tex/context/base/mlib-pps.mkiv b/tex/context/base/mlib-pps.mkiv index f21d84e0d..e16827585 100644 --- a/tex/context/base/mlib-pps.mkiv +++ b/tex/context/base/mlib-pps.mkiv @@ -33,18 +33,41 @@ \newbox \MPtextbox \newtoks\everyMPLIBsettext % not used -\newconditional\MPLIBtextgetdone - % \def\MPLIBsettext#1% #2% % {\dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox} +% +% \def\MPLIBresettexts +% {\ctxlua{metapost.resettextexts()}} +% +% \newconditional\MPLIBtextgetdone +% +% \def\MPLIBsettext#1% #2% +% {\ifconditional\MPLIBtextgetdone +% \else +% \cldcontext{metapost.tex.get()}% MPenvironments are depricated +% \settrue\MPLIBtextgetdone % no \global needed +% \fi +% \dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox} +% +% \def\MPLIBresettexts +% {\ctxlua{metapost.resettextexts()}% +% \setfalse\MPLIBtextgetdone} + +\def\doMPLIBflushenvironment + {%\writestatus\m!metapost{flushing environment}% + \cldcontext{metapost.tex.get()}% + \let\MPLIBflushenvironment\relax}% MPenvironments are depricated} + +\let\MPLIBflushenvironment\doMPLIBflushenvironment \def\MPLIBsettext#1% #2% - {\ifconditional\MPLIBtextgetdone - \else - \cldcontext{metapost.tex.get()}% MPenvironments are depricated - \settrue\MPLIBtextgetdone % no \global needed - \fi - \dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox} + {\MPLIBflushenvironment + \dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox\bgroup + \let\MPLIBflushenvironment\doMPLIBflushenvironment + \let\next} % gobble open brace + +\def\MPLIBresettexts + {\ctxlua{metapost.resettextexts()}} \def\MPLIBgettextscaled#1#2#3% why a copy .. can be used more often {\ctxlua{metapost.gettext(\number\MPtextbox,#1)}% @@ -55,7 +78,7 @@ \ctxlua{metapost.edefsxsy(\number\wd\scratchbox,\number\ht\scratchbox,0)}% \vbox to \zeropoint{\vss\hbox to \zeropoint{\scale[\c!sx=\sx,\c!sy=\sy]{\box\scratchbox}\hss}}} -% horrible: +% horrible (we could inline scale and matrix code): \def\MPLIBgettextscaledcm#1#2#3#4#5#6#7#8#9% 2-7: sx,rx,ry,sy,tx,ty {\ctxlua{metapost.gettext(\number\MPtextbox,#1)}% @@ -64,16 +87,16 @@ \vbox to \zeropoint\bgroup \vss \hbox to \zeropoint \bgroup -% \scale[\c!sx=#8,\c!sy=#9]{\raise\dp\MPtextbox\box\MPtextbox}% -% \scale[\c!sx=#8,\c!sy=#9,\c!depth=\v!no]{\box\MPtextbox}% + % \scale[\c!sx=#8,\c!sy=#9]{\raise\dp\MPtextbox\box\MPtextbox}% + % \scale[\c!sx=#8,\c!sy=#9,\c!depth=\v!no]{\box\MPtextbox}% \fastsxsy{#8}{#9}{\raise\dp\MPtextbox\box\MPtextbox}% - % This gives: LuaTeX warning: Misplaced \pdfrestore .. don't ask me why. - % but I'll retry it some day soon. - % \dostartscaling{#8}{#9}% - % \raise\dp\MPtextbox\box\MPtextbox - % \dostopscaling - \forcecolorhack % needed ? already in the scale macro - \hss + % This gives: LuaTeX warning: Misplaced \pdfrestore .. don't ask me why. + % but I'll retry it some day soon. + % \dostartscaling{#8}{#9}% + % \raise\dp\MPtextbox\box\MPtextbox + % \dostopscaling + \forcecolorhack % can go away ... already in the scale macro + \hss \egroup \egroup \egroup @@ -112,15 +135,8 @@ \def\MPLIBpositionwhd#1#2#3#4#5% bp ! {\dosavepositionwhd{#1}\zerocount{#2\onebasepoint}{#3\onebasepoint}{#4\onebasepoint}{#5\onebasepoint}\zeropoint} -\def\MPLIBextrapass#1% - {\ctxlua{metapost.graphic_extra_pass("#1")}} - -% \def\MPLIBresettexts -% {\ctxlua{metapost.resettextexts()}} - -\def\MPLIBresettexts - {\ctxlua{metapost.resettextexts()}%$ - \setfalse\MPLIBtextgetdone} +% \def\MPLIBextrapass#1% +% {\ctxlua{metapost.graphic_extra_pass("#1")}} %D Experiment diff --git a/tex/context/base/mlib-run.lua b/tex/context/base/mlib-run.lua index 1fc36dd80..e2585998e 100644 --- a/tex/context/base/mlib-run.lua +++ b/tex/context/base/mlib-run.lua @@ -589,3 +589,37 @@ function metapost.directrun(formatname,filename,outputformat,astable,mpdata) end end end + +-- goodie + +function metapost.quickanddirty(mpxformat,data) + if not data then + mpxformat = "metafun" + data = mpxformat + end + local code, bbox + local flusher = { + startfigure = function(n,llx,lly,urx,ury) + code = { } + bbox = { llx, lly, urx, ury } + end, + flushfigure = function(t) + for i=1,#t do + code[#code+1] = t[i] + end + end, + stopfigure = function() + end + } + local data = format("; beginfig(1) ;\n %s\n ; endfig ;",data) + metapost.process(mpxformat, { data }, false, flusher, false, false, "all") + if code then + return { + bbox = bbox or { 0, 0, 0, 0 }, + code = code, + data = data, + } + else + report_metapost("invalid quick and dirty run") + end +end diff --git a/tex/context/base/mtx-context-xml.tex b/tex/context/base/mtx-context-xml.tex new file mode 100644 index 000000000..63b1d6e9e --- /dev/null +++ b/tex/context/base/mtx-context-xml.tex @@ -0,0 +1,75 @@ +%D \module +%D [ file=mtx-context-xml, +%D version=2013.05.30, +%D title=\CONTEXT\ Extra Trickry, +%D subtitle=Analyzing XML files, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +% This module replaces mkii analyzers. + +% begin help +% +% usage: context --extra=xml [options] list-of-files +% +% --analyze : show elements and characters +% --topspace=dimension : distance above first line +% --backspace=dimension : distance before left margin +% --bodyfont=list : additional bodyfont settings +% --paperformat=spec : paper*print or paperxprint +% +% end help + +\input mtx-context-common.tex + +\setupbodyfont + [dejavu,11pt,tt,\getdocumentargument{bodyfont}] + +\setuptyping + [lines=yes] + +\setuplayout + [header=0cm, + footer=1.5cm, + topspace=\getdocumentargumentdefault{topspace}{1.5cm}, + backspace=\getdocumentargumentdefault{backspace}{1.5cm}, + width=middle, + height=middle] + +\setuppapersize + [\getdocumentargument{paperformat_paper}] + [\getdocumentargument{paperformat_print}] + +\usemodule[xml-analyzers] + +\starttext + +\startluacode + local pattern = document.arguments.pattern + local files = document.files + + if pattern then + files = dir.glob(pattern) + context.setupfootertexts( { pattern }, { "pagenumber" }) + else + context.setupfootertexts( { table.concat(files," ") }, { "pagenumber" }) + end + + if #files > 0 then + if document.arguments.analyze then + moduledata.xml.analyzers.structure (files) + moduledata.xml.analyzers.characters(files) + else + context("no action given") + end + else + context("no files given") + end +\stopluacode + +\stoptext diff --git a/tex/context/base/mult-aux.lua b/tex/context/base/mult-aux.lua index 3c4cbcc0f..bdc626d4c 100644 --- a/tex/context/base/mult-aux.lua +++ b/tex/context/base/mult-aux.lua @@ -12,6 +12,8 @@ interfaces.namespaces = interfaces.namespaces or { } local namespaces = interfaces.namespaces local variables = interfaces.variables +local context = context + local trace_namespaces = false trackers.register("interfaces.namespaces", function(v) trace_namespaces = v end) local report_namespaces = logs.reporter("interface","namespaces") diff --git a/tex/context/base/mult-de.mkii b/tex/context/base/mult-de.mkii index 893a9d358..5f2714ce6 100644 --- a/tex/context/base/mult-de.mkii +++ b/tex/context/base/mult-de.mkii @@ -281,6 +281,7 @@ \setinterfacevariable{marginedge}{marginalkante} \setinterfacevariable{margintitle}{marginaltitel} \setinterfacevariable{marking}{beschriftung} +\setinterfacevariable{math}{math} \setinterfacevariable{mathalignment}{mathalignment} \setinterfacevariable{mathcases}{mathcases} \setinterfacevariable{mathematics}{mathematik} @@ -414,12 +415,14 @@ \setinterfacevariable{sectionblockenvironment}{sectionblockenvironment} \setinterfacevariable{sectionnumber}{abschnittsnummer} \setinterfacevariable{see}{sieh} +\setinterfacevariable{selectfont}{selectfont} \setinterfacevariable{september}{september} \setinterfacevariable{serif}{serif} \setinterfacevariable{serried}{kleinerabstand} \setinterfacevariable{setups}{impostazioni} \setinterfacevariable{sheet}{sheet} \setinterfacevariable{short}{kurz} +\setinterfacevariable{simplefonts}{simplefonts} \setinterfacevariable{singlesided}{einzelseitig} \setinterfacevariable{slanted}{geneigt} \setinterfacevariable{slantedbold}{geneigtfett} @@ -593,6 +596,12 @@ \setinterfaceconstant{blockway}{blockauf} \setinterfaceconstant{bodyfont}{fliesstext} \setinterfaceconstant{boffset}{boffset} +\setinterfaceconstant{boldfeatures}{boldfeatures} +\setinterfaceconstant{boldfont}{boldfont} +\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures} +\setinterfaceconstant{bolditalicfont}{bolditalicfont} +\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures} +\setinterfaceconstant{boldslantedfont}{boldslantedfont} \setinterfaceconstant{bookmark}{bookmark} \setinterfaceconstant{bottom}{unten} \setinterfaceconstant{bottomafter}{bottomafter} @@ -608,6 +617,7 @@ \setinterfaceconstant{calculate}{berechnen} \setinterfaceconstant{category}{category} \setinterfaceconstant{ccommand}{cbefehl} +\setinterfaceconstant{check}{check} \setinterfaceconstant{click}{klick} \setinterfaceconstant{clickin}{klickin} \setinterfaceconstant{clickout}{klickaus} @@ -630,6 +640,7 @@ \setinterfaceconstant{component}{component} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{compress} +\setinterfaceconstant{concerns}{concerns} \setinterfaceconstant{connector}{connector} \setinterfaceconstant{continue}{fortsetzen} \setinterfaceconstant{contrastcolor}{kontrastfarbe} @@ -682,6 +693,7 @@ \setinterfaceconstant{factor}{faktor} \setinterfaceconstant{fallback}{fallback} \setinterfaceconstant{family}{familie} +\setinterfaceconstant{features}{features} \setinterfaceconstant{fieldbackgroundcolor}{feldhintergrundfarbe} \setinterfaceconstant{fieldframecolor}{feldrahmenfarbe} \setinterfaceconstant{fieldlayer}{fieldlayer} @@ -711,6 +723,7 @@ \setinterfaceconstant{from}{von} \setinterfaceconstant{get}{hole} \setinterfaceconstant{global}{global} +\setinterfaceconstant{goodies}{goodies} \setinterfaceconstant{grid}{gritter} \setinterfaceconstant{hang}{haengend} \setinterfaceconstant{hcompact}{hcompact} @@ -747,6 +760,8 @@ \setinterfaceconstant{interaction}{interaktion} \setinterfaceconstant{interlinespace}{zeilenabstande} \setinterfaceconstant{internalgrid}{internalgrid} +\setinterfaceconstant{italicfeatures}{italicfeatures} +\setinterfaceconstant{italicfont}{italicfont} \setinterfaceconstant{itemalign}{itemalign} \setinterfaceconstant{items}{posten} \setinterfaceconstant{juniorsep}{juniorsep} @@ -798,9 +813,12 @@ \setinterfaceconstant{marginedge}{marginalkante} \setinterfaceconstant{marginedgetext}{marginalkantetext} \setinterfaceconstant{margintext}{marginaltext} +\setinterfaceconstant{mark}{mark} \setinterfaceconstant{marking}{beschriftung} \setinterfaceconstant{marstyle}{beschrstil} \setinterfaceconstant{mask}{mask} +\setinterfaceconstant{mathclass}{mathclass} +\setinterfaceconstant{mathlimits}{mathlimits} \setinterfaceconstant{mathstyle}{mathstyle} \setinterfaceconstant{max}{max} \setinterfaceconstant{maxdepth}{maxdepth} @@ -818,8 +836,13 @@ \setinterfaceconstant{mindepth}{mindepth} \setinterfaceconstant{minheight}{minhoehe} \setinterfaceconstant{minwidth}{minbreite} +\setinterfaceconstant{moffset}{moffset} \setinterfaceconstant{monthconversion}{monthconversion} \setinterfaceconstant{more}{more} +\setinterfaceconstant{mpdepth}{mpdepth} +\setinterfaceconstant{mpheight}{mpheight} +\setinterfaceconstant{mpoffset}{mpoffset} +\setinterfaceconstant{mpwidth}{mpwidth} \setinterfaceconstant{n}{n} \setinterfaceconstant{name}{name} \setinterfaceconstant{namesep}{namesep} @@ -859,6 +882,7 @@ \setinterfaceconstant{openaction}{oeffenaktion} \setinterfaceconstant{openpage}{openpage} \setinterfaceconstant{openpageaction}{openpageaction} +\setinterfaceconstant{opticalsize}{opticalsize} \setinterfaceconstant{option}{option} \setinterfaceconstant{order}{order} \setinterfaceconstant{orientation}{orientation} @@ -926,6 +950,8 @@ \setinterfaceconstant{region}{region} \setinterfaceconstant{regionin}{regionin} \setinterfaceconstant{regionout}{regionaus} +\setinterfaceconstant{regularfeatures}{regularfeatures} +\setinterfaceconstant{regularfont}{regularfont} \setinterfaceconstant{repeat}{wiederholen} \setinterfaceconstant{reset}{reset} \setinterfaceconstant{resetnumber}{resetnumber} @@ -972,6 +998,8 @@ \setinterfaceconstant{sectionstarter}{sectionstarter} \setinterfaceconstant{sectionstopper}{sectionstopper} \setinterfaceconstant{separator}{seperator} +\setinterfaceconstant{separatorcolor}{separatorcolor} +\setinterfaceconstant{separatorstyle}{separatorstyle} \setinterfaceconstant{set}{set} \setinterfaceconstant{setups}{setups} \setinterfaceconstant{shrink}{shrink} @@ -982,7 +1010,11 @@ \setinterfaceconstant{sidespacebefore}{nebenvorspatium} \setinterfaceconstant{sign}{zeichen} \setinterfaceconstant{size}{groesse} +\setinterfaceconstant{slantedfeatures}{slantedfeatures} +\setinterfaceconstant{slantedfont}{slantedfont} \setinterfaceconstant{small}{klein} +\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures} +\setinterfaceconstant{smallcapsfont}{smallcapsfont} \setinterfaceconstant{solution}{solution} \setinterfaceconstant{sort}{sort} \setinterfaceconstant{sorttype}{sorttype} diff --git a/tex/context/base/mult-def.lua b/tex/context/base/mult-def.lua index fdb8803bf..afd466531 100644 --- a/tex/context/base/mult-def.lua +++ b/tex/context/base/mult-def.lua @@ -6454,6 +6454,34 @@ return { }, }, ["constants"]={ + -- select/simplefonts + ["regularfont"] ={ ["en"]="regularfont" }, + ["boldfont"] ={ ["en"]="boldfont" }, + ["italicfont"] ={ ["en"]="italicfont" }, + ["slantedfont"] ={ ["en"]="slantedfont" }, + ["bolditalicfont"] ={ ["en"]="bolditalicfont" }, + ["boldslantedfont"] ={ ["en"]="boldslantedfont" }, + ["smallcapsfont"] ={ ["en"]="smallcapsfont" }, + ["features"] ={ ["en"]="features" }, + ["regularfeatures"] ={ ["en"]="regularfeatures" }, + ["boldfeatures"] ={ ["en"]="boldfeatures" }, + ["italicfeatures"] ={ ["en"]="italicfeatures" }, + ["slantedfeatures"] ={ ["en"]="slantedfeatures" }, + ["bolditalicfeatures"] ={ ["en"]="bolditalicfeatures" }, + ["boldslantedfeatures"]={ ["en"]="boldslantedfeatures" }, + ["smallcapsfeatures"] ={ ["en"]="smallcapsfeatures" }, + ["opticalsize"] ={ ["en"]="opticalsize" }, + ["goodies"] ={ ["en"]="goodies" }, + ["check"] ={ ["en"]="check" }, + -- + ["separatorcolor"]={ + ["en"]="separatorcolor", + ["nl"]="scheiderkleur", + }, + ["separatorstyle"]={ + ["en"]="separatorstyle", + ["nl"]="scheiderletter", + }, ["less"]={ ["en"]="less", ["nl"]="minder", @@ -6982,6 +7010,10 @@ return { ["pe"]="قبل‌ازسر", ["ro"]="inaintetitlu", }, + ["concerns"]={ + ["en"]="concerns", + ["nl"]="betreft", + }, ["bet"]={ ["cs"]="bet", ["de"]="bet", @@ -8361,6 +8393,10 @@ return { ["pe"]="آیتمها", ["ro"]="elemente", }, + ["mark"]={ + ["en"]="mark", + ["nl"]="kernmerk", + }, ["ken"]={ ["cs"]="ken", ["de"]="ken", @@ -8924,6 +8960,32 @@ return { ["pe"]="کمترین‌عرض", ["ro"]="latimeminima", }, + ["moffset"]={ + ["cs"]="moffset", + ["de"]="moffset", + ["en"]="moffset", + ["fr"]="moffset", + ["it"]="moffset", + ["nl"]="moffset", + ["pe"]="moffset", + ["ro"]="moffset", + }, + ["mpwidth"]={ + ["en"]="mpwidth", + ["nl"]="mpbreedte", + }, + ["mpheight"]={ + ["en"]="mpheight", + ["nl"]="mphoogte", + }, + ["mpdepth"]={ + ["en"]="mpdepth", + ["nl"]="mpdiepte", + }, + ["mpoffset"]={ + ["en"]="mpoffset", + ["nl"]="mpoffset", + }, ["monthconversion"]={ ["en"]="monthconversion", ["nl"]="maandconversie", @@ -10363,9 +10425,15 @@ return { ["pe"]="بست", ["ro"]="strut", }, + ["mathclass"]={ + ["en"]="mathclass", + }, ["mathstyle"]={ ["en"]="mathstyle", }, + ["mathlimits"]={ + ["en"]="mathlimits", + }, ["style"]={ ["cs"]="pismeno", ["de"]="stil", @@ -11325,6 +11393,15 @@ return { }, }, ["variables"]={ + ["math"]={ + ["en"]="math", + }, + ["selectfont"]={ + ["en"]="selectfont", + }, + ["simplefonts"]={ + ["en"]="simplefonts", + }, ["more"]={ ["en"]="more", ["nl"]="meer", diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv index cb165b055..0c52d5a92 100644 --- a/tex/context/base/mult-def.mkiv +++ b/tex/context/base/mult-def.mkiv @@ -34,6 +34,8 @@ % start todo: +\def\c!fences {fences} + \def\c!language {language} \def\c!compressseparator{compressseparator} \def\c!renderingsetup {renderingsetup} @@ -72,6 +74,7 @@ \def\v!words {words} \def\v!combination {combination} \def\v!norepeat {norepeat} +\def\v!mixed {mixed} \def\s!lcgreek {lcgreek} \def\s!ucgreek {ucgreek} @@ -88,11 +91,15 @@ \def\s!current {current} +\def\s!rel {rel} +\def\s!ord {ord} + \def\c!HL {HL} \def\c!VL {VL} \def\c!NL {NL} \ifdefined\v!kerncharacters\else \def\v!kerncharacters{kerncharacters} \fi % no time now for translations should be a e! actually +\ifdefined\v!letterspacing \else \def\v!letterspacing {letterspacing} \fi % no time now for translations should be a e! actually \ifdefined\v!stretched \else \def\v!stretched {stretched} \fi \ifdefined\v!vulgarfraction\else \def\v!vulgarfraction{vulgarfraction} \fi \ifdefined\v!block \else \def\v!block {block} \fi diff --git a/tex/context/base/mult-en.mkii b/tex/context/base/mult-en.mkii index c3ab2fc16..97732dab7 100644 --- a/tex/context/base/mult-en.mkii +++ b/tex/context/base/mult-en.mkii @@ -281,6 +281,7 @@ \setinterfacevariable{marginedge}{marginedge} \setinterfacevariable{margintitle}{margintitle} \setinterfacevariable{marking}{marking} +\setinterfacevariable{math}{math} \setinterfacevariable{mathalignment}{mathalignment} \setinterfacevariable{mathcases}{mathcases} \setinterfacevariable{mathematics}{mathematics} @@ -414,12 +415,14 @@ \setinterfacevariable{sectionblockenvironment}{sectionblockenvironment} \setinterfacevariable{sectionnumber}{sectionnumber} \setinterfacevariable{see}{see} +\setinterfacevariable{selectfont}{selectfont} \setinterfacevariable{september}{september} \setinterfacevariable{serif}{serif} \setinterfacevariable{serried}{serried} \setinterfacevariable{setups}{setups} \setinterfacevariable{sheet}{sheet} \setinterfacevariable{short}{short} +\setinterfacevariable{simplefonts}{simplefonts} \setinterfacevariable{singlesided}{singlesided} \setinterfacevariable{slanted}{slanted} \setinterfacevariable{slantedbold}{slantedbold} @@ -593,6 +596,12 @@ \setinterfaceconstant{blockway}{blockway} \setinterfaceconstant{bodyfont}{bodyfont} \setinterfaceconstant{boffset}{boffset} +\setinterfaceconstant{boldfeatures}{boldfeatures} +\setinterfaceconstant{boldfont}{boldfont} +\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures} +\setinterfaceconstant{bolditalicfont}{bolditalicfont} +\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures} +\setinterfaceconstant{boldslantedfont}{boldslantedfont} \setinterfaceconstant{bookmark}{bookmark} \setinterfaceconstant{bottom}{bottom} \setinterfaceconstant{bottomafter}{bottomafter} @@ -608,6 +617,7 @@ \setinterfaceconstant{calculate}{calculate} \setinterfaceconstant{category}{category} \setinterfaceconstant{ccommand}{ccommand} +\setinterfaceconstant{check}{check} \setinterfaceconstant{click}{click} \setinterfaceconstant{clickin}{clickin} \setinterfaceconstant{clickout}{clickout} @@ -630,6 +640,7 @@ \setinterfaceconstant{component}{component} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{compress} +\setinterfaceconstant{concerns}{concerns} \setinterfaceconstant{connector}{connector} \setinterfaceconstant{continue}{continue} \setinterfaceconstant{contrastcolor}{contrastcolor} @@ -682,6 +693,7 @@ \setinterfaceconstant{factor}{factor} \setinterfaceconstant{fallback}{fallback} \setinterfaceconstant{family}{family} +\setinterfaceconstant{features}{features} \setinterfaceconstant{fieldbackgroundcolor}{fieldbackgroundcolor} \setinterfaceconstant{fieldframecolor}{fieldframecolor} \setinterfaceconstant{fieldlayer}{fieldlayer} @@ -711,6 +723,7 @@ \setinterfaceconstant{from}{from} \setinterfaceconstant{get}{get} \setinterfaceconstant{global}{global} +\setinterfaceconstant{goodies}{goodies} \setinterfaceconstant{grid}{grid} \setinterfaceconstant{hang}{hang} \setinterfaceconstant{hcompact}{hcompact} @@ -747,6 +760,8 @@ \setinterfaceconstant{interaction}{interaction} \setinterfaceconstant{interlinespace}{interlinespace} \setinterfaceconstant{internalgrid}{internalgrid} +\setinterfaceconstant{italicfeatures}{italicfeatures} +\setinterfaceconstant{italicfont}{italicfont} \setinterfaceconstant{itemalign}{itemalign} \setinterfaceconstant{items}{items} \setinterfaceconstant{juniorsep}{juniorsep} @@ -798,9 +813,12 @@ \setinterfaceconstant{marginedge}{marginedge} \setinterfaceconstant{marginedgetext}{marginedgetext} \setinterfaceconstant{margintext}{margintext} +\setinterfaceconstant{mark}{mark} \setinterfaceconstant{marking}{marking} \setinterfaceconstant{marstyle}{marstyle} \setinterfaceconstant{mask}{mask} +\setinterfaceconstant{mathclass}{mathclass} +\setinterfaceconstant{mathlimits}{mathlimits} \setinterfaceconstant{mathstyle}{mathstyle} \setinterfaceconstant{max}{max} \setinterfaceconstant{maxdepth}{maxdepth} @@ -818,8 +836,13 @@ \setinterfaceconstant{mindepth}{mindepth} \setinterfaceconstant{minheight}{minheight} \setinterfaceconstant{minwidth}{minwidth} +\setinterfaceconstant{moffset}{moffset} \setinterfaceconstant{monthconversion}{monthconversion} \setinterfaceconstant{more}{more} +\setinterfaceconstant{mpdepth}{mpdepth} +\setinterfaceconstant{mpheight}{mpheight} +\setinterfaceconstant{mpoffset}{mpoffset} +\setinterfaceconstant{mpwidth}{mpwidth} \setinterfaceconstant{n}{n} \setinterfaceconstant{name}{name} \setinterfaceconstant{namesep}{namesep} @@ -859,6 +882,7 @@ \setinterfaceconstant{openaction}{openaction} \setinterfaceconstant{openpage}{openpage} \setinterfaceconstant{openpageaction}{openpageaction} +\setinterfaceconstant{opticalsize}{opticalsize} \setinterfaceconstant{option}{option} \setinterfaceconstant{order}{order} \setinterfaceconstant{orientation}{orientation} @@ -926,6 +950,8 @@ \setinterfaceconstant{region}{region} \setinterfaceconstant{regionin}{regionin} \setinterfaceconstant{regionout}{regionout} +\setinterfaceconstant{regularfeatures}{regularfeatures} +\setinterfaceconstant{regularfont}{regularfont} \setinterfaceconstant{repeat}{repeat} \setinterfaceconstant{reset}{reset} \setinterfaceconstant{resetnumber}{resetnumber} @@ -972,6 +998,8 @@ \setinterfaceconstant{sectionstarter}{sectionstarter} \setinterfaceconstant{sectionstopper}{sectionstopper} \setinterfaceconstant{separator}{separator} +\setinterfaceconstant{separatorcolor}{separatorcolor} +\setinterfaceconstant{separatorstyle}{separatorstyle} \setinterfaceconstant{set}{set} \setinterfaceconstant{setups}{setups} \setinterfaceconstant{shrink}{shrink} @@ -982,7 +1010,11 @@ \setinterfaceconstant{sidespacebefore}{sidespacebefore} \setinterfaceconstant{sign}{sign} \setinterfaceconstant{size}{size} +\setinterfaceconstant{slantedfeatures}{slantedfeatures} +\setinterfaceconstant{slantedfont}{slantedfont} \setinterfaceconstant{small}{small} +\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures} +\setinterfaceconstant{smallcapsfont}{smallcapsfont} \setinterfaceconstant{solution}{solution} \setinterfaceconstant{sort}{sort} \setinterfaceconstant{sorttype}{sorttype} diff --git a/tex/context/base/mult-fr.mkii b/tex/context/base/mult-fr.mkii index 1ba4f3c8c..520f8e1a6 100644 --- a/tex/context/base/mult-fr.mkii +++ b/tex/context/base/mult-fr.mkii @@ -281,6 +281,7 @@ \setinterfacevariable{marginedge}{bordmarge} \setinterfacevariable{margintitle}{titremarge} \setinterfacevariable{marking}{marquage} +\setinterfacevariable{math}{math} \setinterfacevariable{mathalignment}{mathalignment} \setinterfacevariable{mathcases}{mathcases} \setinterfacevariable{mathematics}{mathematique} @@ -414,12 +415,14 @@ \setinterfacevariable{sectionblockenvironment}{environementblocsection} \setinterfacevariable{sectionnumber}{numerosection} \setinterfacevariable{see}{voit} +\setinterfacevariable{selectfont}{selectfont} \setinterfacevariable{september}{septembre} \setinterfacevariable{serif}{serif} \setinterfacevariable{serried}{serried} \setinterfacevariable{setups}{reglages} \setinterfacevariable{sheet}{sheet} \setinterfacevariable{short}{short} +\setinterfacevariable{simplefonts}{simplefonts} \setinterfacevariable{singlesided}{recto} \setinterfacevariable{slanted}{incline} \setinterfacevariable{slantedbold}{grasincline} @@ -593,6 +596,12 @@ \setinterfaceconstant{blockway}{blockway} \setinterfaceconstant{bodyfont}{policecorps} \setinterfaceconstant{boffset}{boffset} +\setinterfaceconstant{boldfeatures}{boldfeatures} +\setinterfaceconstant{boldfont}{boldfont} +\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures} +\setinterfaceconstant{bolditalicfont}{bolditalicfont} +\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures} +\setinterfaceconstant{boldslantedfont}{boldslantedfont} \setinterfaceconstant{bookmark}{marquepage} \setinterfaceconstant{bottom}{inf} \setinterfaceconstant{bottomafter}{bottomafter} @@ -608,6 +617,7 @@ \setinterfaceconstant{calculate}{calculer} \setinterfaceconstant{category}{category} \setinterfaceconstant{ccommand}{ccommande} +\setinterfaceconstant{check}{check} \setinterfaceconstant{click}{clic} \setinterfaceconstant{clickin}{clicinterieur} \setinterfaceconstant{clickout}{clicexterieur} @@ -630,6 +640,7 @@ \setinterfaceconstant{component}{composant} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{compress} +\setinterfaceconstant{concerns}{concerns} \setinterfaceconstant{connector}{connector} \setinterfaceconstant{continue}{continue} \setinterfaceconstant{contrastcolor}{coleurcontraste} @@ -682,6 +693,7 @@ \setinterfaceconstant{factor}{facteur} \setinterfaceconstant{fallback}{fallback} \setinterfaceconstant{family}{famille} +\setinterfaceconstant{features}{features} \setinterfaceconstant{fieldbackgroundcolor}{couleurarrierreplanchamp} \setinterfaceconstant{fieldframecolor}{couleurcadrechamp} \setinterfaceconstant{fieldlayer}{calquechamp} @@ -711,6 +723,7 @@ \setinterfaceconstant{from}{de} \setinterfaceconstant{get}{obtient} \setinterfaceconstant{global}{global} +\setinterfaceconstant{goodies}{goodies} \setinterfaceconstant{grid}{grille} \setinterfaceconstant{hang}{suspend} \setinterfaceconstant{hcompact}{hcompact} @@ -747,6 +760,8 @@ \setinterfaceconstant{interaction}{interaction} \setinterfaceconstant{interlinespace}{espaceinterligne} \setinterfaceconstant{internalgrid}{internalgrid} +\setinterfaceconstant{italicfeatures}{italicfeatures} +\setinterfaceconstant{italicfont}{italicfont} \setinterfaceconstant{itemalign}{itemalign} \setinterfaceconstant{items}{elements} \setinterfaceconstant{juniorsep}{juniorsep} @@ -798,9 +813,12 @@ \setinterfaceconstant{marginedge}{bordmarge} \setinterfaceconstant{marginedgetext}{textebordmarge} \setinterfaceconstant{margintext}{textemarge} +\setinterfaceconstant{mark}{mark} \setinterfaceconstant{marking}{marquage} \setinterfaceconstant{marstyle}{stylemarquage} \setinterfaceconstant{mask}{mask} +\setinterfaceconstant{mathclass}{mathclass} +\setinterfaceconstant{mathlimits}{mathlimits} \setinterfaceconstant{mathstyle}{mathstyle} \setinterfaceconstant{max}{max} \setinterfaceconstant{maxdepth}{maxdepth} @@ -818,8 +836,13 @@ \setinterfaceconstant{mindepth}{profondeurmin} \setinterfaceconstant{minheight}{hauteurmin} \setinterfaceconstant{minwidth}{largeurmin} +\setinterfaceconstant{moffset}{moffset} \setinterfaceconstant{monthconversion}{monthconversion} \setinterfaceconstant{more}{more} +\setinterfaceconstant{mpdepth}{mpdepth} +\setinterfaceconstant{mpheight}{mpheight} +\setinterfaceconstant{mpoffset}{mpoffset} +\setinterfaceconstant{mpwidth}{mpwidth} \setinterfaceconstant{n}{n} \setinterfaceconstant{name}{nom} \setinterfaceconstant{namesep}{namesep} @@ -859,6 +882,7 @@ \setinterfaceconstant{openaction}{actionouverture} \setinterfaceconstant{openpage}{openpage} \setinterfaceconstant{openpageaction}{actionouverturepage} +\setinterfaceconstant{opticalsize}{opticalsize} \setinterfaceconstant{option}{option} \setinterfaceconstant{order}{order} \setinterfaceconstant{orientation}{orientation} @@ -926,6 +950,8 @@ \setinterfaceconstant{region}{region} \setinterfaceconstant{regionin}{entreregion} \setinterfaceconstant{regionout}{regionexterieure} +\setinterfaceconstant{regularfeatures}{regularfeatures} +\setinterfaceconstant{regularfont}{regularfont} \setinterfaceconstant{repeat}{repete} \setinterfaceconstant{reset}{reset} \setinterfaceconstant{resetnumber}{raznumero} @@ -972,6 +998,8 @@ \setinterfaceconstant{sectionstarter}{sectionstarter} \setinterfaceconstant{sectionstopper}{sectionstopper} \setinterfaceconstant{separator}{separateur} +\setinterfaceconstant{separatorcolor}{separatorcolor} +\setinterfaceconstant{separatorstyle}{separatorstyle} \setinterfaceconstant{set}{set} \setinterfaceconstant{setups}{reglages} \setinterfaceconstant{shrink}{shrink} @@ -982,7 +1010,11 @@ \setinterfaceconstant{sidespacebefore}{espacelateralavant} \setinterfaceconstant{sign}{signe} \setinterfaceconstant{size}{dimension} +\setinterfaceconstant{slantedfeatures}{slantedfeatures} +\setinterfaceconstant{slantedfont}{slantedfont} \setinterfaceconstant{small}{petit} +\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures} +\setinterfaceconstant{smallcapsfont}{smallcapsfont} \setinterfaceconstant{solution}{solution} \setinterfaceconstant{sort}{sort} \setinterfaceconstant{sorttype}{sorttype} diff --git a/tex/context/base/mult-fun.lua b/tex/context/base/mult-fun.lua index a661c53bb..3867489bf 100644 --- a/tex/context/base/mult-fun.lua +++ b/tex/context/base/mult-fun.lua @@ -96,6 +96,10 @@ return { "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", -- + "undashed", + -- "decorated", "redecorated", "undecorated", + -- + "passvariable", }, } diff --git a/tex/context/base/mult-ini.lua b/tex/context/base/mult-ini.lua index 3b18738de..e3ff904a6 100644 --- a/tex/context/base/mult-ini.lua +++ b/tex/context/base/mult-ini.lua @@ -10,6 +10,9 @@ local format, gmatch, match = string.format, string.gmatch, string.match local lpegmatch = lpeg.match local serialize = table.serialize +local context = context +local commands = commands + local allocate = utilities.storage.allocate local mark = utilities.storage.mark local prtcatcodes = catcodes.numbers.prtcatcodes diff --git a/tex/context/base/mult-it.mkii b/tex/context/base/mult-it.mkii index 0d1ea911d..2b31e8e10 100644 --- a/tex/context/base/mult-it.mkii +++ b/tex/context/base/mult-it.mkii @@ -281,6 +281,7 @@ \setinterfacevariable{marginedge}{bordomargine} \setinterfacevariable{margintitle}{titoloinmargine} \setinterfacevariable{marking}{marcatura} +\setinterfacevariable{math}{math} \setinterfacevariable{mathalignment}{mathalignment} \setinterfacevariable{mathcases}{mathcases} \setinterfacevariable{mathematics}{matematica} @@ -414,12 +415,14 @@ \setinterfacevariable{sectionblockenvironment}{ambientebloccosezione} \setinterfacevariable{sectionnumber}{numerosezione} \setinterfacevariable{see}{vedi} +\setinterfacevariable{selectfont}{selectfont} \setinterfacevariable{september}{settembre} \setinterfacevariable{serif}{serif} \setinterfacevariable{serried}{vicino} \setinterfacevariable{setups}{nastaveni} \setinterfacevariable{sheet}{sheet} \setinterfacevariable{short}{short} +\setinterfacevariable{simplefonts}{simplefonts} \setinterfacevariable{singlesided}{facciasingola} \setinterfacevariable{slanted}{inclinato} \setinterfacevariable{slantedbold}{inclinatograssetto} @@ -593,6 +596,12 @@ \setinterfaceconstant{blockway}{blockway} \setinterfaceconstant{bodyfont}{fonttesto} \setinterfaceconstant{boffset}{boffset} +\setinterfaceconstant{boldfeatures}{boldfeatures} +\setinterfaceconstant{boldfont}{boldfont} +\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures} +\setinterfaceconstant{bolditalicfont}{bolditalicfont} +\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures} +\setinterfaceconstant{boldslantedfont}{boldslantedfont} \setinterfaceconstant{bookmark}{segnalibro} \setinterfaceconstant{bottom}{fondo} \setinterfaceconstant{bottomafter}{bottomafter} @@ -608,6 +617,7 @@ \setinterfaceconstant{calculate}{calcola} \setinterfaceconstant{category}{category} \setinterfaceconstant{ccommand}{ccomando} +\setinterfaceconstant{check}{check} \setinterfaceconstant{click}{click} \setinterfaceconstant{clickin}{clickdentro} \setinterfaceconstant{clickout}{clickfuori} @@ -630,6 +640,7 @@ \setinterfaceconstant{component}{component} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{compress} +\setinterfaceconstant{concerns}{concerns} \setinterfaceconstant{connector}{connector} \setinterfaceconstant{continue}{continua} \setinterfaceconstant{contrastcolor}{colorecontrasto} @@ -682,6 +693,7 @@ \setinterfaceconstant{factor}{fattore} \setinterfaceconstant{fallback}{fallback} \setinterfaceconstant{family}{famiglia} +\setinterfaceconstant{features}{features} \setinterfaceconstant{fieldbackgroundcolor}{coloresfondocampo} \setinterfaceconstant{fieldframecolor}{colorecornicecampo} \setinterfaceconstant{fieldlayer}{fieldlayer} @@ -711,6 +723,7 @@ \setinterfaceconstant{from}{da} \setinterfaceconstant{get}{prendi} \setinterfaceconstant{global}{globale} +\setinterfaceconstant{goodies}{goodies} \setinterfaceconstant{grid}{griglia} \setinterfaceconstant{hang}{sospendi} \setinterfaceconstant{hcompact}{hcompact} @@ -747,6 +760,8 @@ \setinterfaceconstant{interaction}{interazione} \setinterfaceconstant{interlinespace}{interlinea} \setinterfaceconstant{internalgrid}{internalgrid} +\setinterfaceconstant{italicfeatures}{italicfeatures} +\setinterfaceconstant{italicfont}{italicfont} \setinterfaceconstant{itemalign}{itemalign} \setinterfaceconstant{items}{elementi} \setinterfaceconstant{juniorsep}{juniorsep} @@ -798,9 +813,12 @@ \setinterfaceconstant{marginedge}{bordomargine} \setinterfaceconstant{marginedgetext}{testobordomargine} \setinterfaceconstant{margintext}{testomargine} +\setinterfaceconstant{mark}{mark} \setinterfaceconstant{marking}{marcatura} \setinterfaceconstant{marstyle}{stilemarcatura} \setinterfaceconstant{mask}{mask} +\setinterfaceconstant{mathclass}{mathclass} +\setinterfaceconstant{mathlimits}{mathlimits} \setinterfaceconstant{mathstyle}{mathstyle} \setinterfaceconstant{max}{max} \setinterfaceconstant{maxdepth}{maxdepth} @@ -818,8 +836,13 @@ \setinterfaceconstant{mindepth}{mindeoth} \setinterfaceconstant{minheight}{altezzamin} \setinterfaceconstant{minwidth}{ampiezzamin} +\setinterfaceconstant{moffset}{moffset} \setinterfaceconstant{monthconversion}{monthconversion} \setinterfaceconstant{more}{more} +\setinterfaceconstant{mpdepth}{mpdepth} +\setinterfaceconstant{mpheight}{mpheight} +\setinterfaceconstant{mpoffset}{mpoffset} +\setinterfaceconstant{mpwidth}{mpwidth} \setinterfaceconstant{n}{n} \setinterfaceconstant{name}{nome} \setinterfaceconstant{namesep}{namesep} @@ -859,6 +882,7 @@ \setinterfaceconstant{openaction}{azioneapri} \setinterfaceconstant{openpage}{openpage} \setinterfaceconstant{openpageaction}{azioneapripagina} +\setinterfaceconstant{opticalsize}{opticalsize} \setinterfaceconstant{option}{opzione} \setinterfaceconstant{order}{order} \setinterfaceconstant{orientation}{orientation} @@ -926,6 +950,8 @@ \setinterfaceconstant{region}{region} \setinterfaceconstant{regionin}{entraregione} \setinterfaceconstant{regionout}{esciregione} +\setinterfaceconstant{regularfeatures}{regularfeatures} +\setinterfaceconstant{regularfont}{regularfont} \setinterfaceconstant{repeat}{ripeti} \setinterfaceconstant{reset}{reset} \setinterfaceconstant{resetnumber}{resetnumber} @@ -972,6 +998,8 @@ \setinterfaceconstant{sectionstarter}{sectionstarter} \setinterfaceconstant{sectionstopper}{sectionstopper} \setinterfaceconstant{separator}{separatore} +\setinterfaceconstant{separatorcolor}{separatorcolor} +\setinterfaceconstant{separatorstyle}{separatorstyle} \setinterfaceconstant{set}{set} \setinterfaceconstant{setups}{setups} \setinterfaceconstant{shrink}{shrink} @@ -982,7 +1010,11 @@ \setinterfaceconstant{sidespacebefore}{spaziolateraleprima} \setinterfaceconstant{sign}{segno} \setinterfaceconstant{size}{dimensione} +\setinterfaceconstant{slantedfeatures}{slantedfeatures} +\setinterfaceconstant{slantedfont}{slantedfont} \setinterfaceconstant{small}{piccolo} +\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures} +\setinterfaceconstant{smallcapsfont}{smallcapsfont} \setinterfaceconstant{solution}{solution} \setinterfaceconstant{sort}{sort} \setinterfaceconstant{sorttype}{sorttype} diff --git a/tex/context/base/mult-low.lua b/tex/context/base/mult-low.lua index 47e31978b..f82be039c 100644 --- a/tex/context/base/mult-low.lua +++ b/tex/context/base/mult-low.lua @@ -96,7 +96,7 @@ return { "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext","startdocument", "stopdocument", "documentvariable", "setupdocument", - "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", + "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule","setupmodule","currentmoduleparameter","moduleparameter", -- "startTEXpage", "stopTEXpage", -- "startMPpage", "stopMPpage", -- already catched by nested lexer @@ -116,6 +116,14 @@ return { "continueifinputfile", -- "luastringsep", "!!bs", "!!es", + -- + "lefttorightmark", "righttoleftmark", + -- + "breakablethinspace", "nobreakspace", "narrownobreakspace", "zerowidthnobreakspace", + "ideographicspace", "ideographichalffillspace", + "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace", + "figurespace", "punctuationspace", "hairspace", + "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj", }, ["helpers"] = { -- @@ -172,7 +180,7 @@ return { -- "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", - "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", + "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", @@ -186,6 +194,8 @@ return { "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", -- + "normalbaselineskip", "normallineskip", "normallineskiplimit", + -- "availablehsize", "localhsize", "setlocalhsize", -- "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", @@ -288,6 +298,8 @@ return { -- "twodigits","threedigits", -- + "leftorright", + -- "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", -- "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", @@ -342,6 +354,16 @@ return { -- "carryoverpar", -- + "assumelongusagecs", + -- "Umathbotaccent", + -- + "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", + "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", + "autodirhbox", "autodirvbox", "autodirvtop", + "lefttoright", "righttoleft","synchronizelayoutdirection","synchronizedisplaydirection","synchronizeinlinedirection", + -- + "lesshyphens", "morehyphens", "nohyphens", "dohyphens", + -- } } diff --git a/tex/context/base/mult-mps.lua b/tex/context/base/mult-mps.lua index 59411cd97..5a320487a 100644 --- a/tex/context/base/mult-mps.lua +++ b/tex/context/base/mult-mps.lua @@ -51,15 +51,17 @@ return { "top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt", -- "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart", + "prescriptpart", "postscriptpart", "rgbcolor", "cmykcolor", "greycolor", "graycolor", "colormodel", "graypart", "dashpart", "penpart", -- "colorpart", - "stroked", "filled", "textual", "clipped", "bounded", + "stroked", "filled", "textual", "clipped", "bounded", "pathpart", "expandafter", }, commands = { "beginfig", "endfig", + "beginglyph", "endglyph", "charscale", "rotatedaround", "reflectedabout", "arrowhead", "currentpen", "currentpicture", "cuttings", @@ -103,6 +105,8 @@ return { "graypart", "graycolor", -- "mm", "pt", "dd", "bp", "cm", "pc", "cc", "in", + -- + "triplet", "quadruplet", }, internals = { -- we need to remove duplicates above -- diff --git a/tex/context/base/mult-nl.mkii b/tex/context/base/mult-nl.mkii index 5f1bada7a..9f91515cb 100644 --- a/tex/context/base/mult-nl.mkii +++ b/tex/context/base/mult-nl.mkii @@ -281,6 +281,7 @@ \setinterfacevariable{marginedge}{kantlijn} \setinterfacevariable{margintitle}{margetitel} \setinterfacevariable{marking}{markering} +\setinterfacevariable{math}{math} \setinterfacevariable{mathalignment}{wiskundeuitlijnen} \setinterfacevariable{mathcases}{mathcases} \setinterfacevariable{mathematics}{wiskunde} @@ -414,12 +415,14 @@ \setinterfacevariable{sectionblockenvironment}{sectieblokomgeving} \setinterfacevariable{sectionnumber}{sectienummer} \setinterfacevariable{see}{zie} +\setinterfacevariable{selectfont}{selectfont} \setinterfacevariable{september}{september} \setinterfacevariable{serif}{serif} \setinterfacevariable{serried}{aanelkaar} \setinterfacevariable{setups}{instellingen} \setinterfacevariable{sheet}{sheet} \setinterfacevariable{short}{kort} +\setinterfacevariable{simplefonts}{simplefonts} \setinterfacevariable{singlesided}{enkelzijdig} \setinterfacevariable{slanted}{schuin} \setinterfacevariable{slantedbold}{schuinvet} @@ -593,6 +596,12 @@ \setinterfaceconstant{blockway}{blokwijze} \setinterfaceconstant{bodyfont}{korps} \setinterfaceconstant{boffset}{boffset} +\setinterfaceconstant{boldfeatures}{boldfeatures} +\setinterfaceconstant{boldfont}{boldfont} +\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures} +\setinterfaceconstant{bolditalicfont}{bolditalicfont} +\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures} +\setinterfaceconstant{boldslantedfont}{boldslantedfont} \setinterfaceconstant{bookmark}{bookmark} \setinterfaceconstant{bottom}{onder} \setinterfaceconstant{bottomafter}{bottomafter} @@ -608,6 +617,7 @@ \setinterfaceconstant{calculate}{bereken} \setinterfaceconstant{category}{categorie} \setinterfaceconstant{ccommand}{ccommando} +\setinterfaceconstant{check}{check} \setinterfaceconstant{click}{klik} \setinterfaceconstant{clickin}{klikin} \setinterfaceconstant{clickout}{klikuit} @@ -630,6 +640,7 @@ \setinterfaceconstant{component}{component} \setinterfaceconstant{compoundhyphen}{koppelteken} \setinterfaceconstant{compress}{comprimeren} +\setinterfaceconstant{concerns}{betreft} \setinterfaceconstant{connector}{connector} \setinterfaceconstant{continue}{doorgaan} \setinterfaceconstant{contrastcolor}{contrastkleur} @@ -682,6 +693,7 @@ \setinterfaceconstant{factor}{factor} \setinterfaceconstant{fallback}{terugval} \setinterfaceconstant{family}{soort} +\setinterfaceconstant{features}{features} \setinterfaceconstant{fieldbackgroundcolor}{veldachtergrondkleur} \setinterfaceconstant{fieldframecolor}{veldkaderkleur} \setinterfaceconstant{fieldlayer}{veldlaag} @@ -711,6 +723,7 @@ \setinterfaceconstant{from}{van} \setinterfaceconstant{get}{haal} \setinterfaceconstant{global}{globaal} +\setinterfaceconstant{goodies}{goodies} \setinterfaceconstant{grid}{grid} \setinterfaceconstant{hang}{hang} \setinterfaceconstant{hcompact}{hcomprimeer} @@ -747,6 +760,8 @@ \setinterfaceconstant{interaction}{interactie} \setinterfaceconstant{interlinespace}{interlinie} \setinterfaceconstant{internalgrid}{interngrid} +\setinterfaceconstant{italicfeatures}{italicfeatures} +\setinterfaceconstant{italicfont}{italicfont} \setinterfaceconstant{itemalign}{itemuitlijnen} \setinterfaceconstant{items}{items} \setinterfaceconstant{juniorsep}{juniorsep} @@ -798,9 +813,12 @@ \setinterfaceconstant{marginedge}{kantlijn} \setinterfaceconstant{marginedgetext}{kantlijntekst} \setinterfaceconstant{margintext}{margetekst} +\setinterfaceconstant{mark}{kernmerk} \setinterfaceconstant{marking}{markering} \setinterfaceconstant{marstyle}{marletter} \setinterfaceconstant{mask}{masker} +\setinterfaceconstant{mathclass}{mathclass} +\setinterfaceconstant{mathlimits}{mathlimits} \setinterfaceconstant{mathstyle}{mathstyle} \setinterfaceconstant{max}{max} \setinterfaceconstant{maxdepth}{maxdepth} @@ -818,8 +836,13 @@ \setinterfaceconstant{mindepth}{mindiepte} \setinterfaceconstant{minheight}{minhoogte} \setinterfaceconstant{minwidth}{minbreedte} +\setinterfaceconstant{moffset}{moffset} \setinterfaceconstant{monthconversion}{maandconversie} \setinterfaceconstant{more}{meer} +\setinterfaceconstant{mpdepth}{mpdiepte} +\setinterfaceconstant{mpheight}{mphoogte} +\setinterfaceconstant{mpoffset}{mpoffset} +\setinterfaceconstant{mpwidth}{mpbreedte} \setinterfaceconstant{n}{n} \setinterfaceconstant{name}{naam} \setinterfaceconstant{namesep}{namesep} @@ -859,6 +882,7 @@ \setinterfaceconstant{openaction}{openactie} \setinterfaceconstant{openpage}{openpagina} \setinterfaceconstant{openpageaction}{openpaginaactie} +\setinterfaceconstant{opticalsize}{opticalsize} \setinterfaceconstant{option}{optie} \setinterfaceconstant{order}{volgorde} \setinterfaceconstant{orientation}{orientatie} @@ -926,6 +950,8 @@ \setinterfaceconstant{region}{gebied} \setinterfaceconstant{regionin}{gebiedin} \setinterfaceconstant{regionout}{gebieduit} +\setinterfaceconstant{regularfeatures}{regularfeatures} +\setinterfaceconstant{regularfont}{regularfont} \setinterfaceconstant{repeat}{herhaal} \setinterfaceconstant{reset}{reset} \setinterfaceconstant{resetnumber}{resetnummer} @@ -972,6 +998,8 @@ \setinterfaceconstant{sectionstarter}{sectionstarter} \setinterfaceconstant{sectionstopper}{sectionstopper} \setinterfaceconstant{separator}{scheider} +\setinterfaceconstant{separatorcolor}{scheiderkleur} +\setinterfaceconstant{separatorstyle}{scheiderletter} \setinterfaceconstant{set}{set} \setinterfaceconstant{setups}{setups} \setinterfaceconstant{shrink}{krimp} @@ -982,7 +1010,11 @@ \setinterfaceconstant{sidespacebefore}{zijvoorwit} \setinterfaceconstant{sign}{teken} \setinterfaceconstant{size}{formaat} +\setinterfaceconstant{slantedfeatures}{slantedfeatures} +\setinterfaceconstant{slantedfont}{slantedfont} \setinterfaceconstant{small}{klein} +\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures} +\setinterfaceconstant{smallcapsfont}{smallcapsfont} \setinterfaceconstant{solution}{oplossing} \setinterfaceconstant{sort}{sort} \setinterfaceconstant{sorttype}{sortering} diff --git a/tex/context/base/mult-pe.mkii b/tex/context/base/mult-pe.mkii index f55a7ab59..240130cdf 100644 --- a/tex/context/base/mult-pe.mkii +++ b/tex/context/base/mult-pe.mkii @@ -281,6 +281,7 @@ \setinterfacevariable{marginedge}{لبه‌حاشیه} \setinterfacevariable{margintitle}{عنوان‌حاشیه} \setinterfacevariable{marking}{نشانه‌گذاری} +\setinterfacevariable{math}{math} \setinterfacevariable{mathalignment}{تنظیم‌ریاضی} \setinterfacevariable{mathcases}{حالتهای‌ریاضی} \setinterfacevariable{mathematics}{ریاضی} @@ -414,12 +415,14 @@ \setinterfacevariable{sectionblockenvironment}{محیط‌بلوک‌بخش} \setinterfacevariable{sectionnumber}{شماره‌بخش} \setinterfacevariable{see}{ببینید} +\setinterfacevariable{selectfont}{selectfont} \setinterfacevariable{september}{سپتامبر} \setinterfacevariable{serif}{سریف} \setinterfacevariable{serried}{تنگ‌هم} \setinterfacevariable{setups}{بارگذاریها} \setinterfacevariable{sheet}{ورقه} \setinterfacevariable{short}{short} +\setinterfacevariable{simplefonts}{simplefonts} \setinterfacevariable{singlesided}{یک‌رو} \setinterfacevariable{slanted}{خوابیده} \setinterfacevariable{slantedbold}{مشکی‌خوابیده} @@ -593,6 +596,12 @@ \setinterfaceconstant{blockway}{راه‌بلوک} \setinterfaceconstant{bodyfont}{قلم‌بدنه} \setinterfaceconstant{boffset}{boffset} +\setinterfaceconstant{boldfeatures}{boldfeatures} +\setinterfaceconstant{boldfont}{boldfont} +\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures} +\setinterfaceconstant{bolditalicfont}{bolditalicfont} +\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures} +\setinterfaceconstant{boldslantedfont}{boldslantedfont} \setinterfaceconstant{bookmark}{چوبخط} \setinterfaceconstant{bottom}{پایین} \setinterfaceconstant{bottomafter}{bottomafter} @@ -608,6 +617,7 @@ \setinterfaceconstant{calculate}{محاسبه} \setinterfaceconstant{category}{category} \setinterfaceconstant{ccommand}{فرمان} +\setinterfaceconstant{check}{check} \setinterfaceconstant{click}{فشردن} \setinterfaceconstant{clickin}{فشردن‌داخل} \setinterfaceconstant{clickout}{فشردن‌خارج} @@ -630,6 +640,7 @@ \setinterfaceconstant{component}{مولفه} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{فشردن} +\setinterfaceconstant{concerns}{concerns} \setinterfaceconstant{connector}{connector} \setinterfaceconstant{continue}{ادامه} \setinterfaceconstant{contrastcolor}{contrastcolor} @@ -682,6 +693,7 @@ \setinterfaceconstant{factor}{عامل} \setinterfaceconstant{fallback}{عقب‌ریختن} \setinterfaceconstant{family}{خانواده} +\setinterfaceconstant{features}{features} \setinterfaceconstant{fieldbackgroundcolor}{رنگ‌پس‌زمینه‌میدان} \setinterfaceconstant{fieldframecolor}{رنگ‌قالب‌میدان} \setinterfaceconstant{fieldlayer}{لایه‌میدان} @@ -711,6 +723,7 @@ \setinterfaceconstant{from}{از} \setinterfaceconstant{get}{بگیر} \setinterfaceconstant{global}{سراسری} +\setinterfaceconstant{goodies}{goodies} \setinterfaceconstant{grid}{توری} \setinterfaceconstant{hang}{بیاویز} \setinterfaceconstant{hcompact}{hcompact} @@ -747,6 +760,8 @@ \setinterfaceconstant{interaction}{پانل} \setinterfaceconstant{interlinespace}{فضای‌بین‌خط} \setinterfaceconstant{internalgrid}{internalgrid} +\setinterfaceconstant{italicfeatures}{italicfeatures} +\setinterfaceconstant{italicfont}{italicfont} \setinterfaceconstant{itemalign}{تنظیم‌آیتم} \setinterfaceconstant{items}{آیتمها} \setinterfaceconstant{juniorsep}{juniorsep} @@ -798,9 +813,12 @@ \setinterfaceconstant{marginedge}{لبه‌حاشیه} \setinterfaceconstant{marginedgetext}{متن‌لبه‌حاشیه} \setinterfaceconstant{margintext}{متن‌حاشیه} +\setinterfaceconstant{mark}{mark} \setinterfaceconstant{marking}{نشانه‌گذاری} \setinterfaceconstant{marstyle}{سبک‌حاش} \setinterfaceconstant{mask}{mask} +\setinterfaceconstant{mathclass}{mathclass} +\setinterfaceconstant{mathlimits}{mathlimits} \setinterfaceconstant{mathstyle}{mathstyle} \setinterfaceconstant{max}{بیشترین} \setinterfaceconstant{maxdepth}{maxdepth} @@ -818,8 +836,13 @@ \setinterfaceconstant{mindepth}{کمترین‌عمق} \setinterfaceconstant{minheight}{کمترین‌ارتفاع} \setinterfaceconstant{minwidth}{کمترین‌عرض} +\setinterfaceconstant{moffset}{moffset} \setinterfaceconstant{monthconversion}{monthconversion} \setinterfaceconstant{more}{more} +\setinterfaceconstant{mpdepth}{mpdepth} +\setinterfaceconstant{mpheight}{mpheight} +\setinterfaceconstant{mpoffset}{mpoffset} +\setinterfaceconstant{mpwidth}{mpwidth} \setinterfaceconstant{n}{n} \setinterfaceconstant{name}{نام} \setinterfaceconstant{namesep}{namesep} @@ -859,6 +882,7 @@ \setinterfaceconstant{openaction}{عمل‌باز} \setinterfaceconstant{openpage}{openpage} \setinterfaceconstant{openpageaction}{عمل‌صفحه‌باز} +\setinterfaceconstant{opticalsize}{opticalsize} \setinterfaceconstant{option}{گزینه} \setinterfaceconstant{order}{order} \setinterfaceconstant{orientation}{جهت‌دهی} @@ -926,6 +950,8 @@ \setinterfaceconstant{region}{region} \setinterfaceconstant{regionin}{ناحیه‌درون} \setinterfaceconstant{regionout}{ناحیه‌بیرون} +\setinterfaceconstant{regularfeatures}{regularfeatures} +\setinterfaceconstant{regularfont}{regularfont} \setinterfaceconstant{repeat}{تکرار} \setinterfaceconstant{reset}{بازنشانی} \setinterfaceconstant{resetnumber}{بازنشانی‌شماره} @@ -972,6 +998,8 @@ \setinterfaceconstant{sectionstarter}{sectionstarter} \setinterfaceconstant{sectionstopper}{sectionstopper} \setinterfaceconstant{separator}{جداکننده} +\setinterfaceconstant{separatorcolor}{separatorcolor} +\setinterfaceconstant{separatorstyle}{separatorstyle} \setinterfaceconstant{set}{قراربده} \setinterfaceconstant{setups}{بارگذاریها} \setinterfaceconstant{shrink}{shrink} @@ -982,7 +1010,11 @@ \setinterfaceconstant{sidespacebefore}{فضای‌کناری‌قبل} \setinterfaceconstant{sign}{علامت} \setinterfaceconstant{size}{اندازه} +\setinterfaceconstant{slantedfeatures}{slantedfeatures} +\setinterfaceconstant{slantedfont}{slantedfont} \setinterfaceconstant{small}{کوچک} +\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures} +\setinterfaceconstant{smallcapsfont}{smallcapsfont} \setinterfaceconstant{solution}{solution} \setinterfaceconstant{sort}{sort} \setinterfaceconstant{sorttype}{ترتیب‌تایپ} diff --git a/tex/context/base/mult-ro.mkii b/tex/context/base/mult-ro.mkii index 34dd385a3..3b7206e44 100644 --- a/tex/context/base/mult-ro.mkii +++ b/tex/context/base/mult-ro.mkii @@ -281,6 +281,7 @@ \setinterfacevariable{marginedge}{marginebordura} \setinterfacevariable{margintitle}{titlumarginal} \setinterfacevariable{marking}{marcaje} +\setinterfacevariable{math}{math} \setinterfacevariable{mathalignment}{mathalignment} \setinterfacevariable{mathcases}{mathcases} \setinterfacevariable{mathematics}{matematica} @@ -414,12 +415,14 @@ \setinterfacevariable{sectionblockenvironment}{blocsectiuneambient} \setinterfacevariable{sectionnumber}{numarsetiune} \setinterfacevariable{see}{vezi} +\setinterfacevariable{selectfont}{selectfont} \setinterfacevariable{september}{septembrie} \setinterfacevariable{serif}{serif} \setinterfacevariable{serried}{serried} \setinterfacevariable{setups}{setari} \setinterfacevariable{sheet}{sheet} \setinterfacevariable{short}{short} +\setinterfacevariable{simplefonts}{simplefonts} \setinterfacevariable{singlesided}{ofata} \setinterfacevariable{slanted}{inclinat} \setinterfacevariable{slantedbold}{inclinataldin} @@ -593,6 +596,12 @@ \setinterfaceconstant{blockway}{blockway} \setinterfaceconstant{bodyfont}{fonttext} \setinterfaceconstant{boffset}{boffset} +\setinterfaceconstant{boldfeatures}{boldfeatures} +\setinterfaceconstant{boldfont}{boldfont} +\setinterfaceconstant{bolditalicfeatures}{bolditalicfeatures} +\setinterfaceconstant{bolditalicfont}{bolditalicfont} +\setinterfaceconstant{boldslantedfeatures}{boldslantedfeatures} +\setinterfaceconstant{boldslantedfont}{boldslantedfont} \setinterfaceconstant{bookmark}{semncarte} \setinterfaceconstant{bottom}{jos} \setinterfaceconstant{bottomafter}{bottomafter} @@ -608,6 +617,7 @@ \setinterfaceconstant{calculate}{calculeaza} \setinterfaceconstant{category}{category} \setinterfaceconstant{ccommand}{comandac} +\setinterfaceconstant{check}{check} \setinterfaceconstant{click}{click} \setinterfaceconstant{clickin}{clickintru} \setinterfaceconstant{clickout}{clickies} @@ -630,6 +640,7 @@ \setinterfaceconstant{component}{component} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{compress} +\setinterfaceconstant{concerns}{concerns} \setinterfaceconstant{connector}{connector} \setinterfaceconstant{continue}{continua} \setinterfaceconstant{contrastcolor}{culoarecontrast} @@ -682,6 +693,7 @@ \setinterfaceconstant{factor}{factor} \setinterfaceconstant{fallback}{fallback} \setinterfaceconstant{family}{familie} +\setinterfaceconstant{features}{features} \setinterfaceconstant{fieldbackgroundcolor}{culoarefundalcamp} \setinterfaceconstant{fieldframecolor}{culoareframecamp} \setinterfaceconstant{fieldlayer}{fieldlayer} @@ -711,6 +723,7 @@ \setinterfaceconstant{from}{dela} \setinterfaceconstant{get}{adu} \setinterfaceconstant{global}{global} +\setinterfaceconstant{goodies}{goodies} \setinterfaceconstant{grid}{grid} \setinterfaceconstant{hang}{suspenda} \setinterfaceconstant{hcompact}{hcompact} @@ -747,6 +760,8 @@ \setinterfaceconstant{interaction}{interactiune} \setinterfaceconstant{interlinespace}{spatiereinterliniara} \setinterfaceconstant{internalgrid}{internalgrid} +\setinterfaceconstant{italicfeatures}{italicfeatures} +\setinterfaceconstant{italicfont}{italicfont} \setinterfaceconstant{itemalign}{itemalign} \setinterfaceconstant{items}{elemente} \setinterfaceconstant{juniorsep}{juniorsep} @@ -798,9 +813,12 @@ \setinterfaceconstant{marginedge}{coltbordura} \setinterfaceconstant{marginedgetext}{textcoltbordura} \setinterfaceconstant{margintext}{textmargine} +\setinterfaceconstant{mark}{mark} \setinterfaceconstant{marking}{marcaje} \setinterfaceconstant{marstyle}{stilmarcaj} \setinterfaceconstant{mask}{mask} +\setinterfaceconstant{mathclass}{mathclass} +\setinterfaceconstant{mathlimits}{mathlimits} \setinterfaceconstant{mathstyle}{mathstyle} \setinterfaceconstant{max}{max} \setinterfaceconstant{maxdepth}{maxdepth} @@ -818,8 +836,13 @@ \setinterfaceconstant{mindepth}{mindepth} \setinterfaceconstant{minheight}{inaltimeminima} \setinterfaceconstant{minwidth}{latimeminima} +\setinterfaceconstant{moffset}{moffset} \setinterfaceconstant{monthconversion}{monthconversion} \setinterfaceconstant{more}{more} +\setinterfaceconstant{mpdepth}{mpdepth} +\setinterfaceconstant{mpheight}{mpheight} +\setinterfaceconstant{mpoffset}{mpoffset} +\setinterfaceconstant{mpwidth}{mpwidth} \setinterfaceconstant{n}{n} \setinterfaceconstant{name}{nume} \setinterfaceconstant{namesep}{namesep} @@ -859,6 +882,7 @@ \setinterfaceconstant{openaction}{actiunedeschidere} \setinterfaceconstant{openpage}{openpage} \setinterfaceconstant{openpageaction}{actiunedeschiderepagina} +\setinterfaceconstant{opticalsize}{opticalsize} \setinterfaceconstant{option}{optiune} \setinterfaceconstant{order}{order} \setinterfaceconstant{orientation}{orientation} @@ -926,6 +950,8 @@ \setinterfaceconstant{region}{region} \setinterfaceconstant{regionin}{regiuneintrare} \setinterfaceconstant{regionout}{regiuneiesire} +\setinterfaceconstant{regularfeatures}{regularfeatures} +\setinterfaceconstant{regularfont}{regularfont} \setinterfaceconstant{repeat}{repeta} \setinterfaceconstant{reset}{reset} \setinterfaceconstant{resetnumber}{resetnumber} @@ -972,6 +998,8 @@ \setinterfaceconstant{sectionstarter}{sectionstarter} \setinterfaceconstant{sectionstopper}{sectionstopper} \setinterfaceconstant{separator}{separator} +\setinterfaceconstant{separatorcolor}{separatorcolor} +\setinterfaceconstant{separatorstyle}{separatorstyle} \setinterfaceconstant{set}{set} \setinterfaceconstant{setups}{setups} \setinterfaceconstant{shrink}{shrink} @@ -982,7 +1010,11 @@ \setinterfaceconstant{sidespacebefore}{spatiulateralinainte} \setinterfaceconstant{sign}{semn} \setinterfaceconstant{size}{dimensiune} +\setinterfaceconstant{slantedfeatures}{slantedfeatures} +\setinterfaceconstant{slantedfont}{slantedfont} \setinterfaceconstant{small}{mic} +\setinterfaceconstant{smallcapsfeatures}{smallcapsfeatures} +\setinterfaceconstant{smallcapsfont}{smallcapsfont} \setinterfaceconstant{solution}{solution} \setinterfaceconstant{sort}{sort} \setinterfaceconstant{sorttype}{sorttype} diff --git a/tex/context/base/mult-sys.mkiv b/tex/context/base/mult-sys.mkiv index f0db9fa67..6c6db58e1 100644 --- a/tex/context/base/mult-sys.mkiv +++ b/tex/context/base/mult-sys.mkiv @@ -163,6 +163,9 @@ \definesystemconstant {both} +\definesystemconstant {internal} +\definesystemconstant {external} + \definesystemconstant {attribute} \definesystemconstant {none} @@ -558,11 +561,12 @@ %D calls to other files), old macro's, to garantee compatibility and new macro's noy %D yet present in the format. -\definefileconstant {errfilename} {cont-err} -\definefileconstant {sysfilename} {cont-sys} -\definefileconstant {newfilename} {cont-new} -\definefileconstant {locfilename} {cont-loc} -\definefileconstant {expfilename} {cont-exp} +\definefileconstant {sysfilename} {cont-sys.mkiv} +\definefileconstant {newfilename} {cont-new.mkiv} +\definefileconstant {locfilename} {cont-loc.mkiv} +\definefileconstant {expfilename} {cont-exp.mkiv} +\definefileconstant {fntfilename} {cont-fnt.mkiv} % not yet used +\definefileconstant {gdsfilename} {cont-fnt.lfg} % not yet used %D The setup files for the language, font, color and special subsystems have a common %D prefix. This means that we have at most three characters for unique filenames. diff --git a/tex/context/base/node-acc.lua b/tex/context/base/node-acc.lua index 4380ec3a4..81ae496b2 100644 --- a/tex/context/base/node-acc.lua +++ b/tex/context/base/node-acc.lua @@ -35,7 +35,7 @@ local function injectspaces(head) while n do local id = n.id if id == glue_code then -- todo: check for subtype related to spacing (13/14 but most seems to be 0) ---~ if n.spec.width > 0 then -- threshold + -- if n.spec.width > 0 then -- threshold if p and p.id == glyph_code then local g = copy_node(p) local c = g.components @@ -56,7 +56,7 @@ local function injectspaces(head) s[a_characters] = 0 n[a_characters] = 0 end ---~ end + -- end elseif id == hlist_code or id == vlist_code then injectspaces(n.list,attribute) -- elseif id == kern_code then -- the backend already collapses @@ -83,58 +83,58 @@ nodes.handlers.accessibility = injectspaces -- todo: ---~ local a_hyphenated = attributes.private('hyphenated') ---~ ---~ local hyphenated, codes = { }, { } ---~ ---~ local function compact(n) ---~ local t = { } ---~ for n in traverse_id(glyph_code,n) do ---~ t[#t+1] = utfchar(n.char) -- check for unicode ---~ end ---~ return concat(t,"") ---~ end ---~ ---~ local function injectspans(head) ---~ for n in traverse_nodes(head) do ---~ local id = n.id ---~ if id == disc then ---~ local r, p = n.replace, n.pre ---~ if r and p then ---~ local str = compact(r) ---~ local hsh = hyphenated[str] ---~ if not hsh then ---~ hsh = #codes + 1 ---~ hyphenated[str] = hsh ---~ codes[hsh] = str ---~ end ---~ n[a_hyphenated] = hsh ---~ end ---~ elseif id == hlist_code or id == vlist_code then ---~ injectspans(n.list) ---~ end ---~ end ---~ return head, true ---~ end ---~ ---~ nodes.injectspans = injectspans ---~ ---~ tasks.appendaction("processors", "words", "nodes.injectspans") ---~ ---~ local function injectspans(head) ---~ for n in traverse_nodes(head) do ---~ local id = n.id ---~ if id == disc then ---~ local a = n[a_hyphenated] ---~ if a then ---~ local str = codes[a] ---~ local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str))) ---~ local e = new_pdfliteral("EMC") ---~ node.insert_before(head,n,b) ---~ node.insert_after(head,n,e) ---~ end ---~ elseif id == hlist_code or id == vlist_code then ---~ injectspans(n.list) ---~ end ---~ end ---~ end +-- local a_hyphenated = attributes.private('hyphenated') +-- +-- local hyphenated, codes = { }, { } +-- +-- local function compact(n) +-- local t = { } +-- for n in traverse_id(glyph_code,n) do +-- t[#t+1] = utfchar(n.char) -- check for unicode +-- end +-- return concat(t,"") +-- end +-- +-- local function injectspans(head) +-- for n in traverse_nodes(head) do +-- local id = n.id +-- if id == disc then +-- local r, p = n.replace, n.pre +-- if r and p then +-- local str = compact(r) +-- local hsh = hyphenated[str] +-- if not hsh then +-- hsh = #codes + 1 +-- hyphenated[str] = hsh +-- codes[hsh] = str +-- end +-- n[a_hyphenated] = hsh +-- end +-- elseif id == hlist_code or id == vlist_code then +-- injectspans(n.list) +-- end +-- end +-- return head, true +-- end +-- +-- nodes.injectspans = injectspans +-- +-- tasks.appendaction("processors", "words", "nodes.injectspans") +-- +-- local function injectspans(head) +-- for n in traverse_nodes(head) do +-- local id = n.id +-- if id == disc then +-- local a = n[a_hyphenated] +-- if a then +-- local str = codes[a] +-- local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str))) +-- local e = new_pdfliteral("EMC") +-- node.insert_before(head,n,b) +-- node.insert_after(head,n,e) +-- end +-- elseif id == hlist_code or id == vlist_code then +-- injectspans(n.list) +-- end +-- end +-- end diff --git a/tex/context/base/node-aux.lua b/tex/context/base/node-aux.lua index e3fc7ad6f..443c78547 100644 --- a/tex/context/base/node-aux.lua +++ b/tex/context/base/node-aux.lua @@ -43,7 +43,7 @@ local unsetvalue = attributes.unsetvalue local current_font = font.current -local texbox = tex.box +local texgetbox = tex.getbox local report_error = logs.reporter("node-aux:error") @@ -195,7 +195,7 @@ function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255 end function nodes.firstcharinbox(n) - local l = texbox[n].list + local l = texgetbox(n).list if l then for g in traverse_id(glyph_code,l) do return g.char @@ -370,20 +370,21 @@ end nodes.locate = locate -function nodes.concat(list) - local head, tail - for i=1,#list do - local li = list[i] - if not li then - -- skip - elseif head then - tail.next = li - li.prev = tail - tail = li.next and slide_nodes(li) or li - else - head = li - tail = li.next and slide_nodes(li) or li - end - end - return head, tail -end +-- I have no use for this yet: +-- +-- \skip0=10pt plus 2pt minus 2pt +-- \cldcontext{"\letterpercent p",tex.stretch_amount(tex.skip[0],1000)} -- 14.30887pt +-- +-- local gluespec_code = nodes.nodecodes.gluespec +-- +-- function tex.badness_to_ratio(badness) +-- return (badness/100)^(1/3) +-- end +-- +-- function tex.stretch_amount(skip,badness) +-- if skip.id == gluespec_code then +-- return skip.width + (badness and (badness/100)^(1/3) or 1) * skip.stretch +-- else +-- return 0 +-- end +-- end diff --git a/tex/context/base/node-fin.lua b/tex/context/base/node-fin.lua index 2e62ebcb5..63a5ef83e 100644 --- a/tex/context/base/node-fin.lua +++ b/tex/context/base/node-fin.lua @@ -41,93 +41,13 @@ local unsetvalue = attributes.unsetvalue -- these two will be like trackers -function states.enabletriggering() - triggering = true -end -function states.disabletriggering() - triggering = false -end - --- the following code is no longer needed due to the new backend --- but we keep it around for a while as an example --- --- states.collected = states.collected or { } --- --- storage.register("states/collected", states.collected, "states.collected") --- --- local collected = states.collected --- --- function states.collect(str) --- collected[#collected+1] = str --- end --- --- function states.flush() --- if #collected > 0 then --- for i=1,#collected do --- context(collected[i]) -- we're in context mode anyway --- end --- collected = { } --- states.collected = collected --- end --- end --- --- function states.check() --- logs.report("states",concat(collected,"\n")) --- end - --- we used to do the main processor loop here and call processor for each node --- but eventually this was too much a slow down (1 sec on 23 for 120 pages mk) --- so that we moved looping to the processor itself; this may lead to a bit of --- duplicate code once that we have more state handlers - --- local function process_attribute(head,plugin) -- head,attribute,enabled,initializer,resolver,processor,finalizer --- local namespace = plugin.namespace --- if namespace.enabled ~= false then -- this test will go away --- starttiming(attributes) -- in principle we could delegate this to the main caller --- local done, used, ok = false, nil, false --- local attribute = namespace.attribute or numbers[plugin.name] -- todo: plugin.attribute --- local processor = plugin.processor --- if processor then --- local initializer = plugin.initializer --- local resolver = plugin.resolver --- local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip ! --- if initializer then --- initializer(namespace,attribute,head) --- end --- head, ok = processor(namespace,attribute,head,inheritance) --- if ok then --- local finalizer = plugin.finalizer --- if finalizer then --- head, ok, used = finalizer(namespace,attribute,head) --- if used then --- local flusher = plugin.flusher --- if flusher then --- head = flusher(namespace,attribute,head,used) --- end --- end --- end --- done = true --- end --- end --- stoptiming(attributes) --- return head, done --- else --- return head, false --- end --- end --- --- function nodes.installattributehandler(plugin) -- we need to avoid this nested function --- return function(head) --- return process_attribute(head,plugin) --- end --- end - --- An experiment: lean and mean functions. It is not really faster but --- with upcoming functionality it might make a difference, e.g. features --- like 'casing' and 'italics' can be called a lot so there it makes sense. +function states.enabletriggering () triggering = true end +function states.disabletriggering() triggering = false end nodes.plugindata = nil +-- inheritance: -0x7FFFFFFF -- we can best use nil and skip ! + local template = [[ local plugin = nodes.plugindata local starttiming = statistics.starttiming @@ -146,8 +66,10 @@ if not processor then elseif initializer or finalizer or resolver then return function(head) starttiming(attributes) - local done, used, ok = false, nil, false - local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip ! + local done, used, ok, inheritance = false, nil, false, nil + if resolver then + inheritance = resolver() + end if initializer then initializer(namespace,attribute,head) end @@ -228,309 +150,6 @@ end -- we need to deal with literals too (reset as well as oval) -- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then --- local function process(namespace,attribute,head,inheritance,default) -- one attribute --- local stack, done = head, false --- while stack do --- local id = stack.id --- if id == glyph_code or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current ~= c then --- head = insert_node_before(head,stack,copy_node(nsdata[c])) --- current = c --- done = true --- end --- -- here ? compare selective --- if id == glue_code then --leader --- -- same as *list --- local content = stack.leader --- if content then --- local savedcurrent = current --- local ci = content.id --- if ci == hlist_code or ci == vlist_code then --- -- else we reset inside a box unneeded, okay, the downside is --- -- that we trigger color in each repeated box, so there is room --- -- for improvement here --- current = 0 --- end --- local ok = false --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.leader, ok = process(namespace,attribute,content,inheritance,outer) --- else --- stack.leader, ok = process(namespace,attribute,content,inheritance,default) --- end --- else --- stack.leader, ok = process(namespace,attribute,content,inheritance,default) --- end --- current = savedcurrent --- done = done or ok --- end --- end --- elseif default and inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current = 0 --- done = true --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then --- local ok = false --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.list, ok = process(namespace,attribute,content,inheritance,outer) --- else --- stack.list, ok = process(namespace,attribute,content,inheritance,default) --- end --- else --- stack.list, ok = process(namespace,attribute,content,inheritance,default) --- end --- done = done or ok --- end --- end --- stack = stack.next --- end --- return head, done --- end - --- local function process(namespace,attribute,head,inheritance,default) -- one attribute --- local stack, done = head, false - --- local function check() --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current ~= c then --- head = insert_node_before(head,stack,copy_node(nsdata[c])) --- current = c --- done = true --- end --- elseif default and inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current = 0 --- done = true --- end --- return c --- end - --- local function nested(content) --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- return process(namespace,attribute,content,inheritance,outer) --- else --- return process(namespace,attribute,content,inheritance,default) --- end --- else --- return process(namespace,attribute,content,inheritance,default) --- end --- end - --- while stack do --- local id = stack.id --- if id == glyph_code then --- check() --- elseif id == glue_code then --- local content = stack.leader --- if content and check() then --- local savedcurrent = current --- local ci = content.id --- if ci == hlist_code or ci == vlist_code then --- -- else we reset inside a box unneeded, okay, the downside is --- -- that we trigger color in each repeated box, so there is room --- -- for improvement here --- current = 0 --- end - --- local ok = false --- stack.leader, ok = nested(content) --- done = done or ok - --- current = savedcurrent --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then - --- local ok = false --- stack.list, ok = nested(content) --- done = done or ok - --- end --- elseif id == rule_code then --- if stack.width ~= 0 then --- check() --- end --- end --- stack = stack.next --- end --- return head, done --- end - --- local function process(namespace,attribute,head,inheritance,default) -- one attribute --- local stack, done = head, false --- while stack do --- local id = stack.id --- if id == glyph_code then --- -- begin of check --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current ~= c then --- head = insert_node_before(head,stack,copy_node(nsdata[c])) --- current = c --- done = true --- end --- elseif default and inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current = 0 --- done = true --- end --- -- end of check --- elseif id == glue_code then --- local content = stack.leader --- if content then --- -- begin of check --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current ~= c then --- head = insert_node_before(head,stack,copy_node(nsdata[c])) --- current = c --- done = true --- end --- -- begin special to this check --- local savedcurrent = current --- local ci = content.id --- if ci == hlist_code or ci == vlist_code then --- -- else we reset inside a box unneeded, okay, the downside is --- -- that we trigger color in each repeated box, so there is room --- -- for improvement here --- current = 0 --- end --- -- begin nested -- --- local ok = false --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.leader, ok = process(namespace,attribute,content,inheritance,outer) --- else --- stack.leader, ok = process(namespace,attribute,content,inheritance,default) --- end --- else --- stack.leader, ok = process(namespace,attribute,content,inheritance,default) --- end --- -- end nested -- --- done = done or ok --- current = savedcurrent --- -- end special to this check --- elseif default and inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current = 0 --- done = true --- end --- -- end of check --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then --- -- begin nested -- --- local ok --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.list, ok = process(namespace,attribute,content,inheritance,outer) --- else --- stack.list, ok = process(namespace,attribute,content,inheritance,default) --- end --- else --- stack.list, ok = process(namespace,attribute,content,inheritance,default) --- end --- -- end nested -- --- done = done or ok --- end --- elseif id == rule_code then --- if stack.width ~= 0 then --- -- begin of check --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current ~= c then --- head = insert_node_before(head,stack,copy_node(nsdata[c])) --- current = c --- done = true --- end --- elseif default and inheritance then --- if current ~= default then --- head = insert_node_before(head,stack,copy_node(nsdata[default])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current = 0 --- done = true --- end --- -- end of check --- end --- end --- stack = stack.next --- end --- return head, done --- end - local function process(namespace,attribute,head,inheritance,default) -- one attribute local stack = head local done = false @@ -633,184 +252,6 @@ states.process = process -- state changes while the main state stays the same (like two glyphs following -- each other with the same color but different color spaces e.g. \showcolor) --- local function selective(namespace,attribute,head,inheritance,default) -- two attributes --- local stack, done = head, false --- while stack do --- local id = stack.id --- -- we need to deal with literals too (reset as well as oval) --- -- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code --- if id == glyph_code -- or id == disc_code --- or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- local data = nsdata[default] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = default --- done = true --- end --- else --- local s = stack[nsselector] --- if current ~= c or current_selector ~= s then --- local data = nsdata[c] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = c --- current_selector = s --- done = true --- end --- end --- elseif default and inheritance then --- if current ~= default then --- local data = nsdata[default] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current, current_selector, done = 0, 0, true --- end --- if id == glue_code then -- leader --- -- same as *list --- local content = stack.leader --- if content then --- local savedcurrent = current --- local ci = content.id --- if ci == hlist_code or ci == vlist_code then --- -- else we reset inside a box unneeded, okay, the downside is --- -- that we trigger color in each repeated box, so there is room --- -- for improvement here --- current = 0 --- end --- local ok = false --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.leader, ok = selective(namespace,attribute,content,inheritance,outer) --- else --- stack.leader, ok = selective(namespace,attribute,content,inheritance,default) --- end --- else --- stack.leader, ok = selective(namespace,attribute,content,inheritance,default) --- end --- current = savedcurrent --- done = done or ok --- end --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then --- local ok = false --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- stack.list, ok = selective(namespace,attribute,content,inheritance,outer) --- else --- stack.list, ok = selective(namespace,attribute,content,inheritance,default) --- end --- else --- stack.list, ok = selective(namespace,attribute,content,inheritance,default) --- end --- done = done or ok --- end --- end --- stack = stack.next --- end --- return head, done --- end - --- local function selective(namespace,attribute,head,inheritance,default) -- two attributes --- local stack, done = head, false - --- local function check() --- local c = stack[attribute] --- if c then --- if default and c == inheritance then --- if current ~= default then --- local data = nsdata[default] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = default --- done = true --- end --- else --- local s = stack[nsselector] --- if current ~= c or current_selector ~= s then --- local data = nsdata[c] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = c --- current_selector = s --- done = true --- end --- end --- elseif default and inheritance then --- if current ~= default then --- local data = nsdata[default] --- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector])) --- current = default --- done = true --- end --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- current, current_selector, done = 0, 0, true --- end --- return c --- end - --- local function nested(content) --- if nstrigger and stack[nstrigger] then --- local outer = stack[attribute] --- if outer ~= inheritance then --- return selective(namespace,attribute,content,inheritance,outer) --- else --- return selective(namespace,attribute,content,inheritance,default) --- end --- else --- return selective(namespace,attribute,content,inheritance,default) --- end --- end - --- while stack do --- local id = stack.id --- if id == glyph_code then --- check() --- elseif id == glue_code then --- local content = stack.leader --- if content and check() then --- -- local savedcurrent = current --- -- local ci = content.id --- -- if ci == hlist_code or ci == vlist_code then --- -- -- else we reset inside a box unneeded, okay, the downside is --- -- -- that we trigger color in each repeated box, so there is room --- -- -- for improvement here --- -- current = 0 --- -- end - --- local ok = false --- stack.leader, ok = nested(content) --- done = done or ok - --- -- current = savedcurrent --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then - --- local ok = false --- stack.list, ok = nested(content) --- done = done or ok - --- end --- elseif id == rule_code then --- if stack.width ~= 0 then --- check() --- end --- end --- stack = stack.next --- end --- return head, done --- end - local function selective(namespace,attribute,head,inheritance,default) -- two attributes local stack = head local done = false @@ -914,77 +355,6 @@ states.selective = selective -- Todo: make a better stacker. Keep track (in attribute) about nesting level. Not -- entirely trivial and a generic solution is nicer (compares to the exporter). --- local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise --- local stack, done = head, false --- local current, depth = default or 0, 0 --- --- local function check() --- local a = stack[attribute] --- if a then --- if current ~= a then --- head = insert_node_before(head,stack,copy_node(nsdata[a])) --- depth = depth + 1 --- current, done = a, true --- end --- elseif default > 0 then --- -- --- elseif current > 0 then --- head = insert_node_before(head,stack,copy_node(nsnone)) --- depth = depth - 1 --- current, done = 0, true --- end --- return a --- end --- --- while stack do --- local id = stack.id --- if id == glyph_code then --- check() --- elseif id == glue_code then --- local content = stack.leader --- if content and check() then --- local ok = false --- stack.leader, ok = stacked(namespace,attribute,content,current) --- done = done or ok --- end --- elseif id == hlist_code or id == vlist_code then --- local content = stack.list --- if content then --- -- the problem is that broken lines gets the attribute which can be a later one --- if nslistwise then --- local a = stack[attribute] --- if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below --- local p = current --- current, done = a, true --- head = insert_node_before(head,stack,copy_node(nsdata[a])) --- stack.list = stacked(namespace,attribute,content,current) --- head, stack = insert_node_after(head,stack,copy_node(nsnone)) --- current = p --- else --- local ok = false --- stack.list, ok = stacked(namespace,attribute,content,current) --- done = done or ok --- end --- else --- local ok = false --- stack.list, ok = stacked(namespace,attribute,content,current) --- done = done or ok --- end --- end --- elseif id == rule_code then --- if stack.width ~= 0 then --- check() --- end --- end --- stack = stack.next --- end --- while depth > 0 do --- head = insert_node_after(head,stack,copy_node(nsnone)) --- depth = depth - 1 --- end --- return head, done --- end - local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise local stack = head local done = false @@ -1066,76 +436,6 @@ states.stacked = stacked -- experimental --- local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise --- nsbegin() --- local current, previous, done, okay = head, head, false, false --- local attrib = default or unsetvalue --- --- local function check() --- local a = current[attribute] or unsetvalue --- if a ~= attrib then --- local n = nsstep(a) --- if n then --- -- !!!! TEST CODE !!!! --- -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a --- head = insert_node_before(head,current,n) -- a --- end --- attrib, done, okay = a, true, true --- end --- return a --- end --- --- while current do --- local id = current.id --- if id == glyph_code then --- check() --- elseif id == glue_code then --- local content = current.leader --- if content and check() then --- -- tricky as a leader has to be a list so we cannot inject before --- local _, ok = stacker(namespace,attribute,content,attrib) --- done = done or ok --- end --- elseif id == hlist_code or id == vlist_code then --- local content = current.list --- if not content then --- -- skip --- elseif nslistwise then --- local a = current[attribute] --- if a and attrib ~= a and nslistwise[a] then -- viewerlayer --- done = true --- head = insert_node_before(head,current,copy_node(nsdata[a])) --- current.list = stacker(namespace,attribute,content,a) --- head, current = insert_node_after(head,current,copy_node(nsnone)) --- else --- local ok = false --- current.list, ok = stacker(namespace,attribute,content,attrib) --- done = done or ok --- end --- else --- local ok = false --- current.list, ok = stacker(namespace,attribute,content,default) --- done = done or ok --- end --- elseif id == rule_code then --- if current.width ~= 0 then --- check() --- end --- end --- previous = current --- current = current.next --- end --- if okay then --- local n = nsend() --- if n then --- -- !!!! TEST CODE !!!! --- -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)])) --- head = insert_node_after(head,previous,n) --- end --- end --- return head, done --- end - local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise nsbegin() local current = head diff --git a/tex/context/base/node-fin.mkiv b/tex/context/base/node-fin.mkiv index 09bac6c08..2eb033fc1 100644 --- a/tex/context/base/node-fin.mkiv +++ b/tex/context/base/node-fin.mkiv @@ -23,8 +23,12 @@ % we might have two variants at some point (efficiency) -\def\finalizeobjectbox #1{\ctxcommand{finalizebox(\number#1)}} -\def\finalizeshipoutbox#1{\ctxcommand{finalizebox(\number#1)}} +\unexpanded\def\finalizeobjectbox #1{\ctxcommand{finalizebox(\number#1)}} +\unexpanded\def\finalizeshipoutbox#1{\ctxcommand{finalizebox(\number#1)}} + +% Experimental (for Aditya): + +\unexpanded\def\cleanupbox#1{\ctxcommand{cleanupbox(\number#1)}} % Tricky stuff: this might become obsolete. diff --git a/tex/context/base/node-fnt.lua b/tex/context/base/node-fnt.lua index 54359117e..2f59d513c 100644 --- a/tex/context/base/node-fnt.lua +++ b/tex/context/base/node-fnt.lua @@ -30,6 +30,7 @@ local nodecodes = nodes.nodecodes local handlers = nodes.handlers local glyph_code = nodecodes.glyph +local disc_code = nodecodes.disc local setmetatableindex = table.setmetatableindex @@ -48,12 +49,31 @@ local run = 0 local setfontdynamics = { } local fontprocesses = { } +-- setmetatableindex(setfontdynamics, function(t,font) +-- local tfmdata = fontdata[font] +-- local shared = tfmdata.shared +-- local v = shared and shared.dynamics and otf.setdynamics or false +-- t[font] = v +-- return v +-- end) + setmetatableindex(setfontdynamics, function(t,font) local tfmdata = fontdata[font] local shared = tfmdata.shared - local v = shared and shared.dynamics and otf.setdynamics or false - t[font] = v - return v + local f = shared and shared.dynamics and otf.setdynamics or false + if f then + local v = { } + t[font] = v + setmetatableindex(v,function(t,k) + local v = f(font,k) + t[k] = v + return v + end) + return v + else + t[font] = false + return false + end end) setmetatableindex(fontprocesses, function(t,font) @@ -72,11 +92,18 @@ end) fonts.hashes.setdynamics = setfontdynamics fonts.hashes.processes = fontprocesses +-- if we forget about basemode we don't need to test too much here and we can consider running +-- over sub-ranges .. this involves a bit more initializations but who cares .. in that case we +-- also need to use the stop criterium (we already use head too) ... we cannot use traverse +-- then, so i'll test it on some local clone first ... the only pitfall is changed directions +-- inside a run which means that we need to keep track of this which in turn complicates matters +-- in a way i don't like + function handlers.characters(head) -- either next or not, but definitely no already processed list starttiming(nodes) - local usedfonts, attrfonts, done = { }, { }, false - local a, u, prevfont, prevattr = 0, 0, nil, 0 + local usedfonts, attrfonts = { }, { } + local a, u, prevfont, prevattr, done = 0, 0, nil, 0, false if trace_fontrun then run = run + 1 report_fonts() @@ -88,9 +115,11 @@ function handlers.characters(head) if id == glyph_code then local font = n.font local attr = n[0] or 0 - report_fonts("font %03i, dynamic %03i, glyph %s",font,attr,utf.char(n.char)) + report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,n.char) + elseif id == disc_code then + report_fonts("[disc] %s",nodes.listtoutf(n,true,false,n)) else - report_fonts("[%s]",nodecodes[n.id]) + report_fonts("[%s]",nodecodes[id]) end n = n.next end @@ -107,15 +136,10 @@ function handlers.characters(head) attrfonts[font] = used end if not used[attr] then - local sd = setfontdynamics[font] - if sd then -- always true ? - local d = sd(font,attr) -- can we cache this one? - if d then - used[attr] = d - a = a + 1 - else - -- can't happen ... otherwise best use nil/false distinction - end + local fd = setfontdynamics[font] + if fd then + used[attr] = fd[attr] + a = a + 1 end end else @@ -125,9 +149,7 @@ function handlers.characters(head) if fp then usedfonts[font] = fp u = u + 1 - else - -- can't happen ... otherwise best use nil/false distinction - end + end end end prevfont = font @@ -141,34 +163,25 @@ function handlers.characters(head) report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none") report_fonts() end + -- in context we always have at least 2 processors if u == 0 then -- skip elseif u == 1 then local font, processors = next(usedfonts) - local n = #processors - if n > 0 then - local h, d = processors[1](head,font,0) - head = h or head - done = done or d - if n > 1 then - for i=2,n do - local h, d = processors[i](head,font,0) - head = h or head - done = done or d - end + for i=1,#processors do + local h, d = processors[i](head,font,0) + if d then + head = h or head + done = true end end else for font, processors in next, usedfonts do - local n = #processors - local h, d = processors[1](head,font,0) - head = h or head - done = done or d - if n > 1 then - for i=2,n do - local h, d = processors[i](head,font,0) + for i=1,#processors do + local h, d = processors[i](head,font,0) + if d then head = h or head - done = done or d + done = true end end end @@ -178,38 +191,22 @@ function handlers.characters(head) elseif a == 1 then local font, dynamics = next(attrfonts) for attribute, processors in next, dynamics do -- attr can switch in between - local n = #processors - if n == 0 then - report_fonts("no processors associated with dynamic %s",attribute) - else - local h, d = processors[1](head,font,attribute) - head = h or head - done = done or d - if n > 1 then - for i=2,n do - local h, d = processors[i](head,font,attribute) - head = h or head - done = done or d - end + for i=1,#processors do + local h, d = processors[i](head,font,attribute) + if d then + head = h or head + done = true end end end else for font, dynamics in next, attrfonts do for attribute, processors in next, dynamics do -- attr can switch in between - local n = #processors - if n == 0 then - report_fonts("no processors associated with dynamic %s",attribute) - else - local h, d = processors[1](head,font,attribute) - head = h or head - done = done or d - if n > 1 then - for i=2,n do - local h, d = processors[i](head,font,attribute) - head = h or head - done = done or d - end + for i=1,#processors do + local h, d = processors[i](head,font,attribute) + if d then + head = h or head + done = true end end end @@ -222,5 +219,177 @@ function handlers.characters(head) return head, true end +-- local formatters = string.formatters + +-- local function make(processors,font,attribute) +-- _G.__temp = processors +-- local t = { } +-- for i=1,#processors do +-- if processors[i] then +-- t[#t+1] = formatters["local p_%s = _G.__temp[%s]"](i,i) +-- end +-- end +-- t[#t+1] = "return function(head,done)" +-- if #processors == 1 then +-- t[#t+1] = formatters["return p_%s(head,%s,%s)"](1,font,attribute or 0) +-- else +-- for i=1,#processors do +-- if processors[i] then +-- t[#t+1] = formatters["local h,d=p_%s(head,%s,%s) if d then head=h or head done=true end"](i,font,attribute or 0) +-- end +-- end +-- t[#t+1] = "return head, done" +-- end +-- t[#t+1] = "end" +-- t = concat(t,"\n") +-- t = load(t)(processors) +-- _G.__temp = nil +-- return t +-- end + +-- setmetatableindex(fontprocesses, function(t,font) +-- local tfmdata = fontdata[font] +-- local shared = tfmdata.shared -- we need to check shared, only when same features +-- local processes = shared and shared.processes +-- if processes and #processes > 0 then +-- processes = make(processes,font,0) +-- t[font] = processes +-- return processes +-- else +-- t[font] = false +-- return false +-- end +-- end) + +-- setmetatableindex(setfontdynamics, function(t,font) +-- local tfmdata = fontdata[font] +-- local shared = tfmdata.shared +-- local f = shared and shared.dynamics and otf.setdynamics or false +-- if f then +-- local v = { } +-- t[font] = v +-- setmetatableindex(v,function(t,k) +-- local v = f(font,k) +-- v = make(v,font,k) +-- t[k] = v +-- return v +-- end) +-- return v +-- else +-- t[font] = false +-- return false +-- end +-- end) + +-- function handlers.characters(head) +-- -- either next or not, but definitely no already processed list +-- starttiming(nodes) +-- local usedfonts, attrfonts +-- local a, u, prevfont, prevattr, done = 0, 0, nil, 0, false +-- if trace_fontrun then +-- run = run + 1 +-- report_fonts() +-- report_fonts("checking node list, run %s",run) +-- report_fonts() +-- local n = head +-- while n do +-- local id = n.id +-- if id == glyph_code then +-- local font = n.font +-- local attr = n[0] or 0 +-- report_fonts("font %03i, dynamic %03i, glyph %s",font,attr,utf.char(n.char)) +-- else +-- report_fonts("[%s]",nodecodes[n.id]) +-- end +-- n = n.next +-- end +-- end +-- for n in traverse_id(glyph_code,head) do +-- -- if n.subtype<256 then -- all are 1 +-- local font = n.font +-- local attr = n[0] or 0 -- zero attribute is reserved for fonts in context +-- if font ~= prevfont or attr ~= prevattr then +-- if attr > 0 then +-- if not attrfonts then +-- attrfonts = { +-- [font] = { +-- [attr] = setfontdynamics[font][attr] +-- } +-- } +-- a = 1 +-- else +-- local used = attrfonts[font] +-- if not used then +-- attrfonts[font] = { +-- [attr] = setfontdynamics[font][attr] +-- } +-- a = a + 1 +-- elseif not used[attr] then +-- used[attr] = setfontdynamics[font][attr] +-- a = a + 1 +-- end +-- end +-- else +-- if not usedfonts then +-- local fp = fontprocesses[font] +-- if fp then +-- usedfonts = { +-- [font] = fp +-- } +-- u = 1 +-- end +-- else +-- local used = usedfonts[font] +-- if not used then +-- local fp = fontprocesses[font] +-- if fp then +-- usedfonts[font] = fp +-- u = u + 1 +-- end +-- end +-- end +-- end +-- prevfont = font +-- prevattr = attr +-- end +-- -- end +-- end +-- if trace_fontrun then +-- report_fonts() +-- report_fonts("statics : %s",(u > 0 and concat(keys(usedfonts)," ")) or "none") +-- report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none") +-- report_fonts() +-- end +-- if not usedfonts then +-- -- skip +-- elseif u == 1 then +-- local font, processors = next(usedfonts) +-- head, done = processors(head,done) +-- else +-- for font, processors in next, usedfonts do +-- head, done = processors(head,done) +-- end +-- end +-- if not attrfonts then +-- -- skip +-- elseif a == 1 then +-- local font, dynamics = next(attrfonts) +-- for attribute, processors in next, dynamics do +-- head, done = processors(head,done) +-- end +-- else +-- for font, dynamics in next, attrfonts do +-- for attribute, processors in next, dynamics do +-- head, done = processors(head,done) +-- end +-- end +-- end +-- stoptiming(nodes) +-- if trace_characters then +-- nodes.report(head,done) +-- end +-- return head, true +-- end + handlers.protectglyphs = node.protect_glyphs handlers.unprotectglyphs = node.unprotect_glyphs diff --git a/tex/context/base/node-ini.lua b/tex/context/base/node-ini.lua index 5a3986c3a..652b46caf 100644 --- a/tex/context/base/node-ini.lua +++ b/tex/context/base/node-ini.lua @@ -13,13 +13,10 @@ modules.

-- this module is being reconstructed -local next, type = next, type -local format, match, gsub = string.format, string.match, string.gsub +local next, type, tostring = next, type, tostring +local gsub = string.gsub local concat, remove = table.concat, table.remove -local sortedhash, sortedkeys, swapped, tohash = table.sortedhash, table.sortedkeys, table.swapped, table.tohash -local utfchar = utf.char -local lpegmatch = lpeg.match -local formatcolumns = utilities.formatters.formatcolumns +local sortedhash, sortedkeys, swapped = table.sortedhash, table.sortedkeys, table.swapped --[[ldx--

Access to nodes is what gives its power. Here we @@ -54,20 +51,12 @@ into the engine, but this is a not so natural extension.

also ignore the empty nodes. [This is obsolete!]

--ldx]]-- -local traverse = node.traverse -local traverse_id = node.traverse_id -local free_node = node.free -local remove_node = node.remove -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after -local node_fields = node.fields - -local allocate = utilities.storage.allocate +nodes = nodes or { } +local nodes = nodes +nodes.handlers = nodes.handlers or { } -nodes = nodes or { } -local nodes = nodes - -nodes.handlers = nodes.handlers or { } +local allocate = utilities.storage.allocate +local formatcolumns = utilities.formatters.formatcolumns -- there will be more of this: @@ -103,7 +92,7 @@ local penaltycodes = allocate { -- unfortunately not used table.setmetatableindex(penaltycodes,function(t,k) return "userpenalty" end) -- not used anyway -local noadcodes = allocate { +local noadcodes = allocate { -- simple nodes [ 0] = "ord", [ 1] = "opdisplaylimits", [ 2] = "oplimits", @@ -170,6 +159,20 @@ local disccodes = allocate { [5] = "second", -- hard second item } +local accentcodes = allocate { + [0] = "bothflexible", + [1] = "fixedtop", + [2] = "fixedbottom", + [3] = "fixedboth", +} + +local fencecodes = allocate { + [0] = "unset", + [1] = "left", + [2] = "middle", + [3] = "right", +} + local function simplified(t) local r = { } for k, v in next, t do @@ -193,6 +196,8 @@ mathcodes = allocate(swapped(mathcodes,mathcodes)) fillcodes = allocate(swapped(fillcodes,fillcodes)) margincodes = allocate(swapped(margincodes,margincodes)) disccodes = allocate(swapped(disccodes,disccodes)) +accentcodes = allocate(swapped(accentcodes,accentcodes)) +fencecodes = allocate(swapped(fencecodes,fencecodes)) nodes.skipcodes = skipcodes nodes.gluecodes = skipcodes -- more official nodes.noadcodes = noadcodes @@ -206,6 +211,8 @@ nodes.mathcodes = mathcodes nodes.fillcodes = fillcodes nodes.margincodes = margincodes nodes.disccodes = disccodes nodes.discretionarycodes = disccodes +nodes.accentcodes = accentcodes +nodes.fencecodes = fencecodes listcodes.row = listcodes.alignment listcodes.column = listcodes.alignment @@ -227,6 +234,8 @@ nodes.codes = allocate { -- mostly for listing margin = margincodes, disc = disccodes, whatsit = whatcodes, + accent = accentcodes, + fence = fencecodes, } local report_codes = logs.reporter("nodes","codes") @@ -248,174 +257,4 @@ function nodes.showcodes() end end -local whatsit_node = nodecodes.whatsit - -local messyhack = tohash { -- temporary solution - nodecodes.attributelist, - nodecodes.attribute, - nodecodes.gluespec, - nodecodes.action, -} - -function nodes.fields(n) - local id = n.id - if id == whatsit_node then - return node_fields(id,n.subtype) - else - local t = node_fields(id) - if messyhack[id] then - for i=1,#t do - if t[i] == "subtype" then - remove(t,i) - break - end - end - end - return t - end -end - trackers.register("system.showcodes", nodes.showcodes) - -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glue_code = nodecodes.glue - --- if t.id == glue_code then --- local s = t.spec --- print(t) --- print(s,s and s.writable) --- if s and s.writable then --- free_node(s) --- end --- t.spec = nil --- end - -local function remove(head, current, free_too) - local t = current - head, current = remove_node(head,current) - if t then - if free_too then - free_node(t) - t = nil - else - t.next = nil - t.prev = nil - end - end - return head, current, t -end - -nodes.remove = remove - -function nodes.delete(head,current) - return remove(head,current,true) -end - -nodes.before = insert_node_before -nodes.after = insert_node_after - --- we need to test this, as it might be fixed now - -function nodes.before(h,c,n) - if c then - if c == h then - n.next = h - n.prev = nil - h.prev = n - else - local cp = c.prev - n.next = c - n.prev = cp - if cp then - cp.next = n - end - c.prev = n - return h, n - end - end - return n, n -end - -function nodes.after(h,c,n) - if c then - local cn = c.next - if cn then - n.next = cn - cn.prev = n - else - n.next = nil - end - c.next = n - n.prev = c - return h, n - end - return n, n -end - --- local h, c = nodes.replace(head,current,new) --- local c = nodes.replace(false,current,new) --- local c = nodes.replace(current,new) - -function nodes.replace(head,current,new) -- no head returned if false - if not new then - head, current, new = false, head, current - end - local prev, next = current.prev, current.next - if next then - new.next = next - next.prev = new - end - if prev then - new.prev = prev - prev.next = new - end - if head then - if head == current then - head = new - end - free_node(current) - return head, new - else - free_node(current) - return new - end -end - --- will move - -local function count(stack,flat) - local n = 0 - while stack do - local id = stack.id - if not flat and id == hlist_code or id == vlist_code then - local list = stack.list - if list then - n = n + 1 + count(list) -- self counts too - else - n = n + 1 - end - else - n = n + 1 - end - stack = stack.next - end - return n -end - -nodes.count = count - -local left, space = lpeg.P("<"), lpeg.P(" ") - -local reference = left * (1-left)^0 * left * space^0 * lpeg.C((1-space)^0) - -function nodes.reference(n) - return lpegmatch(reference,tostring(n)) -end - -if not node.next then - - function node.next(n) return n and n.next end - function node.prev(n) return n and n.prev end - -end diff --git a/tex/context/base/node-ini.mkiv b/tex/context/base/node-ini.mkiv index 39d48a00a..e99653327 100644 --- a/tex/context/base/node-ini.mkiv +++ b/tex/context/base/node-ini.mkiv @@ -18,6 +18,10 @@ \newcount\filterstate \filterstate\plusone % hm, public \registerctxluafile{node-ini}{1.001} +\registerctxluafile{node-met}{1.001} + +\ctxlua{if nodes.gonuts then context.registerctxluafile("node-nut","1.001") end} + \registerctxluafile{node-res}{1.001} \registerctxluafile{node-dir}{1.001} \registerctxluafile{node-aux}{1.001} diff --git a/tex/context/base/node-inj.lua b/tex/context/base/node-inj.lua index 697370cfb..ae48150a6 100644 --- a/tex/context/base/node-inj.lua +++ b/tex/context/base/node-inj.lua @@ -11,6 +11,8 @@ if not modules then modules = { } end modules ['node-inj'] = { -- test fonts. Btw, future versions of luatex will have extended glyph properties -- that can be of help. Some optimizations can go away when we have faster machines. +-- todo: make a special one for context + local next = next local utfchar = utf.char @@ -106,7 +108,7 @@ function injections.setkern(current,factor,rlmode,x,tfmchr) end end -function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanchor, ma=markanchor +function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this local bound = base[a_markbase] -- fails again we should pass it local index = 1 @@ -129,7 +131,7 @@ function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanch base[a_markbase] = bound start[a_markmark] = bound start[a_markdone] = index - marks[bound] = { [index] = { dx, dy, rlmode } } + marks[bound] = { [index] = { dx, dy, rlmode, baseismark } } return dx, dy, bound end @@ -383,6 +385,11 @@ function injections.handler(head,where,keep) else n.xoffset = p.xoffset - d[1] end + local w = n.width + if w ~= 0 then + insert_node_before(head,n,newkern(-w/2)) + insert_node_after(head,n,newkern(-w/2)) + end end -- -- if mk[p] then diff --git a/tex/context/base/node-ltp.lua b/tex/context/base/node-ltp.lua new file mode 100644 index 000000000..97e61cf18 --- /dev/null +++ b/tex/context/base/node-ltp.lua @@ -0,0 +1,3207 @@ +if not modules then modules = { } end modules ['node-par'] = { + version = 1.001, + comment = "companion to node-par.mkiv", + author = "Hans Hagen", + copyright = "ConTeXt Development Team", + license = "see context related readme files", + comment = "a translation of the built in parbuilder, initial convertsin by Taco Hoekwater", +} + +-- todo: remove nest_stack from linebreak.w +-- todo: use ex field as signal (index in ?) +-- todo: attr driven unknown/on/off +-- todo: permit global steps i.e. using an attribute that sets min/max/step and overloads the font parameters +-- todo: split the three passes into three functions +-- todo: simplify the direction stack, no copy needed +-- todo: add more mkiv like tracing +-- todo: add a couple of plugin hooks +-- todo: maybe split expansion code paths +-- todo: fix line numbers (cur_list.pg_field needed) +-- todo: make kerns stretch an option and disable it by default (definitely not shrink) +-- todo: check and improve protrusion +-- todo: arabic etc (we could use pretty large scales there) .. marks and cursive + +--[[ + + This code is derived from traditional TeX and has bits of pdfTeX, Aleph (Omega), and of course LuaTeX. So, + the basic algorithm for sure is not our work. On the other hand, the directional model in LuaTeX is cleaned + up as is other code. And of course there are hooks for callbacks. + + The first version of the code below was a conversion of the C code that in turn was a conversion from the + original Pascal code. Around September 2008 we experimented with cq. discussed possible approaches to improved + typesetting of Arabic and as our policy is that extensions happen in Lua this means that we need a parbuilder + in Lua. Taco's first conversion still looked quite C-ish and in the process of cleaning up we uncovered some odd + bits and pieces in the original code as well. I did some first cleanup to get rid of C-artefacts, and Taco and I + spent the usual amount of Skyping to sort out problems. At that point we diverted to other LuaTeX issues. + + A while later I decided to pick up this thread and decided to look into better ways to deal with font expansion + (aka hz). I got it running using a simpler method. One reason why the built-in mechanims is slow is that there is + lots of redudancy in calculations. Expanded widths are recalculated each time and because the hpakc routine does + it again that gives some overhead. In the process extra fonts are created with different dimensions so that the + backend can deal with it. The alternative method doesn't create fonts but passes an expansion factor to the + pdf generator. The small patch needed for the backend code worked more or less okay but was never intergated into + LuaTeX due to lack of time. + + This all happened in 2010 while listening to Peter Gabriels "Scratch My Back" and Camels "Rayaz" so it was a + rather relaxed job. + + In 2012 I picked up this thread. Because both languages are similar but also quite different it took some time + to get compatible output. Because the C code uses macros, careful checking was needed. Of course Lua's table model + and local variables brought some work as well. And still the code looks a bit C-ish. We could not divert too much + from the original model simply because it's well documented but future versions (or variants) might as well look + different. + + Eventually I'll split this code into passes so that we can better see what happens, but first we need to reach + a decent level of stability. The current expansion results are not the same as the built-in but that was never + the objective. It all has to do with slightly different calculations. + + The original C-code related to protrusion and expansion is not that efficient as many (redundant) function + calls take place in the linebreaker and packer. As most work related to fonts is done in the backend, we + can simply stick to width calculations here. Also, it is no problem at all that we use floating point + calculations (as Lua has only floats). The final result will look ok as the hpack will nicely compensate + for rounding errors as it will normally distribute the content well enough. And let's admit: most texies + won't see it anyway. As long as we're cross platform compatible it's fine. + + We use the table checked_expansion to keep track of font related parameters (per paragraph). The table is + also the signal that we have adjustments > 1. In retrospect one might wonder if adjusting kerns is such a + good idea because other spacing is also not treated. If we would stick to the regular hpack routine + we do have to follow the same logic, but I decided to use a Lua hpacker so that constraint went away. And + anyway, instead of doing a lookup in the kern table (that we don't have in node mode) the set kern value + is used. Disabling kern scaling will become an option in Luatex some day. You can blame me for all errors + that crept in and I know that there are some. + + To be honest, I slowly start to grasp the magic here as normally I start from scratch when implementing + something (as it's the only way I can understand things). This time I had a recently acquired stack of + Porcupine Tree disks to get me through. + + Picking up this effort was inspired by discussions between Luigi Scarso and me about efficiency of Lua + code and we needed some stress tests to compare regular LuaTeX and LuajitTeX. One of the tests was + processing tufte.tex as that one has lots of hyphenations and is a tough one to get right. + + tufte: boxed 1000 times, no flushing in backend: + + \testfeatureonce{1000}{\setbox0\hbox{\tufte}} + \testfeatureonce{1000}{\setbox0\vbox{\tufte}} + \startparbuilder[basic]\testfeatureonce{1000}{\setbox0\vbox{\tufte}}\stopparbuilder + + method normal hz comment + + luatex tex hbox 9.64 9.64 baseline font feature processing, hyphenation etc: 9.74 + tex vbox 9.84 10.16 0.20 linebreak / 0.52 with hz -> 0.32 hz overhead (150pct more) + lua vbox 17.28 18.43 7.64 linebreak / 8.79 with hz -> 1.33 hz overhead ( 20pct more) + + luajittex tex hbox 6.33 6.33 baseline font feature processing, hyphenation etc: 6.33 + tex vbox 6.53 6.81 0.20 linebreak / 0.48 with hz -> 0.28 hz overhead (expected 0.32) + lua vbox 11.06 11.81 4.53 linebreak / 5.28 with hz -> 0.75 hz overhead + + Interesting is that the runtime for the built-in parbuilder indeed increases much when expansion + is enabled, but in the Lua variant the extra overhead is way less significant. This means that when we + retrofit the same approach into the core, the overhead of expansion can be sort of nilled. + +]]-- + +local utfchar = utf.char +local write, write_nl = texio.write, texio.write_nl +local sub, format = string.sub, string.format +local round = math.round +local insert, remove = table.insert, table.remove + +local fonts, nodes, node = fonts, nodes, node + +local trace_basic = false trackers.register("builders.paragraphs.basic", function(v) trace_basic = v end) +local trace_lastlinefit = false trackers.register("builders.paragraphs.lastlinefit", function(v) trace_lastlinefit = v end) +local trace_adjusting = false trackers.register("builders.paragraphs.adjusting", function(v) trace_adjusting = v end) +local trace_protruding = false trackers.register("builders.paragraphs.protruding", function(v) trace_protruding = v end) +local trace_expansion = false trackers.register("builders.paragraphs.expansion", function(v) trace_expansion = v end) +local trace_quality = false trackers.register("builders.paragraphs.quality", function(v) trace_quality = v end) + +local report_parbuilders = logs.reporter("nodes","parbuilders") +local report_hpackers = logs.reporter("nodes","hpackers") + +local calculate_badness = tex.badness +local texnest = tex.nest +local texlists = tex.lists + +-- (t == 0 and 0) or (s <= 0 and 10000) or calculate_badness(t,s) + +-- local function calculate_badness(t,s) +-- if t == 0 then +-- return 0 +-- elseif s <= 0 then +-- return 10000 -- infinite_badness +-- else +-- local r +-- if t <= 7230584 then +-- r = (t * 297) / s +-- elseif s >= 1663497 then +-- r = t / (s / 297) +-- else +-- r = t +-- end +-- if r > 1290 then +-- return 10000 -- infinite_badness +-- else +-- return (r * r * r + 0x20000) / 0x40000 +-- end +-- end +-- end + +local parbuilders = builders.paragraphs +local constructors = parbuilders.constructors + +local setmetatableindex = table.setmetatableindex + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local chardata = fonthashes.characters +local quaddata = fonthashes.quads +local parameters = fonthashes.parameters + +local slide_nodes = node.slide +local new_node = node.new +local copy_node = node.copy +local copy_node_list = node.copy_list +local flush_node = node.free +local flush_node_list = node.flush_list +local hpack_nodes = node.hpack +local xpack_nodes = node.hpack +local replace_node = nodes.replace +local insert_node_after = node.insert_after +local insert_node_before = node.insert_before +local traverse_by_id = node.traverse_id + +local setnodecolor = nodes.tracers.colors.set + +local nodepool = nodes.pool + +local nodecodes = nodes.nodecodes +local whatcodes = nodes.whatcodes +local kerncodes = nodes.kerncodes +local glyphcodes = nodes.glyphcodes +local gluecodes = nodes.gluecodes +local margincodes = nodes.margincodes +local disccodes = nodes.disccodes +local mathcodes = nodes.mathcodes +local fillcodes = nodes.fillcodes + +local temp_code = nodecodes.temp +local glyph_code = nodecodes.glyph +local ins_code = nodecodes.ins +local mark_code = nodecodes.mark +local adjust_code = nodecodes.adjust +local penalty_code = nodecodes.penalty +local whatsit_code = nodecodes.whatsit +local disc_code = nodecodes.disc +local math_code = nodecodes.math +local kern_code = nodecodes.kern +local glue_code = nodecodes.glue +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local unset_code = nodecodes.unset +local marginkern_code = nodecodes.marginkern + +local leaders_code = gluecodes.leaders + +local localpar_code = whatcodes.localpar +local dir_code = whatcodes.dir +local pdfrefximage_code = whatcodes.pdfrefximage +local pdfrefxform_code = whatcodes.pdfrefxform + +local kerning_code = kerncodes.kerning -- font kern +local userkern_code = kerncodes.userkern + +local ligature_code = glyphcodes.ligature + +local stretch_orders = nodes.fillcodes + +local leftmargin_code = margincodes.left +local rightmargin_code = margincodes.right + +local automatic_disc_code = disccodes.automatic +local regular_disc_code = disccodes.regular +local first_disc_code = disccodes.first +local second_disc_code = disccodes.second + +local endmath_code = mathcodes.endmath + +local nosubtype_code = 0 + +local unhyphenated_code = nodecodes.unhyphenated or 1 +local hyphenated_code = nodecodes.hyphenated or 2 +local delta_code = nodecodes.delta or 3 +local passive_code = nodecodes.passive or 4 + +local maxdimen = number.maxdimen + +local max_halfword = 0x7FFFFFFF +local infinite_penalty = 10000 +local eject_penalty = -10000 +local infinite_badness = 10000 +local awful_badness = 0x3FFFFFFF + +local fit_very_loose_class = 0 -- fitness for lines stretching more than their stretchability +local fit_loose_class = 1 -- fitness for lines stretching 0.5 to 1.0 of their stretchability +local fit_decent_class = 2 -- fitness for all other lines +local fit_tight_class = 3 -- fitness for lines shrinking 0.5 to 1.0 of their shrinkability + +local new_penalty = nodepool.penalty +local new_dir = nodepool.textdir +local new_leftmarginkern = nodepool.leftmarginkern +local new_rightmarginkern = nodepool.rightmarginkern +local new_leftskip = nodepool.leftskip +local new_rightskip = nodepool.rightskip +local new_lineskip = nodepool.lineskip +local new_baselineskip = nodepool.baselineskip +local new_temp = nodepool.temp +local new_rule = nodepool.rule + +local is_rotated = nodes.is_rotated +local is_parallel = nodes.textdir_is_parallel +local is_opposite = nodes.textdir_is_opposite +local textdir_is_equal = nodes.textdir_is_equal +local pardir_is_equal = nodes.pardir_is_equal +local glyphdir_is_equal = nodes.glyphdir_is_equal + +local dir_pops = nodes.dir_is_pop +local dir_negations = nodes.dir_negation +local is_skipable = node.protrusion_skippable + +-- helpers -- + +-- It makes more sense to move the somewhat messy dir state tracking +-- out of the main functions. First we create a stack allocator. + +local function new_dir_stack(dir) -- also use elsewhere + return { n = 0, dir } +end + +-- The next function checks a dir node and returns the new dir state. By +-- using s static table we are quite efficient. This function is used +-- in the parbuilder. + +local function checked_line_dir(stack,current) + if not dir_pops[current] then + local n = stack.n + 1 + stack.n = n + stack[n] = current + return current.dir + elseif n > 0 then + local n = stack.n + local dirnode = stack[n] + dirstack.n = n - 1 + return dirnode.dir + else + report_parbuilders("warning: missing pop node (%a)",1) -- in line ... + end +end + +-- The next function checks a dir nodes in a list and appends the negations +-- that are currently needed (some day LuaTeX will be more tolerant). We use +-- the negations for the next line. + +local function inject_dirs_at_end_of_line(stack,current,start,stop) + local e = start + local n = stack.n + local h = nil + while start and start ~= stop do + if start.id == whatsit_code and start.subtype == dir_code then + if not dir_pops[start.dir] then + n = n + 1 + stack[n] = start + elseif n > 0 then + n = n - 1 + else + report_parbuilders("warning: missing pop node (%a)",2) -- in line ... + end + end + start = start.next + end + for i=n,1,-1 do + h, current = insert_node_after(current,current,new_dir(dir_negations[stack[i].dir])) + end + stack.n = n + return current +end + +local function inject_dirs_at_begin_of_line(stack,current) + local h = nil + for i=stack.n,1,-1 do + h, current = insert_node_after(current,current,new_dir(stack[i])) + end + stack.n = 0 + return current +end + +-- diagnostics -- + +local dummy = function() end + +local diagnostics = { + start = dummy, + stop = dummy, + current_pass = dummy, + break_node = dummy, + feasible_break = dummy, +} + +-- statistics -- + +local nofpars, noflines, nofprotrudedlines, nofadjustedlines = 0, 0, 0, 0 + +local function register_statistics(par) + local statistics = par.statistics + nofpars = nofpars + 1 + noflines = noflines + statistics.noflines + nofprotrudedlines = nofprotrudedlines + statistics.nofprotrudedlines + nofadjustedlines = nofadjustedlines + statistics.nofadjustedlines +end + +-- resolvers -- + +local whatsiters = { + get_width = { }, + get_dimensions = { }, +} + +local get_whatsit_width = whatsiters.get_width +local get_whatsit_dimensions = whatsiters.get_dimensions + +local function get_width (n) return n.width end +local function get_dimensions(n) return n.width, n.height, n.depth end + +get_whatsit_width[pdfrefximage_code] = get_width +get_whatsit_width[pdfrefxform_code ] = get_width + +get_whatsit_dimensions[pdfrefximage_code] = get_dimensions +get_whatsit_dimensions[pdfrefxform_code ] = get_dimensions + +-- expansion etc -- + +local function calculate_fraction(x,n,d,max_answer) + local the_answer = x * n/d + 1/2 -- round ? + if the_answer > max_answer then + return max_answer + elseif the_answer < -max_answer then + return -max_answer + else + return the_answer + end +end + +local function check_shrinkage(par,n) + -- called often, so maybe move inline + if n.shrink_order ~= 0 and n.shrink ~= 0 then + if par.no_shrink_error_yet then + par.no_shrink_error_yet = false + report_parbuilders("infinite glue shrinkage found in a paragraph and removed") + end + n = copy_node(n) + n.shrink_order = 0 + end + return n +end + +-- It doesn't really speed up much but the additional memory usage is +-- rather small so it doesn't hurt too much. + +local expansions = { } +local nothing = { stretch = 0, shrink = 0 } + +setmetatableindex(expansions,function(t,font) + local expansion = parameters[font].expansion -- can be an extra hash + if expansion and expansion.auto then + local factors = { } + local c = chardata[font] + setmetatableindex(factors,function(t,char) + local fc = c[char] + local ef = fc.expansion_factor + if ef and ef > 0 then + local stretch = expansion.stretch + local shrink = expansion.shrink + if stretch ~= 0 or shrink ~= 0 then + local factor = ef / 1000 + local ef_quad = factor * quaddata[font] / 1000 + local v = { + glyphstretch = stretch * ef_quad, + glyphshrink = shrink * ef_quad, + factor = factor, + stretch = stretch, + shrink = shrink , + } + t[char] = v + return v + end + end + t[char] = nothing + return nothing + end) + t[font] = factors + return factors + else + t[font] = false + return false + end +end) + +-- local function char_stretch_shrink(p) +-- local data = expansions[p.font][p.char] +-- if data then +-- return data.glyphstretch, data.glyphshrink +-- else +-- return 0, 0 +-- end +-- end +-- +-- local cal_margin_kern_var = char_stretch_shrink + +-- local function kern_stretch_shrink(p,d) +-- local l = p.prev +-- if l and l.id == glyph_code then -- how about disc nodes? +-- local r = p.next +-- if r and r.id == glyph_code then +-- local lf, rf = l.font, r.font +-- if lf == rf then +-- local data = expansions[lf][l.char] +-- if data then +-- local stretch = data.stretch +-- local shrink = data.shrink +-- if stretch ~= 0 then +-- -- stretch = data.factor * (d * stretch - d) +-- stretch = data.factor * d * (stretch - 1) +-- end +-- if shrink ~= 0 then +-- -- shrink = data.factor * (d * shrink - d) +-- shrink = data.factor * d * (shrink - 1) +-- end +-- return stretch, shrink +-- end +-- end +-- end +-- end +-- return 0, 0 +-- end + +local function kern_stretch_shrink(p,d) + local left = p.prev + if left and left.id == glyph_code then -- how about disc nodes? + local data = expansions[left.font][left.char] + if data then + local stretch = data.stretch + local shrink = data.shrink + if stretch ~= 0 then + -- stretch = data.factor * (d * stretch - d) + stretch = data.factor * d * (stretch - 1) + end + if shrink ~= 0 then + -- shrink = data.factor * (d * shrink - d) + shrink = data.factor * d * (shrink - 1) + end + return stretch, shrink + end + end + return 0, 0 +end + +local function kern_stretch_shrink(p,d) + return 0, 0 +end + +-- state: + +local function check_expand_pars(checked_expansion,f) + local expansion = parameters[f].expansion + if not expansion then + checked_expansion[f] = false + return false + end + local step = expansion.step or 0 + local stretch = expansion.stretch or 0 + local shrink = expansion.shrink or 0 + if step == 0 or (stretch == 0 and schrink == 0) then + checked_expansion[f] = false + return false + end + local par = checked_expansion.par + if par.cur_font_step < 0 then + par.cur_font_step = step + elseif par.cur_font_step ~= step then + report_parbuilders("using fonts with different step of expansion in one paragraph is not allowed") + checked_expansion[f] = false + return false + end + if stretch == 0 then + -- okay + elseif par.max_stretch_ratio < 0 then + par.max_stretch_ratio = stretch -- expansion_factor + elseif par.max_stretch_ratio ~= stretch then + report_parbuilders("using fonts with different stretch limit of expansion in one paragraph is not allowed") + checked_expansion[f] = false + return false + end + if shrink == 0 then + -- okay + elseif par.max_shrink_ratio < 0 then + par.max_shrink_ratio = shrink -- - expansion_factor + elseif par.max_shrink_ratio ~= shrink then + report_parbuilders("using fonts with different shrink limit of expansion in one paragraph is not allowed") + checked_expansion[f] = false + return false + end + if trace_adjusting then + report_parbuilders("expanding font %a using step %a, shrink %a and stretch %a",f,step,stretch,shrink) + end + local e = expansions[f] + checked_expansion[f] = e + return e +end + +local function check_expand_lines(checked_expansion,f) + local expansion = parameters[f].expansion + if not expansion then + checked_expansion[f] = false + return false + end + local step = expansion.step or 0 + local stretch = expansion.stretch or 0 + local shrink = expansion.shrink or 0 + if step == 0 or (stretch == 0 and schrink == 0) then + checked_expansion[f] = false + return false + end + if trace_adjusting then + report_parbuilders("expanding font %a using step %a, shrink %a and stretch %a",f,step,stretch,shrink) + end + local e = expansions[f] + checked_expansion[f] = e + return e +end + +-- protrusion + +local function find(head) -- do we really want to recurse into an hlist? + while head do + local id = head.id + if id == glyph_code then + return head + elseif id == hlist_code then + local found = find(head.list) + if found then + return found + else + head = head.next + end + elseif is_skipable(head) then + head = head.next + else + return head + end + end + return nil +end + +local function find_protchar_left(l) -- weird function + local ln = l.next + if ln and ln.id == hlist_code and not ln.list and ln.width == 0 and ln.height == 0 and ln.depth == 0 then + l = l.next + else -- if d then -- was always true + local id = l.id + while ln and not (id == glyph_code or id < math_code) do -- is there always a glyph? + l = ln + ln = l.next + id = ln.id + end + end + -- if l.id == glyph_code then + -- return l + -- end + return find(l) or l +end + +local function find(head,tail) + local tail = tail or slide_nodes(head) + while tail do + local id = tail.id + if id == glyph_code then + return tail + elseif id == hlist_code then + local found = find(tail.list) + if found then + return found + else + tail = tail.prev + end + elseif is_skipable(tail) then + tail = tail.prev + else + return tail + end + end + return nil +end + +local function find_protchar_right(l,r) + return r and find(l,r) or r +end + +local function left_pw(p) + local font = p.font + local prot = chardata[font][p.char].left_protruding + if not prot or prot == 0 then + return 0 + end + return prot * quaddata[font] / 1000, p +end + +local function right_pw(p) + local font = p.font + local prot = chardata[font][p.char].right_protruding + if not prot or prot == 0 then + return 0 + end + return prot * quaddata[font] / 1000, p +end + +-- par parameters + +local function reset_meta(par) + local active = { + id = hyphenated_code, + line_number = max_halfword, + } + active.next = par.active -- head of metalist + par.active = active + par.passive = nil +end + +local function add_to_width(line_break_dir,checked_expansion,s) -- split into two loops (normal and expansion) + local size = 0 + local adjust_stretch = 0 + local adjust_shrink = 0 + while s do + local id = s.id + if id == glyph_code then + if is_rotated[line_break_dir] then -- can be shared + size = size + s.height + s.depth + else + size = size + s.width + end + if checked_expansion then + local data = checked_expansion[s.font] + if data then + data = data[s.char] + if data then + adjust_stretch = adjust_stretch + data.glyphstretch + adjust_shrink = adjust_shrink + data.glyphshrink + end + end + end + elseif id == hlist_code or id == vlist_code then + if is_parallel[s.dir][line_break_dir] then + size = size + s.width + else + size = size + s.depth + s.height + end + elseif id == kern_code then + if checked_expansion and s.subtype == kerning_code then + local d = s.kern + if d ~= 0 then + local stretch, shrink = kern_stretch_shrink(s,d) + adjust_stretch = adjust_stretch + stretch + adjust_shrink = adjust_shrink + shrink + end + end + size = size + s.kern + elseif id == rule_code then + size = size + s.width + else + report_parbuilders("unsupported node at location %a",6) + end + s = s.next + end + return size, adjust_stretch, adjust_shrink +end + +local function compute_break_width(par,break_type,p) -- split in two + local break_width = par.break_width + if break_type > unhyphenated_code then + local disc_width = par.disc_width + local checked_expansion = par.checked_expansion + local line_break_dir = par.line_break_dir + local break_size = break_width.size + disc_width.size + local break_adjust_stretch = break_width.adjust_stretch + disc_width.adjust_stretch + local break_adjust_shrink = break_width.adjust_shrink + disc_width.adjust_shrink + local replace = p.replace + if replace then + local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace) + break_size = break_size - size + break_adjust_stretch = break_adjust_stretch - adjust_stretch + break_adjust_shrink = break_adjust_shrink - adjust_shrink + end + local post = p.post + if post then + local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,post) + break_size = break_size + size + break_adjust_stretch = break_adjust_stretch + adjust_stretch + break_adjust_shrink = break_adjust_shrink + adjust_shrink + end + break_width.size = break_size + break_width.adjust_stretch = break_adjust_stretch + break_width.adjust_shrink = break_adjust_shrink + if not post then + p = p.next + else + return + end + end + while p do -- skip spacing etc + local id = p.id + if id == glyph_code then + return -- happens often + elseif id == glue_code then + local spec = p.spec + local order = stretch_orders[spec.stretch_order] + break_width.size = break_width.size - spec.width + break_width[order] = break_width[order] - spec.stretch + break_width.shrink = break_width.shrink - spec.shrink + elseif id == penalty_code then + -- do nothing + elseif id == kern_code then + if p.subtype == userkern_code then + break_width.size = break_width.size - p.kern + else + return + end + elseif id == math_code then + break_width.size = break_width.size - p.surround + else + return + end + p = p.next + end +end + +local function append_to_vlist(par, b) + local prev_depth = par.prev_depth + if prev_depth > par.ignored_dimen then + if b.id == hlist_code then + local d = par.baseline_skip.width - prev_depth - b.height -- deficiency of space between baselines + local s = d < par.line_skip_limit and new_lineskip(tex.lineskip) or new_baselineskip(d) + -- local s = d < par.line_skip_limit + -- if s then + -- s = new_lineskip() + -- s.spec = tex.lineskip + -- else + -- s = new_baselineskip(d) + -- end + local head_field = par.head_field + if head_field then + local n = slide_nodes(head_field) + n.next, s.prev = s, n + else + par.head_field = s + end + end + end + local head_field = par.head_field + if head_field then + local n = slide_nodes(head_field) + n.next, b.prev = b, n + else + par.head_field = b + end + if b.id == hlist_code then + local pd = b.depth + par.prev_depth = pd + texnest[texnest.ptr].prevdepth = pd + end +end + +local function append_list(par, b) + local head_field = par.head_field + if head_field then + local n = slide_nodes(head_field) + n.next, b.prev = b, n + else + par.head_field = b + end +end + +-- We can actually make par local to this module as we never break inside a break call and that way the +-- array is reused. At some point the information will be part of the paragraph spec as passed. + +local function initialize_line_break(head,display) + + local hang_indent = tex.hangindent or 0 + local hsize = tex.hsize or 0 + local hang_after = tex.hangafter or 0 + local par_shape_ptr = tex.parshape + local left_skip = tex.leftskip -- nodes + local right_skip = tex.rightskip -- nodes + local pretolerance = tex.pretolerance + local tolerance = tex.tolerance + local adjust_spacing = tex.pdfadjustspacing + local protrude_chars = tex.pdfprotrudechars + local last_line_fit = tex.lastlinefit + + local newhead = new_temp() + newhead.next = head + + local adjust_spacing_status = adjust_spacing > 1 and -1 or 0 + + -- metatables + + local par = { + head = newhead, + head_field = nil, + display = display, + font_in_short_display = 0, + no_shrink_error_yet = true, -- have we complained about infinite shrinkage? + second_pass = false, -- is this our second attempt to break this paragraph? + final_pass = false, -- is this our final attempt to break this paragraph? + threshold = 0, -- maximum badness on feasible lines + + passive = nil, -- most recent node on passive list + printed_node = head, -- most recent node that has been printed + pass_number = 0, -- the number of passive nodes allocated on this pass + auto_breaking = 0, -- make auto_breaking accessible out of line_break + + active_width = { size = 0, stretch = 0, fi = 0, fil = 0, fill = 0, filll = 0, shrink = 0, adjust_stretch = 0, adjust_shrink = 0 }, + break_width = { size = 0, stretch = 0, fi = 0, fil = 0, fill = 0, filll = 0, shrink = 0, adjust_stretch = 0, adjust_shrink = 0 }, + disc_width = { size = 0, adjust_stretch = 0, adjust_shrink = 0 }, + fill_width = { stretch = 0, fi = 0, fil = 0, fill = 0, filll = 0, shrink = 0 }, + background = { size = 0, stretch = 0, fi = 0, fil = 0, fill = 0, filll = 0, shrink = 0 }, + + hang_indent = hang_indent, + hsize = hsize, + hang_after = hang_after, + par_shape_ptr = par_shape_ptr, + left_skip = left_skip, + right_skip = right_skip, + pretolerance = pretolerance, + tolerance = tolerance, + + protrude_chars = protrude_chars, + adjust_spacing = adjust_spacing, + max_stretch_ratio = adjust_spacing_status, + max_shrink_ratio = adjust_spacing_status, + cur_font_step = adjust_spacing_status, + checked_expansion = false, + tracing_paragraphs = tex.tracingparagraphs > 0, + + emergency_stretch = tex.emergencystretch or 0, + looseness = tex.looseness or 0, + line_penalty = tex.linepenalty or 0, + hyphen_penalty = tex.hyphenpenalty or 0, + broken_penalty = tex.brokenpenalty or 0, + inter_line_penalty = tex.interlinepenalty or 0, + club_penalty = tex.clubpenalty or 0, + widow_penalty = tex.widowpenalty or 0, + display_widow_penalty = tex.displaywidowpenalty or 0, + ex_hyphen_penalty = tex.exhyphenpenalty or 0, + + adj_demerits = tex.adjdemerits or 0, + double_hyphen_demerits = tex.doublehyphendemerits or 0, + final_hyphen_demerits = tex.finalhyphendemerits or 0, + + first_line = 0, -- tex.nest.modeline, -- 0, -- cur_list.pg_field + + each_line_height = tex.pdfeachlineheight or 0, -- this will go away + each_line_depth = tex.pdfeachlinedepth or 0, -- this will go away + first_line_height = tex.pdffirstlineheight or 0, -- this will go away + last_line_depth = tex.pdflastlinedepth or 0, -- this will go away + ignored_dimen = tex.pdfignoreddimen or 0, -- this will go away + + baseline_skip = tex.baselineskip or 0, + line_skip_limit = tex.lineskiplimit or 0, + + prev_depth = texnest[texnest.ptr].prevdepth, + + final_par_glue = slide_nodes(head), -- todo: we know tail already, slow + + par_break_dir = tex.pardir, + line_break_dir = tex.pardir, + + internal_pen_inter = 0, -- running localinterlinepenalty + internal_pen_broken = 0, -- running localbrokenpenalty + internal_left_box = nil, -- running localleftbox + internal_left_box_width = 0, -- running localleftbox width + init_internal_left_box = nil, -- running localleftbox + init_internal_left_box_width = 0, -- running localleftbox width + internal_right_box = nil, -- running localrightbox + internal_right_box_width = 0, -- running localrightbox width + + best_place = { }, -- how to achieve minimal_demerits + best_pl_line = { }, -- corresponding line number + easy_line = 0, -- line numbers easy_line are equivalent in break nodes + last_special_line = 0, -- line numbers last_special_line all have the same width + first_width = 0, -- the width of all lines last_special_line, if no parshape has been specified + second_width = 0, -- the width of all lines last_special_line + first_indent = 0, -- left margin to go with first_width + second_indent = 0, -- left margin to go with second_width + + best_bet = nil, -- use this passive node and its predecessors + fewest_demerits = 0, -- the demerits associated with best_bet + best_line = 0, -- line number following the last line of the new paragraph + line_diff = 0, -- the difference between the current line number and the optimum best_line + + -- not yet used + + best_pl_short = { }, -- shortfall corresponding to minimal_demerits + best_pl_glue = { }, -- corresponding glue stretch or shrink + do_last_line_fit = false, + last_line_fit = last_line_fit, + + minimum_demerits = awful_badness, + + minimal_demerits = { + + [fit_very_loose_class] = awful_badness, + [fit_loose_class] = awful_badness, + [fit_decent_class] = awful_badness, + [fit_tight_class] = awful_badness, + + }, + + prev_char_p = nil, + + font_steps = { }, -- mine + + statistics = { + + noflines = 0, + nofprotrudedlines = 0, + nofadjustedlines = 0, + + } + + } + + if adjust_spacing > 1 then + local checked_expansion = { par = par } + setmetatableindex(checked_expansion,check_expand_pars) + par.checked_expansion = checked_expansion + end + + -- we need par for the error message + + local background = par.background + + local l = check_shrinkage(par,left_skip) + local r = check_shrinkage(par,right_skip) + local l_order = stretch_orders[l.stretch_order] + local r_order = stretch_orders[r.stretch_order] + + background.size = l.width + r.width + background.shrink = l.shrink + r.shrink + background[l_order] = l.stretch + background[r_order] = r.stretch + background[r_order] + + -- this will move up so that we can assign the whole par table + + if not par_shape_ptr then + if hang_indent == 0 then + par.second_width = hsize + par.second_indent = 0 + else + local abs_hang_after = hang_after >0 and hang_after or -hang_after + local abs_hang_indent = hang_indent>0 and hang_indent or -hang_indent + par.last_special_line = abs_hang_after + if hang_after < 0 then + par.first_width = hsize - abs_hang_indent + if hang_indent >= 0 then + par.first_indent = hang_indent + else + par.first_indent = 0 + end + par.second_width = hsize + par.second_indent = 0 + else + par.first_width = hsize + par.first_indent = 0 + par.second_width = hsize - abs_hang_indent + if hang_indent >= 0 then + par.second_indent = hang_indent + else + par.second_indent = 0 + end + end + end + else + local last_special_line = #par_shape_ptr + par.last_special_line = last_special_line + local ptr = par_shape_ptr[last_special_line] + par.second_width = ptr[2] + par.second_indent = ptr[1] + end + + if par.looseness == 0 then + par.easy_line = par.last_special_line + else + par.easy_line = max_halfword + end + + if pretolerance >= 0 then + par.threshold = pretolerance + par.second_pass = false + par.final_pass = false + else + par.threshold = tolerance + par.second_pass = true + par.final_pass = par.emergency_stretch <= 0 + if trace_basic then + if par.final_pass then + report_parbuilders("enabling second and final pass") + else + report_parbuilders("enabling second pass") + end + end + end + + if last_line_fit > 0 then + local spec = par.final_par_glue.spec + local stretch = spec.stretch + local stretch_order = spec.stretch_order + if stretch > 0 and stretch_order > 0 and background.fi == 0 and background.fil == 0 and background.fill == 0 and background.filll == 0 then + par.do_last_line_fit = true + local si = stretch_orders[stretch_order] + if trace_lastlinefit or trace_basic then + report_parbuilders("enabling last line fit, stretch order %a set to %a, linefit is %a",si,stretch,last_line_fit) + end + par.fill_width[si] = stretch + end + end + + return par +end + +local function post_line_break(par) + + local prevgraf = texnest[texnest.ptr].prevgraf + local cur_line = prevgraf + 1 -- the current line number being justified + local cur_p = nil + + local adjust_spacing = par.adjust_spacing + local protrude_chars = par.protrude_chars + local statistics = par.statistics + + local p, s, k, w -- check when local + + local q = par.best_bet.break_node + repeat -- goto first breakpoint + local r = q + q = q.prev_break + r.prev_break = cur_p + cur_p = r + until not q + + local stack = new_dir_stack() + + repeat + + inject_dirs_at_begin_of_line(stack,par.head) + + local q = nil + local r = cur_p.cur_break + + local disc_break = false + local post_disc_break = false + local glue_break = false + + if not r then + r = slide_nodes(par.head) + if r == par.final_par_glue then + q = r -- q refers to the last node of the line (and paragraph) + r = r.prev -- r refers to the node after which the dir nodes should be closed + end + else + local id = r.id + if id == glue_code then + -- r is normal skip + r = replace_node(r,new_rightskip(par.right_skip)) + glue_break = true + q = r -- q refers to the last node of the line + r = r.prev -- r refers to the node after which the dir nodes should be closed + elseif id == disc_code then + -- todo: use insert_before/after + local prev_r = r.prev + local next_r = r.next + local subtype = r.subtype + local pre = r.pre + local post = r.post + local replace = r.replace + if subtype == second_disc_code then + if not (prev_r.id == disc_code and prev_r.subtype == first_disc_code) then + report_parbuilders('unsupported disc at location %a',3) + end + if pre then + flush_node_list(pre) + r.pre = nil + pre = nil -- signal + end + if replace then + local n = slide_nodes(replace) + prev_r.next = replace + replace.prev = prev_r + n.next = r + r.prev = n + r.replace = nil + replace = nil -- signal + end + local pre = prev_r.pre + local post = prev_r.post + local replace = prev_r.replace + if pre then + flush_node_list(pre) + prev_r.pre = nil + end + if replace then + flush_node_list(replace) + prev_r.replace = nil + end + if post then + flush_node_list(post) + prev_r.post = nil + end + elseif subtype == first_disc_code then + if not (v.id == disc_code and v.subtype == second_disc_code) then + report_parbuilders('unsupported disc at location %a',4) + end + next_r.subtype = regular_disc_code + next_r.replace = post + r.post = nil + end + if replace then + r.replace = nil -- free + flush_node_list(replace) + end + if pre then + local n = slide_nodes(pre) + prev_r.next = pre + pre.prev = prev_r + n.next = r + r.prev = n + r.pre = nil + end + if post then + local n = slide_nodes(post) + r.next = post + post.prev = r + n.next = next_r + next_r.prev = n + r.post = nil + post_disc_break = true + end + disc_break = true + elseif id == kern_code then + r.kern = 0 + elseif r.id == math_code then + r.surround = 0 + end + end + r = inject_dirs_at_end_of_line(stack,r,par.head.next,cur_p.cur_break) + local crb = cur_p.passive_right_box + if crb then + local s = copy_node(crb) + local e = r.next + r.next = s + s.prev = r + s.next = e + if e then + e.prev = s + end + r = s + end + if not q then + q = r + end + if q and q ~= par.head and protrude_chars > 0 then + local id = q.id + local c = (disc_break and (id == glyph_code or id ~= disc_code) and q) or q.prev + local p = find_protchar_right(par.head.next,c) + if p and p.id == glyph_code then + local w, last_rightmost_char = right_pw(p) + if last_rightmost_char and w ~= 0 then + -- so we inherit attributes, q is new pseudo head + q, c = insert_node_after(q,c,new_rightmarginkern(copy_node(last_rightmost_char),-w)) + end + end + end + if not glue_break then + local h + h, q = insert_node_after(q,q,new_rightskip(par.right_skip)) -- q moves on as pseudo head + end + r = q.next + q.next = nil + local phead = par.head + q = phead.next + phead.next = r + if r then + r.prev = phead + end + local clb = cur_p.passive_left_box + if clb then -- here we miss some prev links + local s = copy_node(cb) + s = q.next + r.next = q + q = r + if s and cur_line == (par.first_line + 1) and s.id == hlist_code and not s.list then + q = q.next + r.next = s.next + s.next = r + end + end + if protrude_chars > 0 then + local p = find_protchar_left(q) + if p and p.id == glyph_code then + local w, last_leftmost_char = left_pw(p) + if last_leftmost_char and w ~= 0 then + -- so we inherit attributes, q is pseudo head and moves back + q = insert_node_before(q,q,new_leftmarginkern(copy_node(last_leftmost_char),-w)) + end + end + end + local ls = par.left_skip + if ls and (ls.width ~= 0 or ls.stretch ~= 0 or ls.shrink ~= 0) then + q = insert_node_before(q,q,new_leftskip(ls)) + end + local curwidth, cur_indent + if cur_line > par.last_special_line then + cur_indent = par.second_indent + cur_width = par.second_width + else + local psp = par.par_shape_ptr + if psp then + cur_indent = psp[cur_line][1] + cur_width = psp[cur_line][2] + else + cur_indent = par.first_indent + cur_width = par.first_width + end + end + statistics.noflines = statistics.noflines + 1 + if adjust_spacing > 0 then + statistics.nofadjustedlines = statistics.nofadjustedlines + 1 + just_box = xpack_nodes(q,cur_width,"cal_expand_ratio",par.par_break_dir) -- ,cur_p.analysis) + else + just_box = xpack_nodes(q,cur_width,"exactly",par.par_break_dir) -- ,cur_p.analysis) + end + if protrude_chars > 0 then + statistics.nofprotrudedlines = statistics.nofprotrudedlines + 1 + end + -- wrong: + local adjust_head = texlists.adjust_head + local pre_adjust_head = texlists.pre_adjust_head + -- + just_box.shift = cur_indent + if par.each_line_height ~= par.ignored_dimen then + just_box.height = par.each_line_height + end + if par.each_line_depth ~= par.ignored_dimen then + just_box.depth = par.each_line_depth + end + if par.first_line_height ~= par.ignored_dimen and (cur_line == par.first_line + 1) then + just_box.height = par.first_line_height + end + if par.last_line_depth ~= par.ignored_dimen and cur_line + 1 == par.best_line then + just_box.depth = par.last_line_depth + end + if texlists.pre_adjust_head ~= pre_adjust_head then + append_list(par, texlists.pre_adjust_head) + texlists.pre_adjust_head = pre_adjust_head + end + append_to_vlist(par, just_box) + if texlists.adjust_head ~= adjust_head then + append_list(par, texlists.adjust_head) + texlists.adjust_head = adjust_head + end + local pen + if cur_line + 1 ~= par.best_line then + if cur_p.passive_pen_inter then + pen = cur_p.passive_pen_inter + else + pen = par.inter_line_penalty + end + if cur_line == prevgraf + 1 then + pen = pen + par.club_penalty + end + if cur_line + 2 == par.best_line then + if par.display then + pen = pen + par.display_widow_penalty + else + pen = pen + par.widow_penalty + end + end + if disc_break then + if cur_p.passive_pen_broken ~= 0 then + pen = pen + cur_p.passive_pen_broken + else + pen = pen + par.broken_penalty + end + end + if pen ~= 0 then + append_to_vlist(par,new_penalty(pen)) + end + end + cur_line = cur_line + 1 + cur_p = cur_p.prev_break + if cur_p and not post_disc_break then + local phead = par.head + local r = phead + while true do + q = r.next + if q == cur_p.cur_break or q.id == glyph_code then + break + end + local id = q.id + if not (id == whatsit_code and q.subtype == localpar_code) then + if id < math_code or (id == kern_code and q.subtype ~= userkern_code) then + break + end + end + r = q + end + if r ~= phead then + r.next = nil + flush_node_list(phead.next) + phead.next = q + if q then + q.prev = phead + end + end + end + until not cur_p + if cur_line ~= par.best_line then -- or not par.head.next then + report_parbuilders("line breaking") + end + if par.head then -- added +-- flush_node(par.head) -- the localpar_code whatsit + par.head = nil + end + cur_line = cur_line - 1 + if trace_basic then + report_parbuilders("paragraph broken into %a lines",cur_line) + end + texnest[texnest.ptr].prevgraf = cur_line +end + +local function wrap_up(par) + if par.tracing_paragraphs then + diagnostics.stop() + end + if par.do_last_line_fit then + local best_bet = par.best_bet + local active_short = best_bet.active_short + local active_glue = best_bet.active_glue + if active_short == 0 then + if trace_lastlinefit then + report_parbuilders("disabling last line fit, no active_short") + end + par.do_last_line_fit = false + else + local glue = par.final_par_glue + local spec = copy_node(glue.spec) + spec.width = spec.width + active_short - active_glue + spec.stretch = 0 + -- flush_node(glue.spec) -- brrr, when we do this we can get an "invalid id stretch message", maybe dec refcount + glue.spec = spec + if trace_lastlinefit then + report_parbuilders("applying last line fit, short %a, glue %p",active_short,active_glue) + end + end + end + -- we have a bunch of glue and and temp nodes not freed + local head = par.head + if head.id == temp_code then + par.head = head.next + flush_node(head) + end + post_line_break(par) + reset_meta(par) + register_statistics(par) + return par.head_field +end + +-- we could do active nodes differently ... table instead of linked list or a list +-- with prev nodes + +local function deactivate_node(par,prev_prev_r,prev_r,r,cur_active_width,checked_expansion) -- no need for adjust if disabled + local active = par.active + local active_width = par.active_width + prev_r.next = r.next + -- removes r + -- r = nil + if prev_r == active then + r = active.next + if r.id == delta_code then + local aw = active_width.size + r.size active_width.size = aw cur_active_width.size = aw + local aw = active_width.stretch + r.stretch active_width.stretch = aw cur_active_width.stretch = aw + local aw = active_width.fi + r.fi active_width.fi = aw cur_active_width.fi = aw + local aw = active_width.fil + r.fil active_width.fil = aw cur_active_width.fil = aw + local aw = active_width.fill + r.fill active_width.fill = aw cur_active_width.fill = aw + local aw = active_width.filll + r.filll active_width.filll = aw cur_active_width.filll = aw + local aw = active_width.shrink + r.shrink active_width.shrink = aw cur_active_width.shrink = aw + if checked_expansion then + local aw = active_width.adjust_stretch + r.adjust_stretch active_width.adjust_stretch = aw cur_active_width.adjust_stretch = aw + local aw = active_width.adjust_shrink + r.adjust_shrink active_width.adjust_shrink = aw cur_active_width.adjust_shrink = aw + end + active.next = r.next + -- removes r + -- r = nil + end + elseif prev_r.id == delta_code then + r = prev_r.next + if r == active then + cur_active_width.size = cur_active_width.size - prev_r.size + cur_active_width.stretch = cur_active_width.stretch - prev_r.stretch + cur_active_width.fi = cur_active_width.fi - prev_r.fi + cur_active_width.fil = cur_active_width.fil - prev_r.fil + cur_active_width.fill = cur_active_width.fill - prev_r.fill + cur_active_width.filll = cur_active_width.filll - prev_r.filll + cur_active_width.shrink = cur_active_width.shrink - prev_r.shrink + if checked_expansion then + cur_active_width.adjust_stretch = cur_active_width.adjust_stretch - prev_r.adjust_stretch + cur_active_width.adjust_shrink = cur_active_width.adjust_shrink - prev_r.adjust_shrink + end + prev_prev_r.next = active + -- removes prev_r + -- prev_r = nil + prev_r = prev_prev_r + elseif r.id == delta_code then + local rn = r.size cur_active_width.size = cur_active_width.size + rn prev_r.size = prev_r.size + rn + local rn = r.stretch cur_active_width.stretch = cur_active_width.stretch + rn prev_r.stretch = prev_r.stretch + rn + local rn = r.fi cur_active_width.fi = cur_active_width.fi + rn prev_r.fi = prev_r.fi + rn + local rn = r.fil cur_active_width.fil = cur_active_width.fil + rn prev_r.fil = prev_r.fil + rn + local rn = r.fill cur_active_width.fill = cur_active_width.fill + rn prev_r.fill = prev_r.fill + rn + local rn = r.filll cur_active_width.filll = cur_active_width.filll + rn prev_r.filll = prev_r.fill + rn + local rn = r.shrink cur_active_width.shrink = cur_active_width.shrink + rn prev_r.shrink = prev_r.shrink + rn + if checked_expansion then + local rn = r.adjust_stretch cur_active_width.adjust_stretch = cur_active_width.adjust_stretch + rn prev_r.adjust_stretch = prev_r.adjust_stretch + rn + local rn = r.adjust_shrink cur_active_width.adjust_shrink = cur_active_width.adjust_shrink + rn prev_r.adjust_shrink = prev_r.adjust_shrink + rn + end + prev_r.next = r.next + -- removes r + -- r = nil + end + end + return prev_r, r +end + +local function lastlinecrap(shortfall,active_short,active_glue,cur_active_width,fill_width,last_line_fit) + if active_short == 0 or active_glue <= 0 then + return false, 0, fit_decent_class, 0, 0 + end + if cur_active_width.fi ~= fill_width.fi or cur_active_width.fil ~= fill_width.fil or cur_active_width.fill ~= fill_width.fill or cur_active_width.filll ~= fill_width.filll then + return false, 0, fit_decent_class, 0, 0 + end + local adjustment = active_short > 0 and cur_active_width.stretch or cur_active_width.shrink + if adjustment <= 0 then + return false, 0, fit_decent_class, adjustment, 0 + end + adjustment = calculate_fraction(adjustment,active_short,active_glue,maxdimen) + if last_line_fit < 1000 then + adjustment = calculate_fraction(adjustment,last_line_fit,1000,maxdimen) -- uses previous adjustment + end + local fit_class = fit_decent_class + if adjustment > 0 then + local stretch = cur_active_width.stretch + if adjustment > shortfall then + adjustment = shortfall + end + if adjustment > 7230584 and stretch < 1663497 then + return true, fit_very_loose_class, shortfall, adjustment, infinite_badness + end + -- if adjustment == 0 then -- badness = 0 + -- return true, shortfall, fit_decent_class, 0, 0 + -- elseif stretch <= 0 then -- badness = 10000 + -- return true, shortfall, fit_very_loose_class, adjustment, 10000 + -- end + -- local badness = (adjustment == 0 and 0) or (stretch <= 0 and 10000) or calculate_badness(adjustment,stretch) + local badness = calculate_badness(adjustment,stretch) + if badness > 99 then + return true, shortfall, fit_very_loose_class, adjustment, badness + elseif badness > 12 then + return true, shortfall, fit_loose_class, adjustment, badness + else + return true, shortfall, fit_decent_class, adjustment, badness + end + elseif adjustment < 0 then + local shrink = cur_active_width.shrink + if -adjustment > shrink then + adjustment = -shrink + end + local badness = calculate_badness(-adjustment,shrink) + if badness > 12 then + return true, shortfall, fit_tight_class, adjustment, badness + else + return true, shortfall, fit_decent_class, adjustment, badness + end + else + return false, 0, fit_decent_class, 0, 0 + end +end + +local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion) + + if pi >= infinite_penalty then + return -- this breakpoint is inhibited by infinite penalty + elseif pi <= -infinite_penalty then + pi = eject_penalty -- this breakpoint will be forced + end + + local prev_prev_r = nil -- a step behind prev_r, if type(prev_r)=delta_code + local prev_r = par.active -- stays a step behind r + local r = nil -- runs through the active list + local no_break_yet = true -- have we found a feasible break at cur_p? + local node_r_stays_active = false -- should node r remain in the active list? + local line_width = 0 -- the current line will be justified to this width + local line_number = 0 -- line number of current active node + local old_line_number = 0 -- maximum line number in current equivalence class of lines + + local protrude_chars = par.protrude_chars + local checked_expansion = par.checked_expansion + local break_width = par.break_width + local active_width = par.active_width + local background = par.background + local minimal_demerits = par.minimal_demerits + local best_place = par.best_place + local best_pl_line = par.best_pl_line + local best_pl_short = par.best_pl_short + local best_pl_glue = par.best_pl_glue + local do_last_line_fit = par.do_last_line_fit + local final_pass = par.final_pass + local tracing_paragraphs = par.tracing_paragraphs + -- local par_active = par.active + + local cur_active_width = checked_expansion and { -- distance from current active node + size = active_width.size, + stretch = active_width.stretch, + fi = active_width.fi, + fil = active_width.fil, + fill = active_width.fill, + filll = active_width.filll, + shrink = active_width.shrink, + adjust_stretch = active_width.adjust_stretch, + adjust_shrink = active_width.adjust_shrink, + } or { + size = active_width.size, + stretch = active_width.stretch, + fi = active_width.fi, + fil = active_width.fil, + fill = active_width.fill, + filll = active_width.filll, + shrink = active_width.shrink, + } + + while true do + r = prev_r.next + if r.id == delta_code then + cur_active_width.size = cur_active_width.size + r.size + cur_active_width.stretch = cur_active_width.stretch + r.stretch + cur_active_width.fi = cur_active_width.fi + r.fi + cur_active_width.fil = cur_active_width.fil + r.fil + cur_active_width.fill = cur_active_width.fill + r.fill + cur_active_width.filll = cur_active_width.filll + r.filll + cur_active_width.shrink = cur_active_width.shrink + r.shrink + if checked_expansion then + cur_active_width.adjust_stretch = cur_active_width.adjust_stretch + r.adjust_stretch + cur_active_width.adjust_shrink = cur_active_width.adjust_shrink + r.adjust_shrink + end + prev_prev_r = prev_r + prev_r = r + else + line_number = r.line_number + if line_number > old_line_number then + local minimum_demerits = par.minimum_demerits + if minimum_demerits < awful_badness and (old_line_number ~= par.easy_line or r == par.active) then + if no_break_yet then + no_break_yet = false + break_width.size = background.size + break_width.stretch = background.stretch + break_width.fi = background.fi + break_width.fil = background.fil + break_width.fill = background.fill + break_width.filll = background.filll + break_width.shrink = background.shrink + if checked_expansion then + break_width.adjust_stretch = 0 + break_width.adjust_shrink = 0 + end + if cur_p then + compute_break_width(par,break_type,cur_p) + end + end + if prev_r.id == delta_code then + prev_r.size = prev_r.size - cur_active_width.size + break_width.size + prev_r.stretch = prev_r.stretch - cur_active_width.stretc + break_width.stretch + prev_r.fi = prev_r.fi - cur_active_width.fi + break_width.fi + prev_r.fil = prev_r.fil - cur_active_width.fil + break_width.fil + prev_r.fill = prev_r.fill - cur_active_width.fill + break_width.fill + prev_r.filll = prev_r.filll - cur_active_width.filll + break_width.filll + prev_r.shrink = prev_r.shrink - cur_active_width.shrink + break_width.shrink + if checked_expansion then + prev_r.adjust_stretch = prev_r.adjust_stretch - cur_active_width.adjust_stretch + break_width.adjust_stretch + prev_r.adjust_shrink = prev_r.adjust_shrink - cur_active_width.adjust_shrink + break_width.adjust_shrink + end + elseif prev_r == par.active then + active_width.size = break_width.size + active_width.stretch = break_width.stretch + active_width.fi = break_width.fi + active_width.fil = break_width.fil + active_width.fill = break_width.fill + active_width.filll = break_width.filll + active_width.shrink = break_width.shrink + if checked_expansion then + active_width.adjust_stretch = break_width.adjust_stretch + active_width.adjust_shrink = break_width.adjust_shrink + end + else + local q = checked_expansion and { + id = delta_code, + subtype = nosubtype_code, + next = r, + size = break_width.size - cur_active_width.size, + stretch = break_width.stretch - cur_active_width.stretch, + fi = break_width.fi - cur_active_width.fi, + fil = break_width.fil - cur_active_width.fil, + fill = break_width.fill - cur_active_width.fill, + filll = break_width.filll - cur_active_width.filll, + shrink = break_width.shrink - cur_active_width.shrink, + adjust_stretch = break_width.adjust_stretch - cur_active_width.adjust_stretch, + adjust_shrink = break_width.adjust_shrink - cur_active_width.adjust_shrink, + } or { + id = delta_code, + subtype = nosubtype_code, + next = r, + size = break_width.size - cur_active_width.size, + stretch = break_width.stretch - cur_active_width.stretch, + fi = break_width.fi - cur_active_width.fi, + fil = break_width.fil - cur_active_width.fil, + fill = break_width.fill - cur_active_width.fill, + filll = break_width.filll - cur_active_width.filll, + shrink = break_width.shrink - cur_active_width.shrink, + } + prev_r.next = q + prev_prev_r = prev_r + prev_r = q + end + local adj_demerits = par.adj_demerits + local abs_adj_demerits = adj_demerits > 0 and adj_demerits or -adj_demerits + if abs_adj_demerits >= awful_badness - minimum_demerits then + minimum_demerits = awful_badness - 1 + else + minimum_demerits = minimum_demerits + abs_adj_demerits + end + for fit_class = fit_very_loose_class, fit_tight_class do + if minimal_demerits[fit_class] <= minimum_demerits then + -- insert a new active node from best_place[fit_class] to cur_p + par.pass_number = par.pass_number + 1 + local prev_break = best_place[fit_class] + local passive = { + id = passive_code, + subtype = nosubtype_code, + next = par.passive, + cur_break = cur_p, + serial = par.pass_number, + prev_break = prev_break, + passive_pen_inter = par.internal_pen_inter, + passive_pen_broken = par.internal_pen_broken, + passive_last_left_box = par.internal_left_box, + passive_last_left_box_width = par.internal_left_box_width, + passive_left_box = prev_break and prev_break.passive_last_left_box or par.init_internal_left_box, + passive_left_box_width = prev_break and prev_break.passive_last_left_box_width or par.init_internal_left_box_width, + passive_right_box = par.internal_right_box, + passive_right_box_width = par.internal_right_box_width, +-- analysis = table.fastcopy(cur_active_width), + } + par.passive = passive + local q = { + id = break_type, + subtype = fit_class, + break_node = passive, + line_number = best_pl_line[fit_class] + 1, + total_demerits = minimal_demerits[fit_class], -- or 0, + next = r, + } + if do_last_line_fit then + local active_short = best_pl_short[fit_class] + local active_glue = best_pl_glue[fit_class] + q.active_short = active_short + q.active_glue = active_glue + if trace_lastlinefit then + report_parbuilders("setting short to %i and glue to %p using class %a",active_short,active_glue,fit_class) + end + end + -- q.next = r -- already done + prev_r.next = q + prev_r = q + if tracing_paragraphs then + diagnostics.break_node(par,q,fit_class,break_type,cur_p) + end + end + minimal_demerits[fit_class] = awful_badness + end + par.minimum_demerits = awful_badness + if r ~= par.active then + local q = checked_expansion and { + id = delta_code, + subtype = nosubtype_code, + next = r, + size = cur_active_width.size - break_width.size, + stretch = cur_active_width.stretch - break_width.stretch, + fi = cur_active_width.fi - break_width.fi, + fil = cur_active_width.fil - break_width.fil, + fill = cur_active_width.fill - break_width.fill, + filll = cur_active_width.filll - break_width.filll, + shrink = cur_active_width.shrink - break_width.shrink, + adjust_stretch = cur_active_width.adjust_stretch - break_width.adjust_stretch, + adjust_shrink = cur_active_width.adjust_shrink - break_width.adjust_shrink, + } or { + id = delta_code, + subtype = nosubtype_code, + next = r, + size = cur_active_width.size - break_width.size, + stretch = cur_active_width.stretch - break_width.stretch, + fi = cur_active_width.fi - break_width.fi, + fil = cur_active_width.fil - break_width.fil, + fill = cur_active_width.fill - break_width.fill, + filll = cur_active_width.filll - break_width.filll, + shrink = cur_active_width.shrink - break_width.shrink, + } + -- q.next = r -- already done + prev_r.next = q + prev_prev_r = prev_r + prev_r = q + end + end + if r == par.active then + return + end + if line_number > par.easy_line then + old_line_number = max_halfword - 1 + line_width = par.second_width + else + old_line_number = line_number + if line_number > par.last_special_line then + line_width = par.second_width + elseif par.par_shape_ptr then + line_width = par.par_shape_ptr[line_number][2] + else + line_width = par.first_width + end + end + end + local artificial_demerits = false -- has d been forced to zero + local shortfall = line_width - cur_active_width.size - par.internal_right_box_width -- used in badness calculations + if not r.break_node then + shortfall = shortfall - par.init_internal_left_box_width + else + shortfall = shortfall - (r.break_node.passive_last_left_box_width or 0) + end + local pw, lp, rp -- used later on + if protrude_chars > 1 then + -- this is quite time consuming + local b = r.break_node + local l = b and b.cur_break or first_p + local o = cur_p and cur_p.prev + if cur_p and cur_p.id == disc_code and cur_p.pre then + o = slide_nodes(cur_p.pre) + else + o = find_protchar_right(l,o) + end + if o and o.id == glyph_code then + pw, rp = right_pw(o) + shortfall = shortfall + pw + end + local id = l.id + if id == glyph_code then + -- ok ? + elseif id == disc_code and l.post then + l = l.post -- TODO: first char could be a disc + else + l = find_protchar_left(l) + end + if l and l.id == glyph_code then + pw, lp = left_pw(l) + shortfall = shortfall + pw + end + end + if checked_expansion and shortfall ~= 0 then + local margin_kern_stretch = 0 + local margin_kern_shrink = 0 + if protrude_chars > 1 then + if lp then +-- margin_kern_stretch, margin_kern_shrink = cal_margin_kern_var(lp) +local data = expansions[lp.font][lp.char] +if data then + margin_kern_stretch, margin_kern_shrink = data.glyphstretch, data.glyphshrink +end + end + if rp then +-- local mka, mkb = cal_margin_kern_var(rp) +-- margin_kern_stretch = margin_kern_stretch + mka +-- margin_kern_shrink = margin_kern_shrink + mkb +local data = expansions[lp.font][lp.char] +if data then + margin_kern_stretch = margin_kern_stretch + data.glyphstretch + margin_kern_shrink = margin_kern_shrink + data.glyphshrink +end + end + end + local total = cur_active_width.adjust_stretch + margin_kern_stretch + if shortfall > 0 and total > 0 then + if total > shortfall then + shortfall = total / (par.max_stretch_ratio / par.cur_font_step) / 2 + else + shortfall = shortfall - total + end + else + total = cur_active_width.adjust_shrink + margin_kern_shrink + if shortfall < 0 and total > 0 then + if total > - shortfall then + shortfall = - total / (par.max_shrink_ratio / par.cur_font_step) / 2 + else + shortfall = shortfall + total + end + end + end + par.font_steps[line_number] = par.cur_font_step -- mine + else + par.font_steps[line_number] = 0 -- mine + end + local b = 0 + local g = 0 + local fit_class = fit_decent_class + local found = false + if shortfall > 0 then + if cur_active_width.fi ~= 0 or cur_active_width.fil ~= 0 or cur_active_width.fill ~= 0 or cur_active_width.filll ~= 0 then + if not do_last_line_fit then + -- okay + elseif not cur_p then + found, shortfall, fit_class, g, b = lastlinecrap(shortfall,r.active_short,r.active_glue,cur_active_width,par.fill_width,par.last_line_fit) + else + shortfall = 0 + end + else + local stretch = cur_active_width.stretch + if shortfall > 7230584 and stretch < 1663497 then + b = infinite_badness + fit_class = fit_very_loose_class + else + b = calculate_badness(shortfall,stretch) + if b > 99 then + fit_class = fit_very_loose_class + elseif b > 12 then + fit_class = fit_loose_class + else + fit_class = fit_decent_class + end + end + end + else + local shrink = cur_active_width.shrink + if -shortfall > shrink then + b = infinite_badness + 1 + else + b = calculate_badness(-shortfall,shrink) + end + if b > 12 then + fit_class = fit_tight_class + else + fit_class = fit_decent_class + end + end + if do_last_line_fit and not found then + if not cur_p then + -- g = 0 + shortfall = 0 + elseif shortfall > 0 then + g = cur_active_width.stretch + elseif shortfall < 0 then + g = cur_active_width.shrink + else + g = 0 + end + end + -- ::FOUND:: + local continue_only = false -- brrr + if b > infinite_badness or pi == eject_penalty then + if final_pass and par.minimum_demerits == awful_badness and r.next == par.active and prev_r == par.active then + artificial_demerits = true -- set demerits zero, this break is forced + node_r_stays_active = false + elseif b > par.threshold then + prev_r, r = deactivate_node(par,prev_prev_r,prev_r,r,cur_active_width,checked_expansion) + continue_only = true + else + node_r_stays_active = false + end + else + prev_r = r + if b > par.threshold then + continue_only = true + else + node_r_stays_active = true + end + end + if not continue_only then + local d = 0 + if not artificial_demerits then + d = par.line_penalty + b + if (d >= 0 and d or -d) >= 10000 then -- abs(d) + d = 100000000 + else + d = d * d + end + if pi == 0 then + -- nothing + elseif pi > 0 then + d = d + pi * pi + elseif pi > eject_penalty then + d = d - pi * pi + end + if break_type == hyphenated_code and r.id == hyphenated_code then + if cur_p then + d = d + par.double_hyphen_demerits + else + d = d + par.final_hyphen_demerits + end + end + local delta = fit_class - r.subtype + if (delta >= 0 and delta or -delta) > 1 then -- abs(delta) + d = d + par.adj_demerits + end + end + if tracing_paragraphs then + diagnostics.feasible_break(par,cur_p,r,b,pi,d,artificial_demerits) + end + d = d + r.total_demerits -- this is the minimum total demerits from the beginning to cur_p via r + if d <= minimal_demerits[fit_class] then + minimal_demerits[fit_class] = d + best_place [fit_class] = r.break_node + best_pl_line [fit_class] = line_number + if do_last_line_fit then + best_pl_short[fit_class] = shortfall + best_pl_glue [fit_class] = g + if trace_lastlinefit then + report_parbuilders("storing last line fit short %a and glue %p in class %a",shortfall,g,fit_class) + end + end + if d < par.minimum_demerits then + par.minimum_demerits = d + end + end + if not node_r_stays_active then + prev_r, r = deactivate_node(par,prev_prev_r,prev_r,r,cur_active_width,checked_expansion) + end + end + end + end +end + +local function kern_break(par, cur_p, first_p, checked_expansion) -- move inline if needed + local v = cur_p.next + if par.auto_breaking and v.id == glue_code then + try_break(0, unhyphenated_code, par, first_p, cur_p, checked_expansion) + end + local active_width = par.active_width + if cur_p.id ~= math_code then + active_width.size = active_width.size + cur_p.kern + else + active_width.size = active_width.size + cur_p.surround + end +end + +-- we can call the normal one for simple box building in the otr so we need +-- frequent enabling/disabling + +local temp_head = new_temp() + +function constructors.methods.basic(head,d) + if trace_basic then + report_parbuilders("starting at %a",head) + end + + local par = initialize_line_break(head,d) + + local checked_expansion = par.checked_expansion + local active_width = par.active_width + local disc_width = par.disc_width + local background = par.background + local tracing_paragraphs = par.tracing_paragraphs + + local dirstack = new_dir_stack() + + if tracing_paragraphs then + diagnostics.start() + if par.pretolerance >= 0 then + diagnostics.current_pass(par,"firstpass") + end + end + + while true do + reset_meta(par) + if par.threshold > infinite_badness then + par.threshold = infinite_badness + end + par.active.next = { + id = unhyphenated_code, + subtype = fit_decent_class, + next = par.active, + break_node = nil, + line_number = par.first_line + 1, + total_demerits = 0, + active_short = 0, + active_glue = 0, + } + active_width.size = background.size + active_width.stretch = background.stretch + active_width.fi = background.fi + active_width.fil = background.fil + active_width.fill = background.fill + active_width.filll = background.filll + active_width.shrink = background.shrink + + if checked_expansion then + active_width.adjust_stretch = 0 + active_width.adjust_shrink = 0 + end + + par.passive = nil -- = 0 + par.printed_node = temp_head -- only when tracing, shared + par.printed_node.next = head + par.pass_number = 0 + par.auto_breaking = true + + local cur_p = head + local first_p = cur_p + + par.font_in_short_display = 0 + + if cur_p and cur_p.id == whatsit_code and cur_p.subtype == localpar_code then + par.init_internal_left_box = cur_p.box_left + par.init_internal_left_box_width = cur_p.box_left_width + par.internal_pen_inter = cur_p.pen_inter + par.internal_pen_broken = cur_p.pen_broken + par.internal_left_box = par.init_internal_left_box + par.internal_left_box_width = par.init_internal_left_box_width + par.internal_right_box = cur_p.box_right + par.internal_right_box_width = cur_p.box_right_width + end + + -- all passes are combined in this loop so maybe we should split this into + -- three function calls; we then also need to do the wrap_up elsewhere + + -- split into normal and expansion loop + + -- use an active local + + local fontexp, lastfont -- we can pass fontexp to calculate width if needed + + while cur_p and par.active.next ~= par.active do + while cur_p and cur_p.id == glyph_code do + if is_rotated[par.line_break_dir] then + active_width.size = active_width.size + cur_p.height + cur_p.depth + else + active_width.size = active_width.size + cur_p.width + end + if checked_expansion then + local data= checked_expansion[cur_p.font] + if data then + local currentfont = cur_p.font + if currentfont ~= lastfont then + fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer + lastfont = currentfont + end + if fontexps then + local expansion = fontexps[cur_p.char] + if expansion then + active_width.adjust_stretch = active_width.adjust_stretch + expansion.glyphstretch + active_width.adjust_shrink = active_width.adjust_shrink + expansion.glyphshrink + end + end + end + end + cur_p = cur_p.next + end + if not cur_p then -- TODO + report_parbuilders("problems with linebreak_tail") + os.exit() + end + local id = cur_p.id + if id == hlist_code or id == vlist_code then + if is_parallel[cur_p.dir][par.line_break_dir] then + active_width.size = active_width.size + cur_p.width + else + active_width.size = active_width.size + cur_p.depth + cur_p.height + end + elseif id == glue_code then + if par.auto_breaking then + local prev_p = cur_p.prev + if prev_p and prev_p ~= temp_head then + local id = prev_p.id + if id == glyph_code or + (id < math_code and (id ~= whatsit_code or prev_p.subtype ~= dir_code)) or -- was: precedes_break(prev_p) + (id == kern_code and prev_p.subtype ~= userkern_code) then + try_break(0, unhyphenated_code, par, first_p, cur_p, checked_expansion) + end + end + end + local spec = check_shrinkage(par,cur_p.spec) + local order = stretch_orders[spec.stretch_order] + cur_p.spec = spec + active_width.size = active_width.size + spec.width + active_width[order] = active_width[order] + spec.stretch + active_width.shrink = active_width.shrink + spec.shrink + elseif id == disc_code then + local subtype = cur_p.subtype + if subtype ~= second_disc_code then -- are there still second_disc_code in luatex + local line_break_dir = par.line_break_dir + if par.second_pass then -- todo: make second pass local + local actual_pen = subtype == automatic_disc_code and par.ex_hyphen_penalty or par.hyphen_penalty + local pre = cur_p.pre + if not pre then -- trivial pre-break + disc_width.size = 0 + if checked_expansion then + disc_width.adjust_stretch = 0 + disc_width.adjust_shrink = 0 + end + try_break(actual_pen, hyphenated_code, par, first_p, cur_p, checked_expansion) + else + local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre) + disc_width.size = size + active_width.size = active_width.size + size + if checked_expansion then + disc_width.adjust_stretch = adjust_stretch + disc_width.adjust_shrink = adjust_shrink + active_width.adjust_stretch = active_width.adjust_stretch + adjust_stretch + active_width.adjust_shrink = active_width.adjust_shrink + adjust_shrink + else + -- disc_width.adjust_stretch = 0 + -- disc_width.adjust_shrink = 0 + end + try_break(actual_pen, hyphenated_code, par, first_p, cur_p, checked_expansion) + if subtype == first_disc_code then + local cur_p_next = cur_p.next + if cur_p_next.id ~= disc_code or cur_p_next.subtype ~= second_disc_code then + report_parbuilders("unsupported disc at location %a",1) + else + local pre = cur_p_next.pre + if pre then + local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre) + disc_width.size = disc_width.size + size + if checked_expansion then + disc_width.adjust_stretch = disc_width.adjust_stretch + adjust_stretch + disc_width.adjust_shrink = disc_width.adjust_shrink + adjust_shrink + end + try_break(actual_pen, hyphenated_code, par, first_p, cur_p_next, checked_expansion) + -- + -- I will look into this some day ... comment in linebreak.w says that this fails, + -- maybe this is what Taco means with his comment in the luatex manual. + -- + -- do_one_seven_eight(sub_disc_width_from_active_width); + -- do_one_seven_eight(reset_disc_width); + -- s = vlink_no_break(vlink(cur_p)); + -- add_to_widths(s, line_break_dir, pdf_adjust_spacing,disc_width); + -- ext_try_break(...,first_p,vlink(cur_p)); + -- + else + report_parbuilders("unsupported disc at location %a",2) + end + end + end + -- beware, we cannot restore to a saved value as the try_break adapts active_width + active_width.size = active_width.size - disc_width.size + if checked_expansion then + active_width.adjust_stretch = active_width.adjust_stretch - disc_width.adjust_stretch + active_width.adjust_shrink = active_width.adjust_shrink - disc_width.adjust_shrink + end + end + end + local replace = cur_p.replace + if replace then + local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace) + active_width.size = active_width.size + size + if checked_expansion then + active_width.adjust_stretch = active_width.adjust_stretch + adjust_stretch + active_width.adjust_shrink = active_width.adjust_shrink + adjust_shrink + end + end + end + elseif id == kern_code then + if cur_p.subtype == userkern_code then + kern_break(par,cur_p,first_p, checked_expansion) + else + active_width.size = active_width.size + cur_p.kern + if checked_expansion and cur_p.subtype == kerning_code then + local d = cur_p.kern + if d ~= 0 then + local stretch, shrink = kern_stretch_shrink(cur_p,d) + active_width.adjust_stretch = active_width.adjust_stretch + stretch + active_width.adjust_shrink = active_width.adjust_shrink + shrink + end + end + end + elseif id == math_code then + par.auto_breaking = cur_p.subtype == endmath_code + kern_break(par,cur_p, first_p, checked_expansion) + elseif id == rule_code then + active_width.size = active_width.size + cur_p.width + elseif id == penalty_code then + try_break(cur_p.penalty, unhyphenated_code, par, first_p, cur_p, checked_expansion) + elseif id == whatsit_code then + local subtype = cur_p.subtype + if subtype == localpar_code then + par.internal_pen_inter = cur_p.pen_inter + par.internal_pen_broken = cur_p.pen_broken + par.internal_left_box = cur_p.box_left + par.internal_left_box_width = cur_p.box_left_width + par.internal_right_box = cur_p.box_right + par.internal_right_box_width = cur_p.box_right_width + elseif subtype == dir_code then + par.line_break_dir = checked_line_dir(dirstack) or par.line_break_dir + else + local get_width = get_whatsit_width[subtype] + if get_width then + active_width.size = active_width.size + get_width(cur_p) + end + end + elseif id == mark_code or id == ins_code or id == adjust_code then + -- skip + else + report_parbuilders("node of type %a found in paragraph",type(id)) + end + cur_p = cur_p.next + end + if not cur_p then + try_break(eject_penalty, hyphenated_code, par, first_p, cur_p, checked_expansion) + local p_active = par.active + local n_active = p_active.next + if n_active ~= p_active then + local r = n_active + par.fewest_demerits = awful_badness + repeat -- use local d + if r.id ~= delta_code and r.total_demerits < par.fewest_demerits then + par.fewest_demerits = r.total_demerits + par.best_bet = r + end + r = r.next + until r == p_active + par.best_line = par.best_bet.line_number + local asked_looseness = par.looseness + if asked_looseness == 0 then + return wrap_up(par) + end + local r = n_active + local actual_looseness = 0 + -- minimize assignments to par but happens seldom + repeat + if r.id ~= delta_code then + local line_diff = r.line_number - par.best_line + par.line_diff = line_diff + if (line_diff < actual_looseness and asked_looseness <= line_diff) or + (line_diff > actual_looseness and asked_looseness >= line_diff) then + par.best_bet = r + actual_looseness = line_diff + par.fewest_demerits = r.total_demerits + elseif line_diff == actual_looseness and r.total_demerits < par.fewest_demerits then + par.best_bet = r + par.fewest_demerits = r.total_demerits + end + end + r = r.next + until r == p_active -- weird, loop list? + par.best_line = par.best_bet.line_number + if actual_looseness == asked_looseness or par.final_pass then + return wrap_up(par) + end + end + end + reset_meta(par) -- clean up the memory by removing the break nodes + if not par.second_pass then + if tracing_paragraphs then + diagnostics.current_pass(par,"secondpass") + end + par.threshold = par.tolerance + par.second_pass = true + par.final_pass = par.emergency_stretch <= 0 + else + if tracing_paragraphs then + diagnostics.current_pass(par,"emergencypass") + end + par.background.stretch = par.background.stretch + par.emergency_stretch + par.final_pass = true + end + end + return wrap_up(par) +end + +-- standard tex logging .. will be adapted .. missing font names and to many [] + +local function write_esc(cs) + local esc = tex.escapechar + if esc then + write("log",utfchar(esc),cs) + else + write("log",cs) + end +end + +function diagnostics.start() +end + +function diagnostics.stop() + write_nl("log",'') +end + +function diagnostics.current_pass(par,what) + write_nl("log",format("@%s",what)) +end + +local function short_display(a,font_in_short_display) + while a do + local id = a.id + if id == glyph_code then + local font = a.font + if font ~= font_in_short_display then + write("log",tex.fontidentifier(font) .. ' ') + font_in_short_display = font + end + if a.subtype == ligature_code then + font_in_short_display = short_display(a.components,font_in_short_display) + else + write("log",utfchar(a.char)) + end +-- elseif id == rule_code then +-- write("log","|") +-- elseif id == glue_code then +-- if a.spec.writable then +-- write("log"," ") +-- end +-- elseif id == math_code then +-- write("log","$") + elseif id == disc_code then + font_in_short_display = short_display(a.pre,font_in_short_display) + font_in_short_display = short_display(a.post,font_in_short_display) + else -- no explicit checking + write("log",format("[%s]",nodecodes[id])) + end + a = a.next + end + return font_in_short_display +end + +diagnostics.short_display = short_display + +function diagnostics.break_node(par, q, fit_class, break_type, cur_p) -- %d ? + local passive = par.passive + local typ_ind = break_type == hyphenated_code and '-' or "" + if par.do_last_line_fit then + local s = number.toscaled(q.active_short) + local g = number.toscaled(q.active_glue) + if cur_p then + write_nl("log",format("@@%d: line %d.%d%s t=%s s=%s g=%s", + passive.serial or 0,q.line_number-1,fit_class,typ_ind,q.total_demerits,s,g)) + else + write_nl("log",format("@@%d: line %d.%d%s t=%s s=%s a=%s", + passive.serial or 0,q.line_number-1,fit_class,typ_ind,q.total_demerits,s,g)) + end + else + write_nl("log",format("@@%d: line %d.%d%s t=%s", + passive.serial or 0,q.line_number-1,fit_class,typ_ind,q.total_demerits)) + end + if not passive.prev_break then + write("log"," -> @0") + else + write("log",format(" -> @%d", passive.prev_break.serial or 0)) + end +end + +function diagnostics.feasible_break(par, cur_p, r, b, pi, d, artificial_demerits) + local printed_node = par.printed_node + if printed_node ~= cur_p then + write_nl("log","") + if not cur_p then + par.font_in_short_display = short_display(printed_node.next,par.font_in_short_display) + else + local save_link = cur_p.next + cur_p.next = nil + write_nl("log","") + par.font_in_short_display = short_display(printed_node.next,par.font_in_short_display) + cur_p.next = save_link + end + par.printed_node = cur_p + end + write_nl("log","@") + if not cur_p then + write_esc("par") + else + local id = cur_p.id + if id == glue_code then + -- print nothing + elseif id == penalty_code then + write_esc("penalty") + elseif id == disc_code then + write_esc("discretionary") + elseif id == kern_code then + write_esc("kern") + elseif id == math_code then + write_esc("math") + else + write_esc("unknown") + end + end + local via, badness, demerits = 0, '*', '*' + if r.break_node then + via = r.break_node.serial or 0 + end + if b <= infinite_badness then + badness = tonumber(d) -- format("%d", b) + end + if not artificial_demerits then + demerits = tonumber(d) -- format("%d", d) + end + write("log",format(" via @%d b=%s p=%s d=%s", via, badness, pi, demerits)) +end + +-- reporting -- + +statistics.register("alternative parbuilders", function() + if nofpars > 0 then + return format("%s paragraphs, %s lines (%s protruded, %s adjusted)", nofpars, noflines, nofprotrudedlines, nofadjustedlines) + end +end) + +-- actually scaling kerns is not such a good idea and it will become +-- configureable + +-- This is no way a replacement for the built in (fast) packer +-- it's just an alternative for special (testing) purposes. +-- +-- We could use two hpacks: one to be used in the par builder +-- and one to be used for other purposes. The one in the par +-- builder is much more simple as it does not need the expansion +-- code but only need to register the effective expansion factor +-- with the glyph. + +local function glyph_width_height_depth(curdir,pdir,p) + if is_rotated[curdir] then + if is_parallel[curdir][pdir] then + local half = (p.height + p.depth) / 2 + return p.width, half, half + else + local half = p.width / 2 + return p.height + p.depth, half, half + end + elseif is_rotated[pdir] then + if is_parallel[curdir][pdir] then + local half = (p.height + p.depth) / 2 + return p.width, half, half + else + return p.height + p.depth, p.width, 0 -- weird + end + else + if glyphdir_is_equal[curdir][pdir] then + return p.width, p.height, p.depth + elseif is_opposite[curdir][pdir] then + return p.width, p.depth, p.height + else -- can this happen? + return p.height + p.depth, p.width, 0 -- weird + end + end +end + +local function pack_width_height_depth(curdir,pdir,p) + if is_rotated[curdir] then + if is_parallel[curdir][pdir] then + local half = (p.height + p.depth) / 2 + return p.width, half, half + else -- can this happen? + local half = p.width / 2 + return p.height + p.depth, half, half + end + else + if pardir_is_equal[curdir][pdir] then + return p.width, p.height, p.depth + elseif is_opposite[curdir][pdir] then + return p.width, p.depth, p.height + else -- weird dimensions, can this happen? + -- return p.width, p.depth, p.height + return p.height + p.depth, p.width, 0 + end + end +end + +-- local function xpack(head,width,method,direction,analysis) +-- +-- -- inspect(analysis) +-- +-- local expansion = method == "cal_expand_ratio" +-- local natural = analysis.size +-- local font_stretch = analysis.adjust_stretch +-- local font_shrink = analysis.adjust_shrink +-- local font_expand_ratio = 0 +-- local delta = width - natural +-- +-- local hlist = new_node("hlist") +-- +-- hlist.list = head +-- hlist.dir = direction or tex.textdir +-- hlist.width = width +-- hlist.height = height +-- hlist.depth = depth +-- +-- if delta == 0 then +-- +-- hlist.glue_sign = 0 +-- hlist.glue_order = 0 +-- hlist.glue_set = 0 +-- +-- else +-- +-- local order = analysis.filll ~= 0 and fillcodes.filll or +-- analysis.fill ~= 0 and fillcodes.fill or +-- analysis.fil ~= 0 and fillcodes.fil or +-- analysis.fi ~= 0 and fillcodes.fi or 0 +-- +-- if delta > 0 then +-- +-- if expansion and order == 0 and font_stretch > 0 then +-- font_expand_ratio = (delta/font_stretch) * 1000 +-- else +-- local stretch = analysis.stretch +-- if stretch ~= 0 then +-- hlist.glue_sign = 1 -- stretch +-- hlist.glue_order = order +-- hlist.glue_set = delta/stretch +-- else +-- hlist.glue_sign = 0 -- nothing +-- hlist.glue_order = order +-- hlist.glue_set = 0 +-- end +-- end +-- print("stretch",hlist.glue_sign,hlist.glue_order,hlist.glue_set) +-- +-- else +-- +-- if expansion and order == 0 and font_shrink > 0 then +-- font_expand_ratio = (delta/font_shrink) * 1000 +-- else +-- local shrink = analysis.shrink +-- if shrink ~= 0 then +-- hlist.glue_sign = 2 -- shrink +-- hlist.glue_order = order +-- hlist.glue_set = - delta/shrink +-- else +-- hlist.glue_sign = 0 -- nothing +-- hlist.glue_order = order +-- hlist.glue_set = 0 +-- end +-- end +-- print("shrink",hlist.glue_sign,hlist.glue_order,hlist.glue_set) +-- +-- end +-- +-- end +-- +-- if not expansion or font_expand_ratio == 0 then +-- -- nothing +-- elseif font_expand_ratio > 0 then +-- if font_expand_ratio > 1000 then +-- font_expand_ratio = 1000 +-- end +-- local current = head +-- while current do +-- local id = current.id +-- if id == glyph_code then +-- local stretch, shrink = char_stretch_shrink(current) -- get only one +-- if stretch then +-- if trace_expansion then +-- setnodecolor(g,"hz:positive") +-- end +-- current.expansion_factor = font_expand_ratio * stretch +-- end +-- elseif id == kern_code then +-- local kern = current.kern +-- if kern ~= 0 and current.subtype == kerning_code then +-- current.kern = font_expand_ratio * current.kern +-- end +-- end +-- current = current.next +-- end +-- elseif font_expand_ratio < 0 then +-- if font_expand_ratio < -1000 then +-- font_expand_ratio = -1000 +-- end +-- local current = head +-- while current do +-- local id = current.id +-- if id == glyph_code then +-- local stretch, shrink = char_stretch_shrink(current) -- get only one +-- if shrink then +-- if trace_expansion then +-- setnodecolor(g,"hz:negative") +-- end +-- current.expansion_factor = font_expand_ratio * shrink +-- end +-- elseif id == kern_code then +-- local kern = current.kern +-- if kern ~= 0 and current.subtype == kerning_code then +-- current.kern = font_expand_ratio * current.kern +-- end +-- end +-- current = current.next +-- end +-- end +-- return hlist, 0 +-- end + +-- local expansion_stack = { } -- no dealloc + +local function hpack(head,width,method,direction) -- fast version when head = nil + + -- we can pass the adjust_width and adjust_height so that we don't need to recalculate them but + -- with the glue mess it's less trivial as we lack detail + + local hlist = new_node("hlist") + + if head == nil then + return hlist, 0 + end + + local cal_expand_ratio = method == "cal_expand_ratio" -- "subst_ex_font" -- is gone + + direction = direction or tex.textdir + + local pack_begin_line = 0 + local line = 0 + + local height = 0 + local depth = 0 + local natural = 0 + local font_stretch = 0 + local font_shrink = 0 + local font_expand_ratio = 0 + local last_badness = 0 + local disc_stack = { } + local disc_level = 0 + local expansion_stack = cal_expand_ratio and { } -- todo: optionally pass this + local expansion_index = 0 + local total_stretch = { [0] = 0, 0, 0, 0, 0 } + local total_shrink = { [0] = 0, 0, 0, 0, 0 } + + local hpack_dir = direction + + local adjust_head = texlists.adjust_head + local pre_adjust_head = texlists.pre_adjust_head + local adjust_tail = adjust_head and slide_nodes(adjust_head) + local pre_adjust_tail = pre_adjust_head and slide_nodes(pre_adjust_head) + + hlist.list = head + hlist.dir = hpack_dir + + new_dir_stack(hpack_dir) + + local checked_expansion = false + + if cal_expand_ratio then + checked_expansion = { } + setmetatableindex(checked_expansion,check_expand_lines) + end + + -- this one also needs to check the font, so in the end indeed we might end up with two variants + + local fontexps, lastfont + + local current = head + + while current do + local id = current.id + if id == glyph_code then + if cal_expand_ratio then + local currentfont = current.font + if currentfont ~= lastfont then + fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer + lastfont = currentfont + end + if fontexps then + local expansion = fontexps[current.char] + if expansion then + font_stretch = font_stretch + expansion.glyphstretch + font_shrink = font_shrink + expansion.glyphshrink + expansion_index = expansion_index + 1 + expansion_stack[expansion_index] = current + end + end + end + -- use inline if no expansion + local wd, ht, dp = glyph_width_height_depth(hpack_dir,"TLT",current) -- was TRT ? + natural = natural + wd + if ht > height then + height = ht + end + if dp > depth then + depth = dp + end + current = current.next + elseif id == kern_code then + local kern = current.kern + if kern == 0 then + -- no kern + elseif current.subtype == kerning_code then -- check p.kern + if cal_expand_ratio then + local stretch, shrink = kern_stretch_shrink(current,kern) + font_stretch = font_stretch + stretch + font_shrink = font_shrink + shrink + expansion_index = expansion_index + 1 + expansion_stack[expansion_index] = current + end + natural = natural + kern + else + natural = natural + kern + end + current = current.next + elseif id == disc_code then + if current.subtype ~= second_disc_code then + -- todo : local stretch, shrink = char_stretch_shrink(s) + local replace = current.replace + if replace then + disc_level = disc_level + 1 + disc_stack[disc_level] = current.next + current = replace + else + current = current.next + end + else + -- -- pre post replace + -- + -- local stretch, shrink = char_stretch_shrink(current.pre) + -- font_stretch = font_stretch + stretch + -- font_shrink = font_shrink + shrink + -- expansion_index = expansion_index + 1 + -- expansion_stack[expansion_index] = current.pre + -- + current = current.next + end + elseif id == glue_code then + local spec = current.spec + natural = natural + spec.width + local op = spec.stretch_order + local om = spec.shrink_order + total_stretch[op] = total_stretch[op] + spec.stretch + total_shrink [om] = total_shrink [om] + spec.shrink + if current.subtype >= leaders_code then + local leader = current.leader + local ht = leader.height + local dp = leader.depth + if ht > height then + height = ht + end + if dp > depth then + depth = dp + end + end + current = current.next + elseif id == hlist_code or id == vlist_code then + local sh = current.shift + local wd, ht, dp = pack_width_height_depth(hpack_dir,current.dir or hpack_dir,current) -- added: or pack_dir + local hs, ds = ht - sh, dp + sh + natural = natural + wd + if hs > height then + height = hs + end + if ds > depth then + depth = ds + end + current = current.next + elseif id == rule_code then + local wd = current.width + local ht = current.height + local dp = current.depth + natural = natural + wd + if ht > height then + height = ht + end + if dp > depth then + depth = dp + end + current = current.next + elseif id == math_code then + natural = natural + current.surround + current = current.next + elseif id == unset_code then + local wd = current.width + local ht = current.height + local dp = current.depth + local sh = current.shift + local hs = ht - sh + local ds = dp + sh + natural = natural + wd + if hs > height then + height = hs + end + if ds > depth then + depth = ds + end + current = current.next + elseif id == ins_code or id == mark_code then + local prev = current.prev + local next = current.next + if adjust_tail then -- todo + if next then + next.prev = prev + end + if prev then + prev.next = next + end + current.prev = adjust_tail + current.next = nil + adjust_tail.next = current + adjust_tail = current + else + adjust_head = current + adjust_tail = current + current.prev = nil + current.next = nil + end + current = next + elseif id == adjust_code then + local list = current.list + if adjust_tail then + adjust_tail.next = list + adjust_tail = slide_nodes(list) + else + adjust_head = list + adjust_tail = slide_nodes(list) + end + current = current.next + elseif id == whatsit_code then + local subtype = current.subtype + if subtype == dir_code then + hpack_dir = checked_line_dir(stack,current) or hpack_dir + else + local get_dimensions = get_whatsit_dimensions[subtype] + if get_dimensions then + local wd, ht, dp = get_dimensions(current) + natural = natural + wd + if ht > height then + height = ht + end + if dp > depth then + depth = dp + end + end + end + current = current.next + elseif id == marginkern_code then + if cal_expand_ratio then + local glyph = current.glyph + local char_pw = current.subtype == leftmargin_code and left_pw or right_pw + font_stretch = font_stretch - current.width - char_pw(glyph) + font_shrink = font_shrink - current.width - char_pw(glyph) + expansion_index = expansion_index + 1 + expansion_stack[expansion_index] = glyph + end + natural = natural + current.width + current = current.next + else + current = current.next + end + if not current and disc_level > 0 then + current = disc_stack[disc_level] + disc_level = disc_level - 1 + end + end + if adjust_tail then + adjust_tail.next = nil -- todo + end + if pre_adjust_tail then + pre_adjust_tail.next = nil -- todo + end + if mode == "additional" then + width = width + natural + end + hlist.width = width + hlist.height = height + hlist.depth = depth + local delta = width - natural + if delta == 0 then + hlist.glue_sign = 0 + hlist.glue_order = 0 + hlist.glue_set = 0 + elseif delta > 0 then + local order = (total_stretch[4] ~= 0 and 4 or total_stretch[3] ~= 0 and 3) or + (total_stretch[2] ~= 0 and 2 or total_stretch[1] ~= 0 and 1) or 0 + if cal_expand_ratio and order == 0 and font_stretch > 0 then + font_expand_ratio = (delta/font_stretch) * 1000 -- round(delta/font_stretch * 1000) + else + local tso = total_stretch[order] + if tso ~= 0 then + hlist.glue_sign = 1 + hlist.glue_order = order + hlist.glue_set = delta/tso + else + hlist.glue_sign = 0 + hlist.glue_order = order + hlist.glue_set = 0 + end +-- print("stretch",hlist.glue_sign,hlist.glue_order,hlist.glue_set) + if order == 0 and hlist.list then + last_badness = calculate_badness(delta,total_stretch[0]) + if last_badness > tex.hbadness then + if last_badness > 100 then + diagnostics.underfull_hbox(hlist,pack_begin_line,line,last_badness) + else + diagnostics.loose_hbox(hlist,pack_begin_line,line,last_badness) + end + end + end + end + else + local order = total_shrink[4] ~= 0 and 4 or total_shrink[3] ~= 0 and 3 + or total_shrink[2] ~= 0 and 2 or total_shrink[1] ~= 0 and 1 or 0 + if cal_expand_ratio and order == 0 and font_shrink > 0 then + font_expand_ratio = (delta/font_shrink) * 1000 -- round(delta/font_shrink * 1000) + else -- why was this else commented + local tso = total_shrink[order] + if tso ~= 0 then + hlist.glue_sign = 2 + hlist.glue_order = order + hlist.glue_set = -delta/tso + else + hlist.glue_sign = 0 + hlist.glue_order = order + hlist.glue_set = 0 + end +-- print("shrink",hlist.glue_sign,hlist.glue_order,hlist.glue_set) + if total_shrink[order] < -delta and order == 0 and hlist.list then + last_badness = 1000000 + hlist.glue_set = 1 + local fuzz = - delta - total_shrink[0] + local hfuzz = tex.hfuzz + if fuzz > hfuzz or tex.hbadness < 100 then + local overfullrule = tex.overfullrule + if fuzz > hfuzz and overfullrule > 0 then + -- weird, is always called and no rules shows up + slide_nodes(list).next = new_rule(overfullrule,nil,nil,hlist.dir) + end + diagnostics.overfull_hbox(hlist,pack_begin_line,line,-delta) -- - added + end + elseif order == 0 and hlist.list and last_badness > tex.hbadness then + diagnostics.bad_hbox(hlist,pack_begin_line,line,last_badness) + end + end + end + if cal_expand_ratio and font_expand_ratio ~= 0 then + -- if font_expand_ratio > 1000 then + -- font_expand_ratio = 1000 + -- elseif font_expand_ratio < -1000 then + -- font_expand_ratio = -1000 + -- end + + local fontexps, lastfont + + if font_expand_ratio > 0 then + for i=1,expansion_index do + local g = expansion_stack[i] + if g.id == glyph_code then + local currentfont = g.font + if currentfont ~= lastfont then + fontexps = expansions[currentfont] + lastfont = currentfont + end + local data = fontexps[g.char] + if trace_expansion then + setnodecolor(g,"hz:positive") + end + g.expansion_factor = font_expand_ratio * data.glyphstretch + else + local stretch, shrink = kern_stretch_shrink(g,g.kern) + g.expansion_factor = font_expand_ratio * stretch + end + end + else + for i=1,expansion_index do + local g = expansion_stack[i] + if g.id == glyph_code then + local currentfont = g.font + if currentfont ~= lastfont then + fontexps = expansions[currentfont] + lastfont = currentfont + end + local data = fontexps[g.char] + if trace_expansion then + setnodecolor(g,"hz:negative") + end + g.expansion_factor = font_expand_ratio * data.glyphshrink + else + local stretch, shrink = kern_stretch_shrink(g,g.kern) + g.expansion_factor = font_expand_ratio * shrink + end + end + end + + end + return hlist, last_badness +end + +nodes.hpack = hpack +hpack_nodes = hpack -- comment this for old fashioned expansion +xpack_nodes = hpack -- comment this for old fashioned expansion + +local function common_message(hlist,pack_begin_line,line,str) + write_nl("") + if status.output_active then -- unset + write(str," has occurred while \\output is active") + end + if pack_begin_line > 0 then + write(str," in paragraph at lines ",pack_begin_line,"--",line) + elseif pack_begin_line < 0 then + write(str," in alignment at lines ",-pack_begin_line,"--",line) + else + write(str," detected at line ",line) + end + write_nl("") + diagnostics.short_display(hlist.list,false) + write_nl("") + -- diagnostics.start() + -- show_box(hlist.list) + -- diagnostics.stop() +end + +function diagnostics.overfull_hbox(hlist,pack_begin_line,line,d) + common_message(hlist,pack_begin_line,line,format("Overfull \\hbox (%spt too wide)",number.toscaled(d))) +end + +function diagnostics.bad_hbox(hlist,pack_begin_line,line,b) + common_message(hlist,pack_begin_line,line,format("Tight \\hbox (badness %i)",b)) +end + +function diagnostics.underfull_hbox(hlist,pack_begin_line,line,b) + common_message(hlist,pack_begin_line,line,format("Underfull \\hbox (badness %i)",b)) +end + +function diagnostics.loose_hbox(hlist,pack_begin_line,line,b) + common_message(hlist,pack_begin_line,line,format("Loose \\hbox (badness %i)",b)) +end + +-- for the moment here: + +local utfchar = utf.char +local concat = table.concat + +local nodecodes = nodes.nodecodes +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glyph_code = nodecodes.glyph +local kern_code = nodecodes.kern +local setnodecolor = nodes.tracers.colors.set +local parameters = fonts.hashes.parameters +local basepoints = number.basepoints + +-- definecolor[hz:positive] [r=0.6] +-- definecolor[hz:negative] [g=0.6] +-- definecolor[hz:zero] [b=0.6] + +-- scale = multiplier + ef/multiplier + +local trace_verbose = false trackers.register("builders.paragraphs.expansion.verbose", function(v) trace_verbose = v end) + +local report_verbose = logs.reporter("fonts","expansion") + +local function colorize(n) + local size, font, ef, width, scale, list, flush, length + if trace_verbose then + width = 0 + length = 0 + list = { } + flush = function() + if length > 0 then + report_verbose("%0.3f : %10s %10s %s",scale,basepoints(width),basepoints(width*scale),concat(list,"",1,length)) + width = 0 + length = 0 + end + end + else + length = 0 + end + -- tricky: the built-in method creates dummy fonts and the last line normally has the + -- original font and that one then has ex.auto set + while n do + local id = n.id + if id == glyph_code then + local ne = n.expansion_factor + if ne == 0 then + if length > 0 then flush() end + else + local f = n.font + if f ~= font then + if length > 0 then + flush() + end + local pf = parameters[f] + local ex = pf.expansion + if ex and ex.auto then + size = pf.size + font = f -- save lookups + else + size = false + end + end + if size then + if ne ~= ef then + if length > 0 then + flush() + end + ef = ne + end + -- scale = 1.0 + ef / 1000 / 1000 / 1000 + scale = 1.0 + ef / 1000000000 + if scale > 1 then + setnodecolor(n,"hz:positive") + elseif scale < 1 then + setnodecolor(n,"hz:negative") + else + setnodecolor(n,"hz:zero") + end + if report_verbose then + length = length + 1 + list[length] = utfchar(n.char) + width = width + n.width -- no kerning yet + end + end + end + elseif id == hlist_code or id == vlist_code then + if length > 0 then + flush() + end + colorize(n.list,flush) + else -- nothing to show on kerns + if length > 0 then + flush() + end + end + n = n.next + end + if length > 0 then + flush() + end +end + +builders.paragraphs.expansion = builders.paragraphs.expansion or { } + +function builders.paragraphs.expansion.trace(head) + colorize(head,true) + return head +end + +local tasks = nodes.tasks + +tasks.prependaction("shipouts","normalizers","builders.paragraphs.expansion.trace") +tasks.disableaction("shipouts","builders.paragraphs.expansion.trace") + +trackers.register("builders.paragraphs.expansion.verbose", function(v) + if v then + tasks.enableaction("shipouts","builders.paragraphs.expansion.trace") + else + tasks.disableaction("shipouts","builders.paragraphs.expansion.trace") + end +end) diff --git a/tex/context/base/node-met.lua b/tex/context/base/node-met.lua new file mode 100644 index 000000000..d6b3df213 --- /dev/null +++ b/tex/context/base/node-met.lua @@ -0,0 +1,669 @@ +if not modules then modules = { } end modules ['node-nut'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This is an experimental module. Don't use nuts for generic code, at least not till +-- the regular code is proven stable. No support otherwise. + +-- luatex: todo: copylist should return h, t +-- todo: see if using insert_before and insert_after makes sense here + +-- This file is a side effect of the \LUATEX\ speed optimization project of Luigi +-- Scarso and me. As \CONTEXT\ spends over half its time in \LUA, we though that +-- using \LUAJIT\ could improve performance. We've published some of our experiences +-- elsewhere, but to summarize: \LUAJITTEX\ benefits a lot from the faster virtual +-- machine, but when jit is turned of we loose some again. We experimented with +-- ffi (without messing up the \CONTEXT\ code too much) but there we also lost more +-- than we gained (mostly due to lack of compatible userdata support: it's all or +-- nothing). This made us decide to look into the \LUA||\TEX\ interfacing and by +-- profiling and careful looking at the (of course then still beta source code) we +-- could come up with some improvements. The first showed up in 0.75 and we've more +-- on the agenda for 0.80. Although some interfaces could be sped up significantly +-- in practice we're only talking of 5||10\% on a \CONTEXT\ run and maybe more when +-- complex and extensive node list manipulations happens (we're talking of hundreds +-- of millions cross boundary calls then for documents of hundreds pages). One of the +-- changes in the \CONTEXT\ code base is that we went from indexed access to nodes to +-- function calls (in principle faster weren't it that the accessors need to do more +-- checking which makes them slower) and from there to optimizing these calls as well +-- as providing fast variants for well defined situations. At first optimizations were +-- put in a separate \type {node.fast} table although some improvements could be +-- ported to the main node functions. Because we got the feeling that more gain was +-- possible (especially when using more complex fonts and \MKIV\ functionality) we +-- eventually abandoned this approach and dropped the \type {fast} table in favour of +-- another hack. In the process we had done lots of profiling and testing so we knew +-- where time was wasted, +-- +-- As lots of testing and experimenting was part of this project, I could not have +-- done without stacks of new \CD s and \DVD s. This time Porcupine Tree, No-Man +-- and Archive were came to rescue. + +local type, select = type, select +local setmetatableindex = table.setmetatableindex + +-- First we get the metatable of a node: + +local metatable = nil + +do + local glyph = node.new("glyph",0) + metatable = getmetatable(glyph) + node.free(glyph) +end + +-- statistics.tracefunction(node, "node", "getfield","setfield") +-- statistics.tracefunction(node.direct,"node.direct","getfield","setfield") + +-- We start with some helpers and provide all relevant basic functions in the +-- node namespace as well. + +local gonuts = type(node.direct) == "table" +-- local gonuts = false + +nodes = nodes or { } +local nodes = nodes + +nodes.gonuts = gonuts + +local nodecodes = nodes.codes +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist + +nodes.tostring = node.tostring or tostring +nodes.copy = node.copy +nodes.copy_list = node.copy_list +nodes.delete = node.delete +nodes.dimensions = node.dimensions +nodes.end_of_math = node.end_of_math +nodes.flush_list = node.flush_list +nodes.flush_node = node.flush_node +nodes.free = node.free +nodes.insert_after = node.insert_after +nodes.insert_before = node.insert_before +nodes.hpack = node.hpack +nodes.new = node.new +nodes.tail = node.tail +nodes.traverse = node.traverse +nodes.traverse_id = node.traverse_id +nodes.slide = node.slide +nodes.vpack = node.vpack +nodes.fields = node.fields + +nodes.first_glyph = node.first_glyph +nodes.first_character = node.first_character +nodes.has_glyph = node.has_glyph or node.first_glyph + +nodes.current_attr = node.current_attr +nodes.do_ligature_n = node.do_ligature_n +nodes.has_field = node.has_field +nodes.last_node = node.last_node +nodes.usedlist = node.usedlist +nodes.protrusion_skippable = node.protrusion_skippable +nodes.write = node.write + +nodes.has_attribute = node.has_attribute +nodes.set_attribute = node.set_attribute +nodes.unset_attribute = node.unset_attribute + +nodes.protect_glyphs = node.protect_glyphs +nodes.unprotect_glyphs = node.unprotect_glyphs +nodes.kerning = node.kerning +nodes.ligaturing = node.ligaturing +nodes.mlist_to_hlist = node.mlist_to_hlist + +if not gonuts or not node.getfield then + node.getfield = metatable.__index + node.setfield = metatable.__newindex +end + +-- if gonuts then + nodes.tonode = function(n) return n end + nodes.tonut = function(n) return n end +-- end + +local getfield = node.getfield +local setfield = node.setfield + +local getattr = getfield +local setattr = setfield + +local getnext = node.getnext or function(n) return getfield(n,"next") end +local getprev = node.getprev or function(n) return getfield(n,"prev") end +local getid = node.getid or function(n) return getfield(n,"id") end +local getchar = node.getchar or function(n) return getfield(n,"char") end +local getfont = node.getfont or function(n) return getfield(n,"font") end +local getsubtype = node.getsubtype or function(n) return getfield(n,"subtype") end +local getlist = node.getlist or function(n) return getfield(n,"list") end +local getleader = node.getleader or function(n) return getfield(n,"leader") end + +nodes.getfield = getfield +nodes.getattr = getattr + +nodes.setfield = setfield +nodes.setattr = setattr + +nodes.getnext = getnext +nodes.getprev = getprev +nodes.getid = getid +nodes.getchar = getchar +nodes.getfont = getfont +nodes.getsubtype = getsubtype +nodes.getlist = getlist +nodes.getleader = getleader + +nodes.getbox = node.getbox or tex.getbox +nodes.setbox = node.setbox or tex.setbox +nodes.getskip = node.getskip or tex.get + +local n_new_node = nodes.new +local n_free_node = nodes.free +local n_setfield = nodes.setfield +local n_getfield = nodes.getfield +local n_getnext = nodes.getnext +local n_getprev = nodes.getprev +local n_getid = nodes.getid +local n_getlist = nodes.getlist +local n_copy_node = nodes.copy +local n_copy_list = nodes.copy_list +local n_find_tail = nodes.tail +local n_insert_after = nodes.insert_after +local n_insert_before = nodes.insert_before +local n_slide = nodes.slide + +local n_remove_node = node.remove -- not yet nodes.remove + +-- if t.id == glue_code then +-- local s = t.spec +-- if s and s.writable then +-- free_node(s) +-- end +-- t.spec = nil +-- end + +local function remove(head,current,free_too) + local t = current + head, current = n_remove_node(head,current) + if t then + if free_too then + n_free_node(t) + t = nil + else + n_setfield(t,"next",nil) + n_setfield(t,"prev",nil) + end + end + return head, current, t +end + +nodes.remove = remove + +function nodes.delete(head,current) + return remove(head,current,true) +end + +-- local h, c = nodes.replace(head,current,new) +-- local c = nodes.replace(false,current,new) +-- local c = nodes.replace(current,new) +-- +-- todo: check for new.next and find tail + +function nodes.replace(head,current,new) -- no head returned if false + if not new then + head, current, new = false, head, current +-- current, new = head, current + end + local prev = n_getprev(current) + local next = n_getnext(current) + if next then + n_setfield(new,"next",next) + n_setfield(next,"prev",new) + end + if prev then + n_setfield(new,"prev",prev) + n_setfield(prev,"next",new) + end + if head then + if head == current then + head = new + end + n_free_node(current) + return head, new + else + n_free_node(current) + return new + end +end + +local function count(stack,flat) + local n = 0 + while stack do + local id = n_getid(stack) + if not flat and id == hlist_code or id == vlist_code then + local list = n_getlist(stack) + if list then + n = n + 1 + count(list) -- self counts too + else + n = n + 1 + end + else + n = n + 1 + end + stack = n_getnext(stack) + end + return n +end + +nodes.count = count + +function nodes.append(head,current,...) + for i=1,select("#",...) do + head, current = n_insert_after(head,current,(select(i,...))) + end + return head, current +end + +function nodes.prepend(head,current,...) + for i=1,select("#",...) do + head, current = n_insert_before(head,current,(select(i,...))) + end + return head, current +end + +function nodes.linked(...) + local head, last + for i=1,select("#",...) do + local next = select(i,...) + if next then + if head then + n_setfield(last,"next",next) + n_setfield(next,"prev",last) + else + head = next + end + last = n_find_tail(next) -- we could skip the last one + end + end + return head +end + +function nodes.concat(list) -- consider tail instead of slide + local head, tail + for i=1,#list do + local li = list[i] + if li then + if head then + n_setfield(tail,"next",li) + n_setfield(li,"prev",tail) + else + head = li + end + tail = n_slide(li) + end + end + return head, tail +end + +--[[ +

At some point we ran into a problem that the glue specification +of the zeropoint dimension was overwritten when adapting a glue spec +node. This is a side effect of glue specs being shared. After a +couple of hours tracing and debugging Taco and I came to the +conclusion that it made no sense to complicate the spec allocator +and settled on a writable flag. This all is a side effect of the +fact that some glues use reserved memory slots (with the zeropoint +glue being a noticeable one). So, next we wrap this into a function +and hide it for the user. And yes, LuaTeX now gives a warning as +well.

+]]-- + +function nodes.writable_spec(n) -- not pool + local spec = n_getfield(n,"spec") + if not spec then + spec = n_copy_node(glue_spec) + n_setfield(n,"spec",spec) + elseif not n_getfield(spec,"writable") then + spec = n_copy_node(spec) + n_setfield(n,"spec",spec) + end + return spec +end + +if gonuts then + + function nodes.reference(n) + return n and tonut(n) or "" + end + +else + + local left, space = lpeg.P("<"), lpeg.P(" ") + + local reference = left * (1-left)^0 * left * space^0 * lpeg.C((1-space)^0) + + function nodes.reference(n) + return n and lpegmatch(reference,tostring(n)) or "" + end + +end + +-- Here starts an experiment with metatables. Of course this only works with nodes +-- wrapped in userdata with a metatable. +-- +-- Nodes are kind of special in the sense that you need to keep an eye on creation +-- and destruction. This is quite natural if you consider that changing the content +-- of a node would also change any copy (or alias). As there are too many pitfalls +-- we don't have this kind of support built in \LUATEX, which means that macro +-- packages are free to provide their own. One can even use local variants. +-- +-- n1 .. n2 : append nodes, no copies +-- n1 * 5 : append 4 copies of nodes +-- 5 + n1 : strip first 5 nodes +-- n1 - 5 : strip last 5 nodes +-- n1 + n2 : inject n2 after first of n1 +-- n1 - n2 : inject n2 before last of n1 +-- n1^2 : two copies of nodes (keep orginal) +-- - n1 : reverse nodes +-- n1/f : apply function to nodes + +-- local s = nodes.typesetters.tonodes +-- +-- local function w(lst) +-- context.dontleavehmode() +-- context(lst) +-- context.par() +-- end +-- +-- local n1 = s("a") +-- local n2 = s("b") +-- local n3 = s("c") +-- local n4 = s("d") +-- local n5 = s("e") +-- local n6 = s("f") +-- local n7 = s("g") +-- +-- local n0 = n1 .. (n2 * 10).. n3 .. (5 * n4) .. n5 .. ( 5 * n6 ) .. n7 / function(n) n.char = string.byte("!") return n end +-- +-- w(#n0) +-- +-- w(n0) +-- +-- local n1 = s("a") * 10 +-- local n2 = s("b") * 10 +-- +-- local n0 = ((5 + n1) .. (n2 - 5) ) +-- local n0 = - n0 +-- +-- local n0 = nil .. n0^3 .. nil +-- +-- w(n0) +-- +-- w ( s("a") + s("b") ) w ( s("a") + 4*s("b") ) w ( 4*s("a") + s("b") ) w ( 4*s("a") + 4*s("b") ) +-- w ( s("a") - s("b") ) w ( s("a") - 4*s("b") ) w ( 4*s("a") - s("b") ) w ( 4*s("a") - 4*s("b") ) + +local n_remove_node = nodes.remove + +metatable.__concat = function(n1,n2) -- todo: accept nut on one end + if not n1 then + return n2 + elseif not n2 then + return n1 + elseif n1 == n2 then + -- or abort + return n2 -- or n2 * 2 + else + local tail = n_find_tail(n1) + n_setfield(tail,"next",n2) + n_setfield(n2,"prev",tail) + return n1 + end +end + +metatable.__mul = function(n,multiplier) + if type(multiplier) ~= "number" then + n, multiplier = multiplier, n + end + if multiplier <= 1 then + return n + elseif n_getnext(n) then + local head + for i=2,multiplier do + local h = n_copy_list(n) + if head then + local t = n_find_tail(h) + n_setfield(t,"next",head) + n_setfield(head,"prev",t) + end + head = h + end + local t = n_find_tail(n) + n_setfield(t,"next",head) + n_setfield(head,"prev",t) + else + local head + for i=2,multiplier do + local c = n_copy_node(n) + if head then + n_setfield(c,"next",head) + n_setfield(head,"prev",c) + end + head = c + end + n_setfield(n,"next",head) + n_setfield(head,"prev",n) + end + return n +end + +metatable.__sub = function(first,second) + if type(second) == "number" then + local tail = n_find_tail(first) + for i=1,second do + local prev = n_getfield(tail,"prev") + n_free_node(tail) -- can become flushlist/flushnode + if prev then + tail = prev + else + return nil + end + end + if tail then + n_setfield(tail,"next",nil) + return first + else + return nil + end + else + -- aaaaa - bbb => aaaabbba + local firsttail = n_find_tail(first) + local prev = n_getprev(firsttail) + if prev then + local secondtail = n_find_tail(second) + n_setfield(secondtail,"next",firsttail) + n_setfield(firsttail,"prev",ltail) + n_setfield(prev,"next",second) + n_setfield(second,"prev",prev) + return first + else + local secondtail = n_find_tail(second) + n_setfield(secondtail,"next",first) + n_setfield(first,"prev",ltail) + return second + end + end +end + +metatable.__add = function(first,second) + if type(first) == "number" then + local head = second + for i=1,first do + local second = n_getnext(head) + n_free_node(head) -- can become flushlist/flushnode + if second then + head = second + else + return nil + end + end + if head then + n_setfield(head,"prev",nil) + return head + else + return nil + end + else + -- aaaaa + bbb => abbbaaaa + local next = n_getnext(first) + if next then + local secondtail = n_find_tail(second) + n_setfield(first,"next",second) + n_setfield(second,"prev",first) + n_setfield(secondtail,"next",next) + n_setfield(next,"prev",secondtail) + else + n_setfield(first,"next",second) + n_setfield(second,"prev",first) + end + return first + end +end + +metatable.__len = function(current) + local length = 0 + while current do + current = n_getnext(current) + length = length + 1 + end + return length +end + +metatable.__div = function(list,action) + return action(list) or list -- always a value +end + +metatable.__pow = function(n,multiplier) + local tail = n + local head = nil + if getnext(n) then + if multiplier == 1 then + head = n_copy_list(n) + else + for i=1,multiplier do + local h = n_copy_list(n) + if head then + local t = n_find_tail(h) + n_setfield(t,"next",head) + n_setfield(head,"prev",t) + end + head = h + end + end + else + if multiplier == 1 then + head = n_copy_node(n) + else + for i=2,multiplier do + local c = n_copy_node(n) + if head then + n_setfield(head,"next",c) + n_setfield(c,"prev",head) + end + head = c + end + end + end + -- todo: tracing + return head +end + +metatable.__unm = function(head) + local last = head + local first = head + local current = n_getnext(head) + while current do + local next = n_getnext(current) + n_setfield(first,"prev",current) + n_setfield(current,"next",first) + first = current + current = next + end + n_setfield(first,"prev",nil) + n_setfield(last,"next",nil) + return first +end + +-- see node-nut.lua for more info on going nuts + +if not gonuts then + + local nuts = { } + nodes.nuts = nuts + + local function dummy(f) return f end + + nodes.vianuts = dummy + nodes.vianodes = dummy + + for k, v in next, nodes do + if type(v) == "function" then + nuts[k] = v + end + end + +end + +-- also handy + +local tonode = nodes.tonode +local whatsit_code = nodecodes.whatsit +local getfields = node.fields +local sort = table.sort +local whatsitkeys = { } +local keys = { whatsit = whatsitkeys } +local messyhack = table.tohash { -- temporary solution + nodecodes.attributelist, + nodecodes.attribute, + nodecodes.gluespec, + nodecodes.action, +} + +table.setmetatableindex(keys,function(t,k) + local v = getfields(k) + if messyhack[k] then + for i=1,#v do + if v[i] == "subtype" then + remove(v,i) + break + end + end + end + if v[ 0] then v[#v+1] = "next" v[ 0] = nil end + if v[-1] then v[#v+1] = "prev" v[-1] = nil end + sort(v) + t[k] = v + return v +end) + +table.setmetatableindex(whatsitkeys,function(t,k) + local v = getfields(whatsit_code,k) + if v[ 0] then v[#v+1] = "next" v[ 0] = nil end + if v[-1] then v[#v+1] = "prev" v[-1] = nil end + sort(v) + t[k] = v + return v +end) + +local function nodefields(n) + n = tonode(n) + local id = n.id + if id == whatsit_code then + return whatsitkeys[n.subtype] + else + return keys[id] + end +end + +nodes.keys = keys -- [id][subtype] +nodes.fields = nodefields -- (n) diff --git a/tex/context/base/node-pro.lua b/tex/context/base/node-pro.lua index 60f2d8a72..aa6692d7b 100644 --- a/tex/context/base/node-pro.lua +++ b/tex/context/base/node-pro.lua @@ -66,12 +66,12 @@ processors.tracer = tracer processors.enabled = true -- this will become a proper state (like trackers) -function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction) +function processors.pre_linebreak_filter(head,groupcode) -- ,size,packtype,direction local first, found = first_glyph(head) -- they really need to be glyphs if found then if trace_callbacks then local before = nodes.count(head,true) - local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first + local head, done = actions(head,groupcode) -- ,size,packtype,direction local after = nodes.count(head,true) if done then tracer("pre_linebreak","changed",head,groupcode,before,after,true) @@ -80,7 +80,7 @@ function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction) end return done and head or true else - local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first + local head, done = actions(head,groupcode) -- ,size,packtype,direction return done and head or true end elseif trace_callbacks then @@ -94,6 +94,9 @@ local enabled = true function processors.hpack_filter(head,groupcode,size,packtype,direction) if enabled then + -- if not head.next and head.id ~= glyph_code then -- happens often but not faster + -- return true + -- end local first, found = first_glyph(head) -- they really need to be glyphs if found then if trace_callbacks then diff --git a/tex/context/base/node-ref.lua b/tex/context/base/node-ref.lua index 09e066434..aa864fb1c 100644 --- a/tex/context/base/node-ref.lua +++ b/tex/context/base/node-ref.lua @@ -16,61 +16,60 @@ if not modules then modules = { } end modules ['node-ref'] = { -- is grouplevel still used? -local format = string.format - -local allocate, mark = utilities.storage.allocate, utilities.storage.mark +local attributes, nodes, node = attributes, nodes, node -local cleanupreferences, cleanupdestinations = false, true +local allocate = utilities.storage.allocate, utilities.storage.mark +local mark = utilities.storage.allocate, utilities.storage.mark -local attributes, nodes, node = attributes, nodes, node -local nodeinjections = backends.nodeinjections -local codeinjections = backends.codeinjections +local nodeinjections = backends.nodeinjections +local codeinjections = backends.codeinjections -local transparencies = attributes.transparencies -local colors = attributes.colors -local references = structures.references -local tasks = nodes.tasks +local cleanupreferences = false +local cleanupdestinations = true -local hpack_list = node.hpack -local list_dimensions = node.dimensions +local transparencies = attributes.transparencies +local colors = attributes.colors +local references = structures.references +local tasks = nodes.tasks --- current.glue_set current.glue_sign +local hpack_list = node.hpack +local list_dimensions = node.dimensions -local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end) -local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end) -local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end) +local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end) +local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end) +local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end) -local report_reference = logs.reporter("backend","references") -local report_destination = logs.reporter("backend","destinations") -local report_area = logs.reporter("backend","areas") +local report_reference = logs.reporter("backend","references") +local report_destination = logs.reporter("backend","destinations") +local report_area = logs.reporter("backend","areas") -local nodecodes = nodes.nodecodes -local skipcodes = nodes.skipcodes -local whatcodes = nodes.whatcodes -local listcodes = nodes.listcodes +local nodecodes = nodes.nodecodes +local skipcodes = nodes.skipcodes +local whatcodes = nodes.whatcodes +local listcodes = nodes.listcodes -local hlist_code = nodecodes.hlist -local vlist_code = nodecodes.vlist -local glue_code = nodecodes.glue -local whatsit_code = nodecodes.whatsit +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local glue_code = nodecodes.glue +local whatsit_code = nodecodes.whatsit -local leftskip_code = skipcodes.leftskip -local rightskip_code = skipcodes.rightskip -local parfillskip_code = skipcodes.parfillskip +local leftskip_code = skipcodes.leftskip +local rightskip_code = skipcodes.rightskip +local parfillskip_code = skipcodes.parfillskip -local localpar_code = whatcodes.localpar -local dir_code = whatcodes.dir +local localpar_code = whatcodes.localpar +local dir_code = whatcodes.dir -local line_code = listcodes.line +local line_code = listcodes.line -local nodepool = nodes.pool +local nodepool = nodes.pool -local new_kern = nodepool.kern +local new_kern = nodepool.kern -local traverse = node.traverse -local find_node_tail = node.tail or node.slide -local tosequence = nodes.tosequence +local traverse = node.traverse +local find_node_tail = node.tail or node.slide +local tosequence = nodes.tosequence -- local function dimensions(parent,start,stop) -- stop = stop and stop.next @@ -362,20 +361,19 @@ local function colorize(width,height,depth,n,reference,what) end end -local nodepool = nodes.pool - -local new_kern = nodepool.kern +-- references: -local texattribute = tex.attribute -local texcount = tex.count +local nodepool = nodes.pool +local new_kern = nodepool.kern --- references: +local texsetattribute = tex.setattribute +local texsetcount = tex.setcount -local stack = { } -local done = { } -local attribute = attributes.private('reference') -local nofreferences = 0 -local topofstack = 0 +local stack = { } +local done = { } +local attribute = attributes.private('reference') +local nofreferences = 0 +local topofstack = 0 nodes.references = { attribute = attribute, @@ -390,8 +388,8 @@ local function setreference(h,d,r) -- the preroll permits us to determine samepage (but delayed also has some advantages) -- so some part of the backend work is already done here stack[topofstack] = { r, h, d, codeinjections.prerollreference(r) } - -- texattribute[attribute] = topofstack -- todo -> at tex end - texcount.lastreferenceattribute = topofstack + -- texsetattribute(attribute,topofstack) -- todo -> at tex end + texsetcount("lastreferenceattribute",topofstack) end function references.get(n) -- not public so functionality can change @@ -500,13 +498,16 @@ local function makedestination(width,height,depth,reference) nofdestinations = nofdestinations + 1 for n=1,#name do local annot = nodeinjections.destination(width,height,depth,name[n],view) - if not result then - result = annot - else - current.next = annot - annot.prev = current + if annot then + -- probably duplicate + if not result then + result = annot + else + current.next = annot + annot.prev = current + end + current = find_node_tail(annot) end - current = find_node_tail(annot) end if result then -- some internal error @@ -540,7 +541,7 @@ function references.inject(prefix,reference,h,d,highlight,newwindow,layer) -- to -- unknown ref, just don't set it and issue an error else -- check - set.highlight, set.newwindow,set.layer = highlight, newwindow, layer + set.highlight, set.newwindow, set.layer = highlight, newwindow, layer setreference(h,d,set) -- sets attribute / todo: for set[*].error end end @@ -573,7 +574,7 @@ end statistics.register("interactive elements", function() if nofreferences > 0 or nofdestinations > 0 then - return format("%s references, %s destinations",nofreferences,nofdestinations) + return string.format("%s references, %s destinations",nofreferences,nofdestinations) else return nil end diff --git a/tex/context/base/node-res.lua b/tex/context/base/node-res.lua index 768aac404..ca9d67f91 100644 --- a/tex/context/base/node-res.lua +++ b/tex/context/base/node-res.lua @@ -35,6 +35,9 @@ local glyph_code = nodecodes.glyph local allocate = utilities.storage.allocate +local texgetbox = tex.getbox +local texgetcount = tex.getcount + local reserved, nofreserved = { }, 0 local function register_node(n) @@ -57,11 +60,10 @@ function pool.cleanup(nofboxes) -- todo -- end end if nofboxes then - local tb = tex.box for i=0,nofboxes do - local l = tb[i] + local l = texgetbox(i) if l then - free_node(tb[i]) + free_node(l) -- also list ? nl = nl + 1 end end @@ -293,12 +295,38 @@ function pool.noad() return copy_node(noad) end -function pool.hlist() - return copy_node(hlist) +function pool.hlist(list,width,height,depth) + local n = copy_node(hlist) + if list then + n.list = list + end + if width then + n.width = width + end + if height then + n.height = height + end + if depth then + n.depth = depth + end + return n end -function pool.vlist() - return copy_node(vlist) +function pool.vlist(list,width,height,depth) + local n = copy_node(vlist) + if list then + n.list = list + end + if width then + n.width = width + end + if height then + n.height = height + end + if depth then + n.depth = depth + end + return n end --[[ @@ -396,7 +424,7 @@ function pool.special(str) end statistics.register("cleaned up reserved nodes", function() - return format("%s nodes, %s lists of %s", pool.cleanup(tex.count["c_syst_last_allocated_box"])) + return format("%s nodes, %s lists of %s", pool.cleanup(texgetcount("c_syst_last_allocated_box"))) end) -- \topofboxstack statistics.register("node memory usage", function() -- comes after cleanup ! diff --git a/tex/context/base/node-rul.lua b/tex/context/base/node-rul.lua index 953beb186..96d6bdf41 100644 --- a/tex/context/base/node-rul.lua +++ b/tex/context/base/node-rul.lua @@ -85,6 +85,9 @@ local variables = interfaces.variables local dimenfactor = fonts.helpers.dimenfactor local splitdimen = number.splitdimen +local v_yes = variables.yes +local v_foreground = variables.foreground + local nodecodes = nodes.nodecodes local skipcodes = nodes.skipcodes local whatcodes = nodes.whatcodes @@ -150,7 +153,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi else -- possible extensions: when in same class then keep spanning local newlevel, newclass = floor(aa/1000), aa%1000 ---~ strip = not continue or level == 1 -- 0 + -- strip = not continue or level == 1 -- 0 if f then if class == newclass then -- and newlevel > level then head, done = flush(head,f,l,d,level,parent,false), true @@ -161,7 +164,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi f, l, a = n, n, aa level, class = newlevel, newclass d = data[class] - continue = d.continue == variables.yes + continue = d.continue == v_yes end else if f then @@ -169,8 +172,16 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi end f, l, a = nil, nil, nil end - elseif f and (id == disc_code or (id == kern_code and n.subtype == kerning_code)) then - l = n +-- elseif f and (id == disc_code or (id == kern_code and n.subtype == kerning_code)) then +-- l = n + elseif id == disc_code then + if f then + l = n + end + elseif id == kern_code and n.subtype == kerning_code then + if f then + l = n + end elseif id == hlist_code or id == vlist_code then if f then head, done = flush(head,f,l,d,level,parent,strip), true @@ -188,13 +199,12 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi if continue then if id == penalty_code then l = n - elseif id == kern_code then - l = n + -- elseif id == kern_code then + -- l = n elseif id == glue_code then -- catch \underbar{a} \underbar{a} (subtype test is needed) local subtype = n.subtype - if continue and n[attribute] and - (subtype == userskip_code or subtype == spaceskip_code or subskip == xspaceskip_code) then + if n[attribute] and (subtype == userskip_code or subtype == spaceskip_code or subtype == xspaceskip_code) then l = n else head, done = flush(head,f,l,d,level,parent,strip), true @@ -261,9 +271,9 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a local colorspace = ma > 0 and ma or f[a_colorspace] or 1 local color = ca > 0 and ca or f[a_color] local transparency = ta > 0 and ta or f[a_transparency] - local foreground = order == variables.foreground + local foreground = order == v_foreground - local e = dimenfactor(unit,fontdata[f.font]) -- what if no glyph node + local e = dimenfactor(unit,f.font) -- what if no glyph node local rt = tonumber(rulethickness) if rt then diff --git a/tex/context/base/node-rul.mkiv b/tex/context/base/node-rul.mkiv index 640cc54fe..2d2e61134 100644 --- a/tex/context/base/node-rul.mkiv +++ b/tex/context/base/node-rul.mkiv @@ -185,9 +185,14 @@ % \definebar[touchbar] [\c!method=0,\c!dy=-0.4,\c!offset=-0.0] % \definebar[touchbars] [touchbar] [\c!continue=\v!yes] -\definebar[\v!overstrike][\c!method=0,\c!dy=0.4,\c!offset=0.5,\c!continue=\v!yes] -\definebar[\v!underbar] [\c!method=1,\c!dy=-0.4,\c!offset=-0.3,\c!continue=\v!yes] +\let\normalmathoverbar \overbar +\let\normalmathunderbar \underbar +\let\normalmathoverstrike \overstrike +\let\normalmathunderstrike\understrike + \definebar[\v!overbar] [\c!method=1,\c!dy=0.4,\c!offset=1.8,\c!continue=\v!yes] +\definebar[\v!underbar] [\c!method=1,\c!dy=-0.4,\c!offset=-0.3,\c!continue=\v!yes] +\definebar[\v!overstrike][\c!method=0,\c!dy=0.4,\c!offset=0.5,\c!continue=\v!yes] \definebar [\v!understrike] @@ -198,16 +203,41 @@ \c!order=\v!background, \c!color=lightgray] -\definebar[\v!overstrikes] [\v!overstrike] [\c!continue=\v!no] -\definebar[\v!underbars] [\v!underbar] [\c!continue=\v!no] \definebar[\v!overbars] [\v!overbar] [\c!continue=\v!no] +\definebar[\v!underbars] [\v!underbar] [\c!continue=\v!no] +\definebar[\v!overstrikes] [\v!overstrike] [\c!continue=\v!no] \definebar[\v!understrikes][\v!understrike][\c!continue=\v!no] % we want these always so ... -\expandafter\let\expandafter\overstrike \csname\v!overstrike \endcsname -\expandafter\let\expandafter\underbar \csname\v!underbar \endcsname -\expandafter\let\expandafter\overbar \csname\v!overbar \endcsname +\ifdefined\normalmathunderbar + \expandafter\let\expandafter\normaltextunderbar\csname\v!underbar\endcsname + \unexpanded\def\underbar{\mathortext\normalmathunderbar\normaltextunderbar} +\else + \expandafter\let\expandafter\underbar\csname\v!underbar\endcsname +\fi + +\ifdefined\normalmathoverbar + \expandafter\let\expandafter\normaltextoverbar\csname\v!overbar\endcsname + \unexpanded\def\overbar{\mathortext\normalmathoverbar\normaltextoverbar} +\else + \expandafter\let\expandafter\overbar\csname\v!overbar\endcsname +\fi + +\ifdefined\normalmathunderstrike + \expandafter\let\expandafter\normaltextunderstrike\csname\v!understrike\endcsname + \unexpanded\def\understrike{\mathortext\normalmathunderstrike\normaltextunderstrike} +\else + \expandafter\let\expandafter\understrike\csname\v!understrike\endcsname +\fi + +\ifdefined\normalmathoverstrike + \expandafter\let\expandafter\normaltextoverstrike\csname\v!overstrike\endcsname + \unexpanded\def\overstrike{\mathortext\normalmathoverstrike \normaltextoverstrike} +\else + \expandafter\let\expandafter\overstrike\csname\v!overstrike\endcsname +\fi + \expandafter\let\expandafter\overstrikes\csname\v!overstrikes\endcsname \expandafter\let\expandafter\underbars \csname\v!underbars \endcsname \expandafter\let\expandafter\overbars \csname\v!overbars \endcsname diff --git a/tex/context/base/node-ser.lua b/tex/context/base/node-ser.lua index b0a6e9952..d7593cec7 100644 --- a/tex/context/base/node-ser.lua +++ b/tex/context/base/node-ser.lua @@ -9,24 +9,37 @@ if not modules then modules = { } end modules ['node-ser'] = { -- beware, some field names will change in a next releases -- of luatex; this is pretty old code that needs an overhaul -local type, format, rep = type, string.format, string.rep +local type = type local concat, tohash, sortedkeys, printtable = table.concat, table.tohash, table.sortedkeys, table.print +local formatters, format, rep = string.formatters, string.format, string.rep local allocate = utilities.storage.allocate -local nodes, node = nodes, node +local context = context +local nodes = nodes +local node = node -local traverse = node.traverse -local is_node = node.is_node +local traverse = nodes.traverse +local is_node = nodes.is_node local nodecodes = nodes.nodecodes +local subtcodes = nodes.codes local noadcodes = nodes.noadcodes -local nodefields = nodes.fields +local getfields = nodes.fields + +local tonode = nodes.tonode local hlist_code = nodecodes.hlist local vlist_code = nodecodes.vlist +----- utfchar = utf.char +local f_char = formatters["%U"] +----- fontchars = { } table.setmetatableindex(fontchars,function(t,k) fontchars = fonts.hashes.characters return fontchars[k] end) + +----- f_char = utilities.strings.chkuni -- formatters["%!chkuni!"] + local expand = allocate ( tohash { + -- text: "list", -- list_ptr & ins_ptr & adjust_ptr "pre", -- "post", -- @@ -42,6 +55,23 @@ local expand = allocate ( tohash { "action", -- action_ptr "value", -- user_defined nodes with subtype 'a' en 'n' "head", + -- math: + "nucleus", + "sup", + "sub", + "list", + "num", + "denom", + "left", + "right", + "display", + "text", + "script", + "scriptscript", + "delim", + "degree", + "accent", + "bot_accent", } ) -- page_insert: "height", "last_ins_ptr", "best_ins_ptr" @@ -72,8 +102,9 @@ nodes.ignorablefields = ignore -- not ok yet: -local function astable(n,sparse) -- not yet ok - local f, t = nodefields(n), { } +local function astable(n,sparse) -- not yet ok, might get obsolete anyway + n = tonode(n) + local f, t = getfields(n), { } for i=1,#f do local v = f[i] local d = n[v] @@ -101,10 +132,9 @@ setinspector(function(v) if is_node(v) then printtable(astable(v),tostring(v)) r -- under construction: -local function totable(n,flat,verbose,noattributes) - -- todo: no local function +local function totable(n,flat,verbose,noattributes) -- nicest: n,true,true,true local function to_table(n,flat,verbose,noattributes) -- no need to pass - local f = nodefields(n) + local f = getfields(n) local tt = { } for k=1,#f do local v = f[k] @@ -118,7 +148,7 @@ local function totable(n,flat,verbose,noattributes) if type(nv) == "number" or type(nv) == "string" then tt[v] = nv else - tt[v] = totable(nv,flat,verbose) + tt[v] = totable(nv,flat,verbose,noattributes) end elseif type(nv) == "table" then tt[v] = nv -- totable(nv,flat,verbose) -- data @@ -128,7 +158,27 @@ local function totable(n,flat,verbose,noattributes) end end if verbose then - tt.type = nodecodes[tt.id] + local subtype = tt.subtype + local id = tt.id + local nodename = nodecodes[id] + tt.id = nodename + local subtypes = subtcodes[nodename] + if subtypes then + tt.subtype = subtypes[subtype] + elseif subtype == 0 then + tt.subtype = nil + else + -- we need a table + end + if tt.char then + tt.char = f_char(tt.char) + end + if tt.small_char then + tt.small_char = f_char(tt.small_char) + end + if tt.large_char then + tt.large_char = f_char(tt.large_char) + end end return tt end @@ -137,14 +187,18 @@ local function totable(n,flat,verbose,noattributes) local t, tn = { }, 0 while n do tn = tn + 1 - t[tn] = to_table(n,flat,verbose,noattributes) + local nt = to_table(n,flat,verbose,noattributes) + t[tn] = nt + nt.next = nil + nt.prev = nil n = n.next end return t else - local t = to_table(n) - if n.next then - t.next = totable(n.next,flat,verbose,noattributes) + local t = to_table(n,flat,verbose,noattributes) + local n = n.next + if n then + t.next = totable(n,flat,verbose,noattributes) end return t end @@ -153,7 +207,8 @@ local function totable(n,flat,verbose,noattributes) end end -nodes.totable = totable +nodes.totable = function(n,...) return totable(tonode(n),...) end +nodes.totree = function(n) return totable(tonode(n),true,true,true) end -- no attributes, todo: attributes in k,v list local function key(k) return ((type(k) == "number") and "["..k.."]") or k @@ -161,7 +216,7 @@ end -- not ok yet; this will become a module --- todo: adapt to nodecodes etc +-- todo: adapt to nodecodes etc .. use formatters local function serialize(root,name,handle,depth,m,noattributes) handle = handle or print @@ -186,12 +241,12 @@ local function serialize(root,name,handle,depth,m,noattributes) if root then local fld if root.id then - fld = nodefields(root) -- we can cache these (todo) + fld = getfields(root) -- we can cache these (todo) else fld = sortedkeys(root) end if type(root) == 'table' and root['type'] then -- userdata or table - handle(format("%s %s=%q,",depth,'type',root['type'])) + handle(format("%s type=%q,",depth,root['type'])) end for f=1,#fld do local k = fld[f] @@ -241,7 +296,7 @@ function nodes.serialize(root,name,noattributes) n = n + 1 t[n] = s end - serialize(root,name,flush,nil,0,noattributes) + serialize(tonode(root),name,flush,nil,0,noattributes) return concat(t,"\n") end @@ -258,6 +313,7 @@ function nodes.visualizebox(...) -- to be checked .. will move to module anyway end function nodes.list(head,n) -- name might change to nodes.type -- to be checked .. will move to module anyway + head = tonode(head) if not n then context.starttyping(true) end @@ -275,6 +331,7 @@ function nodes.list(head,n) -- name might change to nodes.type -- to be checked end function nodes.print(head,n) + head = tonode(head) while head do local id = head.id logs.writer(string.formatters["%w%S"],n or 0,head) diff --git a/tex/context/base/node-shp.lua b/tex/context/base/node-shp.lua index 8f7a411a7..6ebfd767f 100644 --- a/tex/context/base/node-shp.lua +++ b/tex/context/base/node-shp.lua @@ -14,6 +14,7 @@ local concat, sortedpairs = table.concat, table.sortedpairs local setmetatableindex = table.setmetatableindex local nodecodes = nodes.nodecodes +local whatsitcodes = nodes.whatsitcodes local tasks = nodes.tasks local handlers = nodes.handlers @@ -23,28 +24,52 @@ local disc_code = nodecodes.disc local mark_code = nodecodes.mark local kern_code = nodecodes.kern local glue_code = nodecodes.glue +local whatsit_code = nodecodes.whatsit -local texbox = tex.box +local texgetbox = tex.getbox local free_node = node.free local remove_node = node.remove local traverse_nodes = node.traverse -local function cleanup(head) -- rough +local removables = { + [whatsitcodes.open] = true, + [whatsitcodes.close] = true, + [whatsitcodes.write] = true, + [whatsitcodes.pdfdest] = true, + [whatsitcodes.pdfsavepos] = true, + [whatsitcodes.latelua] = true, +} + +local function cleanup_redundant(head) local start = head while start do local id = start.id - if id == disc_code or (id == glue_code and not start.writable) or (id == kern_code and start.kern == 0) or id == mark_code then - head, start, tmp = remove_node(head,start) - free_node(tmp) + if id == disc_code then + head, start = remove_node(head,start,true) + -- elseif id == glue_code then + -- if start.writable then + -- start = start.next + -- elseif some_complex_check_on_glue_spec then + -- head, start = remove_node(head,start,true) + -- else + -- start = start.next + -- end + elseif id == kern_code then + if start.kern == 0 then + head, start = remove_node(head,start,true) + else + start = start.next + end + elseif id == mark_code then + head, start = remove_node(head,start,true) elseif id == hlist_code or id == vlist_code then local sl = start.list if sl then - start.list = cleanup(sl) + start.list = cleanup_redundant(sl) start = start.next else - head, start, tmp = remove_node(head,start) - free_node(tmp) + head, start = remove_node(head,start,true) end else start = start.next @@ -53,27 +78,56 @@ local function cleanup(head) -- rough return head end -directives.register("backend.cleanup", function() - tasks.enableaction("shipouts","nodes.handlers.cleanuppage") -end) +local function cleanup_flushed(head) -- rough + local start = head + while start do + local id = start.id + if id == whatsit_code and removables[start.subtype] then + head, start = remove_node(head,start,true) + elseif id == hlist_code or id == vlist_code then + local sl = start.list + if sl then + start.list = cleanup_flushed(sl) + start = start.next + else + head, start = remove_node(head,start,true) + end + else + start = start.next + end + end + return head +end function handlers.cleanuppage(head) -- about 10% of the nodes make no sense for the backend - return cleanup(head), true + return cleanup_redundant(head), true end +function handlers.cleanupbox(head) + return cleanup_flushed(head), true +end + +directives.register("backend.cleanup", function() + tasks.enableaction("shipouts","nodes.handlers.cleanuppage") +end) + local actions = tasks.actions("shipouts") -- no extra arguments function handlers.finalize(head) -- problem, attr loaded before node, todo ... return actions(head) end +function commands.cleanupbox(n) + cleanup_flushed(texgetbox(n)) +end + -- handlers.finalize = actions -- interface function commands.finalizebox(n) - actions(texbox[n]) + actions(texgetbox(n)) end -- just in case we want to optimize lookups: @@ -129,8 +183,8 @@ trackers.register("nodes.frequencies",function(v) if type(v) == "string" then frequencies.filename = v end - handlers.frequencies_shipouts_before = register("shipouts", "begin") - handlers.frequencies_shipouts_after = register("shipouts", "end") + handlers.frequencies_shipouts_before = register("shipouts", "begin") + handlers.frequencies_shipouts_after = register("shipouts", "end") handlers.frequencies_processors_before = register("processors", "begin") handlers.frequencies_processors_after = register("processors", "end") tasks.prependaction("shipouts", "before", "nodes.handlers.frequencies_shipouts_before") diff --git a/tex/context/base/node-tra.lua b/tex/context/base/node-tra.lua index 916b2143d..9fe979e93 100644 --- a/tex/context/base/node-tra.lua +++ b/tex/context/base/node-tra.lua @@ -18,10 +18,10 @@ local clock = os.gettimeofday or os.clock -- should go in environment local report_nodes = logs.reporter("nodes","tracing") -nodes = nodes or { } - local nodes, node, context = nodes, node, context +local texgetattribute = tex.getattribute + local tracers = nodes.tracers or { } nodes.tracers = tracers @@ -51,12 +51,13 @@ local glue_code = nodecodes.glue local kern_code = nodecodes.kern local rule_code = nodecodes.rule local whatsit_code = nodecodes.whatsit -local spec_code = nodecodes.glue_spec +local gluespec_code = nodecodes.gluespec local localpar_code = whatcodes.localpar local dir_code = whatcodes.dir local nodepool = nodes.pool +local new_rule = nodepool.rule local dimenfactors = number.dimenfactors local formatters = string.formatters @@ -257,16 +258,19 @@ local function listtoutf(h,joiner,textonly,last) while h do local id = h.id if id == glyph_code then -- always true - w[#w+1] = utfchar(h.char) + local c = h.char + w[#w+1] = c >= 0 and utfchar(c) or formatters["<%i>"](c) if joiner then w[#w+1] = joiner end elseif id == disc_code then - local pre, rep, pos = h.pre, h.replace, h.post + local pre = h.pre + local pos = h.post + local rep = h.replace w[#w+1] = formatters["[%s|%s|%s]"] ( pre and listtoutf(pre,joiner,textonly) or "", - rep and listtoutf(rep,joiner,textonly) or "", - mid and listtoutf(mid,joiner,textonly) or "" + pos and listtoutf(pos,joiner,textonly) or "", + rep and listtoutf(rep,joiner,textonly) or "" ) elseif textonly then if id == glue_code and h.spec and h.spec.width > 0 then @@ -342,7 +346,7 @@ local function numbertodimen(d,unit,fmt,strip) local str = formatters[fmt](d*dimenfactors[unit],unit) return strip and lpegmatch(stripper,str) or str end - local id = node.id + local id = d.id if id == kern_code then local str = formatters[fmt](d.width*dimenfactors[unit],unit) return strip and lpegmatch(stripper,str) or str @@ -350,7 +354,7 @@ local function numbertodimen(d,unit,fmt,strip) if id == glue_code then d = d.spec end - if not d or not d.id == spec_code then + if not d or not d.id == gluespec_code then local str = formatters[fmt](0,unit) return strip and lpegmatch(stripper,str) or str end @@ -522,8 +526,63 @@ end -- for the moment here -nodes.visualizers = { } +local visualizers = nodes.visualizers or { } +nodes.visualizers = visualizers -function nodes.visualizers.handler(head) +function visualizers.handler(head) return head, false end + +-- we could cache attribute lists and set attr (copy will increment count) .. todo .. +-- although tracers are used seldom + +local function setproperties(n,c,s) + local mm = texgetattribute(a_colormodel) + n[a_colormodel] = mm > 0 and mm or 1 + n[a_color] = m_color[c] + n[a_transparency] = m_transparency[c] + return n +end + +tracers.setproperties = setproperties + +function tracers.setlistv(n,c,s) + local f = n + local mc = m_color[c] + local mt = m_transparency[c] + local mm = texgetattribute(a_colormodel) + if mm <= 0 then + mm = 1 + end + while n do + n[a_colormodel] = mm + n[a_color] = mc + n[a_transparency] = mt + n = n.next + end + return f +end + +function tracers.resetproperties(n) + n[a_color] = unsetvalue + n[a_transparency] = unsetvalue + return n +end + +function tracers.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup) + return setproperties(new_rule(w,h,d),c,s) +end + +-- only nodes + +local nodestracerpool = { } + +tracers.pool = { + nodes = nodestracerpool, +} + +function nodestracerpool.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup) + return setproperties(new_rule(w,h,d),c,s) +end + +tracers.rule = nodestracerpool.rule -- for a while diff --git a/tex/context/base/node-tsk.lua b/tex/context/base/node-tsk.lua index 596ac765a..dfa570b24 100644 --- a/tex/context/base/node-tsk.lua +++ b/tex/context/base/node-tsk.lua @@ -18,10 +18,11 @@ local report_tasks = logs.reporter("tasks") local allocate = utilities.storage.allocate +local context = context local nodes = nodes -nodes.tasks = nodes.tasks or { } -local tasks = nodes.tasks +local tasks = nodes.tasks or { } +nodes.tasks = tasks local tasksdata = { } -- no longer public @@ -116,6 +117,14 @@ function tasks.disableaction(name,action) end end +function tasks.setaction(name,action,value) + if value then + tasks.enableaction(name,action) + else + tasks.disableaction(name,action) + end +end + function tasks.enablegroup(name,group) local data = validgroup(name,"enable group") if data then @@ -317,7 +326,8 @@ end tasks.new { name = "processors", - arguments = 4, + arguments = 5, -- often only the first is used, and the last three are only passed in hpack filter +-- arguments = 2, processor = nodeprocessor, sequence = { "before", -- for users diff --git a/tex/context/base/node-typ.lua b/tex/context/base/node-typ.lua index 6e1a31643..4a2ef8d49 100644 --- a/tex/context/base/node-typ.lua +++ b/tex/context/base/node-typ.lua @@ -6,21 +6,23 @@ if not modules then modules = { } end modules ['node-typ'] = { license = "see context related readme files" } -local utfvalues = utf.values +-- code has been moved to blob-ini.lua -local currentfont = font.current -local fontparameters = fonts.hashes.parameters +local typesetters = nodes.typesetters or { } +nodes.typesetters = typesetters -local hpack = node.hpack -local vpack = node.vpack -local fast_hpack = nodes.fasthpack +local hpack_node_list = nodes.hpack +local vpack_node_list = nodes.vpack +local fast_hpack_list = nodes.fasthpack -local nodepool = nodes.pool +local nodepool = nodes.pool +local new_glyph = nodepool.glyph +local new_glue = nodepool.glue -local newglyph = nodepool.glyph -local newglue = nodepool.glue +local utfvalues = utf.values -typesetters = typesetters or { } +local currentfont = font.current +local fontparameters = fonts.hashes.parameters local function tonodes(str,fontid,spacing) -- quick and dirty local head, prev = nil, nil @@ -39,11 +41,11 @@ local function tonodes(str,fontid,spacing) -- quick and dirty local next if c == 32 then if not spacedone then - next = newglue(s,p,m) + next = new_glue(s,p,m) spacedone = true end else - next = newglyph(fontid or 1,c) + next = new_glyph(fontid or 1,c) spacedone = false end if not next then @@ -59,21 +61,33 @@ local function tonodes(str,fontid,spacing) -- quick and dirty return head end -typesetters.tonodes = tonodes - -function typesetters.hpack(str,fontid,spacing) - return hpack(tonodes(str,fontid,spacing),"exactly") +local function tohpack(str,fontid,spacing) + return hpack_node_list(tonodes(str,fontid,spacing),"exactly") end -function typesetters.fast_hpack(str,fontid,spacing) - return fast_hpack(tonodes(str,fontid,spacing),"exactly") +local function tohpackfast(str,fontid,spacing) + return fast_hpack_list(tonodes(str,fontid,spacing),"exactly") end -function typesetters.vpack(str,fontid,spacing) +local function tovpack(str,fontid,spacing) -- vpack is just a hack, and a proper implentation is on the agenda -- as it needs more info etc than currently available - return vpack(tonodes(str,fontid,spacing)) + return vpack_node_list(tonodes(str,fontid,spacing)) end ---~ node.write(typesetters.hpack("Hello World!")) ---~ node.write(typesetters.hpack("Hello World!",1,100*1024*10)) +local tovpackfast = tovpack + +typesetters.tonodes = tonodes +typesetters.tohpack = tohpack +typesetters.tohpackfast = tohpackfast +typesetters.tovpack = tovpack +typesetters.tovpackfast = tovpackfast + +typesetters.hpack = tohpack +typesetters.fast_hpack = tohpackfast +typesetters.vpack = tovpack + +-- node.write(nodes.typestters.hpack("Hello World!")) +-- node.write(nodes.typestters.hpack("Hello World!",1,100*1024*10)) + +string.tonodes = tonodes -- quite convenient diff --git a/tex/context/base/norm-ltx.mkii b/tex/context/base/norm-ltx.mkii index cd02cffec..9a8f7ba57 100644 --- a/tex/context/base/norm-ltx.mkii +++ b/tex/context/base/norm-ltx.mkii @@ -155,7 +155,7 @@ \let \normallatelua = \latelua \let \normalluaescapestring = \luaescapestring \let \normalluastartup = \luastartup -\let \normalluatexdatestamp = \luatexdatestamp +%let \normalluatexdatestamp = \luatexdatestamp \let \normalluatexrevision = \luatexrevision \let \normalluatexversion = \luatexversion \let \normalnokerns = \nokerns diff --git a/tex/context/base/pack-com.mkiv b/tex/context/base/pack-com.mkiv index 6c1363148..2c28d6b20 100644 --- a/tex/context/base/pack-com.mkiv +++ b/tex/context/base/pack-com.mkiv @@ -267,10 +267,13 @@ \edef\p_pack_combinations_alternative{\combinationparameter\c!alternative}% \to \everydefinecombination -\def\pack_combinations_pickup % we want to add struts but still ignore an empty box +\def\pack_combinations_pickup {\dostarttagged\t!combinationpair\empty \dostarttagged\t!combinationcontent\empty - \dowithnextboxcs\pack_combinations_pickup_content\hbox} + \assumelongusagecs\pack_combinations_pickup_content_indeed} + +\def\pack_combinations_pickup_content_indeed + {\dowithnextboxcs\pack_combinations_pickup_content\hbox} \def\pack_combinations_pickup_content % we want to add struts but still ignore an empty box {\dostoptagged @@ -279,15 +282,21 @@ \expandnamespacemacro\??combinationalternative\p_pack_combinations_alternative\v!text} \setvalue{\??combinationalternative\v!text}% + {\assumelongusagecs\pack_combinations_alternative_text_indeed} + +\setvalue{\??combinationalternative\v!label}% + {\assumelongusagecs\pack_combinations_alternative_label_indeed} + +\def\pack_combinations_alternative_text_indeed {\dowithnextboxcs\pack_combinations_pickup_caption\vtop\bgroup \afterassignment\pack_combinations_caption_first \let\nexttoken=} -\setvalue{\??combinationalternative\v!label}% +\def\pack_combinations_alternative_label_indeed {\dowithnextboxcs\pack_combinations_pickup_caption\vtop\bgroup \hsize\wd\b_pack_combinations_content \ifx\p_align\empty\else\setupalign[\p_align]\fi - \usecombinationstyleandcolor\c!style\c!color % but label style wins, so independent configuration + \usecombinationstyleandcolor\c!style\c!color \begstrut \normalexpanded{\strc_labels_command{\v!combination\ifx\currentcombination\empty\else:\currentcombination\fi}}% \endstrut @@ -644,7 +653,7 @@ % \global\setsystemmode{pairedbox}% \pack_pairedboxes_before - \dogotopar\pack_pairedboxes_first_pickup} + \assumelongusagecs\pack_pairedboxes_first_pickup} \def\pack_pairedboxes_first_pickup {\dowithnextboxcs\pack_pairedboxes_first\hbox @@ -653,7 +662,7 @@ \def\pack_pairedboxes_first {\pack_pairedboxes_between - \dogotopar\pack_pairedboxes_second_pickup} + \assumelongusagecs\pack_pairedboxes_second_pickup} \def\pack_pairedboxes_second_pickup {\dowithnextboxcs\pack_pairedboxes_second\vbox @@ -782,19 +791,32 @@ \setrigidcolumnhsize\hsize{\pairedboxparameter\c!distance}\p_n \fi} +% \def\pack_pairedboxes_between_horizontal +% {\setlocalhsize +% \hsize\wd\b_pack_pairedboxes_first % trick +% \hsize\pairedboxparameter\c!width % can be \hsize +% \scratchdimen\dimexpr\wd\b_pack_pairedboxes_first+\pairedboxparameter\c!distance\relax +% \ifdim\dimexpr\hsize+\scratchdimen\relax>\pairedboxparameter\c!maxwidth\relax +% \hsize\dimexpr\pairedboxparameter\c!maxwidth-\scratchdimen\relax +% \fi} + \def\pack_pairedboxes_between_horizontal - {\hsize\wd\b_pack_pairedboxes_first % trick - \hsize\pairedboxparameter\c!width % can be \hsize - \scratchdimen\dimexpr\wd\b_pack_pairedboxes_first+\pairedboxparameter\c!distance\relax - \ifdim\dimexpr\hsize+\scratchdimen\relax>\pairedboxparameter\c!maxwidth\relax - \hsize\dimexpr\pairedboxparameter\c!maxwidth-\scratchdimen\relax + {\scratchdistance\pairedboxparameter\c!distance + \scratchwidth\pairedboxparameter\c!maxwidth\relax + \setlocalhsize + \hsize\dimexpr\availablehsize-\wd\b_pack_pairedboxes_first-\scratchdistance\relax + \hsize\pairedboxparameter\c!width\relax % can be \hsize + \scratchdimen\dimexpr\wd\b_pack_pairedboxes_first+\scratchdistance\relax + \ifdim\dimexpr\hsize+\scratchdimen\relax>\scratchwidth + \hsize\dimexpr\scratchwidth-\scratchdimen\relax \fi} \def\pack_pairedboxes_between_vertical - {\hsize\wd\b_pack_pairedboxes_first - \hsize\pairedboxparameter\c!width % can be \hsize - \ifdim\hsize>\pairedboxparameter\c!maxwidth\relax - \hsize\pairedboxparameter\c!maxwidth % can be \hsize + {\scratchwidth\pairedboxparameter\c!maxwidth\relax + \hsize\wd\b_pack_pairedboxes_first + \hsize\pairedboxparameter\c!width\relax % can be \hsize + \ifdim\hsize>\scratchwidth\relax + \hsize\scratchwidth \fi} \def\pack_pairedboxes_after @@ -811,7 +833,7 @@ \fi} \def\pack_pairedboxes_pack_horizontal - {\hbox\bgroup + {\dontleavehmode\hbox\bgroup \forgetall \s_pack_pairedboxes_size\ht \ifdim\ht\b_pack_pairedboxes_first>\ht\b_pack_pairedboxes_second @@ -832,7 +854,7 @@ \egroup} \def\pack_pairedboxes_pack_vertical - {\vbox\bgroup + {\dontleavehmode\vbox\bgroup \forgetall \s_pack_pairedboxes_size\wd \ifdim\wd\b_pack_pairedboxes_first>\wd\b_pack_pairedboxes_second diff --git a/tex/context/base/pack-obj.lua b/tex/context/base/pack-obj.lua index 1e4e0f59e..70876a346 100644 --- a/tex/context/base/pack-obj.lua +++ b/tex/context/base/pack-obj.lua @@ -13,8 +13,7 @@ reusable components.

local commands, context = commands, context -local texcount = tex.count -local allocate = utilities.storage.allocate +local allocate = utilities.storage.allocate local collected = allocate() local tobesaved = allocate() diff --git a/tex/context/base/pack-rul.lua b/tex/context/base/pack-rul.lua index a990936e7..909c0c168 100644 --- a/tex/context/base/pack-rul.lua +++ b/tex/context/base/pack-rul.lua @@ -10,16 +10,21 @@ if not modules then modules = { } end modules ['pack-rul'] = {

An explanation is given in the history document mk.

--ldx]]-- -local texsetdimen, texsetcount, texbox = tex.setdimen, tex.setcount, tex.box -local hpack, free, copy, traverse_id = node.hpack, node.free, node.copy_list, node.traverse_id -local texdimen, texcount = tex.dimen, tex.count +local hpack = node.hpack +local free = node.free +local copy = node.copy_list +local traverse_id = node.traverse_id +local node_dimensions = node.dimensions local hlist_code = nodes.nodecodes.hlist local box_code = nodes.listcodes.box -local node_dimensions = node.dimensions + +local texsetdimen = tex.setdimen +local texsetcount = tex.setcount +local texgetbox = tex.getbox function commands.doreshapeframedbox(n) - local box = texbox[n] + local box = texgetbox(n) local noflines = 0 local firstheight = nil local lastdepth = nil @@ -76,17 +81,16 @@ function commands.doreshapeframedbox(n) end end end - -- print("reshape", noflines, firstheight or 0, lastdepth or 0) - texsetcount("global","framednoflines", noflines) - texsetdimen("global","framedfirstheight", firstheight or 0) - texsetdimen("global","framedlastdepth", lastdepth or 0) - texsetdimen("global","framedminwidth", minwidth) - texsetdimen("global","framedmaxwidth", maxwidth) - texsetdimen("global","framedaveragewidth", noflines > 0 and totalwidth/noflines or 0) + texsetcount("global","framednoflines",noflines) + texsetdimen("global","framedfirstheight",firstheight or 0) + texsetdimen("global","framedlastdepth",lastdepth or 0) + texsetdimen("global","framedminwidth",minwidth) + texsetdimen("global","framedmaxwidth",maxwidth) + texsetdimen("global","framedaveragewidth",noflines > 0 and totalwidth/noflines or 0) end function commands.doanalyzeframedbox(n) - local box = texbox[n] + local box = texgetbox(n) local noflines = 0 local firstheight = nil local lastdepth = nil @@ -102,8 +106,7 @@ function commands.doanalyzeframedbox(n) end end end - -- print("analyze", noflines, firstheight or 0, lastdepth or 0) - texsetcount("global","framednoflines", noflines) - texsetdimen("global","framedfirstheight", firstheight or 0) - texsetdimen("global","framedlastdepth", lastdepth or 0) + texsetcount("global","framednoflines",noflines) + texsetdimen("global","framedfirstheight",firstheight or 0) + texsetdimen("global","framedlastdepth",lastdepth or 0) end diff --git a/tex/context/base/pack-rul.mkiv b/tex/context/base/pack-rul.mkiv index f2b332b14..bff8b4a0f 100644 --- a/tex/context/base/pack-rul.mkiv +++ b/tex/context/base/pack-rul.mkiv @@ -100,11 +100,11 @@ \expandafter\noexpand\csname do#1rootparameter\endcsname \expandafter\noexpand\csname setupcurrent#1\endcsname \expandafter\noexpand\csname inherited#1framed\endcsname - \noexpand\??framed}} % if needed we can have a variant + \expandafter\noexpand\csname inherited#1framedbox\endcsname}} % new \unexpanded\def\doinstallinheritedframed#1#2#3#4#5#6#7#8#9% {\def#5##1##2{\ifx##1\relax#6{##2}\else#4{##1}{##2}\fi}% - \def#6##1{\ifcsname#9:##1\endcsname#9:##1\else\s!empty\fi}% root + \def#6##1{\ifcsname\??framed:##1\endcsname\??framed:##1\else\s!empty\fi}% root \unexpanded\def#8% {\bgroup \bgroup @@ -113,7 +113,15 @@ \let\framedparameter #2% \let\framedparameterhash#3% \let\setupcurrentframed #7% - \pack_framed_process_indeed}} + \pack_framed_process_indeed}% + \unexpanded\def#9% + {\bgroup + \inframedtrue + \let\currentframed #1% + \let\framedparameter #2% + \let\framedparameterhash#3% + \let\setupcurrentframed #7% + \pack_framed_process_box_indeed}} \unexpanded\def\installframedcommandhandler#1#2#3% {\installcommandhandler{#1}{#2}{#3}% @@ -880,11 +888,8 @@ %D The next macro uses a box and takes its natural width and height so these %D can better be correct. -\unexpanded\def\localbackgroundframed#1#2#3% namespace component box - {\bgroup - \edef\currentframed{#1}% - \pack_framed_initialize - \setbox\b_framed_normal\box#3% +\unexpanded\def\pack_framed_process_box_indeed#1#2% component box (assumes parameters set and grouped usage) + {\setbox\b_framed_normal\box#2% could actually be \let\b_framed_normal#2 \edef\p_framed_region{\framedparameter\c!region}% \ifx\p_framed_region\v!yes % maybe later named \pack_framed_add_region @@ -907,12 +912,18 @@ \else \p_framed_backgroundoffset \fi - \edef\p_framed_component{#2}% + \edef\p_framed_component{#1}% \pack_framed_add_background \fi \box\b_framed_normal \egroup} +\unexpanded\def\localbackgroundframed#1% namespace component box + {\bgroup + \edef\currentframed{#1}% + \pack_framed_initialize + \pack_framed_process_box_indeed} % group ends here + \let\postprocessframebox\relax %D A nice example by Aditya: @@ -1638,27 +1649,41 @@ % \framed[width=12cm,height=3cm,orientation=-180]{\input ward\relax} % \framed[width=12cm,height=3cm,orientation=-270]{\input ward\relax} +% \def\pack_framed_start_orientation +% {\ifcase\p_framed_orientation +% \let\pack_framed_stop_orientation\relax +% \else +% \scratchcounter\p_framed_orientation % weird .. why +% \divide\scratchcounter\plustwo +% \ifodd\scratchcounter +% \let\pack_framed_stop_orientation\pack_framed_stop_orientation_odd +% \else +% \let\pack_framed_stop_orientation\pack_framed_stop_orientation_even +% \fi +% \fi} +% +% \def\pack_framed_stop_orientation_odd +% {\swapmacros\framedwidth\framedheight +% \swapmacros\localwidth\localheight +% \swapdimens\d_framed_height\d_framed_width +% \pack_framed_stop_orientation_even} +% +% \def\pack_framed_stop_orientation_even +% {\setbox\b_framed_normal\hbox{\dorotatebox\p_framed_orientation\hbox{\box\b_framed_normal}}% +% \d_framed_height\ht\b_framed_normal +% \d_framed_width \wd\b_framed_normal} + \def\pack_framed_start_orientation {\ifcase\p_framed_orientation \let\pack_framed_stop_orientation\relax \else - \scratchcounter\p_framed_orientation % weird .. why - \divide\scratchcounter\plustwo - \ifodd\scratchcounter - \let\pack_framed_stop_orientation\pack_framed_stop_orientation_odd - \else - \let\pack_framed_stop_orientation\pack_framed_stop_orientation_even - \fi + \let\pack_framed_stop_orientation\pack_framed_stop_orientation_indeed \fi} -\def\pack_framed_stop_orientation_odd - {\swapmacros\framedwidth\framedheight - \swapmacros\localwidth\localheight - \swapdimens\d_framed_height\d_framed_width - \pack_framed_stop_orientation_even} - -\def\pack_framed_stop_orientation_even - {\setbox\b_framed_normal\hbox{\dorotatebox\p_framed_orientation\hbox{\box\b_framed_normal}}} +\def\pack_framed_stop_orientation_indeed + {\setbox\b_framed_normal\hbox{\dorotatebox\p_framed_orientation\hbox{\box\b_framed_normal}}% + \d_framed_height\ht\b_framed_normal + \d_framed_width \wd\b_framed_normal} %D The last conditional takes care of the special situation of in||line \inframed %D [height=3cm] {framed} boxes. Such boxes have to be \inframed {aligned} with the @@ -1738,17 +1763,45 @@ \edef\currentmathframed{#1}% \dosingleempty\pack_framed_mathframed_indeed} +% \def\pack_framed_mathframed_indeed[#1]#2% no fancy nesting supported here +% {\iffirstargument +% \setupcurrentmathframed[#1]% +% \fi +% \c_framed_mstyle\mathstyle +% \doifnot{\mathframedparameter\c!location}\v!low{\let\normalstrut\pack_framed_math_strut}% +% \inheritedmathframedframed{\Ustartmath\triggermathstyle\c_framed_mstyle#2\Ustopmath}% +% \endgroup} + +\newcount\c_pack_framed_mc + +\def\pack_framed_math_pos + {\global\advance\c_pack_framed_mc\plusone + \xdef\pack_framed_mc_one{mcf:1:\number\c_pack_framed_mc}% + \xdef\pack_framed_mc_two{mcf:2:\number\c_pack_framed_mc}% + \xypos\pack_framed_mc_two} + \def\pack_framed_mathframed_indeed[#1]#2% no fancy nesting supported here {\iffirstargument \setupcurrentmathframed[#1]% \fi \c_framed_mstyle\mathstyle - \doifnot{\mathframedparameter\c!location}\v!low{\let\normalstrut\pack_framed_math_strut}% + \edef\m_framed_location{\mathframedparameter\c!location}% + \ifx\m_framed_location\v!mathematics + \let\normalstrut\pack_framed_math_pos + \else\ifx\m_framed_location\v!low\else + \let\normalstrut\pack_framed_math_strut + \fi\fi \inheritedmathframedframed{\Ustartmath\triggermathstyle\c_framed_mstyle#2\Ustopmath}% \endgroup} +\installframedlocator \v!mathematics + {} + {\lower\dimexpr\MPy\pack_framed_mc_two-\MPy\pack_framed_mc_one\relax + \hbox{\xypos\pack_framed_mc_one\box\b_framed_normal}} + \definemathframed[mframed] \definemathframed[inmframed][\c!location=\v!low] +\definemathframed[mcframed] [\c!location=\v!mathematics] %D So instead of the rather versatile \type {\framed}, we use \type {\mframed}: %D @@ -1953,6 +2006,7 @@ \raggedcommand \pack_framed_do_top \bgroup +\synchronizeinlinedirection \localbegstrut \aftergroup\localendstrut \aftergroup\pack_framed_do_bottom @@ -1974,6 +2028,7 @@ \raggedcenter \vss \bgroup +\synchronizeinlinedirection \localbegstrut \aftergroup\localendstrut \aftergroup\vss @@ -1996,6 +2051,7 @@ \aftergroup\localendstrut \aftergroup\vss \aftergroup\egroup +\synchronizeinlinedirection \localbegstrut \doformatonelinerbox} @@ -2013,6 +2069,7 @@ \raggedcommand \pack_framed_do_top \bgroup +\synchronizeinlinedirection \localbegstrut \aftergroup\localendstrut \aftergroup\pack_framed_do_bottom @@ -2036,6 +2093,7 @@ \hbox \bgroup \aftergroup\egroup +\synchronizeinlinedirection \localstrut \doformatonelinerbox} @@ -2049,6 +2107,7 @@ \fi \pack_framed_do_setups \hss +\synchronizeinlinedirection \localstrut \bgroup \aftergroup\hss @@ -2063,6 +2122,7 @@ \fi \let\postprocessframebox\relax \pack_framed_do_setups +\synchronizeinlinedirection \localstrut \doformatonelinerbox} diff --git a/tex/context/base/page-app.mkiv b/tex/context/base/page-app.mkiv index 5f1c2f297..e4858d48f 100644 --- a/tex/context/base/page-app.mkiv +++ b/tex/context/base/page-app.mkiv @@ -106,7 +106,7 @@ \fi \d_page_fitting_width \wd\b_page_fitting \d_page_fitting_height\ht\b_page_fitting - \startlocallayout % still valid? + \startlocallayout % hm, we change the papersize so we still need it \let\checkcurrentlayout\relax % else interference with odd/even layout \processaction [\fittingpageparameter\c!paper] @@ -117,7 +117,7 @@ \startmakeup[fittingpage]% \box\b_page_fitting \stopmakeup - \stoplocallayout % still valid? + \stoplocallayout % we need to get rid of this and use the built-in \egroup \autostoptext} diff --git a/tex/context/base/page-brk.mkiv b/tex/context/base/page-brk.mkiv index ac1fffd6b..d1520b6ce 100644 --- a/tex/context/base/page-brk.mkiv +++ b/tex/context/base/page-brk.mkiv @@ -228,7 +228,7 @@ \installpagebreakmethod \v!quadruple % not yet ok inside columnsets {\ifdoublesided - \ifnum\numexpr\realpageno/4\relax=\numexpr\realpageno/2\relax\else + \ifnum\numexpr\realpageno/\plusfour\relax=\numexpr\realpageno/\plustwo\relax\else \page_breaks_handle_direct\v!yes \page_breaks_handle_direct\v!empty \page_breaks_handle_direct\v!empty diff --git a/tex/context/base/page-flt.lua b/tex/context/base/page-flt.lua index ab7a534eb..11aa2be21 100644 --- a/tex/context/base/page-flt.lua +++ b/tex/context/base/page-flt.lua @@ -11,9 +11,6 @@ if not modules then modules = { } end modules ['page-flt'] = { local insert, remove = table.insert, table.remove local find = string.find -local setdimen, setcount, texbox = tex.setdimen, tex.setcount, tex.box - -local copy_node_list = node.copy_list local trace_floats = false trackers.register("graphics.floats", function(v) trace_floats = v end) -- name might change @@ -24,10 +21,20 @@ local C, S, P, lpegmatch = lpeg.C, lpeg.S, lpeg.P, lpeg.match -- we use floatbox, floatwidth, floatheight -- text page leftpage rightpage (todo: top, bottom, margin, order) -floats = floats or { } -local floats = floats +local copy_node_list = node.copy_list + +local setdimen = tex.setdimen +local setcount = tex.setcount +local texgetbox = tex.getbox +local texsetbox = tex.setbox + +floats = floats or { } +local floats = floats -local noffloats, last, default, pushed = 0, nil, "text", { } +local noffloats = 0 +local last = nil +local default = "text" +local pushed = { } local function initialize() return { @@ -98,7 +105,7 @@ end function floats.save(which,data) which = which or default - local b = texbox.floatbox + local b = texgetbox("floatbox") if b then local stack = stacks[which] noffloats = noffloats + 1 @@ -108,7 +115,7 @@ function floats.save(which,data) data = data or { }, box = copy_node_list(b), } - texbox.floatbox = nil + texsetbox("floatbox",nil) insert(stack,t) setcount("global","savednoffloats",#stacks[default]) if trace_floats then @@ -125,10 +132,10 @@ function floats.resave(which) if last then which = which or default local stack = stacks[which] - local b = texbox.floatbox + local b = texgetbox("floatbox") local w, h, d = b.width, b.height, b.depth last.box = copy_node_list(b) - texbox.floatbox = nil + texsetbox("floatbox",nil) insert(stack,1,last) setcount("global","savednoffloats",#stacks[default]) if trace_floats then @@ -152,7 +159,7 @@ function floats.flush(which,n,bylabel) else interfaces.showmessage("floatblocks",3,t.n) end - texbox.floatbox = b + texsetbox("floatbox",b) last = remove(stack,n) last.box = nil setcount("global","savednoffloats",#stacks[default]) -- default? diff --git a/tex/context/base/page-imp.mkiv b/tex/context/base/page-imp.mkiv index c22e9e646..cfa535ab2 100644 --- a/tex/context/base/page-imp.mkiv +++ b/tex/context/base/page-imp.mkiv @@ -293,7 +293,7 @@ \fi \fi} -\def\installpagearrangement #1 % will lchange, no space +\def\installpagearrangement #1 % will change, no space {\setgvalue{\??layoutarranger#1}} \def\checkinstalledpagearrangement#1% can be empty: aaa,,bbb diff --git a/tex/context/base/page-inj.lua b/tex/context/base/page-inj.lua index 5b450d60e..56e5a234e 100644 --- a/tex/context/base/page-inj.lua +++ b/tex/context/base/page-inj.lua @@ -8,6 +8,8 @@ if not modules then modules = { } end modules ["page-inj"] = { -- Adapted a bit by HH: numbered states, tracking, delayed, order, etc. +local type, tonumber = type, tonumber + local injections = pagebuilders.injections or { } pagebuilders.injections = injections @@ -16,6 +18,11 @@ local trace = false trackers.register("pagebuilder.injections",func local variables = interfaces.variables +local context = context +local commands = commands + +local texsetcount = tex.setcount + local v_yes = variables.yes local v_previous = variables.previous local v_next = variables.next @@ -31,7 +38,7 @@ function injections.save(specification) -- maybe not public, just commands.* state = tonumber(specification.state) or specification.state, parameters = specification.userdata, } - tex.setcount("global","c_page_boxes_flush_n",#cache) + texsetcount("global","c_page_boxes_flush_n",#cache) end function injections.flushbefore() -- maybe not public, just commands.* @@ -62,7 +69,7 @@ function injections.flushbefore() -- maybe not public, just commands.* end context.unprotect() cache = delayed - tex.setcount("global","c_page_boxes_flush_n",#cache) + texsetcount("global","c_page_boxes_flush_n",#cache) end end @@ -92,7 +99,7 @@ function injections.flushafter() -- maybe not public, just commands.* end context.protect() cache = delayed - tex.setcount("global","c_page_boxes_flush_n",#cache) + texsetcount("global","c_page_boxes_flush_n",#cache) end end diff --git a/tex/context/base/page-lay.mkiv b/tex/context/base/page-lay.mkiv index c0d897522..81eb0423c 100644 --- a/tex/context/base/page-lay.mkiv +++ b/tex/context/base/page-lay.mkiv @@ -597,7 +597,7 @@ \let\p_page_layouts_height\empty \def\page_layouts_synchronize - {\setups[\layoutparameter\c!preset]% + {\setups[\layoutparameter\c!preset]\relax \global\leftmarginwidth \layoutparameter\c!leftmargin \global\rightmarginwidth\layoutparameter\c!rightmargin \global\leftedgewidth \layoutparameter\c!leftedge @@ -987,14 +987,13 @@ \global\let\page_adepts_pop\page_adepts_pop_indeed} \def\page_adapts_check + {\csname\??pageadaptations\the\ifcsname\??pageadaptations\the\realpageno\endcsname\realpageno\else\zerocount\fi\endcsname} + +\def\page_adapts_reset {\ifcsname\??pageadaptations\the\realpageno\endcsname - \page_adapts_check_indeed + \global\undefinevalue{\??pageadaptations\the\realpageno}% \fi} -\def\page_adapts_check_indeed - {\getvalue{\??pageadaptations\the\realpageno}% - \letvalue{\??pageadaptations\the\realpageno}\relax} - \def\page_adepts_push_indeed {\global\d_page_adepts_pushed_text_height \textheight \global\d_page_adepts_pushed_footer_height\footerheight} @@ -1006,6 +1005,9 @@ \global\let\page_adepts_push\page_adepts_push_indeed \global\let\page_adepts_pop\relax} +\appendtoks \page_adapts_check \to \everystarttext +\appendtoks \page_adapts_reset \to \everyshipout + \let\page_adepts_pop \relax \let\page_adepts_push\page_adepts_push_indeed diff --git a/tex/context/base/page-lin.lua b/tex/context/base/page-lin.lua index e6b500e8b..7e8e9ad8a 100644 --- a/tex/context/base/page-lin.lua +++ b/tex/context/base/page-lin.lua @@ -12,8 +12,6 @@ local trace_numbers = false trackers.register("lines.numbers", function(v) tra local report_lines = logs.reporter("lines") -local texbox = tex.box - local attributes, nodes, node, context = attributes, nodes, node, context nodes.lines = nodes.lines or { } @@ -23,6 +21,8 @@ lines.data = lines.data or { } -- start step tag local data = lines.data local last = #data +local texgetbox = tex.getbox + lines.scratchbox = lines.scratchbox or 0 local leftmarginwidth = nodes.leftmarginwidth @@ -208,7 +208,7 @@ local function identify(list) end function boxed.stage_zero(n) - return identify(texbox[n].list) + return identify(texgetbox(n).list) end -- reset ranges per page @@ -217,9 +217,9 @@ end function boxed.stage_one(n,nested) current_list = { } - local head = texbox[n] - if head then - local list = head.list + local box = texgetbox(n) + if box then + local list = box.list if nested then list = identify(list) end @@ -268,7 +268,7 @@ function boxed.stage_two(n,m) if #current_list > 0 then m = m or lines.scratchbox local t, tn = { }, 0 - for l in traverse_id(hlist_code,texbox[m].list) do + for l in traverse_id(hlist_code,texgetbox(m).list) do tn = tn + 1 t[tn] = copy_node(l) end diff --git a/tex/context/base/page-lin.mkiv b/tex/context/base/page-lin.mkiv index 0f8b78398..876d2e781 100644 --- a/tex/context/base/page-lin.mkiv +++ b/tex/context/base/page-lin.mkiv @@ -156,28 +156,28 @@ % todo: text -\installcorenamespace{linennumberinglocation} -\installcorenamespace{linennumberingalternative} - -\expandafter\let\csname\??linennumberinglocation\v!middle \endcsname \zerocount -\expandafter\let\csname\??linennumberinglocation\v!left \endcsname \plusone -\expandafter\let\csname\??linennumberinglocation\v!margin \endcsname \plusone -\expandafter\let\csname\??linennumberinglocation\v!inmargin \endcsname \plusone -\expandafter\let\csname\??linennumberinglocation\v!inleft \endcsname \plusone -\expandafter\let\csname\??linennumberinglocation\v!right \endcsname \plustwo -\expandafter\let\csname\??linennumberinglocation\v!inright \endcsname \plustwo -\expandafter\let\csname\??linennumberinglocation\v!inner \endcsname \plusthree -\expandafter\let\csname\??linennumberinglocation\v!outer \endcsname \plusfour -\expandafter\let\csname\??linennumberinglocation\v!text \endcsname \plusfive -\expandafter\let\csname\??linennumberinglocation\v!begin \endcsname \plussix -\expandafter\let\csname\??linennumberinglocation\v!end \endcsname \plusseven - -\expandafter\let\csname\??linennumberingalternative\v!middle \endcsname \zerocount -\expandafter\let\csname\??linennumberingalternative\v!right \endcsname \plusone -\expandafter\let\csname\??linennumberingalternative\v!flushleft \endcsname \plusone -\expandafter\let\csname\??linennumberingalternative\v!left \endcsname \plustwo -\expandafter\let\csname\??linennumberingalternative\v!flushright\endcsname \plustwo -\expandafter\let\csname\??linennumberingalternative\v!auto \endcsname \plusfive +\installcorenamespace{linenumberinglocation} +\installcorenamespace{linenumberingalternative} + +\expandafter\let\csname\??linenumberinglocation\v!middle \endcsname \zerocount +\expandafter\let\csname\??linenumberinglocation\v!left \endcsname \plusone +\expandafter\let\csname\??linenumberinglocation\v!margin \endcsname \plusone +\expandafter\let\csname\??linenumberinglocation\v!inmargin \endcsname \plusone +\expandafter\let\csname\??linenumberinglocation\v!inleft \endcsname \plusone +\expandafter\let\csname\??linenumberinglocation\v!right \endcsname \plustwo +\expandafter\let\csname\??linenumberinglocation\v!inright \endcsname \plustwo +\expandafter\let\csname\??linenumberinglocation\v!inner \endcsname \plusthree +\expandafter\let\csname\??linenumberinglocation\v!outer \endcsname \plusfour +\expandafter\let\csname\??linenumberinglocation\v!text \endcsname \plusfive +\expandafter\let\csname\??linenumberinglocation\v!begin \endcsname \plussix +\expandafter\let\csname\??linenumberinglocation\v!end \endcsname \plusseven + +\expandafter\let\csname\??linenumberingalternative\v!middle \endcsname \zerocount +\expandafter\let\csname\??linenumberingalternative\v!right \endcsname \plusone +\expandafter\let\csname\??linenumberingalternative\v!flushleft \endcsname \plusone +\expandafter\let\csname\??linenumberingalternative\v!left \endcsname \plustwo +\expandafter\let\csname\??linenumberingalternative\v!flushright\endcsname \plustwo +\expandafter\let\csname\??linenumberingalternative\v!auto \endcsname \plusfive % \startlinenumbering[|continue|settings|name] % \startlinenumbering[name][|continue|settings] @@ -339,8 +339,8 @@ \else \setfalse\c_page_lines_fake_number \fi - \c_page_lines_location \executeifdefined{\??linennumberinglocation \linenumberingparameter\c!location}\plusone \relax % left - \c_page_lines_alignment\executeifdefined{\??linennumberingalternative\linenumberingparameter\c!align }\plusfive\relax % auto + \c_page_lines_location \executeifdefined{\??linenumberinglocation \linenumberingparameter\c!location}\plusone \relax % left + \c_page_lines_alignment\executeifdefined{\??linenumberingalternative\linenumberingparameter\c!align }\plusfive\relax % auto \ifcase\c_page_lines_last_column\relax \settrue \c_page_lines_fake_number \or diff --git a/tex/context/base/page-mix.lua b/tex/context/base/page-mix.lua index cf0094787..7a97655d2 100644 --- a/tex/context/base/page-mix.lua +++ b/tex/context/base/page-mix.lua @@ -34,11 +34,14 @@ local new_glue = nodepool.glue local hpack = node.hpack local vpack = node.vpack local freenode = node.free +local concatnodes = nodes.concat + +local texgetbox = tex.getbox +local texsetbox = tex.setbox +local texgetskip = tex.getskip -local texbox = tex.box -local texskip = tex.skip -local texdimen = tex.dimen local points = number.points + local settings_to_hash = utilities.parsers.settings_to_hash local variables = interfaces.variables @@ -79,7 +82,7 @@ local function collectinserts(result,nxt,nxtid) if not c then c = { } inserts[s] = c - local width = texskip[s].width + local width = texgetskip(s).width if not result.inserts[s] then currentskips = currentskips + width end @@ -133,15 +136,10 @@ local function discardtopglue(current,discarded) if current.penalty == forcedbreak then discarded[#discarded+1] = current current = current.next - while current do - local id = current.id - if id == glue_code then - size = size + current.spec.width - discarded[#discarded+1] = current - current = current.next - else - break - end + while current and current.id == glue_code do + size = size + current.spec.width + discarded[#discarded+1] = current + current = current.next end else discarded[#discarded+1] = current @@ -199,7 +197,7 @@ local function setsplit(specification) -- a rather large function report_state("fatal error, no box") return end - local list = texbox[box] + local list = texgetbox(box) if not list then report_state("fatal error, no list") return @@ -218,6 +216,10 @@ local function setsplit(specification) -- a rather large function local height = 0 local depth = 0 local skip = 0 + local splitmethod = specification.splitmethod or false + if splitmethod == v_none then + splitmethod = false + end local options = settings_to_hash(specification.option or "") local stripbottom = specification.alternative == v_local local cycle = specification.cycle or 1 @@ -253,12 +255,35 @@ local function setsplit(specification) -- a rather large function delta = 0, } end + local column = 1 local line = 0 - local result = results[column] + local result = results[1] local lasthead = nil local rest = nil + + if trace_state then + report_state("setting collector to column %s",column) + end + + local lastlocked = nil + local lastcurrent = nil + + local backtracked = false + local function gotonext() + if lastcurrent then + if current ~= lastcurrent then + if trace_state then + report_state("backtracking to preferred break in column %s",column) + end + -- todo: also remember height/depth + current = lastcurrent + backtracked = true + end + lastcurrent = nil + lastlocked = nil + end if head == lasthead then if trace_state then report_state("empty column %s, needs more work",column) @@ -282,19 +307,21 @@ local function setsplit(specification) -- a rather large function if column == nofcolumns then column = 0 -- nicer in trace rest = head - -- lasthead = head return false, 0 else local skipped column = column + 1 result = results[column] + if trace_state then + report_state("setting collector to column %s",column) + end current, skipped = discardtopglue(current,discarded) head = current - -- lasthead = head return true, skipped end end - local function checked(advance,where) + + local function checked(advance,where,locked) local total = skip + height + depth + advance local delta = total - target local state = "same" @@ -316,47 +343,34 @@ local function setsplit(specification) -- a rather large function end return state, skipped end + current, skipped = discardtopglue(current,discarded) if trace_detail and skipped ~= 0 then report_state("check > column 1, discarded %p",skipped) end + + -- problem: when we cannot break after a list (and we only can expect same-page situations as we don't + -- care too much about weighted breaks here) we should sort of look ahead or otherwise be able to push + -- back inserts and so + -- + -- ok, we could use vsplit but we don't have that one opened up yet .. maybe i should look into the c-code + -- .. something that i try to avoid so let's experiment more before we entry dirty trick mode + head = current - while current do - local id = current.id - local nxt = current.next -local lastcolumn = column - if id == hlist_code or id == vlist_code then - line = line + 1 - local nxtid = nxt and nxt.id - local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0 - local advance = current.height -- + current.depth - if nxt and (nxtid == insert_code or nxtid == mark_code) then - nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid) - end - local state, skipped = checked(advance+inserttotal+currentskips,"line") - if trace_state then - report_state("%-7s > column %s, state %a, line %s, advance %p, insert %p, height %p","line",column,state,line,advance,inserttotal,height) - if skipped ~= 0 then - report_state("%-7s > column %s, discarded %p","line",column,skipped) - end - end - if state == "quit" then - break - else - height = height + depth + skip + advance + inserttotal - if state == "next" then - height = height + nextskips - else - height = height + currentskips + + local function process_skip(current,nxt) + local advance = current.spec.width + local prv = current.prev + if prv.id == penalty_code then + local penalty = prv.penalty + if penalty < 4000 then + lastlocked = nil + lastcurrent = nil end + elseif current.subtype ~= lineskip_code then + lastlocked = nil + lastcurrent = nil end - depth = current.depth - skip = 0 - if inserts then - appendinserts(result.inserts,inserts) - end - elseif id == glue_code then - local advance = current.spec.width if advance ~= 0 then local state, skipped = checked(advance,"glue") if trace_state then @@ -366,60 +380,222 @@ local lastcolumn = column end end if state == "quit" then - break + return true end height = height + depth + skip depth = 0 skip = height > 0 and advance or 0 - end - elseif id == kern_code then - local advance = current.kern - if advance ~= 0 then - local state, skipped = checked(advance,"kern") if trace_state then - report_state("%-7s > column %s, state %a, advance %p, height %p, state %a","kern",column,state,advance,height) - if skipped ~= 0 then - report_state("%-7s > column %s, discarded %p","kern",column,skipped) - end + report_state("%-7s > column %s, height %p, depth %p, skip %p","glue",column,height,depth,skip) end - if state == "quit" then - break + else + -- what else? ignore? treat as valid as usual? + end + end + + local function process_kern(current,nxt) + local advance = current.kern + if advance ~= 0 then + local state, skipped = checked(advance,"kern") + if trace_state then + report_state("%-7s > column %s, state %a, advance %p, height %p, state %a","kern",column,state,advance,height) + if skipped ~= 0 then + report_state("%-7s > column %s, discarded %p","kern",column,skipped) end - height = height + depth + skip + advance - depth = 0 - skip = 0 end - elseif id == penalty_code then - local penalty = current.penalty - if penalty == 0 then - -- don't bother - elseif penalty == forcedbreak then - local okay, skipped = gotonext() - if okay then - if trace_state then - report_state("cycle: %s, forced column break (same page)",cycle) - if skipped ~= 0 then - report_state("%-7s > column %s, discarded %p","penalty",column,skipped) - end - end - else - if trace_state then - report_state("cycle: %s, forced column break (next page)",cycle) - if skipped ~= 0 then - report_state("%-7s > column %s, discarded %p","penalty",column,skipped) - end + if state == "quit" then + return true + end + height = height + depth + skip + advance + depth = 0 + skip = 0 + if trace_state then + report_state("%-7s > column %s, height %p, depth %p, skip %p","kern",column,height,depth,skip) + end + end + end + + local function process_rule(current,nxt) + -- simple variant of h|vlist + local advance = current.height -- + current.depth + local state, skipped = checked(advance+currentskips,"rule") + if trace_state then + report_state("%-7s > column %s, state %a, rule, advance %p, height %p","line",column,state,advance,inserttotal,height) + if skipped ~= 0 then + report_state("%-7s > column %s, discarded %p","rule",column,skipped) + end + end + if state == "quit" then + return true + end + height = height + depth + skip + advance + if state == "next" then + height = height + nextskips + else + height = height + currentskips + end + depth = current.depth + skip = 0 + end + + -- okay, here we could do some badness like magic but we want something + -- predictable and even better: strategies .. so eventually this will + -- become installable + -- + -- [chapter] [penalty] [section] [penalty] [first line] + -- + -- we need some nice use cases so the next is just for me to play with + + -- todo: presets: + -- + -- fixed : criterium=4000 check=no + -- large : criterium=4000 check=more + -- auto : criterium=0 check=more + + local lockcriterium = 4000 + + local function prevprev(current) + local p = current.prev + return p and p.prev + end + + local function reassess(current,penalty) + if splitmethod == v_fixed then + -- quite ok, a magic number: used as samepage (in sectioning) + if penalty >= lockcriterium then + if not lastlocked then + lastcurrent = prevprev(current) + lastlocked = lastcurrent and penalty + end + return + end + elseif splitmethod == v_more then + -- experiment, might change + if penalty >= lockcriterium then + if not lastlocked or penalty >= lastlocked then + lastcurrent = prevprev(current) + lastlocked = lastcurrent and penalty + end + return + end + elseif splitmethod == v_auto then + if penalty > 0 then + if not lastlocked or penalty > lastlocked then + lastcurrent = prevprev(current) + lastlocked = lastcurrent and penalty + end + return + end + end + lastlocked = nil + lastcurrent = nil + end + + local function process_penalty(current,nxt) + local penalty = current.penalty + if penalty == 0 then + lastlocked = nil + lastcurrent = nil + elseif penalty == forcedbreak then + lastlocked = nil + lastcurrent = nil + local okay, skipped = gotonext() + if okay then + if trace_state then + report_state("cycle: %s, forced column break, same page",cycle) + if skipped ~= 0 then + report_state("%-7s > column %s, discarded %p","penalty",column,skipped) end - break end else - -- todo: nobreak etc ... we might need to backtrack so we need to remember - -- the last acceptable break - -- club and widow and such i.e. resulting penalties (if we care) + if trace_state then + report_state("cycle: %s, forced column break, next page",cycle) + if skipped ~= 0 then + report_state("%-7s > column %s, discarded %p","penalty",column,skipped) + end + end + return true end + elseif penalty < 0 then + -- we don't care too much + lastlocked = nil + lastcurrent = nil + elseif splitmethod then + reassess(current,penalty) + else + lastlocked = nil + lastcurrent = nil end -if lastcolumn == column then - nxt = current.next -- can have changed -end + end + + local function process_list(current,nxt) + local nxtid = nxt and nxt.id + line = line + 1 + local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0 + local advance = current.height -- + current.depth + if trace_state then + report_state("%-7s > column %s, content: %s","line",column,listtoutf(current.list,true,true)) + end + if nxt and (nxtid == insert_code or nxtid == mark_code) then + nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid) + end + local state, skipped = checked(advance+inserttotal+currentskips,"line",lastlocked) + if trace_state then + report_state("%-7s > column %s, state %a, line %s, advance %p, insert %p, height %p","line",column,state,line,advance,inserttotal,height) + if skipped ~= 0 then + report_state("%-7s > column %s, discarded %p","line",column,skipped) + end + end + if state == "quit" then + return true + end + height = height + depth + skip + advance + inserttotal + if state == "next" then + height = height + nextskips + else + height = height + currentskips + end + depth = current.depth + skip = 0 + if inserts then + -- so we already collect them ... makes backtracking tricky ... alternatively + -- we can do that in a separate loop ... no big deal either + appendinserts(result.inserts,inserts) + end + if trace_state then + report_state("%-7s > column %s, height %p, depth %p, skip %p","line",column,height,depth,skip) + end + end + + while current do + + local id = current.id + local nxt = current.next + + backtracked = false + + -- print("process",nodetostring(current)) + + if id == hlist_code or id == vlist_code then + if process_list(current,nxt) then break end + elseif id == glue_code then + if process_skip(current,nxt) then break end + elseif id == kern_code then + if process_kern(current,nxt) then break end + elseif id == penalty_code then + if process_penalty(current,nxt) then break end + elseif id == rule_code then + if process_rule(current,nxt) then break end + else + end + + if backtracked then + -- print("pickup",nodetostring(current)) + nxt = current + else + -- print("move on",nodetostring(current)) + end + if nxt then current = nxt elseif head == lasthead then @@ -437,6 +613,7 @@ end break end end + if not current then if trace_state then report_state("nilling rest") @@ -466,7 +643,7 @@ end specification.overflow = overflow specification.discarded = discarded - texbox[specification.box].head = nil + texgetbox(specification.box).list = nil return specification end @@ -556,9 +733,6 @@ function mixedcolumns.setsplit(specification) end end -local topskip_code = gluecodes.topskip -local baselineskip_code = gluecodes.baselineskip - function mixedcolumns.getsplit(result,n) if not result then report_state("flush, column %s, no result",n) @@ -615,9 +789,10 @@ function mixedcolumns.getsplit(result,n) end for c, list in next, r.inserts do - -- tex.setbox("global",c,vpack(nodes.concat(list))) - -- tex.setbox(c,vpack(nodes.concat(list))) - texbox[c] = vpack(nodes.concat(list)) + local l = concatnodes(list) + local b = vpack(l) -- multiple arguments, todo: fastvpack + -- texsetbox("global",c,b) + texsetbox(c,b) r.inserts[c] = nil end diff --git a/tex/context/base/page-mix.mkiv b/tex/context/base/page-mix.mkiv index abdcced1f..a40418d58 100644 --- a/tex/context/base/page-mix.mkiv +++ b/tex/context/base/page-mix.mkiv @@ -28,6 +28,7 @@ % top and bottom inserts % wide floats % move floats +% offsets (inner ones, so we change the hsize ... needed with backgrounds % luatex buglet: % @@ -55,7 +56,11 @@ \setupmixedcolumns [\c!distance=1.5\bodyfontsize, \c!n=\plustwo, - %\c!rule=\v!none, + %\c!align=, % inherit + %\c!before=, + %\c!after=, + %\c!separator=\v!none, + %\c!setups=, \c!frame=\v!off, \c!strut=\v!no, \c!offset=\v!overlay, @@ -64,6 +69,7 @@ \c!maxwidth=\makeupwidth, \c!grid=\v!tolerant, \c!step=.25\lineheight, % needs some experimenting + %\c!splitmethod=\v!fixed, % will be default \c!method=\ifinner\s!box\else\s!otr\fi] % automatic as suggested by WS \let\startmixedcolumns\relax % defined later @@ -158,7 +164,8 @@ \definemixedcolumns [\s!itemgroupcolumns] [\c!n=\itemgroupparameter\c!n, - \c!rule=\v!off, + \c!separator=\v!none, + \c!splitmethod=\v!none, \c!balance=\v!yes] \unexpanded\def\strc_itemgroups_start_columns @@ -219,7 +226,7 @@ \unexpanded\def\page_mix_command_set_hsize {\hsize\d_page_mix_column_width - \textwidth\d_page_mul_used_width} % needs thinking ... grouping etc + \textwidth\d_page_mix_column_width} %D When setting the vsize we make sure that we collect a few more lines than needed %D so that we have enough to split over the columns. Collecting too much is somewhat @@ -289,7 +296,7 @@ \unexpanded\def\page_mix_command_inject_separator {\bgroup \hss - \csname\??mixedcolumnsseparator\mixedcolumnsparameter\c!separator\endcsname + \csname\??mixedcolumnsseparator\mixedcolumnsparameter\c!separator\endcsname % was \c!rule \hss \egroup} @@ -478,12 +485,21 @@ % \page_mix_command_set_vsize \page_mix_command_set_hsize - \fi} + \fi + \usealignparameter\mixedcolumnsparameter + \usesetupsparameter\mixedcolumnsparameter} + +% \setvalue{\??mixedcolumnsstop\s!otr}% +% {\par +% \ifcase\c_page_mix_otr_nesting\or +% \c_page_mix_routine\c_page_mix_routine_balance +% \page_otr_trigger_output_routine +% \fi} \setvalue{\??mixedcolumnsstop\s!otr}% {\par \ifcase\c_page_mix_otr_nesting\or - \c_page_mix_routine\c_page_mix_routine_balance + \doif{\mixedcolumnsparameter\c!balance}\v!yes{\c_page_mix_routine\c_page_mix_routine_balance}% \page_otr_trigger_output_routine \fi} @@ -511,6 +527,7 @@ strutht = \number\strutht, strutdp = \number\strutdp, threshold = \number\d_page_mix_threshold, + splitmethod = "\mixedcolumnsparameter\c!splitmethod", balance = "#1", alternative = "\mixedcolumnsparameter\c!alternative", grid = \ifgridsnapping true\else false\fi, @@ -527,10 +544,14 @@ \fi \hskip\d_page_mix_leftskip \page_mix_hbox to \d_page_mix_max_width \bgroup - \letmixedcolumnsparameter\c!strut\v!no - % maybe use \c_page_mix_used_of_columns - \dorecurse\c_page_mix_n_of_columns {% - \inheritedmixedcolumnsframed{\page_mix_command_package_column}% + \dorecurse\c_page_mix_n_of_columns{% + % needs packaging anyway + \setbox\scratchbox\page_mix_command_package_column + % for the moment a quick and dirty patch .. we need to go into the box (hence the \plusone) .. a slowdowner + \page_lines_add_numbers_to_box\scratchbox\recurselevel\c_page_mix_n_of_columns\plusone + % the framed needs a reset of strut, align, setups etc + \inheritedmixedcolumnsframedbox\currentmixedcolumns\scratchbox + % optional \ifnum\recurselevel<\c_page_mix_n_of_columns \page_mix_command_inject_separator \fi diff --git a/tex/context/base/page-mrk.mkiv b/tex/context/base/page-mrk.mkiv index 5f8d332c5..6d0a5af94 100644 --- a/tex/context/base/page-mrk.mkiv +++ b/tex/context/base/page-mrk.mkiv @@ -173,6 +173,7 @@ \settrue\c_page_marks_add_more_number} \appendtoks + \setfalse\c_page_marks_add_page_lines \setfalse\c_page_marks_add_more_color \setfalse\c_page_marks_add_more_marking \setfalse\c_page_marks_add_more_lines diff --git a/tex/context/base/page-mul.mkiv b/tex/context/base/page-mul.mkiv index 9e0861af9..24286106a 100644 --- a/tex/context/base/page-mul.mkiv +++ b/tex/context/base/page-mul.mkiv @@ -1158,26 +1158,28 @@ \def\setlocalcolumnfloats {\settrue\onlylocalcolumnfloats \everypar\everylocalcolumnfloatspar - \let\page_mul_flush_float\doflushcolumnfloat + \let\page_mul_flush_float \doflushcolumnfloat \let\page_mul_flush_floats\doflushcolumnfloats} \def\setglobalcolumnfloats {\setfalse\onlylocalcolumnfloats \reseteverypar - \let\page_mul_flush_float\relax + \let\page_mul_flush_float \relax \let\page_mul_flush_floats\noflushcolumnfloats} - \def\noflushcolumnfloats - {\bgroup - \xdef\localsavednoffloats{\the\savednoffloats}% - \global\savednoffloats\globalsavednoffloats - \page_otr_command_flush_top_insertions - \xdef\globalsavenoffloats{\the\savednoffloats}% - \ifnum\globalsavednoffloats=\zerocount - \setlocalcolumnfloats - \fi - \global\savednoffloats\localsavednoffloats - \egroup} + % \def\noflushcolumnfloats + % {\bgroup + % \xdef\localsavednoffloats{\the\savednoffloats}% + % \global\savednoffloats\globalsavednoffloats + % \page_otr_command_flush_top_insertions + % \xdef\globalsavenoffloats{\the\savednoffloats}% + % \ifnum\globalsavednoffloats=\zerocount + % \setlocalcolumnfloats + % \fi + % \global\savednoffloats\localsavednoffloats + % \egroup} + % + \def\noflushcolumnfloats{\doflushcolumnfloats} % not yet redone %D We need to calculate the amount of free space in a columns. When there is not %D enough room, we migrate the float to the next column. These macro's are diff --git a/tex/context/base/page-one.mkiv b/tex/context/base/page-one.mkiv index 6261938b6..941828688 100644 --- a/tex/context/base/page-one.mkiv +++ b/tex/context/base/page-one.mkiv @@ -1,4 +1,4 @@ -%D \module + %D \module %D [ file=page-one, %D version=2000.10.20, %D title=\CONTEXT\ Page Macros, diff --git a/tex/context/base/page-pst.lua b/tex/context/base/page-pst.lua index 8586830cf..50580ae33 100644 --- a/tex/context/base/page-pst.lua +++ b/tex/context/base/page-pst.lua @@ -8,15 +8,23 @@ if not modules then modules = { } end modules ['page-pst'] = { -- todo: adapt message +local tonumber, next = tonumber, next local format, validstring = string.format, string.valid local sortedkeys = table.sortedkeys +local context = context +local commands = commands + +local texgetcount = tex.getcount +local texsetcount = tex.setcount + local cache = { } local function flush(page) local c = cache[page] if c then for i=1,#c do + -- characters.showstring(c[i]) context.viafile(c[i],format("page.%s",validstring(page,"nopage"))) end cache[page] = nil @@ -32,14 +40,14 @@ local function setnextpage() elseif n > 0 then -- upcoming page (realpageno) end - tex.setcount("global","c_page_postponed_blocks_next_page",n) + texsetcount("global","c_page_postponed_blocks_next_page",n) end function commands.flushpostponedblocks(page) -- we need to flush previously pending pages as well and the zero -- slot is the generic one so that one is always flushed local t = sortedkeys(cache) - local p = tonumber(page) or tex.count.realpageno or 0 + local p = tonumber(page) or texgetcount("realpageno") or 0 for i=1,#t do local ti = t[i] if ti <= p then @@ -54,7 +62,7 @@ end function commands.registerpostponedblock(page) if type(page) == "string" then if string.find(page,"^+") then - page = tex.count.realpageno + (tonumber(page) or 1) -- future delta page + page = texgetcount("realpageno") + (tonumber(page) or 1) -- future delta page else page = tonumber(page) or 0 -- preferred page or otherwise first possible occasion end diff --git a/tex/context/base/page-pst.mkiv b/tex/context/base/page-pst.mkiv index 7f8a39ca6..704289246 100644 --- a/tex/context/base/page-pst.mkiv +++ b/tex/context/base/page-pst.mkiv @@ -78,6 +78,7 @@ %\flushrestfloats \page_floats_flush_page_floats \setnormalcatcodes % postponing in verbatim + \uncatcodespacetokens % postponing in startlines \restoreglobalbodyfont % otherwise problems inside split verbatim \ctxcommand{flushpostponedblocks()}% \relax diff --git a/tex/context/base/page-str.lua b/tex/context/base/page-str.lua index f6314657f..35ce85609 100644 --- a/tex/context/base/page-str.lua +++ b/tex/context/base/page-str.lua @@ -12,18 +12,25 @@ if not modules then modules = { } end modules ['page-str'] = { local concat, insert, remove = table.concat, table.insert, table.remove -local find_tail, write_node, free_node, copy_nodelist = node.slide, node.write, node.free, node.copy_list -local vpack_nodelist, hpack_nodelist = node.vpack, node.hpack -local texdimen, texbox = tex.dimen, tex.box -local settings_to_array = utilities.parsers.settings_to_array - local nodes, node = nodes, node -local nodepool = nodes.pool -local tasks = nodes.tasks +local nodepool = nodes.pool +local tasks = nodes.tasks + +local new_kern = nodepool.kern +local new_glyph = nodepool.glyph + +local find_tail = node.slide +local write_node = node.write +local free_node = node.free +local copy_nodelist = node.copy_list +local vpack_nodelist = node.vpack +local hpack_nodelist = node.hpack + +local settings_to_array = utilities.parsers.settings_to_array -local new_kern = nodepool.kern -local new_glyph = nodepool.glyph +local texgetdimen = tex.getdimen +local texgetbox = tex.getbox local trace_collecting = false trackers.register("streams.collecting", function(v) trace_collecting = v end) local trace_flushing = false trackers.register("streams.flushing", function(v) trace_flushing = v end) @@ -175,7 +182,8 @@ function streams.synchronize(list) -- this is an experiment ! if trace_flushing then report_streams("slot %s has max height %p and max depth %p",m,height,depth) end - local strutht, strutdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth + local strutht = texgetdimen("globalbodyfontstrutheight") + local strutdp = texgetdimen("globalbodyfontstrutdepth") local struthtdp = strutht + strutdp for i=1,#list do local name = list[i] @@ -198,7 +206,7 @@ function streams.synchronize(list) -- this is an experiment ! local n, delta = 0, delta_height -- for tracing while delta > 0 do -- we need to add some interline penalties - local line = copy_nodelist(tex.box.strutbox) + local line = copy_nodelist(texgetbox("strutbox")) line.height, line.depth = strutht, strutdp if tail then tail.next, line.prev = line, tail diff --git a/tex/context/base/page-txt.mkvi b/tex/context/base/page-txt.mkvi index 707af25e9..240f0e00b 100644 --- a/tex/context/base/page-txt.mkvi +++ b/tex/context/base/page-txt.mkvi @@ -269,7 +269,7 @@ \let\m_page_layouts_element_content\empty \unexpanded\def\page_layouts_process_element_single#style#color#width#content% - {\edef\m_page_layouts_element_content{\detokenize{#content}}% + {\edef\m_page_layouts_element_content{\detokenize{#content}}% so no \v!xxx \ifx\m_page_layouts_element_content\empty % should not happen too often \else diff --git a/tex/context/base/phys-dim.lua b/tex/context/base/phys-dim.lua index 45a99978d..e40d1eabb 100644 --- a/tex/context/base/phys-dim.lua +++ b/tex/context/base/phys-dim.lua @@ -53,6 +53,9 @@ local variables = interfaces.variables local v_reverse = variables.reverse local allocate = utilities.storage.allocate +local context = context +local commands = commands + local trace_units = false local report_units = logs.reporter("units") @@ -381,13 +384,15 @@ local long_operators = { local long_suffixes = { - Linear = "linear", - Square = "square", - Cubic = "cubic", - Inverse = "inverse", - ILinear = "ilinear", - ISquare = "isquare", - ICubic = "icubic", + Linear = "linear", + Square = "square", + Cubic = "cubic", + Quadratic = "quadratic", + Inverse = "inverse", + ILinear = "ilinear", + ISquare = "isquare", + ICubic = "icubic", + IQuadratic = "iquadratic", } @@ -454,23 +459,29 @@ local short_suffixes = { -- maybe just raw digit match ["1"] = "linear", ["2"] = "square", ["3"] = "cubic", + ["4"] = "quadratic", ["+1"] = "linear", ["+2"] = "square", ["+3"] = "cubic", + ["+4"] = "quadratic", ["-1"] = "inverse", ["-1"] = "ilinear", ["-2"] = "isquare", ["-3"] = "icubic", + ["-4"] = "iquadratic", ["^1"] = "linear", ["^2"] = "square", ["^3"] = "cubic", + ["^4"] = "quadratic", ["^+1"] = "linear", ["^+2"] = "square", ["^+3"] = "cubic", + ["^+4"] = "quadratic", ["^-1"] = "inverse", ["^-1"] = "ilinear", ["^-2"] = "isquare", ["^-3"] = "icubic", + ["^-4"] = "iquadratic", } local symbol_units = { @@ -575,7 +586,7 @@ labels.units = allocate { lumen = { labels = { en = [[lm]] } }, lux = { labels = { en = [[lx]] } }, bequerel = { labels = { en = [[Bq]] } }, - gray = { labels = { en = [[Gr]] } }, + gray = { labels = { en = [[Gy]] } }, sievert = { labels = { en = [[Sv]] } }, katal = { labels = { en = [[kat]] } }, minute = { labels = { en = [[min]] } }, @@ -635,13 +646,15 @@ labels.operators = allocate { } labels.suffixes = allocate { - linear = { labels = { en = [[1]] } }, - square = { labels = { en = [[2]] } }, - cubic = { labels = { en = [[3]] } }, - inverse = { labels = { en = [[-1]] } }, - ilinear = { labels = { en = [[-1]] } }, - isquare = { labels = { en = [[-2]] } }, - icubic = { labels = { en = [[-3]] } }, + linear = { labels = { en = [[1]] } }, + square = { labels = { en = [[2]] } }, + cubic = { labels = { en = [[3]] } }, + quadratic = { labels = { en = [[4]] } }, + inverse = { labels = { en = [[-1]] } }, + ilinear = { labels = { en = [[-1]] } }, + isquare = { labels = { en = [[-2]] } }, + icubic = { labels = { en = [[-3]] } }, + iquadratic = { labels = { en = [[-4]] } }, } local function dimpus(p,u,s) diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex index 026f2ea09..7d8064b29 100644 --- a/tex/context/base/s-abr-01.tex +++ b/tex/context/base/s-abr-01.tex @@ -232,6 +232,7 @@ \logo [SQL] {sql} \logo [SSD] {ssd} \logo [SVG] {svg} +\logo [STIX] {Stix} \logo [SWIG] {swig} \logo [SWIGLIB] {SwigLib} \logo [TABLE] {\TaBlE} @@ -299,6 +300,7 @@ \logo [XFDF] {xfdf} \logo [XHTML] {xhtml} \logo [XINDY] {Xindy} +\logo [XITS] {Xits} \logo [XML] {xml} \logo [XPATH] {xpath} \logo [XMLTOOLS] {xmltools} diff --git a/tex/context/base/s-abr-04.tex b/tex/context/base/s-abr-04.tex index dcd93c6f1..23940b526 100644 --- a/tex/context/base/s-abr-04.tex +++ b/tex/context/base/s-abr-04.tex @@ -1,8 +1,8 @@ %D \module -%D [ file=s-abr-01, +%D [ file=s-abr-04, %D version=1996.01.01, %D title=\CONTEXT\ Style File, -%D subtitle=General Abbreviations 1, +%D subtitle=General Abbreviations 2, %D author=Hans Hagen, %D date=\currentdate, %D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] @@ -11,314 +11,14 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\unprotect - -% \setupsorting[logo][\c!style=\v!capital] - -% \setupcapitals[\c!title=\v!no] - -\protect - -\logo [MKI] {MkI} -\logo [MKII] {MkII} -\logo [MKIII] {MkIII} -\logo [MKIV] {MkIV} +\usemodule[abr-01] -%logo [FGA] {fga} -%logo [FGBBS] {fgbbs} -\logo [ACROBAT] {Acro\-bat} -\logo [AFM] {afm} -\logo [API] {api} -\logo [ALEPH] {Aleph} % {\mathematics{\aleph}} -\logo [ALGOL] {ALGOL} -\logo [AMS] {ams} -\logo [AMSLATEX] {AmS\LATEX} -\logo [AMSTEX] {AmS\TeX} -\logo [ANSI] {ansi} -\logo [ARABTEX] {Arab\TeX} -\logo [ASCII] {ascii} -\logo [ASCIITEX] {ascii\TeX} -\logo [BACHOTEX] {Bacho\TeX} -\logo [BIBTEX] {bib\TeX} -\logo [BLUESKY] {BlueSky} -\logo [BMP] {bmp} -\logo [BSD] {bsd} -\logo [CCODE] {c} -\logo [CALCMATH] {CalcMath} -\logo [CD] {cd} -\logo [CLD] {cld} -\logo [CPU] {cpu} -\logo [CDROM] {cdrom} -\logo [CID] {cid} -\logo [CJK] {cjk} -\logo [CMR] {cmr} -\logo [CMYK] {cmyk} -\logo [CODHOST] {CodHost} -\logo [CONTEXT] {Con{\TeX}t} -\logo [CSS] {css} -\logo [CTAN] {ctan} -\logo [CTXTOOLS] {ctxtools} -\logo [CWEB] {cweb} -\logo [DANTE] {Dante} -\logo [DISTILLER] {distiller} -\logo [DRATEX] {Dra\TeX} -\logo [DSC] {dsc} -\logo [DTD] {dtd} -\logo [DTK] {dtk} -\logo [DTP] {dtp} -\logo [DVD] {dvd} -\logo [DVI] {dvi} -\logo [DVIPDFM] {dvipdfm} -\logo [DVIPDFMX] {dvipdfmx} -\logo [DVIPOS] {dvipos} -\logo [DVIPS] {dvips} -\logo [DVIPSONE] {dvipsone} -\logo [DVISCR] {dviscr} -\logo [DVIWINDO] {dviwindo} -\logo [EC] {ec} -\logo [EIFFEL] {Eiffel} -\logo [EMACS] {emacs} -\logo [EMTEX] {em\TeX} -\logo [ENCODING] {enc} -\logo [ENCTEX] {enc\TeX} -\logo [EPS] {eps} -\logo [ETEX] {\eTeX} -\logo [EUROBACHOTEX] {EuroBacho\TeX} -\logo [EUROMATH] {EuroMath} -\logo [EUROTEX] {Euro\TeX} -\logo [EXAMPLE] {eXaMpLe} -\logo [EXAMPLED] {exampled} -\logo [EXAMPLEQ] {exampleq} -\logo [EXAMPLER] {exampler} -\logo [EXAMPLET] {examplet} -\logo [EXAMPLEX] {examplex} -\logo [EXIMPLE] {eXiMpLe} -\logo [FAQ] {faq} -\logo [FDF] {fdf} -\logo [FONTFORGE] {FontForge} -\logo [FOXET] {foXet} -\logo [FPTEX] {fp\TeX} -\logo [FREEBSD] {FreeBSD} -\logo [FTP] {ftp} -\logo [GHOSTSCRIPT]{Ghost\-script} -\logo [GHOSTVIEW] {Ghost\-view} -\logo [GIF] {gif} -\logo [GNU] {gnu} -\logo [GNUPLOT] {gnuplot} -\logo [GS] {Ghost\-Script} -\logo [GUST] {Gust} -\logo [GWTEX] {gw\TeX} -\logo [HSB] {hsb} -\logo [HTML] {html} -\logo [HTTP] {http} -\logo [HZ] {hz} -\logo [IBM] {ibm} -\logo [IMAGEMAGICK]{ImageMagick} -\logo [INITEX] {ini\TeX} -\logo [INRSTEX] {inrs\TeX} -\logo [IO] {io} -\logo [IRCNET] {IRCnet} -\logo [ISO] {iso} -\logo [JAVA] {Java} -\logo [JAVASCRIPT] {Java\-Script} -\logo [JPEG] {jpeg} -\logo [JPG] {jpg} -\logo [JBIG] {jbig} -\logo [KPATHSEA] {kpathsea} -\logo [KPSE] {kpse} -\logo [KPSEWHICH] {kpsewhich} -\logo [MKTEXLSR] {mktexlsr} -\logo [LAMSTEX] {LamS\TeX} -\logo [LATEX] {La\TeX} -\logo [LATEXTE] {La\TeX2e} -\logo [LATEXTN] {La\TeX2.09} -\logo [LINUX] {linux} -\logo [LISP] {Lisp} -\logo [LPEG] {lpeg} -\logo [LUA] {Lua} -\logo [LUAJIT] {LuaJIT} -\logo [LUATEX] {Lua\TeX} -\logo [LUAJITTEX] {Luajit\TeX} -\logo [LUATOOLS] {luatools} -\logo [MACOSX] {MacOSX} -\logo [MACROTEX] {Macro\TeX} -\logo [MAKEMPY] {MakeMPY} -\logo [MAPPING] {map} -\logo [MAPS] {Maps} -\logo [MATHML] {MathML} -\logo [METAFONT] {MetaFont} -\logo [METAPOST] {MetaPost} -\logo [METATEX] {Meta\TeX} -\logo [MIKTEX] {Mik\TeX} -\logo [MLTEX] {ml\TeX} -\logo [METATYPE] {MetaType1} -\logo [MODULA] {Modula} -\logo [MOV] {mov} -\logo [MPS] {mps} -\logo [MPTOPDF] {mptopdf} -\logo [MPLIB] {MPlib} -\logo [MSDOS] {msdos} -\logo [MSWINDOWS] {MS~Windows} -\logo [MTXRUN] {mtxrun} -\logo [MTXTOOLS] {mtxtools} -\logo [NETPBM] {NetPBM} -\logo [NTG] {ntg} -\logo [NTS] {nts} -\logo [OFM] {ofm} -\logo [OMEGA] {Omega} -\logo [OPENMATH] {OpenMath} -\logo [OPENTYPE] {OpenType} -\logo [OPI] {opi} -\logo [OTF] {otf} -\logo [OTP] {otp} -\logo [OVF] {ovf} -\logo [PASCAL] {Pascal} -\logo [PCTEX] {pc\TeX} -\logo [PDF] {pdf} -\logo [PDFETEX] {pdfe\TeX} -\logo [PDFTEX] {pdf\TeX} -\logo [PDFTOOLS] {pdftools} -\logo [PDFTOPS] {pdftops} -\logo [PERL] {Perl} -\logo [PERLTK] {Perl/Tk} -\logo [PICTEX] {\PiCTeX} -\logo [PK] {pk} -\logo [PLAIN] {Plain} -\logo [PNG] {png} -\logo [POSIX] {posix} -\logo [POSTSCRIPT] {Post\-Script} -\logo [PPCHTEX] {\PPCHTeX} -\logo [PRAGMA] {Pragma ADE} -\logo [PRESS] {press} -\logo [PRIFIL] {prifil} -\logo [PS] {Post\-Script} -\logo [PSCHECK] {pscheck} -\logo [PSTOEDIT] {pstoedit} -\logo [PSTOPAGE] {pstopage} -\logo [PSTOPDF] {pstopdf} -\logo [PSTRICKS] {pstricks} -\logo [RAM] {ram} -\logo [READER] {Acro\-bat Reader} -\logo [RELAXNG] {Relax\kern.125emNG} -\logo [RGB] {rgb} -\logo [RLXTOOLS] {rlxtools} -\logo [RUBY] {Ruby} -\logo [SCITE] {SciTE} -\logo [SGML] {sgml} -\logo [SI] {si} -\logo [SQL] {sql} -\logo [TABLE] {\TaBlE} -\logo [TCPIP] {tcp/ip} -\logo [TDS] {tds} % no sc te -\logo [TETEX] {te\TeX} % no sc te -\logo [TEX] {\TeX} -\logo [TEXADRES] {\TeX adress} -\logo [TEXBASE] {\TeX base} -\logo [TEXEDIT] {\TeX edit} -\logo [TEXEXEC] {\TeX exec} -\logo [TEXFONT] {\TeX font} -\logo [TEXFORM] {\TeX form} -\logo [TEXLIVE] {\TeX Live} -\logo [TEXLUA] {\TeX Lua} -\logo [TEXMF] {texmf} -\logo [TEXMFSTART] {texmfstart} -\logo [TEXNL] {tex-nl} -\logo [TEXSHOW] {\TeX show} -\logo [TEXSPELL] {\TeX spell} -\logo [TEXGYRE] {\TeX\ Gyre} -\logo [TEXSYNC] {texsync} -\logo [TEXTMATE] {TextMate} -\logo [TEXTOOLS] {\TeX tools} -\logo [TEXUTIL] {\TeX util} -\logo [TEXWORK] {\TeX work} -\logo [TEXXET] {\TeX\XeT} \def\XeT{XeT} -\logo [TFM] {tfm} -\logo [TIF] {tif} -\logo [TIFF] {tiff} -\logo [TIFFINFO] {tiffinfo} -\logo [TIFFTAGS] {tifftags} -\logo [TMFTOOLS] {tmftools} -\logo [TPIC] {tpic} -\logo [TPM] {tpm} -\logo [TRUETYPE] {TrueType} -\logo [TTF] {ttf} -\logo [TUG] {tug} -\logo [TUGBOAT] {Tug\-Boat} -\logo [TUGNEWS] {Tug\-News} -\logo [TYPEONE] {Type1} -\logo [UCS] {ucs} -\logo [UNICODE] {Uni\-code} -\logo [UNIX] {Unix} -\logo [URI] {uri} -\logo [URL] {url} -\logo [USA] {usa} -\logo [USENET] {usenet} -\logo [UTF] {utf} -\logo [UTF] {utf} -\logo [VF] {vf} -\logo [WDT] {wdt} -\logo [WEB] {web} -\logo [WEBC] {web2c} -\logo [WIKI] {Wiki} -\logo [WINDOWS] {Windows} -\logo [WINNT] {WinNT} -\logo [WINNX] {Win9x} -\logo [WWW] {www} -\logo [WYSIWYG] {wysiwyg} -\logo [XDVI] {Xdvi} -\logo [XETEX] {\XeTeX} -\logo [XFDF] {xfdf} -\logo [XHTML] {xhtml} -\logo [XINDY] {Xindy} -\logo [XML] {xml} -\logo [XPATH] {xpath} -\logo [XMLTOOLS] {xmltools} -\logo [XPDFETEX] {xpdfe\TeX} -\logo [XSL] {xsl} -\logo [XSLFO] {xsl-fo} -\logo [XSLT] {xslt} -\logo [XSLTPROC] {xsltproc} -\logo [XYPIC] {XYPIC} % wrong logo -\logo [YandY] {y\&y} -\logo [ZIP] {zip} - -\def\METAFUN {\MetaFun} - -\logo [METAFUN] {\MetaFun} - -\def\SystemSpecialA#1{$\langle\it#1\rangle$} -\def\SystemSpecialB#1{{\tttf<#1>}} - -\def\CATCODE {\SystemSpecialA{catcode}} -\def\CATCODES {\SystemSpecialA{catcodes}} -\def\DIMENSION {\SystemSpecialA{dimension}} -\def\DIMENSIONS {\SystemSpecialA{dimensions}} -\def\COUNTER {\SystemSpecialA{counter}} -\def\COUNTERS {\SystemSpecialA{counters}} -\def\HBOX {\SystemSpecialA{hbox}} -\def\HBOXES {\SystemSpecialA{hboxes}} -\def\VBOX {\SystemSpecialA{vbox}} -\def\VBOXES {\SystemSpecialA{vboxes}} -\def\BOX {\SystemSpecialA{box}} -\def\BOXES {\SystemSpecialA{boxes}} -\def\TOKENLIST {\SystemSpecialA{token list}} -\def\TOKENLISTS {\SystemSpecialA{token lists}} -\def\NEWLINE {\SystemSpecialA{newline}} -\def\SKIP {\SystemSpecialA{skip}} -\def\SKIPS {\SystemSpecialA{skips}} -\def\MUSKIP {\SystemSpecialA{muskip}} -\def\MUSKIPS {\SystemSpecialA{muskips}} -\def\MARK {\SystemSpecialA{mark}} -\def\MARKS {\SystemSpecialA{marks}} +\unprotect -\def\SPACE {\SystemSpecialB{space}} -\def\EOF {\SystemSpecialB{eof}} -\def\TAB {\SystemSpecialB{tab}} -\def\NEWPAGE {\SystemSpecialB{newpage}} -\def\NEWLINE {\SystemSpecialB{newline}} +% \definealternativestyle [\v!mixed] [\font_style_pseudoMixedCapped] [\font_style_pseudoMixedCapped] -\def\THANH {H\`an Th\^e\llap{\raise 0.5ex\hbox{\'{}}} Th\`anh} +\setupsorting[logo][\c!style=\font_style_pseudoMixedCapped] -\def\THANH {H\`an Th\ecircumflexacute\ Th\`anh} +% \setupcapitals[\c!title=\v!no] -\endinput +\protect \endinput diff --git a/tex/context/base/s-fonts-features.mkiv b/tex/context/base/s-fonts-features.mkiv index 8982f7347..b81b53a71 100644 --- a/tex/context/base/s-fonts-features.mkiv +++ b/tex/context/base/s-fonts-features.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\startmodule[s-fonts-features] +\startmodule[fonts-features] \registerctxluafile{s-fonts-features}{} diff --git a/tex/context/base/s-fonts-goodies.mkiv b/tex/context/base/s-fonts-goodies.mkiv index f07081825..e596507af 100644 --- a/tex/context/base/s-fonts-goodies.mkiv +++ b/tex/context/base/s-fonts-goodies.mkiv @@ -13,7 +13,7 @@ %D More tables will follow here as we have many more goodies by now. -\startmodule[s-fonts-goodies] +\startmodule[fonts-goodies] \registerctxluafile{s-fonts-goodies}{} diff --git a/tex/context/base/s-fonts-missing.lua b/tex/context/base/s-fonts-missing.lua index 829fed45f..9a75676a9 100644 --- a/tex/context/base/s-fonts-missing.lua +++ b/tex/context/base/s-fonts-missing.lua @@ -14,33 +14,23 @@ local function legend(id) local privates = c.properties.privates if privates then local categories = table.swapped(fonts.loggers.category_to_placeholder) - -- context.starttabulate { "|l|c|c|l|" } - context.starttabulate { "|l|c|l|" } + context.starttabulate { "|c|l|" } context.HL() context.NC() - context.bold("name") - context.NC() context.bold("symbol") context.NC() - -- context.bold("node") - -- context.NC() - context.bold("category") + context.bold("name") context.NC() context.NR() context.HL() for k, v in table.sortedhash(privates) do local tag = characters.categorytags[categories[k]] if tag and tag ~= "" then - context.NC() - context(k) context.NC() context.dontleavehmode() context.char(v) context.NC() - -- context.dontleavehmode() - -- commands.getprivatechar(k) - -- context.NC() - context(string.lower(tag)) + context(k) context.NC() context.NR() end diff --git a/tex/context/base/s-fonts-missing.mkiv b/tex/context/base/s-fonts-missing.mkiv index 6acef819e..c566f4995 100644 --- a/tex/context/base/s-fonts-missing.mkiv +++ b/tex/context/base/s-fonts-missing.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\startmodule[s-fonts-missing] +\startmodule[fonts-missing] \registerctxluafile{s-fonts-missing}{} diff --git a/tex/context/base/s-fonts-shapes.mkiv b/tex/context/base/s-fonts-shapes.mkiv index 56e3d80a7..f8eb8ffdd 100644 --- a/tex/context/base/s-fonts-shapes.mkiv +++ b/tex/context/base/s-fonts-shapes.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\startmodule[s-fonts-shapes] +\startmodule[fonts-shapes] \registerctxluafile{s-fonts-shapes}{} diff --git a/tex/context/base/s-fonts-tables.mkiv b/tex/context/base/s-fonts-tables.mkiv index 98f9052ca..e962f952d 100644 --- a/tex/context/base/s-fonts-tables.mkiv +++ b/tex/context/base/s-fonts-tables.mkiv @@ -13,7 +13,7 @@ % todo: make a mtxrun --script font option -\startmodule[s-fonts-tables] +\startmodule[fonts-tables] \registerctxluafile{s-fonts-tables}{} diff --git a/tex/context/base/s-fonts-vectors.lua b/tex/context/base/s-fonts-vectors.lua index 1bac0ae8b..af8042f84 100644 --- a/tex/context/base/s-fonts-vectors.lua +++ b/tex/context/base/s-fonts-vectors.lua @@ -20,7 +20,7 @@ function moduledata.fonts.protrusions.showvector(specification) local vector = vectors[specification.name or "?"] if vector then context.blank() - context.startcolumns { n = specification.columns or 3 } + context.startcolumns { n = specification.columns or 3, balance="yes" } context.starttabulate { "|T||cw(.5em)||" } for unicode, values in table.sortedhash(vector) do NC() context("%U",unicode) @@ -65,7 +65,7 @@ function moduledata.fonts.expansions.showvector(specification) local vector = vectors[specification.name or "?"] if vector then context.blank() - context.startcolumns { n = specification.columns or 3 } + context.startcolumns { n = specification.columns or 3, balance="yes" } context.starttabulate { "|T|cw(.5em)||" } for unicode, value in table.sortedhash(vector) do NC() context("%U",unicode) diff --git a/tex/context/base/s-fonts-vectors.mkiv b/tex/context/base/s-fonts-vectors.mkiv index 371a30cc5..2605fe964 100644 --- a/tex/context/base/s-fonts-vectors.mkiv +++ b/tex/context/base/s-fonts-vectors.mkiv @@ -13,7 +13,7 @@ %D This code is used in the \MKIV\ fonts manual. -\startmodule[s-fonts-vectors] +\startmodule[fonts-vectors] \registerctxluafile{s-fonts-vectors}{} diff --git a/tex/context/base/s-math-coverage.lua b/tex/context/base/s-math-coverage.lua index 258019c9d..a74e24450 100644 --- a/tex/context/base/s-math-coverage.lua +++ b/tex/context/base/s-math-coverage.lua @@ -77,6 +77,8 @@ local chardata = characters.data local superscripts = characters.superscripts local subscripts = characters.subscripts +context.writestatus("math coverage","underline: not remapped") + function moduledata.math.coverage.showalphabets() context.starttabulate { "|lT|l|Tl|" } for i=1,#styles do diff --git a/tex/context/base/s-present-tiles.mkiv b/tex/context/base/s-present-tiles.mkiv index 80ea5249f..b68a34ef4 100644 --- a/tex/context/base/s-present-tiles.mkiv +++ b/tex/context/base/s-present-tiles.mkiv @@ -13,7 +13,7 @@ %D The Bacho\TeX\ 2013 style. -\setupbodyfont[palatino,14.4pt] +\setupbodyfont[pagella,14.4pt] \setuppapersize[S6][S6] @@ -64,8 +64,22 @@ -\topspace ] -\definemeasure [topiclistfont] [\measured{topiclistwidth}/10] -\definemeasure [titlepagefont] [2\measured{layoutunit}] +\definemeasure [topiclistfontsize] [ + \ifcase\numexpr\structurelistsize\relax 2.5 % 0 + \or1.4\or1.4\or1.4\or1.4\or1.4\or1.4\or1.4\or1.4\or1.4 % 1 - 9 + \or1.3\or1.3\or1.3\or1.3\or1.3\or1.3\or1.3 % 10 - 16 + \or1.2\or1.2\or1.2\or1.2\or1.2 % 17 - 20 + \else1 % 21 - 25 + \fi + \bodyfontsize +] + +\definemeasure [topiclistfont] [\measured{topiclistfontsize}] +\definemeasure [topictitlefont] [1.2\measured{layoutunit}] +\definemeasure [titlepagefont] [2\measured{layoutunit}] + +\predefinefont[MyTopicTitleFont][SerifBold*default at \measure{topictitlefont}] +\predefinefont[MyTopicListFont] [SerifBold*default at \measure{topiclistfont}] \defineframed [topiclistentry] @@ -73,7 +87,7 @@ height=\measure{topiclistheight}, background=color, frame=off, - foregroundstyle={\definedfont[Bold at \measure{topiclistfont}]}, + foregroundstyle=MyTopicListFont, backgroundcolor=primarycolor, foregroundcolor=white] @@ -108,8 +122,6 @@ \stoptopicmakeup \stopsetups -\predefinefont[MyTopicTitleFont][SerifBold*default at \measure{layoutunit}] - \definehead [topic] [chapter] @@ -216,6 +228,15 @@ % end of buttons +\defineframed + [conclusion] + [location=low, + width=max, + align={flushleft,lohi}, + background=color, + backgroundcolor=white, + foregroundcolor=secondarycolor] + \startsetups [document:titlepage] \definebodyfontenvironment diff --git a/tex/context/base/s-sql-tables.mkiv b/tex/context/base/s-sql-tables.mkiv index 6f507e8b5..122570874 100644 --- a/tex/context/base/s-sql-tables.mkiv +++ b/tex/context/base/s-sql-tables.mkiv @@ -13,7 +13,7 @@ % for the moment no helpers -\startmodule[s-sql-tables] +\startmodule[sql-tables] \registerctxluafile{s-sql-tables}{} diff --git a/tex/context/base/scrn-but.lua b/tex/context/base/scrn-but.lua index e49372ce9..74f6e0cd9 100644 --- a/tex/context/base/scrn-but.lua +++ b/tex/context/base/scrn-but.lua @@ -6,6 +6,9 @@ if not modules then modules = { } end modules ['scrn-but'] = { license = "see context related readme files" } +local commands = commands +local context = context + local f_two_colon = string.formatters["%s:%s"] function commands.registerbuttons(tag,register,language) diff --git a/tex/context/base/scrn-fld.lua b/tex/context/base/scrn-fld.lua index 9836cbebe..69480b887 100644 --- a/tex/context/base/scrn-fld.lua +++ b/tex/context/base/scrn-fld.lua @@ -11,6 +11,8 @@ if not modules then modules = { } end modules ['scrn-fld'] = { local variables = interfaces.variables local v_yes = variables.yes +local texsetbox = tex.setbox + local fields = { } interactions.fields = fields @@ -43,7 +45,7 @@ commands.definefieldset = defineset commands.clonefield = clone function commands.insertfield(name,specification) - tex.box["b_scrn_field_body"] = insert(name,specification) + texsetbox("b_scrn_field_body",insert(name,specification)) end -- (for the monent) only tex interface diff --git a/tex/context/base/scrn-hlp.lua b/tex/context/base/scrn-hlp.lua index 5f8368c6d..d344ce280 100644 --- a/tex/context/base/scrn-hlp.lua +++ b/tex/context/base/scrn-hlp.lua @@ -11,6 +11,9 @@ local format = string.format local help = { } interactions.help = help +local context = context +local commands = commands + local a_help = attributes.private("help") local copy_nodelist = node.copy_list @@ -18,6 +21,8 @@ local hpack_nodelist = node.hpack local register_list = nodes.pool.register +local texgetbox = tex.getbox + local nodecodes = nodes.nodecodes local hlist_code = nodecodes.hlist @@ -48,7 +53,7 @@ function help.register(number,name,box) interactions.javascripts.setpreamble("HelpTexts",helpscript) helpscript = false end - local b = copy_nodelist(tex.box[box]) + local b = copy_nodelist(texgetbox(box)) register_list(b) data[number] = b if name and name ~= "" then @@ -81,7 +86,7 @@ end function help.collect(box) if next(data) then - return collect(tex.box[box].list) + return collect(texgetbox(box).list) end end diff --git a/tex/context/base/scrn-wid.lua b/tex/context/base/scrn-wid.lua index 4ad46761e..5b319b07e 100644 --- a/tex/context/base/scrn-wid.lua +++ b/tex/context/base/scrn-wid.lua @@ -9,19 +9,27 @@ if not modules then modules = { } end modules ['scrn-wid'] = { interactions = interactions or { } local interactions = interactions -local attachments = { } -local comments = { } -local soundclips = { } -local renderings = { } -local linkedlists = { } +local context = context + +local allocate = utilities.storage.allocate + +local attachments = allocate() +local comments = allocate() +local soundclips = allocate() +local renderings = allocate() +local linkedlists = allocate() interactions.attachments = attachments interactions.soundclips = soundclips interactions.renderings = renderings interactions.linkedlists = linkedlists +local texsetbox = tex.setbox + local jobpasses = job.passes +local texgetcount = tex.getcount + local codeinjections = backends.codeinjections local nodeinjections = backends.nodeinjections @@ -103,7 +111,7 @@ end commands.registerattachment = attachments.register function commands.insertattachment(specification) - tex.box["b_scrn_attachment_link"] = attachments.insert(specification) + texsetbox("b_scrn_attachment_link",(attachments.insert(specification))) end -- Comment @@ -117,7 +125,7 @@ function comments.insert(specification) end function commands.insertcomment(specification) - tex.box["b_scrn_comment_link"] = comments.insert(specification) + texsetbox("b_scrn_comment_link",(comments.insert(specification))) end -- Soundclips @@ -195,7 +203,7 @@ end function commands.enhancelinkedlist(tag,n) local ll = jobpasses.gettobesaved(tag) if ll then - ll[n] = texcount.realpageno + ll[n] = texgetcount("realpageno") end end diff --git a/tex/context/base/scrn-wid.mkvi b/tex/context/base/scrn-wid.mkvi index ae5f7c556..fad451651 100644 --- a/tex/context/base/scrn-wid.mkvi +++ b/tex/context/base/scrn-wid.mkvi @@ -16,6 +16,7 @@ \registerctxluafile{scrn-wid}{1.001} % todo: expansion in comments (default is expanded) +% todo: check renderings ... acrobat crashes too easily on missing one \unprotect @@ -566,8 +567,8 @@ \definereference[PauseCurrentRendering] [\v!PauseRendering {\currentrendering}] \definereference[ResumeCurrentRendering][\v!ResumeRendering{\currentrendering}] -\def\useexternalrendering{\doquadrupleempty\scrn_rendering_use} -\def\setinternalrendering{\dodoubleempty \scrn_rendering_set} +\unexpanded\def\useexternalrendering{\doquadrupleempty\scrn_rendering_use} +\unexpanded\def\setinternalrendering{\dodoubleempty \scrn_rendering_set} \def\scrn_rendering_use[#tag][#mime][#file][#option]% {\ctxcommand{registerrendering{ @@ -578,7 +579,7 @@ option = "#option", }}} -\def\scrn_rendering_set[#tag][#option]% {content} +\def\scrn_rendering_set[#tag][#option]% {content} % crappy {\bgroup \dowithnextbox {\ctxcommand{registerrendering{ @@ -615,17 +616,9 @@ \unexpanded\def\placerenderingwindow {\dodoubleempty\scrn_rendering_place_window} -\def\scrn_rendering_place_window[#window][#rendering]% +\def\scrn_rendering_place_window[#window][#rendering]% do all in lua {\bgroup \edef\currentrendering{\ifsecondargument#rendering\else#window\fi}% - \doifelse{\renderingtype\currentrendering}{internal} % an object - {\getobjectdimensions{IRO}\currentrendering - \d_scrn_rendering_height\dimexpr\objectheight+\objectdepth\relax - \d_scrn_rendering_width\objectwidth\relax - \dogetobjectreferencepage{IRO}\currentrendering\m_scrn_rendering_page}% - {\d_scrn_rendering_height\vsize - \d_scrn_rendering_width\hsize - \let\m_scrn_rendering_page\realpageno}% % create fall back if needed \edef\currentrenderingwindow{\namedrenderingwindowparameter{#window}\c!width}% stupid test, we need a proper one here \ifx\currentrenderingwindow\empty @@ -634,6 +627,21 @@ \else \edef\currentrenderingwindow{#window}% \fi + \edef\currentrenderingtype{\renderingtype\currentrendering}% + \ifx\currentrenderingtype\s!internal + \getobjectdimensions{IRO}\currentrendering + \d_scrn_rendering_height\dimexpr\objectheight+\objectdepth\relax + \d_scrn_rendering_width\objectwidth\relax + \dogetobjectreferencepage{IRO}\currentrendering\m_scrn_rendering_page + \else\ifx\currentrenderingwindow\s!default + \d_scrn_rendering_height\vsize + \d_scrn_rendering_width \hsize + \let\m_scrn_rendering_page\realpageno + \else + \d_scrn_rendering_height\renderingwindowparameter\c!height + \d_scrn_rendering_width \renderingwindowparameter\c!width + \let\m_scrn_rendering_page\realpageno + \fi\fi % todo: % \handlereferenceactions{\renderingwindowparameter\c!openpageaction }\dosetuprenderingopenpageaction % \handlereferenceactions{\renderingwindowparameter\c!closepageaction}\dosetuprenderingclosepageaction diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua index 18f86475f..56422e622 100644 --- a/tex/context/base/scrp-ini.lua +++ b/tex/context/base/scrp-ini.lua @@ -11,12 +11,16 @@ if not modules then modules = { } end modules ['scrp-ini'] = { local attributes, nodes, node = attributes, nodes, node -local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end) -local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end) +local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end) +local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end) +local trace_splitting = false trackers.register("scripts.splitting", function(v) trace_splitting = v end) +local trace_splitdetail = false trackers.register("scripts.splitring.detail", function(v) trace_splitdetail = v end) local report_preprocessing = logs.reporter("scripts","preprocessing") +local report_splitting = logs.reporter("scripts","splitting") -local utfchar = utf.char +local utfbyte, utfsplit = utf.byte, utf.split +local gmatch = string.gmatch local first_glyph = node.first_glyph or node.first_character local traverse_id = node.traverse_id @@ -29,6 +33,9 @@ local unsetvalue = attributes.unsetvalue local glyph_code = nodecodes.glyph local glue_code = nodecodes.glue +local emwidths = fonts.hashes.emwidths +local exheights = fonts.hashes.exheights + local a_scriptinjection = attributes.private('scriptinjection') local a_scriptsplitting = attributes.private('scriptsplitting') local a_scriptstatus = attributes.private('scriptstatus') @@ -41,6 +48,14 @@ local setmetatableindex = table.setmetatableindex local enableaction = nodes.tasks.enableaction local disableaction = nodes.tasks.disableaction +local insert_node_after = node.insert_after + +local nodepool = nodes.pool +local new_glue = nodepool.glue +local new_rule = nodepool.rule +local new_penalty = nodepool.penalty +----- new_gluespec = nodepool.gluespec + scripts = scripts or { } local scripts = scripts @@ -198,7 +213,7 @@ local function provide(t,k) return v end -setmetatableindex(hash,provide) +setmetatableindex(hash,provide) -- should come from char-def scripts.hash = hash @@ -542,10 +557,289 @@ function scripts.injectors.handler(head) end end -function scripts.splitters.handler(head) - return head, false +-- kind of experimental .. might move to it's own module + +-- function scripts.splitters.handler(head) +-- return head, false +-- end + +local function addwords(tree,data) + if not tree then + tree = { } + end + for word in gmatch(data,"%S+") do + local root = tree + local list = utfsplit(word,true) + for i=1,#list do + local l = utfbyte(list[i]) + local r = root[l] + if not r then + r = { } + root[l] = r + end + if i == #list then + r.final = word -- true -- could be something else, like word in case of tracing + else + root = r + end + end + end + return tree +end + +local loaded = { } + +function splitters.load(handler,files) + local files = handler.files + local tree = handler.tree or { } + handler.tree = tree + if not files then + return + elseif type(files) == "string" then + files = { files } + handler.files = files + end + if trace_splitting then + report_splitting("loading splitter data for language/script %a",handler.name) + end + loaded[handler.name or "unknown"] = (loaded[handler.name or "unknown"] or 0) + 1 + statistics.starttiming(loaded) + for i=1,#files do + local filename = files[i] + local fullname = resolvers.findfile(filename) + if fullname == "" then + fullname = resolvers.findfile(filename .. ".gz") + end + if fullname ~= "" then + if trace_splitting then + report_splitting("loading file %a",fullname) + end + local suffix, gzipped = gzip.suffix(fullname) + if suffix == "lua" then + local specification = table.load(fullname,gzipped and gzip.load) + if specification then + local lists = specification.lists + if lists then + for i=1,#lists do + local entry = lists[i] + local data = entry.data + if data then + if entry.compression == "zlib" then + data = zlib.decompress(data) + if entry.length and entry.length ~= #data then + report_splitting("compression error in file %a",fullname) + end + end + if data then + addwords(tree,data) + end + end + end + end + end + else + local data = gzipped and io.loadgzip(fullname) or io.loaddata(fullname) + if data then + addwords(tree,data) + end + end + else + report_splitting("unknown file %a",filename) + end + end + statistics.stoptiming(loaded) + return tree +end + +statistics.register("loaded split lists", function() + if next(loaded) then + return string.format("%s, load time: %s",table.sequenced(loaded),statistics.elapsedtime(loaded)) + end +end) + +-- function splitters.addlist(name,filename) +-- local handler = scripts.handlers[name] +-- if handler and filename then +-- local files = handler.files +-- if not files then +-- files = { } +-- elseif type(files) == "string" then +-- files = { files } +-- end +-- handler.files = files +-- if type(filename) == "string" then +-- filename = utilities.parsers.settings_to_array(filename) +-- end +-- if type(filename) == "table" then +-- for i=1,#filename do +-- files[#files+1] = filenames[i] +-- end +-- end +-- end +-- end +-- +-- commands.setscriptsplitterlist = splitters.addlist + +local categories = characters.categories or { } + +local function hit(root,head) + local current = head.next + local lastrun = false + local lastfinal = false + while current and current.id == glyph_code do + local char = current.char + local newroot = root[char] + if newroot then + local final = newroot.final + if final then + lastrun = current + lastfinal = final + end + root = newroot + elseif categories[char] == "mn" then + -- continue + else + return lastrun, lastfinal + end + current = current.next + end + if lastrun then + return lastrun, lastfinal + end end +local tree, attr, proc + +function splitters.handler(head) + local current = head + local done = false + while current do + if current.id == glyph_code then + local a = current[a_scriptsplitting] + if a then + if a ~= attr then + local handler = numbertohandler[a] + tree = handler.tree or { } + attr = a + proc = handler.splitter + end + if proc then + local root = tree[current.char] + if root then + -- we don't check for attributes in the hitter (yet) + local last, final = hit(root,current) + if last then + local next = last.next + if next and next.id == glyph_code then + local nextchar = next.char + if tree[nextchar] then + if trace_splitdetail then + if type(final) == "string" then + report_splitting("advance %s processing between <%s> and <%c>","with",final,nextchar) + else + report_splitting("advance %s processing between <%c> and <%c>","with",char,nextchar) + end + end + head, current = proc(handler,head,current,last,1) + done = true + else + if trace_splitdetail then + -- could be punctuation + if type(final) == "string" then + report_splitting("advance %s processing between <%s> and <%c>","without",final,nextchar) + else + report_splitting("advance %s processing between <%c> and <%c>","without",char,nextchar) + end + end + head, current = proc(handler,head,current,last,2) + done = true + end + end + end + end + end + end + end + current = current.next + end + return head, done +end + +local function marker(head,current,font,color) -- could become: nodes.tracers.marker + local ex = exheights[font] + local em = emwidths [font] + head, current = insert_node_after(head,current,new_penalty(10000)) + head, current = insert_node_after(head,current,new_glue(-0.05*em)) + head, current = insert_node_after(head,current,new_rule(0.05*em,1.5*ex,0.5*ex)) + setnodecolor(current,color) + return head, current +end + +-- local function process(handler,head,first,last) +-- dataset = numbertodataset[first[a_scriptsplitting]] +-- stretch = emwidths[first.font]*dataset.inter_word_stretch_factor +-- return insert_node_after(head,last,new_glue(0,stretch)) +-- end +-- +-- local cache = { } table.setmetatableindex(cache,function(t,k) +-- local v = new_gluespec(0,k) +-- nodepool.register(v) +-- t[k] = v +-- return v +-- end) +-- return insert_node_after(head,last,new_glue(cache[last_s])) + +local last_a, last_f, last_s, last_q + +function splitters.insertafter(handler,head,first,last,detail) + local a = first[a_scriptsplitting] + local f = first.font + if a ~= last_a or f ~= last_f then + last_s = emwidths[f] * numbertodataset[a].inter_word_stretch_factor + last_a = a + last_f = f + end + if trace_splitting then + head, last = marker(head,last,f,detail == 2 and "trace:r" or "trace:g") + end + if ignore then + return head, last + else + return insert_node_after(head,last,new_glue(0,last_s)) + end +end + +-- word-xx.lua: +-- +-- return { +-- comment = "test", +-- copyright = "not relevant", +-- language = "en", +-- timestamp = "2013-05-20 14:15:21", +-- version = "1.00", +-- lists = { +-- { +-- -- data = "we thrive information in thick worlds because of our marvelous and everyday capacity to select edit single out structure highlight group pair merge harmonize synthesize focus organize condense reduce boil down choose categorize catalog classify list abstract scan look into idealize isolate discriminate distinguish screen pigeonhole pick over sort integrate blend inspect filter lump skip smooth chunk average approximate cluster aggregate outline summarize itemize review dip into flip through browse glance into leaf through skim refine enumerate glean synopsize winnow the wheat from the chaff and separate the sheep from the goats", +-- data = "abstract aggregate and approximate average because blend boil browse capacity catalog categorize chaff choose chunk classify cluster condense dip discriminate distinguish down edit enumerate everyday filter flip focus from glance glean goats group harmonize highlight idealize in information inspect integrate into isolate itemize leaf list look lump marvelous merge of organize our out outline over pair pick pigeonhole reduce refine review scan screen select separate sheep single skim skip smooth sort structure summarize synopsize synthesize the thick thrive through to we wheat winnow worlds", +-- }, +-- }, +-- } + +scripts.installmethod { + name = "test", + splitter = splitters.insertafter, + initializer = splitters.load, + files = { + -- "scrp-imp-word-test.lua", + "word-xx.lua", + }, + datasets = { + default = { + inter_word_stretch_factor = 0.25, -- of quad + }, + }, +} + -- new plugin: local registercontext = fonts.specifiers.registercontext diff --git a/tex/context/base/scrp-ini.mkiv b/tex/context/base/scrp-ini.mkiv index fe62295bb..4a27dd8e2 100644 --- a/tex/context/base/scrp-ini.mkiv +++ b/tex/context/base/scrp-ini.mkiv @@ -16,6 +16,7 @@ \registerctxluafile{scrp-ini}{1.001} \registerctxluafile{scrp-cjk}{1.001} \registerctxluafile{scrp-eth}{1.001} +\registerctxluafile{scrp-tha}{1.001} \definesystemattribute[scriptinjection][public] \definesystemattribute[scriptsplitting][public] diff --git a/tex/context/base/scrp-tha.lua b/tex/context/base/scrp-tha.lua new file mode 100644 index 000000000..ec5df07c0 --- /dev/null +++ b/tex/context/base/scrp-tha.lua @@ -0,0 +1,57 @@ +if not modules then modules = { } end modules ['scrp-tha'] = { + version = 1.001, + comment = "companion to scrp-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This module needs dictionary files that looks as follows. At some point +-- we will add these files to the distribution. +-- +-- word-th.lua: +-- +-- return { +-- comment = "The data is taken from http://thailinux.gits.net.th/websvn/wsvn/software.swath by Phaisarn Charoenpornsawat and Theppitak Karoonboonyanan.", +-- copyright = "gnu general public license", +-- language = "th", +-- compiling = "mtxrun --script patterns --words --update --compress word-th.lua", +-- timestamp = "0000-00-00 00:00:00", +-- version = "1.00", +-- lists = { +-- { filename = "tdict-city.txt" }, +-- { filename = "tdict-collection.txt" }, +-- { filename = "tdict-common.txt" }, +-- { filename = "tdict-country.txt" }, +-- { filename = "tdict-district.txt" }, +-- { filename = "tdict-geo.txt" }, +-- { filename = "tdict-history.txt" }, +-- { filename = "tdict-ict.txt" }, +-- { filename = "tdict-lang-ethnic.txt" }, +-- { filename = "tdict-proper.txt" }, +-- { filename = "tdict-science.txt" }, +-- { filename = "tdict-spell.txt" }, +-- { filename = "tdict-std-compound.txt" }, +-- { filename = "tdict-std.txt" }, +-- }, +-- } + +-- Currently there is nothing additional special here, first we need a +-- ConTeXt user who uses it. It's a starting point. + +local splitters = scripts.splitters + +scripts.installmethod { + name = "thai", + splitter = splitters.insertafter, + initializer = splitters.load, + files = { + -- "scrp-imp-word-thai.lua", + "word-th.lua", + }, + datasets = { + default = { + inter_word_stretch_factor = 0.25, -- of quad + }, + }, +} diff --git a/tex/context/base/sort-lan.lua b/tex/context/base/sort-lan.lua index d2fa276d7..8efc0924a 100644 --- a/tex/context/base/sort-lan.lua +++ b/tex/context/base/sort-lan.lua @@ -9,6 +9,7 @@ if not modules then modules = { } end modules ['sort-lan'] = { -- todo: look into uts#10 (2012) ... some experiments ... something -- to finish in winter. +-- todo: U+1E9E (german SS) -- Many vectors were supplied by Wolfgang Schuster and Philipp -- Gesang. However this is a quite adapted and reformatted variant diff --git a/tex/context/base/spac-ali.lua b/tex/context/base/spac-ali.lua index ceb278433..25cc6cd66 100644 --- a/tex/context/base/spac-ali.lua +++ b/tex/context/base/spac-ali.lua @@ -20,8 +20,6 @@ local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here local unsetvalue = attributes.unsetvalue -local concat_nodes = nodes.concat - local nodecodes = nodes.nodecodes local listcodes = nodes.listcodes @@ -35,8 +33,8 @@ local new_stretch = nodepool.stretch local a_realign = attributes.private("realign") -local texattribute = tex.attribute -local texcount = tex.count +local texsetattribute = tex.setattribute +local texgetcount = tex.getcount local isleftpage = layouts.status.isleftpage @@ -77,12 +75,12 @@ local function handler(head,leftpage,realpageno) action = leftpage and 2 or 1 end if action == 1 then - current.list = hpack_nodes(concat_nodes{current.list,new_stretch(3)},current.width,"exactly") + current.list = hpack_nodes(current.list .. new_stretch(3),current.width,"exactly") if trace_realign then report_realign("flushing left, align %a, page %a, realpage %a",align,pageno,realpageno) end elseif action == 2 then - current.list = hpack_nodes(concat_nodes{new_stretch(3),current.list},current.width,"exactly") + current.list = hpack_nodes(new_stretch(3) .. current.list,current.width,"exactly") if trace_realign then report_realign("flushing right. align %a, page %a, realpage %a",align,pageno,realpageno) end @@ -106,7 +104,7 @@ end function alignments.handler(head) local leftpage = isleftpage(true,false) - local realpageno = texcount.realpageno + local realpageno = texgetcount("realpageno") return handler(head,leftpage,realpageno) end @@ -120,7 +118,7 @@ function alignments.set(n) report_realign("enabled") end end - texattribute[a_realign] = texcount.realpageno * 10 + n + texsetattribute(a_realign,texgetcount("realpageno") * 10 + n) end commands.setrealign = alignments.set diff --git a/tex/context/base/spac-ali.mkiv b/tex/context/base/spac-ali.mkiv index 0f9b21186..a7ce5b971 100644 --- a/tex/context/base/spac-ali.mkiv +++ b/tex/context/base/spac-ali.mkiv @@ -149,6 +149,17 @@ inline:\ifconditional \inlinelefttoright l2r\else r2l\fi\space ]\endgroup} +\unexpanded\def\righttolefthbox#1#{\normalhbox dir TRT #1\bgroup\righttoleft\let\next} \let\rtlhbox\righttolefthbox +\unexpanded\def\lefttorighthbox#1#{\normalhbox dir TLT #1\bgroup\lefttoright\let\next} \let\ltrhbox\lefttorighthbox +\unexpanded\def\righttoleftvbox#1#{\normalvbox dir TRT #1\bgroup\righttoleft\let\next} \let\rtlvbox\righttoleftvbox +\unexpanded\def\lefttorightvbox#1#{\normalvbox dir TLT #1\bgroup\lefttoright\let\next} \let\ltrvbox\lefttorightvbox +\unexpanded\def\righttoleftvtop#1#{\normalvtop dir TRT #1\bgroup\righttoleft\let\next} \let\rtlvtop\righttoleftvtop +\unexpanded\def\lefttorightvtop#1#{\normalvtop dir TLT #1\bgroup\lefttoright\let\next} \let\ltrvtop\lefttorightvtop + +\unexpanded\def\autodirhbox#1#{\hbox#1\bgroup\synchronizeinlinedirection\let\next} +\unexpanded\def\autodirvbox#1#{\vbox#1\bgroup\synchronizeinlinedirection\let\next} % maybe also pardir or maybe just a \vbox +\unexpanded\def\autodirvtop#1#{\vtop#1\bgroup\synchronizeinlinedirection\let\next} % maybe also pardir or maybe just a \vtop + % Tolerance and hyphenation \ifdefined\lesshyphens \else \let\lesshyphens\relax \fi @@ -553,6 +564,20 @@ \fi \raggedcommand} +% experiment + +\unexpanded\def\spac_align_use_later#1% + {\begingroup + \edef\m_spac_align_asked{#1}% + \expandafter\let\expandafter\raggedcommand\csname\??alignmentnormalcache\m_spac_align_asked\endcsname + \ifx\raggedcommand\relax + \spac_align_add_to_cache + \fi + \endgroup} + +\unexpanded\def\spac_align_use_now#1% + {\csname\??alignmentnormalcache#1\endcsname} + % The keywords: \unexpanded\def\installalign#1#2% beware: commands must be unexpandable! diff --git a/tex/context/base/spac-cha.mkiv b/tex/context/base/spac-cha.mkiv deleted file mode 100644 index a07c8f198..000000000 --- a/tex/context/base/spac-cha.mkiv +++ /dev/null @@ -1,191 +0,0 @@ -%D \module -%D [ file=spac-cha, % was supp-ali, -%D version=2012.06.08, % 2000.04.17, -%D title=\CONTEXT\ Spacing Macros, -%D subtitle=Character Alignment, -%D author=Hans Hagen, -%D date=\currentdate, -%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] -%C -%C This module is part of the \CONTEXT\ macro||package and is -%C therefore copyrighted by \PRAGMA. See mreadme.pdf for -%C details. - -%D Yet undocumented but nevertheless used. - -% 0 = centered -% 1 = left in before -% 2 = right in before -% 3 = left in after -% 4 = right in after - -\unprotect - -% \starttabulate[|cg{.}|cg{,}|cg{,}|] -% \NC period \NC comma \NC comma \NC\NR -% \NG 100.000,00 \NG 100.000,00 \NG 100,00 \NC\NR -% \NG 10.000,00 \NG 10.000,00 \NG 1000,00 \NC\NR -% \NG 100,00 \NG 100,00 \NG 10,00 \NC\NR -% \NG 100,00 \NG 100,00 \NG 10,00 \NC\NR -% \NG 10\\ \NG 10\\ \NG 0,00 \NC\NR -% \NG 10 \NG 10 \NG 0,00 \NC\NR -% \NG 10 \NG 10 \NG 0,00 \NC\NR -% \stoptabulate - -% We gain not much by luafication and actually make things worse. - -\chardef\characteralignmentmode\plusfour -\chardef\characteralignmentslot\plusone - -\let\afterassignwidth \!!zeropoint -\let\beforeassignwidth\!!zeropoint - -\def\alignmentcharacter{.} - -\newdimen\d_supp_charalign_width -\newtoks \t_supp_charalign_list - -\let\alignmentclass\s!default % can be used to handle multiple mixed ones - -\installcorenamespace{characteralign} - -\unexpanded\def\supp_charalign_push - {\ifcsname\??characteralign\alignmentclass\endcsname\else - \normalexpanded{\global\t_supp_charalign_list{\the\t_supp_charalign_list\supp_charalign_do{\alignmentclass}}}% - \fi - \setxvalue{\??characteralign\alignmentclass}{\supp_charalign_do - {\afterassignwidth}{\beforeassignwidth}{\alignmentcharacter}}} - -\unexpanded\def\supp_charalign_pop_do#1#2#3% - {\def\afterassignwidth {#1}% - \def\beforeassignwidth {#2}% - \def\alignmentcharacter{#3}} - -\unexpanded\def\supp_charalign_pop - {\let\supp_charalign_do\supp_charalign_pop_do - \executeifdefined{\??characteralign\alignmentclass}\donothing} - -\unexpanded\def\supp_charalign_reset_do#1% - {\global\letbeundefined{\??characteralign#1}} % global ! - -\unexpanded\def\resetcharacteralign - {\let\supp_charalign_do\supp_charalign_reset_do - \the\t_supp_charalign_list - \global\t_supp_charalign_list\emptytoks} - -\unexpanded\def\supp_charalign_firstpass_one#1#2% - {\supp_charalign_pop - \let\\\empty - \setbox\scratchbox\hbox{#1}% - \d_supp_charalign_width\wd\scratchbox - \setbox\scratchbox\emptyhbox - \supp_charalign_check#2#1\relax\relax - \scratchdimen-\wd\scratchbox - \setbox\scratchbox\hbox{\ignorespaces#2\unskip}% - \advance\scratchdimen \wd\scratchbox - \ifdim\scratchdimen>\beforeassignwidth\relax - \edef\beforeassignwidth{\the\scratchdimen}% - \fi - \ifdim\scratchdimen=\zeropoint - \setbox\scratchbox\hbox{\ignorespaces#2\unskip}% - \scratchdimen\wd\scratchbox - \ifcase\characteralignmentmode - % do nothing - \else\ifnum\characteralignmentmode<\plusthree - \advance\scratchdimen\d_supp_charalign_width\relax - \ifdim\scratchdimen>\beforeassignwidth\relax - \edef\beforeassignwidth{\the\scratchdimen}% - \fi - \else - \ifdim\scratchdimen>\afterassignwidth\relax - \edef\afterassignwidth{\the\scratchdimen}% - \fi - \fi\fi - \fi - \supp_charalign_push} - -\unexpanded\def\supp_charalign_firstpass_two#1#2#3% - {\ifx#2\relax - \setbox\scratchbox\hbox{\ignorespaces#1\unskip}% - \ifdim\wd\scratchbox>\afterassignwidth - \edef\afterassignwidth{\the\wd\scratchbox}% - \fi - \else - \supp_charalign_check#2#3\relax\relax - \fi} - -\unexpanded\def\supp_charalign_secondpass_one#1#2% - {\supp_charalign_pop - \let\\\empty % beware, no grouping - \setbox\scratchbox\hbox{#1}% - \d_supp_charalign_width\wd\scratchbox - \setbox\scratchbox\emptyhbox - % new 12,34 vs 10\\ where 10 aligns on 12 if #1 = , - \ifcase\characteralignmentslot - \supp_charalign_check#2#1\relax\relax - \scratchdimen\wd\scratchbox - \setbox\scratchbox\hbox{\ignorespaces##1\unskip}% - \else - \def\\{#1}% - \normalexpanded{\supp_charalign_check#2#1\relax\relax}% - \scratchdimen\wd\scratchbox - \setbox\scratchbox\hbox{\def\\{\hphantom{#1}}\ignorespaces#2\unskip}% - \fi - \noindent - \ifdim\scratchdimen=\wd\scratchbox - \ifcase\characteralignmentmode - \box\scratchbox - \else - \hbox - {\dontcomplain - \hbox to \beforeassignwidth - {\ifcase\characteralignmentmode\or - \box\scratchbox\hss - \or - \hss\box\scratchbox\hskip\d_supp_charalign_width - \or - \hss\rlap{\box\scratchbox}% - \or - \hss\rlap{\hbox to \afterassignwidth{\hss\box\scratchbox}}% - \fi}% - \hskip\afterassignwidth}% - \fi - \else - \hbox - {\hbox to \beforeassignwidth - {\hss\box\scratchbox\hskip-\scratchdimen}% - \hskip\afterassignwidth}% - \fi} - -\unexpanded\def\supp_charalign_secondpass_two#1#2#3% - {\ifx#2\relax - \setbox\scratchbox\hbox{\ignorespaces#1\unskip}% - \else - \supp_charalign_check#2#3\relax\relax - \fi} - -\unexpanded\def\supp_charalign_firstpass#1% - {\unexpanded\def\checkalignment ##1{\supp_charalign_firstpass_one{#1}{##1}}% - \unexpanded\def\supp_charalign_check##1#1##2##3\relax{\supp_charalign_firstpass_two{##1}{##2}{##3}}} - -\unexpanded\def\supp_charalign_secondpass#1% - {\unexpanded\def\checkalignment ##1{\supp_charalign_secondpass_one{#1}{##1}}% - \unexpanded\def\supp_charalign_check##1#1##2##3\relax{\supp_charalign_secondpass_two{##1}{##2}{##3}}} - -\unexpanded\def\setfirstpasscharacteralign - {\supp_charalign_pop - \normalexpanded{\supp_charalign_firstpass{\alignmentcharacter}}} - -\unexpanded\def\setsecondpasscharacteralign - {\supp_charalign_pop - \normalexpanded{\supp_charalign_secondpass{\alignmentcharacter}}} - -\unexpanded\def\startcharacteralign#1\stopcharacteralign - {\bgroup - \setfirstpasscharacteralign #1% - \setsecondpasscharacteralign#1% - \egroup} - -\let\stopcharacteralign\relax - -\protect \endinput diff --git a/tex/context/base/spac-chr.lua b/tex/context/base/spac-chr.lua index 24364978a..db98b42a6 100644 --- a/tex/context/base/spac-chr.lua +++ b/tex/context/base/spac-chr.lua @@ -14,19 +14,22 @@ local byte, lower = string.byte, string.lower -- to be redone: characters will become tagged spaces instead as then we keep track of -- spaceskip etc +local next = next + trace_characters = false trackers.register("typesetters.characters", function(v) trace_characters = v end) report_characters = logs.reporter("typesetting","characters") local nodes, node = nodes, node -local insert_node_after = node.insert_after -local remove_node = nodes.remove -- ! nodes -local copy_node_list = node.copy_list +local insert_node_after = nodes.insert_after +local remove_node = nodes.remove +local copy_node_list = nodes.copy_list +local traverse_id = nodes.traverse_id -local nodepool = nodes.pool local tasks = nodes.tasks +local nodepool = nodes.pool local new_penalty = nodepool.penalty local new_glue = nodepool.glue @@ -41,7 +44,9 @@ local chardata = characters.data local typesetters = typesetters -local characters = { } +local unicodeblocks = characters.blocks + +local characters = typesetters.characters or { } -- can be predefined typesetters.characters = characters local fonthashes = fonts.hashes @@ -49,6 +54,8 @@ local fontparameters = fonthashes.parameters local fontcharacters = fonthashes.characters local fontquads = fonthashes.quads +local setmetatableindex = table.setmetatableindex + local a_character = attributes.private("characters") local a_alignstate = attributes.private("alignstate") @@ -74,7 +81,6 @@ local function inject_char_space(unicode,head,current,parent) local font = current.font local char = fontcharacters[font][parent] local glue = new_glue(char and char.width or fontparameters[font].space) - -- glue.attr = copy_node_list(current.attr) glue.attr = current.attr current.attr = nil glue[a_character] = unicode @@ -86,30 +92,78 @@ local function inject_nobreak_space(unicode,head,current,space,spacestretch,spac local attr = current.attr local glue = new_glue(space,spacestretch,spaceshrink) local penalty = new_penalty(10000) - -- glue.attr = copy_node_list(attr) glue.attr = attr current.attr = nil - -- penalty.attr = attr glue[a_character] = unicode head, current = insert_node_after(head,current,penalty) head, current = insert_node_after(head,current,glue) return head, current end +local function nbsp(head,current) + local para = fontparameters[current.font] + if current[a_alignstate] == 1 then -- flushright + head, current = inject_nobreak_space(0x00A0,head,current,para.space,0,0) + current.subtype = space_skip_code + else + head, current = inject_nobreak_space(0x00A0,head,current,para.space,para.spacestretch,para.spaceshrink) + end + return head, current +end + +-- assumes nuts or nodes, depending on callers .. so no tonuts here + +function characters.replacenbsp(head,original) + local head, current = nbsp(head,original) + head = remove_node(head,original,true) + return head, current +end + +function characters.replacenbspaces(head) + for current in traverse_id(glyph_code,head) do + if current.char == 0x00A0 then + local h = nbsp(head,current) + if h then + head = remove_node(h,current,true) + end + end + end + return head +end + +-- This initialization might move someplace else if we need more of it. The problem is that +-- this module depends on fonts so we have an order problem. + +local nbsphash = { } setmetatableindex(nbsphash,function(t,k) + for i=unicodeblocks.devanagari.first,unicodeblocks.devanagari.last do nbsphash[i] = true end + for i=unicodeblocks.kannada .first,unicodeblocks.kannada .last do nbsphash[i] = true end + setmetatableindex(nbsphash,nil) + return nbsphash[k] +end) + local methods = { -- The next one uses an attribute assigned to the character but still we -- don't have the 'local' value. [0x00A0] = function(head,current) -- nbsp - local para = fontparameters[current.font] - if current[a_alignstate] == 1 then -- flushright - head, current = inject_nobreak_space(0x00A0,head,current,para.space,0,0) - current.subtype = space_skip_code - else - head, current = inject_nobreak_space(0x00A0,head,current,para.space,para.spacestretch,para.spaceshrink) + local next = current.next + if next and next.id == glyph_code then + local char = next.char + if char == 0x200C or char == 0x200D then -- nzwj zwj + next = next.next + if next and nbsphash[next.char] then + return false + end + elseif nbsphash[char] then + return false + end + end + local prev = current.prev + if prev and prev.id == glyph_code and nbsphash[prev.char] then + return false -- kannada end - return head, current + return nbsp(head,current) end, [0x2000] = function(head,current) -- enquad @@ -187,8 +241,10 @@ function characters.handler(head) if trace_characters then report_characters("replacing character %C, description %a",char,lower(chardata[char].description)) end - head = method(head,current) - head = remove_node(head,current,true) + local h = method(head,current) + if h then + head = remove_node(h,current,true) + end done = true end current = next diff --git a/tex/context/base/spac-chr.mkiv b/tex/context/base/spac-chr.mkiv index 0b6ebe0a9..54a25be34 100644 --- a/tex/context/base/spac-chr.mkiv +++ b/tex/context/base/spac-chr.mkiv @@ -68,13 +68,16 @@ \edef\breakablethinspace {\normalUchar"2009} % quad/8 \edef\hairspace {\normalUchar"200A} % quad/8 \edef\zerowidthspace {\normalUchar"200B} % 0 -\edef\zwnj {\normalUchar"200C} % 0 -\edef\zwj {\normalUchar"200D} % 0 +\edef\zerowidthnonjoiner {\normalUchar"200C} % 0 +\edef\zerowidthjoiner {\normalUchar"200D} % 0 \edef\narrownobreakspace {\normalUchar"202F} % quad/8 % % "205F % space/8 (math) % \zerowidthnobreakspace {\normalUchar"FEFF} \udef\zerowidthnobreakspace {\penalty\plustenthousand\kern\zeropoint} +\let\zwnj\zerowidthnonjoiner +\let\zwj \zerowidthjoiner + % Shortcuts: % unexpanded as otherwise we need to intercept / cleanup a lot diff --git a/tex/context/base/spac-hor.lua b/tex/context/base/spac-hor.lua index 09920bd46..c9d6e2b15 100644 --- a/tex/context/base/spac-hor.lua +++ b/tex/context/base/spac-hor.lua @@ -8,6 +8,10 @@ if not modules then modules = { } end modules ['spac-hor'] = { local match = string.match local utfbyte = utf.byte + +local context = context +local commands = commands + local chardata = characters.data local can_have_space = table.tohash { diff --git a/tex/context/base/spac-hor.mkiv b/tex/context/base/spac-hor.mkiv index 2ce502fb5..73e4771fe 100644 --- a/tex/context/base/spac-hor.mkiv +++ b/tex/context/base/spac-hor.mkiv @@ -486,17 +486,17 @@ \ifdefined\thinspace \else - \def\thinspace {\kern .16667\emwidth} - \def\negthinspace{\kern-.16667\emwidth} - \def\enspace {\kern .5\emwidth} + \unexpanded\def\thinspace {\kern .16667\emwidth} + \unexpanded\def\negthinspace{\kern-.16667\emwidth} + \unexpanded\def\enspace {\kern .5\emwidth} \fi \ifdefined\quad \else - \def\enskip{\hskip.5\emwidth} - \def\quad {\hskip \emwidth} - \def\qquad {\hskip 2\emwidth} + \unexpanded\def\enskip{\hskip.5\emwidth} + \unexpanded\def\quad {\hskip \emwidth} + \unexpanded\def\qquad {\hskip 2\emwidth} \fi @@ -613,14 +613,51 @@ {\begingroup \dodoubleempty\spac_narrower_start_named} -\def\spac_narrower_start_named[#1][#2]% - {\edef\currentnarrower{#1}% - \ifsecondargument - \spac_narrower_start_apply{#2}% +% \def\spac_narrower_start_named[#1][#2]% +% {\edef\currentnarrower{#1}% +% \ifsecondargument +% \spac_narrower_start_apply{#2}% +% \else +% \spac_narrower_start_apply{\narrowerparameter\v!default}% +% \fi} + +\def\spac_narrower_start_named + {\ifsecondargument + \expandafter\spac_narrower_start_named_two \else - \spac_narrower_start_apply{\narrowerparameter\v!default}% + \expandafter\spac_narrower_start_named_one \fi} +\def\spac_narrower_start_named_one[#1]% + {\doifassignmentelse{#1}\spac_narrower_start_named_one_yes\spac_narrower_start_named_one_nop[#1]} + +\def\spac_narrower_start_named_one_yes[#1][#2]% [settings] [] + {\setupcurrentnarrower[#1]% + \spac_narrower_start_apply{\narrowerparameter\v!default}} + +\def\spac_narrower_start_named_one_nop[#1][#2]% [tag] [] + {\edef\currentnarrower{#1}% + \spac_narrower_start_apply{\narrowerparameter\v!default}} + +\def\spac_narrower_start_named_two[#1]% + {\doifassignmentelse{#1}\spac_narrower_start_named_settings_how\spac_narrower_start_named_tag_unknown[#1]} + +\def\spac_narrower_start_named_settings_how[#1][#2]% [settings] [how] + {\setupcurrentnarrower[#1]% + \spac_narrower_start_apply{#2}} + +\def\spac_narrower_start_named_tag_unknown[#1][#2]% [tag] [...] + {\doifassignmentelse{#2}\spac_narrower_start_named_tag_settings\spac_narrower_start_named_tag_how[#1][#2]} + +\def\spac_narrower_start_named_tag_settings[#1][#2]% [tag] [settings] + {\edef\currentnarrower{#1}% + \setupcurrentnarrower[#2]% + \spac_narrower_start_apply{\narrowerparameter\v!default}} + +\def\spac_narrower_start_named_tag_how[#1][#2]% [tag] [how] + {\edef\currentnarrower{#1}% + \spac_narrower_start_apply{#2}} + \let\stopnarrow\spac_narrower_stop \newdimen\d_spac_effective_hsize \def\effectivehsize {\hsize} diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua index 7d030ab1a..7bca7e8a1 100644 --- a/tex/context/base/spac-ver.lua +++ b/tex/context/base/spac-ver.lua @@ -23,8 +23,7 @@ if not modules then modules = { } end modules ['spac-ver'] = { local next, type, tonumber = next, type, tonumber local gmatch, concat = string.gmatch, table.concat -local ceil, floor, max, min, round, abs = math.ceil, math.floor, math.max, math.min, math.round, math.abs -local texlists, texdimen, texbox = tex.lists, tex.dimen, tex.box +local ceil, floor = math.ceil, math.floor local lpegmatch = lpeg.match local unpack = unpack or table.unpack local allocate = utilities.storage.allocate @@ -33,12 +32,13 @@ local formatters = string.formatters local P, C, R, S, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc -local nodes, node, trackers, attributes, context = nodes, node, trackers, attributes, context +local nodes, node, trackers, attributes, context, commands, tex = nodes, node, trackers, attributes, context, commands, tex -local variables = interfaces.variables +----- texlists = tex.lists +local texgetdimen = tex.getdimen +local texgetbox = tex.getbox -local starttiming = statistics.starttiming -local stoptiming = statistics.stoptiming +local variables = interfaces.variables -- vertical space handler @@ -87,7 +87,6 @@ local new_gluespec = nodepool.gluespec local nodecodes = nodes.nodecodes local skipcodes = nodes.skipcodes -local fillcodes = nodes.fillcodes local penalty_code = nodecodes.penalty local kern_code = nodecodes.kern @@ -96,7 +95,7 @@ local hlist_code = nodecodes.hlist local vlist_code = nodecodes.vlist local whatsit_code = nodecodes.whatsit -local userskip_code = skipcodes.userskip +local texnest = tex.nest local vspacing = builders.vspacing or { } builders.vspacing = vspacing @@ -288,20 +287,24 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is local snapht, snapdp if method["local"] then -- snapping is done immediately here - snapht, snapdp = texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth + snapht = texgetdimen("bodyfontstrutheight") + snapdp = texgetdimen("bodyfontstrutdepth") if t then t[#t+1] = formatters["local: snapht %p snapdp %p"](snapht,snapdp) end elseif method["global"] then - snapht, snapdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth + snapht = texgetdimen("globalbodyfontstrutheight") + snapdp = texgetdimen("globalbodyfontstrutdepth") if t then t[#t+1] = formatters["global: snapht %p snapdp %p"](snapht,snapdp) end else -- maybe autolocal -- snapping might happen later in the otr - snapht, snapdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth - local lsnapht, lsnapdp = texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth + snapht = texgetdimen("globalbodyfontstrutheight") + snapdp = texgetdimen("globalbodyfontstrutdepth") + local lsnapht = texgetdimen("bodyfontstrutheight") + local lsnapdp = texgetdimen("bodyfontstrutdepth") if snapht ~= lsnapht and snapdp ~= lsnapdp then snapht, snapdp = lsnapht, lsnapdp end @@ -342,13 +345,16 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is id = thebox and thebox.id end if thebox and id == vlist_code then - local list, lh, ld = thebox.list + local list = thebox.list + local lh, ld for n in traverse_nodes_id(hlist_code,list) do - lh, ld = n.height, n.depth + lh = n.height + ld = n.depth break end if lh then - local ht, dp = thebox.height, thebox.depth + local ht = thebox.height + local dp = thebox.depth if t then t[#t+1] = formatters["first line: height %p depth %p"](lh,ld) t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp) @@ -379,10 +385,12 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is if thebox and id == vlist_code then local list, lh, ld = thebox.list for n in traverse_nodes_id(hlist_code,list) do - lh, ld = n.height, n.depth + lh = n.height + ld = n.depth end if lh then - local ht, dp = thebox.height, thebox.depth + local ht = thebox.height + local dp = thebox.depth if t then t[#t+1] = formatters["last line: height %p depth %p" ](lh,ld) t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp) @@ -479,7 +487,7 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is local lines = (ch+cd)/snaphtdp if t then local original = (h+d)/snaphtdp - local whatever = (ch+cd)/(texdimen.globalbodyfontstrutheight + texdimen.globalbodyfontstrutdepth) + local whatever = (ch+cd)/(texgetdimen("globalbodyfontstrutheight") + texgetdimen("globalbodyfontstrutdepth")) t[#t+1] = formatters["final lines: %s -> %s (%s)"](original,lines,whatever) t[#t+1] = formatters["final height: %p -> %p"](h,ch) t[#t+1] = formatters["final depth: %p -> %p"](d,cd) @@ -495,7 +503,8 @@ local function snap_topskip(current,method) local w = spec.width local wd = w if spec.writable then - spec.width, wd = 0, 0 + spec.width = 0 + wd = 0 end return w, wd end @@ -740,21 +749,11 @@ local topskip_code = skipcodes.topskip local splittopskip_code = skipcodes.splittopskip local free_glue_node = free_node -local discard, largest, force, penalty, add, disable, nowhite, goback, together = 0, 1, 2, 3, 4, 5, 6, 7, 8 - --- local function free_glue_node(n) --- -- free_node(n.spec) --- print("before",n) --- logs.flush() --- free_node(n) --- print("after") --- logs.flush() --- end function vspacing.snapbox(n,how) local sv = snapmethods[how] if sv then - local box = texbox[n] + local box = texgetbox(n) local list = box.list if list then local s = list[a_snapmethod] @@ -763,7 +762,8 @@ function vspacing.snapbox(n,how) -- report_snapper("box list not snapped, already done") end else - local ht, dp = box.height, box.depth + local ht = box.height + local dp = box.depth if false then -- todo: already_done -- assume that the box is already snapped if trace_vsnapping then @@ -772,7 +772,8 @@ function vspacing.snapbox(n,how) end else local h, d, ch, cd, lines = snap_hlist("box",box,sv,ht,dp) - box.height, box.depth = ch, cd + box.height= ch + box.depth = cd if trace_vsnapping then report_snapper("box list snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s", h,d,ch,cd,sv.name,sv.specification,"direct",lines,listtoutf(list)) @@ -785,8 +786,17 @@ function vspacing.snapbox(n,how) end end +-- I need to figure out how to deal with the prevdepth that crosses pages. In fact, +-- prevdepth is often quite interfering (even over a next paragraph) so I need to +-- figure out a trick. + local function forced_skip(head,current,width,where,trace) - if where == "after" then + if head == current and head.subtype == baselineskip_code then + width = width - head.spec.width + end + if width == 0 then + -- do nothing + elseif where == "after" then head, current = insert_node_after(head,current,new_rule(0,0,0)) head, current = insert_node_after(head,current,new_kern(width)) head, current = insert_node_after(head,current,new_rule(0,0,0)) @@ -805,6 +815,8 @@ end -- penalty only works well when before skip +local discard, largest, force, penalty, add, disable, nowhite, goback, together = 0, 1, 2, 3, 4, 5, 6, 7, 8 -- move into function when upvalue 60 issue + local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also pass tail if trace then reset_tracing(head) @@ -823,11 +835,12 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also head = insert_node_before(head,current,p) end if glue_data then + local spec = glue_data.spec if force_glue then if trace then trace_done("flushed due to " .. why,glue_data) end - head = forced_skip(head,current,glue_data.spec.width,"before",trace) + head = forced_skip(head,current,spec.width,"before",trace) free_glue_node(glue_data) - elseif glue_data.spec.writable then + elseif spec.writable then if trace then trace_done("flushed due to " .. why,glue_data) end head = insert_node_before(head,current,glue_data) else @@ -841,8 +854,9 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also end if trace_vsnapping then report_snapper("global ht/dp = %p/%p, local ht/dp = %p/%p", - texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth, - texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth) + texgetdimen("globalbodyfontstrutheight"), texgetdimen("globalbodyfontstrutdepth"), + texgetdimen("bodyfontstrutheight"), texgetdimen("bodyfontstrutdepth") + ) end if trace then trace_info("start analyzing",where,what) end while current do @@ -865,7 +879,8 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also if sv then -- check if already snapped if list and already_done(id,list,a_snapmethod) then - local ht, dp = current.height, current.depth + local ht = current.height + local dp = current.depth -- assume that the box is already snapped if trace_vsnapping then report_snapper("mvl list already snapped at (%p,%p): %s",ht,dp,listtoutf(list)) @@ -936,9 +951,9 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also previous.spec = new_gluespec(pw + cw, pp + cp, pm + cm) -- else topskip can disappear if trace then trace_natural("removed",current) end head, current = remove_node(head, current, true) - -- current = previous + -- current = previous if trace then trace_natural("collapsed",previous) end - -- current = current.next + -- current = current.next else if trace then trace_natural("filler",current) end current = current.next @@ -1075,7 +1090,8 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also if trace then trace_natural("ignored parskip",current) end head, current = remove_node(head, current, true) elseif glue_data then - local ps, gs = current.spec, glue_data.spec + local ps = current.spec + local gs = glue_data.spec if ps.writable and gs.writable and ps.width > gs.width then glue_data.spec = copy_node(ps) if trace then trace_natural("taking parskip",current) end @@ -1131,11 +1147,14 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also current = current.next -- else -- other glue - if snap and trace_vsnapping and current.spec.writable and current.spec.width ~= 0 then - report_snapper("glue %p of type %a kept",current.spec.width,skipcodes[subtype]) - --~ current.spec.width = 0 + if snap and trace_vsnapping then + local spec = current.spec + if spec.writable and spec.width ~= 0 then + report_snapper("glue %p of type %a kept",current.spec.width,skipcodes[subtype]) + -- spec.width = 0 + end end - if trace then trace_skip(formatted["glue of type %a"](subtype),sc,so,sp,current) end + if trace then trace_skip(formatter["glue of type %a"](subtype),sc,so,sp,current) end flush("some glue") current = current.next end @@ -1167,6 +1186,7 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also else head, tail = insert_node_after(head,tail,glue_data) end +texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevdepth handler end if trace then if glue_data or penalty_data then @@ -1227,11 +1247,11 @@ function vspacing.pagehandler(newhead,where) if stackhack then stackhack = false if trace_collect_vspacing then report("processing %s nodes: %s",newhead) end ---~ texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod) + -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod) newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod) else if trace_collect_vspacing then report("flushing %s nodes: %s",newhead) end ---~ texlists.contrib_head = newhead + -- texlists.contrib_head = newhead end else if stackhead then @@ -1258,97 +1278,30 @@ local ignore = table.tohash { function vspacing.vboxhandler(head,where) if head and not ignore[where] and head.next then - -- starttiming(vspacing) head = collapser(head,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper - -- stoptiming(vspacing) end return head end function vspacing.collapsevbox(n) -- for boxes but using global a_snapmethod - local list = texbox[n].list - if list then - -- starttiming(vspacing) - texbox[n].list = vpack_node(collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod)) - -- stoptiming(vspacing) - end -end - --- We will split this module so a few locals are repeated. Also this will be --- rewritten. - -nodes.builders = nodes.builder or { } -local builders = nodes.builders - -local actions = nodes.tasks.actions("vboxbuilders") - -function builders.vpack_filter(head,groupcode,size,packtype,maxdepth,direction) - local done = false - if head then - starttiming(builders) - if trace_vpacking then - local before = nodes.count(head) - head, done = actions(head,groupcode,size,packtype,maxdepth,direction) - local after = nodes.count(head) - if done then - nodes.processors.tracer("vpack","changed",head,groupcode,before,after,true) - else - nodes.processors.tracer("vpack","unchanged",head,groupcode,before,after,true) - end - else - head, done = actions(head,groupcode) + local box = texgetbox(n) + if box then + local list = box.list + if list then + box.list = vpack_node(collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod)) end - stoptiming(builders) end - return head, done end --- This one is special in the sense that it has no head and we operate on the mlv. Also, --- we need to do the vspacing last as it removes items from the mvl. - -local actions = nodes.tasks.actions("mvlbuilders") +-- This one is needed to prevent bleeding of prevdepth to the next page +-- which doesn't work well with forced skips. -local function report(groupcode,head) - report_page_builder("trigger: %s",groupcode) - report_page_builder(" vsize : %p",tex.vsize) - report_page_builder(" pagegoal : %p",tex.pagegoal) - report_page_builder(" pagetotal: %p",tex.pagetotal) - report_page_builder(" list : %s",head and nodeidstostring(head) or "") -end +local outer = texnest[0] -function builders.buildpage_filter(groupcode) - local head, done = texlists.contrib_head, false - -- if head and head.next and head.next.id == hlist_code and head.next.width == 1 then - -- report_page_builder("trigger otr calculations") - -- free_node_list(head) - -- head = nil - -- end - if head then - starttiming(builders) - if trace_page_builder then - report(groupcode,head) - end - head, done = actions(head,groupcode) - stoptiming(builders) - -- -- doesn't work here (not passed on?) - -- tex.pagegoal = tex.vsize - tex.dimen.d_page_floats_inserted_top - tex.dimen.d_page_floats_inserted_bottom - texlists.contrib_head = head - return done and head or true - else - if trace_page_builder then - report(groupcode) - end - return nil, false - end +function vspacing.resetprevdepth() + outer.prevdepth = 0 end -callbacks.register('vpack_filter', builders.vpack_filter, "vertical spacing etc") -callbacks.register('buildpage_filter', builders.buildpage_filter, "vertical spacing etc (mvl)") - -statistics.register("v-node processing time", function() - return statistics.elapsedseconds(builders) -end) - -- interface commands.vspacing = vspacing.analyze @@ -1356,3 +1309,4 @@ commands.vspacingsetamount = vspacing.setskip commands.vspacingdefine = vspacing.setmap commands.vspacingcollapse = vspacing.collapsevbox commands.vspacingsnap = vspacing.snapbox +commands.resetprevdepth = vspacing.resetprevdepth diff --git a/tex/context/base/spac-ver.mkiv b/tex/context/base/spac-ver.mkiv index ee78d8964..f535a59f6 100644 --- a/tex/context/base/spac-ver.mkiv +++ b/tex/context/base/spac-ver.mkiv @@ -715,6 +715,10 @@ \unexpanded\def\setmaxdepth {\maxdepth\systemmaxdepthfactor\globalbodyfontsize} +\let\normalbaselineskip \relax \newskip \normalbaselineskip % these got lost in the transition to mkiv due +\let\normallineskip \relax \newskip \normallineskip % to auto-\normal* definitions and registers +\let\normallineskiplimit\relax \newdimen\normallineskiplimit % being protected + \unexpanded\def\normalbaselines {\baselineskip \normalbaselineskip \lineskip \normallineskip @@ -913,7 +917,7 @@ \s!depth \strutdp}} \def\spac_struts_set_vide - {\setbox\strutbox\hbox + {\setbox\strutbox\hbox % at some time this extra wrapping was needed {\spac_struts_vide_hbox to \zeropoint {% \hss % new, will be option \vrule @@ -939,8 +943,8 @@ \unexpanded\def\strut % still callbacks for \hbox{\strut} {\relax - \dontleavehmode - \copy\strutbox} + \dontleavehmode + \copy\strutbox} \let\normalstrut\strut @@ -984,7 +988,7 @@ \setcharstrut\m_strut \fi} -\unexpanded\def\showstruts +\unexpanded\def\showstruts % adapts .. is wrong {\setteststrut \settestcrlf} @@ -2073,4 +2077,42 @@ % % \def\shapefill{\vskip\zeropoint\s!plus\lineheight\s!minus\lineheight\relax} +%D Nasty: + +% \writestatus{1}{\the\prevdepth} \blank[force,5*big] { \writestatus{1}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page +% \writestatus{2}{\the\prevdepth} \blank[force,5*big] { \writestatus{2}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page +% \writestatus{3}{\the\prevdepth} \blank[force,5*big] { \writestatus{3}{\the\prevdepth} \baselineskip5cm xxxxxxxxx \par } \page +% \writestatus{4}{\the\prevdepth} \input tufte \page +% \writestatus{5}{\the\prevdepth} \input tufte \page +% \writestatus{6}{\the\prevdepth} \blank[force,5*big] { \writestatus{6}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page + +% \writestatus{1}{\the\prevdepth} \null\vskip4cm { \writestatus{1}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page +% \writestatus{2}{\the\prevdepth} \null\vskip4cm { \writestatus{2}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page +% \writestatus{3}{\the\prevdepth} \null\vskip4cm { \writestatus{3}{\the\prevdepth} \baselineskip5cm xxxxxxxxx \par } \page +% \writestatus{4}{\the\prevdepth} \input tufte \page +% \writestatus{5}{\the\prevdepth} \input tufte \page +% \writestatus{6}{\the\prevdepth} \null\vskip4cm { \writestatus{6}{\the\prevdepth} \baselineskip1cm xxxxxxxxx \par } \page + +\appendtoks + \ifvmode\prevdepth\zeropoint\fi % consistent, else first page -1000pt .. needed for fixed,3*big first/successive pages consistency +\to \everystarttext + +\prevdepth\zeropoint + +% not ok, so we need to figure out another way to fix this messy prevdepth-across-page issue +% as encountered in forced blank skips (see lua code) +% +% \appendtoks +% \ifvmode\ctxcommand{resetprevdepth()}\fi +% \to \everyafteroutput +% +% this should only happen when there is nothing left over (how to detemine that) .. testcase: +% +% \dorecurse{41}{line\par} +% \starttyping +% line 1 +% line 2 +% line 3 +% \stoptyping + \protect \endinput diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf index 097fe5a2d..e2c3f3255 100644 Binary files a/tex/context/base/status-files.pdf and b/tex/context/base/status-files.pdf differ diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf index f727ca843..82a45e683 100644 Binary files a/tex/context/base/status-lua.pdf and b/tex/context/base/status-lua.pdf differ diff --git a/tex/context/base/status-mkiv.lua b/tex/context/base/status-mkiv.lua index 443eee60e..caa7dc16c 100644 --- a/tex/context/base/status-mkiv.lua +++ b/tex/context/base/status-mkiv.lua @@ -1259,6 +1259,12 @@ return { loading = "always", status = "okay", }, + { + category = "mkiv", + filename = "typo-tal", + loading = "always", + status = "okay", + }, { category = "mkiv", comment = "somewhat weird", @@ -1490,12 +1496,6 @@ return { loading = "always", status = "okay", }, - { - category = "mkiv", - filename = "spac-cha", - loading = "always", - status = "okay", - }, { category = "mkiv", comment = "work in progress", @@ -1586,8 +1586,13 @@ return { }, { category = "mkiv", - comment = "might get extended", - filename = "typo-par", + filename = "typo-drp", + loading = "always", + status = "okay", + }, + { + category = "mkiv", + filename = "typo-fln", loading = "always", status = "okay", }, @@ -1679,6 +1684,12 @@ return { loading = "experimental", status = "okay", }, + { + category = "mkiv", + filename = "meta-fnt", + loading = "always", + status = "okay", + }, { category = "mkiv", filename = "meta-tex", @@ -1817,6 +1828,12 @@ return { loading = "always", status = "okay", }, + { + category = "mkvi", + filename = "math-acc", + loading = "always", + status = "okay", + }, { category = "mkiv", comment = "at least for the moment", @@ -1849,6 +1866,12 @@ return { loading = "always", status = "okay", }, + { + category = "mkiv", + filename = "math-rad", + loading = "always", + status = "okay", + }, { category = "mkiv", comment = "code might move to here", @@ -3714,7 +3737,12 @@ return { { category = "lua", filename = "m-database", - status = "todo", + status = "okay", + }, + { + category = "lua", + filename = "m-nodechart", + status = "okay", }, { category = "lua", @@ -3836,6 +3864,12 @@ return { loading = "meta-lua", status = "okay", }, + { + category = "lua", + filename = "meta-fnt", + loading = "meta-fnt", + status = "okay", + }, { category = "lua", comment = "could be done nicer nowadays but who needs it", @@ -4742,13 +4776,40 @@ return { { category = "lua", filename = "typo-dir", + loading = "typo-dir", + status = "okay", + }, + { + category = "lua", + comment = "work in progress", + filename = "typo-dha", + loading = "typo-dir", status = "todo", }, + { + category = "lua", + filename = "typo-dua", + loading = "typo-dir", + status = "okay", + }, + { + category = "lua", + comment = "work in progress", + filename = "typo-dub", + loading = "typo-dir", + status = "okay", + }, { category = "lua", filename = "typo-ini", status = "todo", }, + { + category = "mkiv", + filename = "typo-tal", + loading = "typo-tal", + status = "okay", + }, { category = "lua", filename = "typo-itc", @@ -4771,8 +4832,13 @@ return { }, { category = "lua", - filename = "typo-par", - status = "todo", + filename = "typo-drp", + status = "okay", + }, + { + category = "lua", + filename = "typo-fln", + status = "okay", }, { category = "lua", @@ -5090,7 +5156,12 @@ return { { category = "mkiv", filename = "m-database", - status = "todo", + status = "okay", + }, + { + category = "mkiv", + filename = "m-nodechart", + status = "okay", }, { category = "tex", diff --git a/tex/context/base/strc-blk.lua b/tex/context/base/strc-blk.lua index 791f8f99b..935b6c061 100644 --- a/tex/context/base/strc-blk.lua +++ b/tex/context/base/strc-blk.lua @@ -13,7 +13,10 @@ local find, format, validstring = string.find, string.format, string.valid local settings_to_set, settings_to_array = utilities.parsers.settings_to_set, utilities.parsers.settings_to_array local allocate = utilities.storage.allocate -local structures, context = structures, context +local context = context +local commands = commands + +local structures = structures structures.blocks = structures.blocks or { } diff --git a/tex/context/base/strc-con.mkvi b/tex/context/base/strc-con.mkvi index 11cd31983..75519b8ce 100644 --- a/tex/context/base/strc-con.mkvi +++ b/tex/context/base/strc-con.mkvi @@ -246,6 +246,24 @@ \newconditional\c_strc_constructions_distance_none +\def\strc_constructions_set_width_and_distance + {\assignwidth + \p_strc_constructions_width + \constructionsheadwidth + {\unhcopy\constructionheadbox} + \constructionsheaddistance} + +\def\strc_constructions_preroll_head#content% + {\setbox\constructionheadbox\hbox + {\forgetall + \dontcomplain + \settrialtypesetting + \csname\??constructionmainhandler\currentconstructionhandler\endcsname#content}} + +\def\strc_constructions_ignore_head + {\constructionsheaddistance\zeropoint + \constructionsheadwidth \zeropoint} + \unexpanded\setvalue{\??constructionstarthandler\v!construction}% this will be redone (reorganized) .. too much boxing {\dostarttagged\t!construction\currentconstruction \dotagsetconstruction @@ -283,21 +301,34 @@ \fi % inefficient and not always needed, for instance not with margins so we will make checkers % per alternative some day (especially in labels this is unwanted overhead) - \setbox\constructionheadbox\hbox - {\forgetall - \dontcomplain - \settrialtypesetting - \edef\p_strc_constructions_sample{\constructionparameter\c!sample}% - \ifx\p_strc_constructions_sample\empty - \csname\??constructionmainhandler\currentconstructionhandler\endcsname\currentconstructiontext - \else - \csname\??constructionmainhandler\currentconstructionhandler\endcsname\currentconstructionsample - \fi}% - \assignwidth - \p_strc_constructions_width - \constructionsheadwidth - {\unhcopy\constructionheadbox} - \constructionsheaddistance + % + % maybe we should have an option as i cannot oversee the consequences now + % + % \edef\p_strc_constructions_sample{\constructionparameter\c!sample}% + % \ifx\p_strc_constructions_sample\empty + % \strc_constructions_preroll_head\currentconstructiontext + % \else + % \strc_constructions_preroll_head\currentconstructiontextsample + % \fi + % \strc_constructions_set_width_and_distance + % + \strc_constructions_preroll_head\currentconstructiontext + \ifzeropt\wd\constructionheadbox + \strc_constructions_ignore_head + \else + \edef\p_strc_constructions_sample{\constructionparameter\c!sample}% + \ifx\p_strc_constructions_sample\empty + \strc_constructions_set_width_and_distance + \else + \strc_constructions_preroll_head\currentconstructionsample + \ifzeropt\wd\constructionheadbox + \strc_constructions_ignore_head + \else + \strc_constructions_set_width_and_distance + \fi + \fi + \fi + % \dostarttagged\t!constructiontag\empty % todo \setbox\constructionheadbox\hbox {\forgetall @@ -709,7 +740,8 @@ \startsetups[\??constructionrenderings:\v!serried:\v!fit] \let\\=\crlf \noindent - \ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox + \ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox % why copy? leftover? + \penalty\plustenthousand % new \hskip\constructionsheaddistance\relax \useconstructionstyleandcolor\c!style\c!color \ignorespaces @@ -718,8 +750,9 @@ \startsetups[\??constructionrenderings:\v!serried:\v!broad] \let\\=\crlf \noindent - \ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox + \ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox % why copy? leftover? \ifconditional\c_strc_constructions_distance_none \else + \penalty\plustenthousand % new \hskip\constructionsheaddistance \!!plus .5\constructionsheaddistance \!!minus .25\constructionsheaddistance\relax \fi \useconstructionstyleandcolor\c!style\c!color diff --git a/tex/context/base/strc-doc.lua b/tex/context/base/strc-doc.lua index 50a9e67a0..5be5727f5 100644 --- a/tex/context/base/strc-doc.lua +++ b/tex/context/base/strc-doc.lua @@ -41,9 +41,10 @@ local trace_detail = false trackers.register("structures.detail", fu local report_structure = logs.reporter("structure","sectioning") -local structures = structures local context = context +local commands = commands +local structures = structures local helpers = structures.helpers local documents = structures.documents local sections = structures.sections @@ -736,13 +737,13 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref applyprocessor(connector) end else -if groupsuffix and kind ~= "prefix" then - if result then - result[#result+1] = strippedprocessor(groupsuffix) - else - applyprocessor(groupsuffix) - end -end + if groupsuffix and kind ~= "prefix" then + if result then + result[#result+1] = strippedprocessor(groupsuffix) + else + applyprocessor(groupsuffix) + end + end if stopper then if result then result[#result+1] = strippedprocessor(stopper) @@ -768,94 +769,104 @@ end function sections.findnumber(depth,what) -- needs checking (looks wrong and slow too) local data = data.status[depth or data.depth] - if data then - local index = data.references.section - local collected = sections.collected - local sectiondata = collected[index] - if sectiondata and sectiondata.hidenumber ~= true then -- can be nil - local quit = what == v_previous or what == v_next - if what == v_first or what == v_previous then - for i=index,1,-1 do - local s = collected[i] - if s then - local n = s.numbers - if #n == depth and n[depth] and n[depth] ~= 0 then - sectiondata = s - if quit then - break - end - elseif #n < depth then + if not data then + return + end + local references = data.references + if not references then + return + end + local index = references.section + local collected = sections.collected + local sectiondata = collected[index] + if sectiondata and sectiondata.hidenumber ~= true then -- can be nil + local quit = what == v_previous or what == v_next + if what == v_first or what == v_previous then + for i=index,1,-1 do + local s = collected[i] + if s then + local n = s.numbers + if #n == depth and n[depth] and n[depth] ~= 0 then + sectiondata = s + if quit then break end + elseif #n < depth then + break end end - elseif what == v_last or what == v_next then - for i=index,#collected do - local s = collected[i] - if s then - local n = s.numbers - if #n == depth and n[depth] and n[depth] ~= 0 then - sectiondata = s - if quit then - break - end - elseif #n < depth then + end + elseif what == v_last or what == v_next then + for i=index,#collected do + local s = collected[i] + if s then + local n = s.numbers + if #n == depth and n[depth] and n[depth] ~= 0 then + sectiondata = s + if quit then break end + elseif #n < depth then + break end end end - return sectiondata end + return sectiondata end end function sections.finddata(depth,what) local data = data.status[depth or data.depth] - if data then - -- if sectiondata and sectiondata.hidenumber ~= true then -- can be nil - local index = data.references.listindex - if index then - local collected = structures.lists.collected - local quit = what == v_previous or what == v_next - if what == v_first or what == v_previous then - for i=index-1,1,-1 do - local s = collected[i] - if not s then + if not data then + return + end + local references = data.references + if not references then + return + end + local index = references.listindex + if not index then + return + end + local collected = structures.lists.collected + local quit = what == v_previous or what == v_next + if what == v_first or what == v_previous then + for i=index-1,1,-1 do + local s = collected[i] + if not s then + break + elseif s.metadata.kind == "section" then -- maybe check on name + local n = s.numberdata.numbers + if #n == depth and n[depth] and n[depth] ~= 0 then + data = s + if quit then break - elseif s.metadata.kind == "section" then -- maybe check on name - local n = s.numberdata.numbers - if #n == depth and n[depth] and n[depth] ~= 0 then - data = s - if quit then - break - end - elseif #n < depth then - break - end end + elseif #n < depth then + break end - elseif what == v_last or what == v_next then - for i=index+1,#collected do - local s = collected[i] - if not s then + end + end + elseif what == v_last or what == v_next then + for i=index+1,#collected do + local s = collected[i] + if not s then + break + elseif s.metadata.kind == "section" then -- maybe check on name + local n = s.numberdata.numbers + if #n == depth and n[depth] and n[depth] ~= 0 then + data = s + if quit then break - elseif s.metadata.kind == "section" then -- maybe check on name - local n = s.numberdata.numbers - if #n == depth and n[depth] and n[depth] ~= 0 then - data = s - if quit then - break - end - elseif #n < depth then - break - end end + elseif #n < depth then + break end end end - return data end + return data end function sections.internalreference(sectionname,what) -- to be used in pagebuilder (no marks used) diff --git a/tex/context/base/strc-flt.mkvi b/tex/context/base/strc-flt.mkvi index 8122b953e..065513db8 100644 --- a/tex/context/base/strc-flt.mkvi +++ b/tex/context/base/strc-flt.mkvi @@ -983,6 +983,8 @@ {\let\extrafloatlocation#rightpagelocation}% {\let\extrafloatlocation#leftpagelocation}} +\let\extrafloatlocation\empty + \installcorenamespace{extrafloataction} \setvalue{\??extrafloataction \v!inner}#1{\strc_floats_set_extra_action\v!left \v!right} @@ -1007,6 +1009,7 @@ \processcommacommand[\floatlocation]\strc_floats_check_extra_actions_step \ifx\extrafloatlocation\empty \else \edef\floatlocation{\extrafloatlocation,\floatlocation}% + \setfloatmethodvariables\floatlocation \fi}} \def\strc_floats_check_extra_actions_step#step% diff --git a/tex/context/base/strc-ini.lua b/tex/context/base/strc-ini.lua index fd7c10f79..09ed79288 100644 --- a/tex/context/base/strc-ini.lua +++ b/tex/context/base/strc-ini.lua @@ -20,12 +20,13 @@ but it does not make sense to store all processdata. ]]-- -local formatters = string.formatters local lpegmatch = lpeg.match -local count = tex.count local type, next, tonumber, select = type, next, tonumber, select -local settings_to_array, settings_to_hash = utilities.parsers.settings_to_array, utilities.parsers.settings_to_hash -local allocate = utilities.storage.allocate + +local formatters = string.formatters +local settings_to_array = utilities.parsers.settings_to_array +local settings_to_hash = utilities.parsers.settings_to_hash +local allocate = utilities.storage.allocate local catcodenumbers = catcodes.numbers -- better use the context(...) way to switch @@ -34,7 +35,8 @@ local xmlcatcodes = catcodenumbers.xmlcatcodes local notcatcodes = catcodenumbers.notcatcodes local txtcatcodes = catcodenumbers.txtcatcodes -local context, commands = context, commands +local context = context +local commands = commands local pushcatcodes = context.pushcatcodes local popcatcodes = context.popcatcodes diff --git a/tex/context/base/strc-itm.lua b/tex/context/base/strc-itm.lua index 8a745f356..4945c282f 100644 --- a/tex/context/base/strc-itm.lua +++ b/tex/context/base/strc-itm.lua @@ -10,19 +10,19 @@ local structures = structures local itemgroups = structures.itemgroups local jobpasses = job.passes -local setfield = jobpasses.save -local getfield = jobpasses.getfield +local setvariable = jobpasses.save +local getvariable = jobpasses.getfield function itemgroups.register(name,nofitems,maxwidth) - setfield("itemgroup", { nofitems, maxwidth }) + setvariable("itemgroup", { nofitems, maxwidth }) end function itemgroups.nofitems(name,index) - return getfield("itemgroup", index, 1, 0) + return getvariable("itemgroup", index, 1, 0) end function itemgroups.maxwidth(name,index) - return getfield("itemgroup", index, 2, 0) + return getvariable("itemgroup", index, 2, 0) end -- interface (might become counter/dimension) @@ -30,9 +30,9 @@ end commands.registeritemgroup = itemgroups.register function commands.nofitems(name,index) - context(getfield("itemgroup", index, 1, 0)) + context(getvariable("itemgroup", index, 1, 0)) end function commands.maxitemwidth(name,index) - context(getfield("itemgroup", index, 2, 0)) + context(getvariable("itemgroup", index, 2, 0)) end diff --git a/tex/context/base/strc-itm.mkvi b/tex/context/base/strc-itm.mkvi index 85ec4bc45..8259fa38d 100644 --- a/tex/context/base/strc-itm.mkvi +++ b/tex/context/base/strc-itm.mkvi @@ -408,7 +408,6 @@ \settrue\c_strc_itemgroups_inline \settrue\c_strc_itemgroups_joined \strc_itemgroups_process_set_option_pack} -\setvalue{\??itemgroupkeyword\v!columns }{\strc_itemgroups_process_set_option_pack} \setvalue{\??itemgroupkeyword\v!before }{\settrue\c_strc_itemgroups_before} \setvalue{\??itemgroupkeyword\v!after }{\settrue\c_strc_itemgroups_after} \setvalue{\??itemgroupkeyword\v!nowhite }{\settrue\c_strc_itemgroups_nowhite} @@ -1205,7 +1204,8 @@ \def\strc_itemgroups_handle_lapped_item_positive {\llap - {\hbox to \d_strc_itemgroups_list_width + {\dontcomplain + \hbox to \d_strc_itemgroups_list_width {\ifconditional\c_strc_itemgroups_sub \llap{+\enspace}% \fi @@ -1246,7 +1246,7 @@ \strc_itemgroups_start_head} \def\strc_itemgroups_make_symbol_box - {\setbox\b_strc_itemgroups\hbox + {\setbox\b_strc_itemgroups\autodirhbox {\ifconditional\c_strc_itemgroups_head \ifconditional\c_strc_itemgroups_symbol \strc_itemgroups_insert_extra_reference @@ -1444,6 +1444,19 @@ \fi +\relaxvalueifundefined \v!item +\relaxvalueifundefined \v!sub +\relaxvalueifundefined \v!sym +\relaxvalueifundefined \v!ran +\relaxvalueifundefined \v!head +\relaxvalueifundefined \v!its +\relaxvalueifundefined \v!mar +\relaxvalueifundefined \v!txt +\relaxvalueifundefined {\e!start\v!item} +\relaxvalueifundefined {\e!stop \v!item} +\relaxvalueifundefined {\e!start\v!head} +\relaxvalueifundefined {\e!stop \v!head} + %D A nice example of a plugin: %D %D \startbuffer diff --git a/tex/context/base/strc-lev.lua b/tex/context/base/strc-lev.lua index 50a63c938..947889e1e 100644 --- a/tex/context/base/strc-lev.lua +++ b/tex/context/base/strc-lev.lua @@ -8,6 +8,9 @@ if not modules then modules = { } end modules ['strc-lev'] = { local insert, remove = table.insert, table.remove +local context = context +local commands = commands + local sections = structures.sections local default = interfaces.variables.default diff --git a/tex/context/base/strc-lnt.mkvi b/tex/context/base/strc-lnt.mkvi index 4a2cd1cc0..f713ee20b 100644 --- a/tex/context/base/strc-lnt.mkvi +++ b/tex/context/base/strc-lnt.mkvi @@ -11,6 +11,8 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. +% todo: mkvi # + \writestatus{loading}{ConTeXt Structure Macros / Line Notes} %D This module loads on top of the footnote and line numbering macros. @@ -79,6 +81,8 @@ \letvalue\??linenotespreviousfrom\empty \letvalue\??linenotespreviousto \empty +% maybe do this in lua + \def\page_lines_in_from{\in[lr:b:\currentlinenotereference]} \def\page_lines_in_to {\in[lr:e:\currentlinenotereference]} @@ -113,14 +117,11 @@ \notationparameter\c!compressseparator \else \page_lines_in_from + \ifx\m_page_lines_current_from\m_page_lines_current_to\else\endash\page_lines_in_to\fi \fi \else \page_lines_in_from - \ifx\m_page_lines_current_from\m_page_lines_current_to - \else - \endash - \page_lines_in_to - \fi + \ifx\m_page_lines_current_from\m_page_lines_current_to\else\endash\page_lines_in_to\fi \fi \else \page_lines_in_from diff --git a/tex/context/base/strc-lst.lua b/tex/context/base/strc-lst.lua index 305b6a6fa..f6a355707 100644 --- a/tex/context/base/strc-lst.lua +++ b/tex/context/base/strc-lst.lua @@ -17,7 +17,6 @@ if not modules then modules = { } end modules ['strc-lst'] = { local format, gmatch, gsub = string.format, string.gmatch, string.gsub local tonumber = tonumber -local texcount = tex.count local concat, insert, remove = table.concat, table.insert, table.remove local lpegmatch = lpeg.match local simple_hash_to_string, settings_to_hash = utilities.parsers.simple_hash_to_string, utilities.parsers.settings_to_hash @@ -27,6 +26,11 @@ local trace_lists = false trackers.register("structures.lists", function( local report_lists = logs.reporter("structure","lists") +local context = context +local commands = commands + +local texgetcount = tex.getcount + local structures = structures local lists = structures.lists local sections = structures.sections @@ -122,6 +126,8 @@ function lists.groupindex(name,group) return groupindex and groupindex[group] or 0 end +-- we could use t (as hash key) in order to check for dup entries + function lists.addto(t) local m = t.metadata local u = t.userdata @@ -130,12 +136,13 @@ function lists.addto(t) end local numberdata = t.numberdata local group = numberdata and numberdata.group + local name = m.name if not group then -- forget about it elseif group == "" then group, numberdata.group = nil, nil else - local groupindex = groupindices[m.name][group] + local groupindex = groupindices[name][group] if groupindex then numberdata.numbers = cached[groupindex].numberdata.numbers end @@ -154,7 +161,10 @@ function lists.addto(t) setcomponent(t) -- might move to the tex end end if group then - groupindices[m.name][group] = p + groupindices[name][group] = p + end + if trace_lists then + report_lists("added %a, internal %a",name,p) end return p end @@ -181,10 +191,17 @@ end -- this is the main pagenumber enhancer +local enhanced = { } + function lists.enhance(n) - -- todo: symbolic names for counters local l = cached[n] - if l then + if not l then + report_lists("enhancing %a, unknown internal",n) + elseif enhanced[n] then + if trace_lists then + report_lists("enhancing %a, name %a, duplicate ignored",n,name) + end + else local metadata = l.metadata local references = l.references -- @@ -192,23 +209,27 @@ function lists.enhance(n) -- save in the right order (happens at shipout) lists.tobesaved[#lists.tobesaved+1] = l -- default enhancer (cross referencing) - references.realpage = texcount.realpageno + references.realpage = texgetcount("realpageno") -- tags local kind = metadata.kind local name = metadata.name + if trace_lists then + report_lists("enhancing %a, name %a",n,name) + end if references then -- is this used ? local tag = tags.getid(kind,name) if tag and tag ~= "?" then references.tag = tag end - --~ references.listindex = n end -- specific enhancer (kind of obsolete) local enhancer = kind and lists.enhancers[kind] if enhancer then enhancer(l) end + -- + enhanced[n] = true return l end end diff --git a/tex/context/base/strc-lst.mkvi b/tex/context/base/strc-lst.mkvi index 15a499c8b..7cef924b7 100644 --- a/tex/context/base/strc-lst.mkvi +++ b/tex/context/base/strc-lst.mkvi @@ -1262,6 +1262,8 @@ % \resetinteractionparameter\c!contrastcolor \fi} +\let\strc_lists_set_style_color\strc_lists_set_style_color_normal + %D A helper: \def\strc_lists_limitated_text#text% diff --git a/tex/context/base/strc-mar.lua b/tex/context/base/strc-mar.lua index 7b3ac11e1..02d676fb9 100644 --- a/tex/context/base/strc-mar.lua +++ b/tex/context/base/strc-mar.lua @@ -12,7 +12,9 @@ if not modules then modules = { } end modules ['strc-mar'] = { local insert, concat = table.insert, table.concat local tostring, next, rawget = tostring, next, rawget local lpegmatch = lpeg.match -local match = string.match + +local context = context +local commands = commands local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex @@ -22,9 +24,10 @@ local glyph_code = nodecodes.glyph local hlist_code = nodecodes.hlist local vlist_code = nodecodes.vlist -local traversenodes = node.traverse +local traversenodes = nodes.traverse + local texsetattribute = tex.setattribute -local texbox = tex.box +local texgetbox = tex.getbox local a_marks = attributes.private("structure","marks") @@ -121,7 +124,7 @@ local function sweep(head,first,last) end local list = n.list if list then - first, last = sweep(list, first, last) + first, last = sweep(list,first,last) end end end @@ -135,7 +138,7 @@ setmetatableindex(classes, function(t,k) local s = settings_to_array(k) t[k] = s local lasts = { } function marks.synchronize(class,n,option) - local box = texbox[n] + local box = texgetbox(n) if box then local first, last = sweep(box.list,0,0) if option == v_keep and first == 0 and last == 0 then @@ -151,11 +154,16 @@ function marks.synchronize(class,n,option) for i=1,#classlist do local class = classlist[i] local range = ranges[class] - if not range then - range = { } + if range then + range.first = first + range.last = last + else + range = { + first = first, + last = last, + } ranges[class] = range end - range.first, range.last = first, last if trace_marks_get or trace_marks_set then report_marks("action %a, class %a, first %a, last %a","synchronize",class,range.first,range.last) end @@ -659,8 +667,10 @@ function marks.fetchallmarks(name,range) fetchallmarks(name,range ) -- here we have a few helpers .. will become commands.* +local pattern = lpeg.afterprefix("li::") + function marks.title(tag,n) - local listindex = match(n,"^li::(.-)$") + local listindex = lpegmatch(pattern,n) if listindex then commands.savedlisttitle(tag,listindex,"marking") else @@ -669,7 +679,7 @@ function marks.title(tag,n) end function marks.number(tag,n) -- no spec - local listindex = match(n,"^li::(.-)$") + local listindex = lpegmatch(pattern,n) if listindex then commands.savedlistnumber(tag,listindex) else diff --git a/tex/context/base/strc-not.lua b/tex/context/base/strc-not.lua index 882e00a44..40b78d59f 100644 --- a/tex/context/base/strc-not.lua +++ b/tex/context/base/strc-not.lua @@ -8,7 +8,6 @@ if not modules then modules = { } end modules ['strc-not'] = { local format = string.format local next = next -local texcount = tex.count local trace_notes = false trackers.register("structures.notes", function(v) trace_notes = v end) local trace_references = false trackers.register("structures.notes.references", function(v) trace_references = v end) @@ -24,6 +23,9 @@ local notes = structures.notes local references = structures.references local counterspecials = counters.specials +local texgetcount = tex.getcount +local texgetbox = tex.getbox + notes.states = notes.states or { } lists.enhancers = lists.enhancers or { } @@ -91,10 +93,10 @@ end local function getn(tag) local nd = notedata[tag] - return (nd and #nd) or 0 + return nd and #nd or 0 end -notes.get = get +notes.get = get notes.getn = getn -- we could make a special enhancer @@ -189,7 +191,7 @@ local function hascontent(tag) local ok = notestates[tag] if ok then if ok.kind == "insert" then - ok = tex.box[ok.number] + ok = texgetbox(ok.number) if ok then ok = tbs.list ok = lst and lst.next @@ -257,7 +259,7 @@ function notes.checkpagechange(tag) -- called before increment ! end elseif current then -- we need to locate the next one, best guess - if texcount.realpageno > current.pagenumber.number then + if texgetcount("realpageno") > current.pagenumber.number then counters.reset(tag) end end @@ -280,7 +282,7 @@ commands.postponenotes = notes.postpone function notes.setsymbolpage(tag,n,l) local l = l or listindex(tag,n) if l then - local p = texcount.realpageno + local p = texgetcount("realpageno") if trace_notes or trace_references then report_notes("note %a of %a with list index %a gets symbol page %a",n,tag,l,p) end @@ -382,7 +384,7 @@ function commands.flushnotes(tag,whatkind,how) -- store and postpone local rp = get(tag,i) rp = rp and rp.references rp = rp and rp.symbolpage or 0 - if rp > texcount.realpageno then + if rp > texgetcount("realpageno") then state.start = i return end diff --git a/tex/context/base/strc-not.mkvi b/tex/context/base/strc-not.mkvi index 76816d035..cf1735da8 100644 --- a/tex/context/base/strc-not.mkvi +++ b/tex/context/base/strc-not.mkvi @@ -622,8 +622,15 @@ \unexpanded\def\strc_notes_inject_symbol_nop {\strc_notes_inject_symbol_indeed\conditionalfalse} -\unexpanded\def\strc_notes_inject_symbol_snc - {\currentconstructionsynchronize} % this flushes the data to the list +% % this flushes the data to the list +% +% \unexpanded\def\strc_notes_inject_symbol_snc +% {\currentconstructionsynchronize} +% +% but instead we need to do this with the content + +\unexpanded\def\strc_notes_inject_symbol_snc % so this will go away probably + {} \unexpanded\def\strc_notes_inject_symbol_indeed#synchronize% {\removeunwantedspaces @@ -1170,16 +1177,17 @@ \usesetupsparameter\noteparameter % experimental \doifelse{\noteparameter\c!paragraph}\v!yes {\nointerlineskip + \vboxtohboxslack.5\emwidth % we can instead use \hboxestohboxslack later \startvboxtohbox - \handlenoteitself{#tag}{#id}% - % add some slack + \handlenoteitself{#tag}{#id}% + \strc_notes_between_paragraphs \stopvboxtohbox} {\handlenoteitself{#tag}{#id}}% \egroup \the\everyafternoteinsert \endgroup} -\unexpanded\def\betweennoteitself#tag% +\unexpanded\def\betweennoteitself#tag% used ? {\edef\currentnote{#tag}% \doif{\noteparameter\c!paragraph}\v!yes\strc_notes_between_paragraphs} @@ -1192,7 +1200,7 @@ % as we can have collected notes (e.g. in tables) we need to recover % \currentdescriptionattribute and \currentdescriptionsynchronize % - %\reinstateconstructionnumberentry\currentconstructionlistentry % we could store the number in the entry + \reinstateconstructionnumberentry\currentconstructionlistentry % we could store the number in the entry (e.g. needed when local notes in table) % \dontcomplain % \begingroup @@ -1338,18 +1346,11 @@ \def\strc_notes_between_paragraphs_first {\glet\strc_notes_between_paragraphs_indeed\strc_notes_between_paragraphs}% shape works reverse -\def\strc_notes_flush_global % will be done in lua instead +\def\strc_notes_flush_global {\doifelse{\noteparameter\c!paragraph}\v!yes - {\vbox - {\beginofshapebox - \iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteinsertionnumber - \endofshapebox - \let\strc_notes_between_paragraphs_indeed\strc_notes_between_paragraphs_first % shape works reverse - \doreshapebox - {\hbox{\unhbox\shapebox\strc_notes_between_paragraphs_indeed}} - \donothing \donothing \donothing % get rid of penalties etc - \innerflushshapebox - \convertvboxtohbox}} + {\vbox\starthboxestohbox + \iftrialtypesetting\unvcopy\else\unvbox\fi\currentnoteinsertionnumber + \stophboxestohbox} {\iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteinsertionnumber}} %D Supporting end notes is surprisingly easy. Even better, we diff --git a/tex/context/base/strc-num.lua b/tex/context/base/strc-num.lua index b0eae6b78..67e9b1734 100644 --- a/tex/context/base/strc-num.lua +++ b/tex/context/base/strc-num.lua @@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['strc-num'] = { local format = string.format local next, type = next, type local min, max = math.min, math.max -local texcount, texsetcount = tex.count, tex.setcount +local texsetcount = tex.setcount -- Counters are managed here. They can have multiple levels which makes it easier to synchronize -- them. Synchronization is sort of special anyway, as it relates to document structuring. @@ -147,9 +147,9 @@ local function dummyconstructor(t,name,i) end setmetatableindex(constructor,function(t,k) - if trace_counters then - report_counters("unknown constructor %a",k) - end + -- if trace_counters then + -- report_counters("unknown constructor %a",k) + -- end return dummyconstructor end) diff --git a/tex/context/base/strc-pag.lua b/tex/context/base/strc-pag.lua index f70d37d63..02ed5610f 100644 --- a/tex/context/base/strc-pag.lua +++ b/tex/context/base/strc-pag.lua @@ -6,8 +6,6 @@ if not modules then modules = { } end modules ['strc-pag'] = { license = "see context related readme files" } -local texcount = tex.count - local allocate, mark = utilities.storage.allocate, utilities.storage.mark local trace_pages = false trackers.register("structures.pages", function(v) trace_pages = v end) @@ -26,12 +24,16 @@ local counterdata = counters.data local variables = interfaces.variables local context = context +local commands = commands local processors = typesetters.processors local applyprocessor = processors.apply local startapplyprocessor = processors.startapply local stopapplyprocessor = processors.stopapply +local texsetcount = tex.setcount +local texgetcount = tex.getcount + -- storage local collected, tobesaved = allocate(), allocate() @@ -49,7 +51,8 @@ job.register('structures.pages.collected', tobesaved, initializer) local specification = { } -- to be checked function pages.save(prefixdata,numberdata) - local realpage, userpage = texcount.realpageno, texcount.userpageno + local realpage = texgetcount("realpageno") + local userpage = texgetcount("userpageno") if realpage > 0 then if trace_pages then report_pages("saving page %s.%s",realpage,userpage) @@ -73,24 +76,24 @@ end -- builder we have to make sure it starts at least at 1. function counters.specials.userpage() - local r = texcount.realpageno + local r = texgetcount("realpageno") if r > 0 then local t = tobesaved[r] if t then - t.number = texcount.userpageno + t.number = texgetcount("userpageno") if trace_pages then report_pages("forcing pagenumber of realpage %s to %s",r,t.number) end return end end - local u = texcount.userpageno + local u = texgetcount("userpageno") if u == 0 then if trace_pages then report_pages("forcing pagenumber of realpage %s to %s (probably a bug)",r,1) end counters.setvalue("userpage",1) - texcount.userpageno = 1 + texsetcount("userpageno",1) -- not global ? end end @@ -252,8 +255,8 @@ function helpers.prefix(data,prefixspec) end function pages.is_odd(n) - n = n or texcount.realpageno - if texcount.pagenoshift % 2 == 0 then + n = n or texgetcount("realpageno") + if texgetcount("pagenoshift") % 2 == 0 then return n % 2 == 0 else return n % 2 ~= 0 diff --git a/tex/context/base/strc-ref.lua b/tex/context/base/strc-ref.lua index 284418c48..9b4ec6ab7 100644 --- a/tex/context/base/strc-ref.lua +++ b/tex/context/base/strc-ref.lua @@ -15,10 +15,10 @@ if not modules then modules = { } end modules ['strc-ref'] = { -- todo: autoload components when ::: local format, find, gmatch, match, concat = string.format, string.find, string.gmatch, string.match, table.concat -local texcount, texsetcount = tex.count, tex.setcount +local floor = math.floor local rawget, tonumber = rawget, tonumber local lpegmatch = lpeg.match -local copytable = table.copy +local insert, remove, copytable = table.insert, table.remove, table.copy local formatters = string.formatters local allocate = utilities.storage.allocate @@ -46,16 +46,17 @@ local report_empty = logs.reporter("references","empty") local variables = interfaces.variables local constants = interfaces.constants local context = context +local commands = commands + +local texgetcount = tex.getcount +local texsetcount = tex.setcount +local texconditionals = tex.conditionals local v_default = variables.default local v_url = variables.url local v_file = variables.file local v_unknown = variables.unknown local v_yes = variables.yes - -local texcount = tex.count -local texconditionals = tex.conditionals - local productcomponent = resolvers.jobs.productcomponent local justacomponent = resolvers.jobs.justacomponent @@ -140,16 +141,10 @@ job.register('structures.references.collected', tobesaved, initializer, finalize local maxreferred = 1 local nofreferred = 0 --- local function initializer() -- can we use a tobesaved as metatable for collected? --- tobereferred = references.tobereferred --- referred = references.referred --- nofreferred = #referred --- end - local function initializer() -- can we use a tobesaved as metatable for collected? tobereferred = references.tobereferred referred = references.referred - setmetatableindex(referred,get) -- hm, what is get ? + nofreferred = #referred end -- We make the array sparse (maybe a finalizer should optionally return a table) because @@ -216,9 +211,11 @@ local function referredpage(n) end end -- fallback - return texcount.realpageno + return texgetcount("realpageno") end +-- setmetatableindex(referred,function(t,k) return referredpage(k) end ) + references.referredpage = referredpage function references.registerpage(n) -- called in the backend code @@ -226,7 +223,7 @@ function references.registerpage(n) -- called in the backend code if n > maxreferred then maxreferred = n end - tobereferred[n] = texcount.realpageno + tobereferred[n] = texgetcount("realpageno") end end @@ -252,7 +249,7 @@ references.setnextorder = setnextorder function references.setnextinternal(kind,name) setnextorder(kind,name) -- always incremented with internal - local n = texcount.locationcount + 1 + local n = texgetcount("locationcount") + 1 texsetcount("global","locationcount",n) return n end @@ -311,7 +308,7 @@ end function references.enhance(prefix,tag) local l = tobesaved[prefix][tag] if l then - l.references.realpage = texcount.realpageno + l.references.realpage = texgetcount("realpageno") end end @@ -1384,25 +1381,36 @@ local function identify_inner_or_outer(set,var,i) return v end -local components = job.structure.components + -- these get auto prefixes but are loaded in the document so they are + -- internal .. we also set the realpage (for samepage analysis) -if components then - for i=1,#components do - local component = components[i] - local data = collected[component] - local vi = data and data[inner] - if vi then - var.outer = component - var.i = vi - var.kind = "outer with inner" - set.external = true - if trace_identifying then - report_identify_outer(set,var,i,"4x") - end - return var - end + local components = job.structure.components + if components then + for i=1,#components do + local component = components[i] + local data = collected[component] + local vi = data and data[inner] + if vi then +-- var = copytable(var) +-- var.kind = "inner" +-- var.i = vi +-- var.p = component +-- runners.inner(var.r = vi.references.realpage +-- if trace_identifying then +-- report_identify_outer(set,var,i,"4x") +-- end +-- return var +local v = identify_inner(set,copytable(var),component,collected) -- is copy needed ? +if v.i and not v.error then + v.kind = "inner" + if trace_identifying then + report_identify_outer(set,var,i,"4x") end + return v end + end + end + end local componentreferences = productdata.componentreferences local productreferences = productdata.productreferences @@ -1547,7 +1555,7 @@ local function identify(prefix,reference) end local set = resolve(prefix,reference) local bug = false - texcount.referencehastexstate = set.has_tex and 1 or 0 + texsetcount("referencehastexstate",set.has_tex and 1 or 0) nofidentified = nofidentified + 1 set.n = nofidentified for i=1,#set do @@ -1675,14 +1683,14 @@ function references.setinternalreference(prefix,tag,internal,view) -- needs chec t[tn] = "aut:" .. internal end local destination = references.mark(t,nil,nil,view) -- returns an attribute - texcount.lastdestinationattribute = destination + texsetcount("lastdestinationattribute",destination) return destination end end function references.setandgetattribute(kind,prefix,tag,data,view) -- maybe do internal automatically here local attr = references.set(kind,prefix,tag,data) and references.setinternalreference(prefix,tag,nil,view) or unsetvalue - texcount.lastdestinationattribute = attr + texsetcount("lastdestinationattribute",attr) return attr end @@ -1801,7 +1809,7 @@ function filters.generic.number(data,what,prefixspec) -- todo: spec and then no sections.typesetnumber(numberdata,"number",numberdata) else local useddata = data.useddata - if useddata and useddsta.number then + if useddata and useddata.number then context(useddata.number) end end @@ -1931,7 +1939,8 @@ local specials = references.testspecials -- real page to determine if we need contrastlocation as that is more lightweight. local function checkedpagestate(n,page) - local r, p = referredpage(n), tonumber(page) + local r = referredpage(n) + local p = tonumber(page) if not p then return 0 elseif p > r then @@ -1944,7 +1953,9 @@ local function checkedpagestate(n,page) end local function setreferencerealpage(actions) - actions = actions or references.currentset + if not actions then + actions = references.currentset + end if not actions then return 0 else @@ -1976,7 +1987,9 @@ end -- normally such an analysis happens in the backend code function references.analyze(actions) - actions = actions or references.currentset + if not actions then + actions = references.currentset + end if not actions then actions = { realpage = 0, pagestate = 0 } elseif actions.pagestate then @@ -1995,12 +2008,15 @@ function references.analyze(actions) end function commands.referencepagestate(actions) - actions = actions or references.currentset + if not actions then + actions = references.currentset + end if not actions then context(0) else if not actions.pagestate then references.analyze(actions) -- delayed unless explicitly asked for +-- print("NO STATE",actions.reference,actions.pagestate) end context(actions.pagestate) end @@ -2019,7 +2035,10 @@ local function realpageofpage(p) -- the last one counts ! nofrealpages = #pages plist = { } for rp=1,nofrealpages do - plist[pages[rp].number] = rp + local page = pages[rp] + if page then + plist[page.number] = rp + end end references.nofrealpages = nofrealpages end @@ -2033,7 +2052,7 @@ function references.checkedrealpage(r) realpageofpage(r) -- just initialize end if not r then - return texcount.realpageno + return texgetcount("realpageno") elseif r < 1 then return 1 elseif r > nofrealpages then @@ -2126,7 +2145,7 @@ end function specials.deltapage(var,actions) local p = tonumber(var.operation) if p then - p = references.checkedrealpage(p + texcount.realpageno) + p = references.checkedrealpage(p + texgetcount("realpageno")) var.r = p actions.realpage = actions.realpage or p -- first wins end @@ -2156,3 +2175,23 @@ function references.import(usedname) end function references.load (usedname) end commands.exportreferences = references.export + +-- better done here .... we don't insert/remove, just use a pointer + +local prefixstack = { "" } +local prefixlevel = 1 + +function commands.pushreferenceprefix(prefix) + prefixlevel = prefixlevel + 1 + prefixstack[prefixlevel] = prefix + context(prefix) +end + +function commands.popreferenceprefix() + prefixlevel = prefixlevel - 1 + if prefixlevel > 0 then + context(prefixstack[prefixlevel]) + else + report_references("unable to pop referenceprefix") + end +end diff --git a/tex/context/base/strc-ref.mkvi b/tex/context/base/strc-ref.mkvi index c82a09d20..69ad4629e 100644 --- a/tex/context/base/strc-ref.mkvi +++ b/tex/context/base/strc-ref.mkvi @@ -75,6 +75,8 @@ %D document). By setting the \type{interaction} variable, one %D can influences the way interactive references are set. +\let\referenceprefix\empty + \installcorenamespace{referencing} \installdirectcommandhandler \??referencing {referencing} % \??referencing @@ -230,6 +232,7 @@ \dontleavehmode\hbox attr \destinationattribute\lastdestinationattribute\bgroup \strc_references_flush_destination_nodes \egroup + \prewordbreak % new \fi} \def\strc_references_set_page_only_destination_attribute#labels% could in fact be fully expandable @@ -771,12 +774,24 @@ \unexpanded\def\setupglobalreferenceprefix[#prefix]% {\xdef\referenceprefix{#prefix}} +% \unexpanded\def\pushreferenceprefix#prefix% +% {\pushmacro\referenceprefix +% \xdef\referenceprefix{#prefix}} % global + +% \unexpanded\def\popreferenceprefix +% {\popmacro\referenceprefix} + +\unexpanded\def\globalpushreferenceprefix#prefix% + {\xdef\referenceprefix{\ctxcommand{pushreferenceprefix("#prefix")}}} + +\unexpanded\def\globalpopreferenceprefix + {\xdef\referenceprefix{\ctxcommand{popreferenceprefix()}}} + \unexpanded\def\pushreferenceprefix#prefix% - {\pushmacro\referenceprefix - \xdef\referenceprefix{#prefix}} + {\edef\referenceprefix{\ctxcommand{pushreferenceprefix("#prefix")}}} \unexpanded\def\popreferenceprefix - {\popmacro\referenceprefix} + {\edef\referenceprefix{\ctxcommand{popreferenceprefix()}}} \def\m_strc_references_prefix_yes{+} \def\m_strc_references_prefix_nop{-} @@ -799,7 +814,7 @@ \fi\fi\fi\fi} \appendtoks - \setupreferenceprefix[\referencingparameter\c!prefix] + \setupreferenceprefix[\referencingparameter\c!prefix] \to \everysetupreferencing %D We can typeset a reference using \type{\in}, \type{\at} and diff --git a/tex/context/base/strc-reg.lua b/tex/context/base/strc-reg.lua index 40cd3455b..b0d8a8a25 100644 --- a/tex/context/base/strc-reg.lua +++ b/tex/context/base/strc-reg.lua @@ -7,36 +7,41 @@ if not modules then modules = { } end modules ['strc-reg'] = { } local next, type = next, type -local texcount = tex.count local format, gmatch = string.format, string.gmatch local equal, concat, remove = table.are_equal, table.concat, table.remove local utfchar = utf.char local lpegmatch = lpeg.match local allocate = utilities.storage.allocate -local trace_registers = false trackers.register("structures.registers", function(v) trace_registers = v end) +local trace_registers = false trackers.register("structures.registers", function(v) trace_registers = v end) -local report_registers = logs.reporter("structure","registers") +local report_registers = logs.reporter("structure","registers") -local structures = structures -local registers = structures.registers -local helpers = structures.helpers -local sections = structures.sections -local documents = structures.documents -local pages = structures.pages -local references = structures.references +local structures = structures +local registers = structures.registers +local helpers = structures.helpers +local sections = structures.sections +local documents = structures.documents +local pages = structures.pages +local references = structures.references -local mappings = sorters.mappings -local entries = sorters.entries -local replacements = sorters.replacements +local mappings = sorters.mappings +local entries = sorters.entries +local replacements = sorters.replacements -local processors = typesetters.processors -local splitprocessor = processors.split +local processors = typesetters.processors +local splitprocessor = processors.split -local variables = interfaces.variables -local context = context +local texgetcount = tex.getcount -local matchingtilldepth, numberatdepth = sections.matchingtilldepth, sections.numberatdepth +local variables = interfaces.variables +local context = context +local commands = commands + +local matchingtilldepth = sections.matchingtilldepth +local numberatdepth = sections.numberatdepth + +local absmaxlevel = 5 -- \c_strc_registers_maxlevel -- some day we will share registers and lists (although there are some conceptual -- differences in the application of keywords) @@ -286,7 +291,7 @@ end function registers.enhance(name,n) local r = tobesaved[name].entries[n] if r then - r.references.realpage = texcount.realpageno + r.references.realpage = texgetcount("realpageno") end end @@ -298,7 +303,7 @@ function registers.extend(name,tag,rawdata) -- maybe do lastsection internally local r = tobesaved[name].entries[tag] if r then local rr = r.references - rr.lastrealpage = texcount.realpageno + rr.lastrealpage = texgetcount("realpageno") rr.lastsection = sections.currentid() if rawdata then if rawdata.entries then @@ -663,11 +668,31 @@ function registers.flush(data,options,prefixspec,pagespec) local collapse_singles = options.compress == variables.yes local collapse_ranges = options.compress == variables.all local result = data.result + local maxlevel = 0 + -- + for i=1,#result do + local data = result[i].data + for d=1,#data do + local m = #data[d].list + if m > maxlevel then + maxlevel = m + end + end + end + if maxlevel > absmaxlevel then + maxlevel = absmaxlevel + report_registers("limiting level to %a",maxlevel) + end + -- context.startregisteroutput() +local done = { } for i=1,#result do -- ranges need checking ! local sublist = result[i] - local done = { false, false, false, false } + -- local done = { false, false, false, false } +for i=1,maxlevel do + done[i] = false +end local data = sublist.data local d, n = 0, 0 context.startregistersection(sublist.tag) @@ -683,20 +708,28 @@ function registers.flush(data,options,prefixspec,pagespec) end end end + -- ok, this is tricky: we use e[i] delayed so we need it to be local + -- but we don't want to allocate too many entries so there we go while d < #data do d = d + 1 local entry = data[d] - local e = { false, false, false, false } + local e = { false, false, false } +for i=3,maxlevel do + e[i] = false +end local metadata = entry.metadata local kind = metadata.kind local list = entry.list - for i=1,4 do -- max 4 + for i=1,maxlevel do if list[i] then e[i] = list[i][1] end if e[i] ~= done[i] then if e[i] and e[i] ~= "" then done[i] = e[i] +for j=i+1,maxlevel do + done[j] = false +end if n == i then context.stopregisterentries() context.startregisterentries(n) @@ -713,6 +746,8 @@ function registers.flush(data,options,prefixspec,pagespec) local internal = entry.references.internal or 0 local seeparent = entry.references.seeparent or "" local processor = entry.processors and entry.processors[1] or "" + -- so, we need to keep e as is (local), or we need local title = e[i] ... which might be + -- more of a problem if metadata then context.registerentry(processor,internal,seeparent,function() helpers.title(e[i],metadata) end) else -- ? @@ -720,6 +755,9 @@ function registers.flush(data,options,prefixspec,pagespec) end else done[i] = false +for j=i+1,maxlevel do + done[j] = false +end end end end @@ -850,6 +888,7 @@ function registers.flush(data,options,prefixspec,pagespec) data.metadata.sorted = false end + function registers.analyze(class,options) context(registers.analyzed(class,options)) end diff --git a/tex/context/base/strc-reg.mkiv b/tex/context/base/strc-reg.mkiv index 8c9f040f0..febb4c0b0 100644 --- a/tex/context/base/strc-reg.mkiv +++ b/tex/context/base/strc-reg.mkiv @@ -116,6 +116,7 @@ %D \stoptyping \newconditional\c_strc_registers_defining +\setnewconstant\c_strc_registers_maxlevel \plusfive \ifdefined\Word \else \unexpanded\def\Word#1{#1} \fi @@ -131,7 +132,7 @@ \setuevalue{\e!place\currentregister}{\placeregister[\currentregister]}% \setuevalue{\e!complete\currentregister}{\completeregister[\currentregister]}% \setuevalue{\e!setup\currentregister\e!endsetup}{\setupregister[\currentregister]}% - \dorecurse\plusthree {% weird, expanded should not be needed + \dorecurse\c_strc_registers_maxlevel{% weird, expanded should not be needed \normalexpanded{\defineregister[\currentregister:\recurselevel][\currentregister]}% %\defineregister[\currentregister:\recurselevel][\currentregister]% \letregisterparameter{\c!entries:\recurselevel}\empty % needed as we use detokenize (ok, we can @@ -672,14 +673,29 @@ \dostoptagged \endgroup} +% \unexpanded\def\startregisterentries#1% depth +% {\endgraf +% \begingroup +% \dostarttagged\t!registerentries\empty +% \let\savedcurrentregister\currentregister +% \edef\currentregister{\currentregister:#1}% +% \useregisterstyleandcolor\c!textstyle\c!textcolor +% \advance\leftskip\numexpr#1-\plusone\relax\dimexpr\d_strc_registers_distance\relax +% \hangindent\registerparameter\c!distance\relax +% \hangafter\plusone +% \let\currentregister\savedcurrentregister} + \unexpanded\def\startregisterentries#1% depth {\endgraf \begingroup + \scratchcounter\ifnum#1>\c_strc_registers_maxlevel\c_strc_registers_maxlevel\else#1\fi\relax \dostarttagged\t!registerentries\empty \let\savedcurrentregister\currentregister - \edef\currentregister{\currentregister:#1}% + \edef\currentregister{\currentregister:\number\scratchcounter}% \useregisterstyleandcolor\c!textstyle\c!textcolor - \advance\leftskip\numexpr#1-\plusone\relax\dimexpr\d_strc_registers_distance\relax + \ifnum\scratchcounter>\plusone + \advance\leftskip\d_strc_registers_distance\relax + \fi \hangindent\registerparameter\c!distance\relax \hangafter\plusone \let\currentregister\savedcurrentregister} diff --git a/tex/context/base/strc-ren.mkiv b/tex/context/base/strc-ren.mkiv index 00c8c3cd4..fdf8fb7f4 100644 --- a/tex/context/base/strc-ren.mkiv +++ b/tex/context/base/strc-ren.mkiv @@ -26,7 +26,7 @@ % \def\doTitle#1#2{\ruledvbox{\forgetall \hsize=4cm \ruledhbox{\ruledvtop{#1}\ruledvtop{#2}}}} % \section{test test test test test test test test test test test test test test test test test} -\newtoks\everyheadstart +% \newtoks\everyheadstart % not used currently \unexpanded\def\strc_rendering_initialize_style_and_color {\ifconditional\headisdisplay @@ -625,10 +625,16 @@ \dontleavehmode % in case there is no strut, else side effects with llap \ifconditional\headshownumber \llap { + \signalrightpage \hbox { \hfill \headnumbercontent - \hskip\dimexpr\d_strc_rendering_local_leftoffset+\doifoddpageelse\leftmargindistance\rightmargindistance\relax + \doifrightpageelse{ + \scratchdistance\leftmargindistance + } { + \scratchdistance\rightmargindistance + } + \hskip\dimexpr\d_strc_rendering_local_leftoffset+\scratchdistance\relax } } \fi diff --git a/tex/context/base/strc-sec.mkiv b/tex/context/base/strc-sec.mkiv index a5ff2084e..2836b018b 100644 --- a/tex/context/base/strc-sec.mkiv +++ b/tex/context/base/strc-sec.mkiv @@ -432,6 +432,9 @@ \unexpanded\def\startnamedsection {\dotripleempty\strc_sectioning_start_named_section} +% todo: add grouping but where: before/after trickery .. probably inside because one can always add +% grouping to the before/after settings + \unexpanded\def\strc_sectioning_start_named_section[#1]% [#2][#3] {\pushmacro\currentnamedsection \edef\currentnamedsection{#1}% @@ -588,11 +591,13 @@ \headparameter\c!beforesection % beware, no users vars set yet \the\everybeforehead \strc_sectioning_handle{#1}{#2}{#3}% name -- -- -- userdata (we might move the tagged to here) + % potential: \bgroup (can be optional: grouped = yes) \headparameter\c!insidesection} \unexpanded\def\strc_sectioning_stop[#1]% !!! also used at lua end {\dostoptagged \dostoptagged + % potential: \egroup %\globalpopmacro\currenthead % so we do a hard recover \xdef\currenthead{#1}% recover \headparameter\c!aftersection @@ -681,8 +686,15 @@ \setfalse\headshownumber \fi} +\newtoks\everyheadsynchronization + +\appendtoks + \currentstructuresynchronize +\to \everyheadsynchronization + \unexpanded\def\theheadsynchonization - {\currentstructuresynchronize} + {\the\everyheadsynchronization + \currentstructuresynchronize} % BEWARE: \marking[section]{my text} does not work as we use list indices instead % so we need a 'keep track of raw set option' (or maybe a funny internal prefix) diff --git a/tex/context/base/supp-box.lua b/tex/context/base/supp-box.lua index c7382834a..bc0a7056e 100644 --- a/tex/context/base/supp-box.lua +++ b/tex/context/base/supp-box.lua @@ -6,12 +6,16 @@ if not modules then modules = { } end modules ['supp-box'] = { license = "see context related readme files" } --- this is preliminary code +-- this is preliminary code, use insert_before etc local report_hyphenation = logs.reporter("languages","hyphenation") -local tex, node = tex, node -local context, commands, nodes = context, commands, nodes +local tex = tex +local context = context +local commands = commands +local nodes = nodes + +local splitstring = string.split local nodecodes = nodes.nodecodes @@ -22,11 +26,17 @@ local glue_code = nodecodes.glue local glyph_code = nodecodes.glyph local new_penalty = nodes.pool.penalty +local new_hlist = nodes.pool.hlist +local new_glue = nodes.pool.glue + +local free_node = nodes.free +local copy_list = nodes.copy_list +local copy_node = nodes.copy +local find_tail = nodes.tail -local free_node = node.free -local copynodelist = node.copy_list -local copynode = node.copy -local texbox = tex.box +local texsetbox = tex.setbox +local texgetbox = tex.getbox +local texget = tex.get local function hyphenatedlist(list) while list do @@ -56,7 +66,7 @@ end local function checkedlist(list) if type(list) == "number" then - return texbox[list].list + return texgetbox(list).list else return list end @@ -73,9 +83,9 @@ local function applytochars(list,what,nested) applytochars(current.list,what,nested) context.endhbox() elseif id ~= glyph_code then - noaction(copynode(current)) + noaction(copy_node(current)) else - doaction(copynode(current)) + doaction(copy_node(current)) end current = current.next end @@ -90,10 +100,10 @@ local function applytowords(list,what,nested) local id = current.id if id == glue_code then if start then - doaction(copynodelist(start,current)) + doaction(copy_list(start,current)) start = nil end - noaction(copynode(current)) + noaction(copy_node(current)) elseif nested and (id == hlist_code or id == vlist_code) then context.beginhbox() applytowords(current.list,what,nested) @@ -104,9 +114,97 @@ local function applytowords(list,what,nested) current = current.next end if start then - doaction(copynodelist(start)) + doaction(copy_list(start)) end end commands.applytochars = applytochars commands.applytowords = applytowords + +local split_char = lpeg.Ct(lpeg.C(1)^0) +local split_word = lpeg.tsplitat(lpeg.patterns.space) +local split_line = lpeg.tsplitat(lpeg.patterns.eol) + +function commands.processsplit(str,command,how,spaced) + how = how or "word" + if how == "char" then + local words = lpeg.match(split_char,str) + for i=1,#words do + local word = words[i] + if word == " " then + if spaced then + context.space() + end + elseif command then + context[command](word) + else + context(word) + end + end + elseif how == "word" then + local words = lpeg.match(split_word,str) + for i=1,#words do + local word = words[i] + if spaced and i > 1 then + context.space() + end + if command then + context[command](word) + else + context(word) + end + end + elseif how == "line" then + local words = lpeg.match(split_line,str) + for i=1,#words do + local word = words[i] + if spaced and i > 1 then + context.par() + end + if command then + context[command](word) + else + context(word) + end + end + else + context(str) + end +end + +function commands.vboxlisttohbox(original,target,inbetween) + local current = texgetbox(original).list + local head = nil + local tail = nil + while current do + if current.id == hlist_code then + local list = current.list + if head then + if inbetween > 0 then + local n = new_glue(0,0,inbetween) + tail.next = n + n.prev = tail + tail = n + end + tail.next = list + list.prev = tail + else + head = list + end + tail = find_tail(list) + tail.next = nil + current.list = nil + end + current = current.next + end + local result = new_hlist() + result.list = head + texsetbox(target,result) +end + +function commands.hboxtovbox(original) + local b = texgetbox(original) + local factor = texget("baselineskip").width / texget("hsize") + b.depth = 0 + b.height = b.width * factor +end diff --git a/tex/context/base/supp-box.mkiv b/tex/context/base/supp-box.mkiv index 833096222..ad35b525b 100644 --- a/tex/context/base/supp-box.mkiv +++ b/tex/context/base/supp-box.mkiv @@ -1346,11 +1346,11 @@ %D {processisolatedwords,processisolatedchars} %D %D \startbuffer -%D \processisolatedchars{some more words} \ruledhbox \par -%D \processisolatedchars{and some $x + y = z$ math} \ruledhbox \par +%D \processisolatedchars{some more words} \ruledhbox \par +%D \processisolatedchars{and some $x + y = z$ math} \ruledhbox \par %D \processisolatedchars{and a \hbox{$x + y = z$}} \ruledhbox \par -%D \processisolatedwords{some more words} \ruledhbox \par -%D \processisolatedwords{and some $x + y = z$ math} \ruledhbox \par +%D \processisolatedwords{some more words} \ruledhbox \par +%D \processisolatedwords{and some $x + y = z$ math} \ruledhbox \par %D \processisolatedwords{and a \hbox{$x + y = z$}} \ruledhbox \par %D \stopbuffer %D @@ -1379,7 +1379,15 @@ \let\processword\relax -%D The better variant: +\unexpanded\def\applytosplitstringchar#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","char")}} +\unexpanded\def\applytosplitstringword#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","word")}} +\unexpanded\def\applytosplitstringline#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","line")}} + +\unexpanded\def\applytosplitstringcharspaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","char",true)}} +\unexpanded\def\applytosplitstringwordspaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","word",true)}} +\unexpanded\def\applytosplitstringlinespaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","line",true)}} + +%D A variant: \unexpanded\def\applytocharacters#1% {\dontleavehmode @@ -1730,35 +1738,56 @@ %D These macros are used in reformatting footnotes, so they do %D what they're meant for. -\unexpanded\def\setvboxtohbox - {\bgroup - \ifdim\baselineskip<16pt \relax - \scratchdimen\baselineskip - \multiply\scratchdimen 1024 - \else - \message{cropping \baselineskip to 16pt}% - \scratchdimen\maxdimen - \fi - \divide\scratchdimen \hsize - \multiply\scratchdimen 64 - \xdef\vboxtohboxfactor{\withoutpt\the\scratchdimen}% - \egroup} +\newdimen\vboxtohboxslack +\newdimen\hboxestohboxslack + +% Create line and fake height of paragraph by messign with heights: +% a nice hack by DEK himself. + +%\unexpanded\def\setvboxtohbox +% {\bgroup +% \ifdim\baselineskip<16pt \relax +% \scratchdimen\baselineskip +% \multiply\scratchdimen 1024 +% \else +% \message{cropping \baselineskip to 16pt}% +% \scratchdimen\maxdimen +% \fi +% \divide\scratchdimen \hsize +% \multiply\scratchdimen 64 +% \xdef\vboxtohboxfactor{\withoutpt\the\scratchdimen}% +% \egroup} +% +% \unexpanded\def\startvboxtohbox +% {\bgroup +% \setvboxtohbox +% \setbox\scratchbox\hbox\bgroup} +% +% \unexpanded\def\stopvboxtohbox +% {\ifcase\vboxtohboxslack\else\hskip\zeropoint\!!minus\vboxtohboxslack\fi +% \egroup +% \dp\scratchbox\zeropoint +% \ht\scratchbox\vboxtohboxfactor\wd\scratchbox +% \box\scratchbox +% \egroup} + +% More modern: \unexpanded\def\startvboxtohbox - {\bgroup - \setvboxtohbox - \setbox\scratchbox\hbox\bgroup} + {\bgroup + \setbox\scratchbox\hbox\bgroup} \unexpanded\def\stopvboxtohbox - {\egroup - \dp\scratchbox\zeropoint - \ht\scratchbox\vboxtohboxfactor\wd\scratchbox + {\ifcase\vboxtohboxslack\else\hskip\zeropoint\!!minus\vboxtohboxslack\fi + \egroup + \ctxcommand{hboxtovbox(\number\scratchbox)}% \box\scratchbox \egroup} +% A possible reconstruction: + \unexpanded\def\convertvboxtohbox - {\setvboxtohbox - \makehboxofhboxes + {\makehboxofhboxes \setbox0\hbox{\unhbox0 \removehboxes}% \noindent\unhbox0\par} @@ -1776,6 +1805,42 @@ {\removehboxes}\unhbox0 \fi} +% And one special for notes: + +% \unexpanded\def\starthboxestohbox +% {\bgroup +% \beginofshapebox} +% +% \unexpanded\def\stophboxestohbox +% {\endofshapebox +% \doreshapebox +% {\hbox\bgroup +% \unhbox\shapebox +% \ifcase\hboxestohboxslack\else\hskip\zeropoint\!!minus\hboxestohboxslack\fi +% \egroup}% +% \donothing +% \donothing +% \donothing % get rid of penalties etc +% \innerflushshapebox +% \convertvboxtohbox +% \par +% \egroup} + +% More modern: + +\unexpanded\def\starthboxestohbox + {\bgroup + \setbox\scratchbox\vbox\bgroup} + +\unexpanded\def\stophboxestohbox + {\egroup + \ctxcommand{vboxlisttohbox(\number\scratchbox,\number\nextbox,\number\dimexpr\hboxestohboxslack)}% + \dontleavehmode + \unhbox\nextbox + \removeunwantedspaces + \par + \egroup} + %D \macros %D {unhhbox} %D diff --git a/tex/context/base/symb-ini.lua b/tex/context/base/symb-ini.lua index deeef667a..9586338be 100644 --- a/tex/context/base/symb-ini.lua +++ b/tex/context/base/symb-ini.lua @@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['symb-ini'] = { license = "see context related readme files" } +local context, commands = context, commands local variables = interfaces.variables diff --git a/tex/context/base/syst-aux.lua b/tex/context/base/syst-aux.lua index b0fb8483b..9ef28996a 100644 --- a/tex/context/base/syst-aux.lua +++ b/tex/context/base/syst-aux.lua @@ -16,7 +16,8 @@ local commands, context = commands, context local settings_to_array = utilities.parsers.settings_to_array local format = string.format local utfsub = utf.sub -local P, C, Carg, lpegmatch, utf8char = lpeg.P, lpeg.C, lpeg.Carg, lpeg.match, lpeg.patterns.utf8char +local P, S, C, Cc, Cs, Carg, lpegmatch, utf8char = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Carg, lpeg.match, lpeg.patterns.utf8char + local setvalue = context.setvalue @@ -28,6 +29,15 @@ function commands.getfirstcharacter(str) setvalue("remainingcharacters",rest) end +function commands.thefirstcharacter(str) + local first, rest = lpegmatch(pattern,str) + context(first) +end +function commands.theremainingcharacters(str) + local first, rest = lpegmatch(pattern,str) + context(rest) +end + local pattern = C(utf8char^-1) function commands.doiffirstcharelse(chr,str) @@ -78,3 +88,23 @@ end -- \gdef\setpercentdimen#1#2% -- {#1=\ctxcommand{percentageof("#2",\number#1)}\relax} + +local spaces = P(" ")^0/"" + +local pattern = Cs( + ( P("global") / "\\global" )^0 + * spaces + * ( P("unexpanded") / "\\unexpanded" )^0 + * spaces + * Cc("\\expandafter\\") + * spaces + * ( P("expanded") / "e" )^0 + * spaces + * ( P((1-S(" #"))^1) / "def\\csname %0\\endcsname" ) + * spaces + * Cs( (P("##")/"#" + P(1))^0 ) +) + +function commands.thetexdefinition(str) + context(lpegmatch(pattern,str)) +end diff --git a/tex/context/base/syst-aux.mkiv b/tex/context/base/syst-aux.mkiv index 542b132ae..97f601cc9 100644 --- a/tex/context/base/syst-aux.mkiv +++ b/tex/context/base/syst-aux.mkiv @@ -401,7 +401,52 @@ \expandafter\m_syst_action_nop \fi} -%D This macro uses some auxiliary macros. Although we were able to program quite +%D Here's one for skipping spaces and pars, handy for: +%D +%D \starttyping +%D \hbox +%D +%D {a few lines later} +%D \stoptyping + +% \unexpanded\def\assumelongusagecs#1% +% {\let\m_syst_action#1% +% \futurelet\nexttoken\syst_helpers_ignore_par_character} +% +% \def\syst_helpers_ignore_par_character +% {\ifx\nexttoken\blankspace +% \expandafter\syst_helpers_ignore_par_character_blankspace +% \else +% \expandafter\syst_helpers_ignore_par_character_followup +% \fi} +% +% \def\syst_helpers_ignore_par_character_followup +% {\ifx\nexttoken\par +% \expandafter\syst_helpers_ignore_par_partoken +% \else +% \expandafter\m_syst_action +% \fi} +% +% \def\syst_helpers_ignore_par_partoken +% {\afterassignment\m_syst_action\let\nexttoken} + +\unexpanded\def\assumelongusagecs#1% can be relaxed when we have long support in \hbox etc + {\let\m_syst_action#1% + \futurelet\nexttoken\syst_helpers_ignore_spacing} + +\def\syst_helpers_ignore_spacing + {\ifx\nexttoken\blankspace + \singleexpandafter\syst_helpers_ignore_spacing_blankspace + \else\ifx\nexttoken\par + \doubleexpandafter\syst_helpers_ignore_spacing_partoken + \else + \doubleexpandafter\m_syst_action + \fi\fi} + +\def\syst_helpers_ignore_spacing_partoken\par + {\futurelet\nexttoken\syst_helpers_ignore_spacing} + +%D These macros use some auxiliary macros. Although we were able to program quite %D complicated things, I only understood these after rereading the \TEX book. The %D trick is in using a command with a one character name. Such commands differ from %D the longer ones in the fact that trailing spaces are {\em not} skipped. This @@ -429,6 +474,9 @@ \def\:{\syst_helpers_reinspect_next_parenthesis_character} \expandafter\def\: {\let\if_next_blank_space_token\iftrue\futurelet\nexttoken\syst_helpers_inspect_next_parenthesis_character} +\def\:{\syst_helpers_ignore_spacing_blankspace} +\expandafter\def\: {\futurelet\nexttoken\syst_helpers_ignore_spacing} + \let\:\next %D \macros @@ -1243,8 +1291,13 @@ %D complicated arguments, for instance arguments that %D consist of two or more expandable tokens. -\def\getfirstcharacter #1{\ctxcommand{getfirstcharacter(\!!bs#1\!!es)}} -\def\doiffirstcharelse#1#2{\ctxcommand{doiffirstcharelse(\!!bs#1\!!es,\!!bs#2\!!es)}} % chr str +\let\firstcharacter \empty +\let\remainingcharacters\empty + +\unexpanded\def\getfirstcharacter #1{\ctxcommand{getfirstcharacter(\!!bs#1\!!es)}} +\unexpanded\def\doiffirstcharelse #1#2{\ctxcommand{doiffirstcharelse(\!!bs#1\!!es,\!!bs#2\!!es)}} % chr str +\unexpanded\def\thefirstcharacter #1{\ctxcommand{thefirstcharacter(\!!bs#1\!!es)}} +\unexpanded\def\theremainingcharacters#1{\ctxcommand{theremainingcharacters(\!!bs#1\!!es)}} %D \macros %D {doifinstringelse, doifincsnameelse} @@ -2892,13 +2945,18 @@ %D %D Trivial: -\def\letempty #1{\let#1\empty} -\def\globalletempty#1{\global\let#1\empty} +\unexpanded\def\letempty #1{\let#1\empty} +\unexpanded\def\globalletempty#1{\global\let#1\empty} -\def\letvalueempty #1{\expandafter\let\csname#1\endcsname\empty} -\def\letgvalueempty#1{\global\expandafter\let\csname#1\endcsname\empty} -\def\letvaluerelax #1{\expandafter\let\csname#1\endcsname\relax} -\def\letgvalurelax #1{\global\expandafter\let\csname#1\endcsname\relax} +\unexpanded\def\letvalueempty #1{\expandafter\let\csname#1\endcsname\empty} +\unexpanded\def\letgvalueempty#1{\global\expandafter\let\csname#1\endcsname\empty} +\unexpanded\def\letvaluerelax #1{\expandafter\let\csname#1\endcsname\relax} +\unexpanded\def\letgvalurelax #1{\global\expandafter\let\csname#1\endcsname\relax} + +\unexpanded\def\relaxvalueifundefined#1% + {\ifcsname#1\endcsname \else + \expandafter\let\csname#1\endcsname\relax + \fi} %D \macros %D {wait} @@ -3122,6 +3180,63 @@ \def\s!unexpanded{unexpanded} +% \bgroup \obeylines +% +% \global\let\stoptexdefinition\relax +% +% \unexpanded\gdef\starttexdefinition% +% {\bgroup% +% \obeylines% +% \syst_helpers_start_tex_definition} +% +% \gdef\syst_helpers_start_tex_definition #1 +% {\catcode\endoflineasciicode\ignorecatcode% +% \doifinstringelse\letterhash{\detokenize{#1}}\syst_helpers_start_tex_definition_yes\syst_helpers_start_tex_definition_nop#1 +% } +% +% \gdef\syst_helpers_start_tex_definition_yes#1 #2 +% {\edef\texdefinitionname{#1}% +% \ifx\texdefinitionname\s!unexpanded% +% \expandafter\syst_helpers_start_tex_definition_yes_unexpanded% +% \else% +% \expandafter\syst_helpers_start_tex_definition_yes_normal% +% \fi% +% {#1}#2 +% } +% +% \gdef\syst_helpers_start_tex_definition_yes_unexpanded#1#2 #3 +% #4\stoptexdefinition% +% {\egroup% #1=unexpanded +% \unexpanded\expandafter\def\csname#2\endcsname#3{#4}} +% +% \gdef\syst_helpers_start_tex_definition_yes_normal#1#2 +% #3\stoptexdefinition% +% {\egroup% +% \expandafter\def\csname#1\endcsname#2{#3}} +% +% \gdef\syst_helpers_start_tex_definition_nop#1 +% {\syst_helpers_start_tex_definition_nop_indeed{#1}{}} +% +% \gdef\syst_helpers_start_tex_definition_nop_indeed#1#2#3\stoptexdefinition% +% {\egroup% +% \expandafter\def\csname#1\endcsname{#3}} +% +% \egroup + +% \starttexdefinition unexpanded test #1 +% [here #1] +% \stoptexdefinition +% +% \starttexdefinition global unexpanded test +% [here test] +% \stoptexdefinition +% +% \scratchcounter=123 +% +% \starttexdefinition global unexpanded expanded test #oeps +% [here #oeps: \the\scratchcounter] +% \stoptexdefinition + \bgroup \obeylines \global\let\stoptexdefinition\relax @@ -3129,39 +3244,15 @@ \unexpanded\gdef\starttexdefinition% {\bgroup% \obeylines% - \syst_helpers_start_tex_definition} + \syst_helpers_start_tex_definition_one} -\gdef\syst_helpers_start_tex_definition #1 +\gdef\syst_helpers_start_tex_definition_one#1 {\catcode\endoflineasciicode\ignorecatcode% - \doifinstringelse\letterhash{\detokenize{#1}}\syst_helpers_start_tex_definition_yes\syst_helpers_start_tex_definition_nop#1 - } - -\gdef\syst_helpers_start_tex_definition_yes#1 #2 - {\edef\texdefinitionname{#1}% - \ifx\texdefinitionname\s!unexpanded% - \expandafter\syst_helpers_start_tex_definition_yes_unexpanded% - \else% - \expandafter\syst_helpers_start_tex_definition_yes_normal% - \fi% - {#1}#2 - } - -\gdef\syst_helpers_start_tex_definition_yes_unexpanded#1#2 #3 - #4\stoptexdefinition% - {\egroup% #1=unexpanded - \unexpanded\expandafter\def\csname#2\endcsname#3{#4}} - -\gdef\syst_helpers_start_tex_definition_yes_normal#1#2 - #3\stoptexdefinition% - {\egroup% - \expandafter\def\csname#1\endcsname#2{#3}} + \syst_helpers_start_tex_definition_two{#1}} -\gdef\syst_helpers_start_tex_definition_nop#1 - {\syst_helpers_start_tex_definition_nop_indeed{#1}{}} - -\gdef\syst_helpers_start_tex_definition_nop_indeed#1#2#3\stoptexdefinition% +\gdef\syst_helpers_start_tex_definition_two#1#2\stoptexdefinition% {\egroup% - \expandafter\def\csname#1\endcsname{#3}} + \ctxcommand{thetexdefinition("#1")}{#2}} \egroup @@ -4481,7 +4572,6 @@ {\let\dodogotopar#1% \redogotopar\par} - \unexpanded\def\GetPar {\expanded {\dowithpar diff --git a/tex/context/base/syst-lua.lua b/tex/context/base/syst-lua.lua index ef524c339..e47041444 100644 --- a/tex/context/base/syst-lua.lua +++ b/tex/context/base/syst-lua.lua @@ -10,9 +10,10 @@ local format, find, match, rep = string.format, string.find, string.match, strin local tonumber = tonumber local S, lpegmatch, lpegtsplitat = lpeg.S, lpeg.match, lpeg.tsplitat -local context = context +commands = commands or { } +local commands = commands -commands = commands or { } +local context = context function commands.writestatus(...) logs.status(...) end -- overloaded later diff --git a/tex/context/base/tabl-ntb.mkiv b/tex/context/base/tabl-ntb.mkiv index 57134cb53..242c3d6fe 100644 --- a/tex/context/base/tabl-ntb.mkiv +++ b/tex/context/base/tabl-ntb.mkiv @@ -1517,18 +1517,29 @@ {\scratchdimen\tabl_ntb_get_wid\recurselevel\relax [\recurselevel:\the\scratchdimen]}}} +% \def\tabl_ntb_char_align +% {\doifelse{\naturaltablelocalparameter\c!aligncharacter}\v!yes +% \tabl_ntb_char_align_indeed\gobbletwoarguments} + +% \def\tabl_ntb_char_align_indeed#1#2#3% row column data +% {\edef\alignmentclass{#2}% +% \edef\alignmentcharacter{\naturaltablelocalparameter\c!alignmentcharacter}% +% \ifcase\c_tabl_tbl_pass\or +% \setfirstpasscharacteralign\checkalignment{#3}% {\strut#2\unskip}% +% \fi % force hsize, so always a second +% \setsecondpasscharacteralign \checkalignment{#3}% {\strut#2\unskip}% +% \ignorespaces} + \def\tabl_ntb_char_align {\doifelse{\naturaltablelocalparameter\c!aligncharacter}\v!yes - \tabl_ntb_char_align_indeed\gobbleoneargument} + \tabl_ntb_char_align_indeed + \gobbletwoarguments} -\def\tabl_ntb_char_align_indeed#1#2% column data - {\edef\alignmentclass{#1}% - \edef\alignmentcharacter{\naturaltablelocalparameter\c!alignmentcharacter}% - \ifcase\c_tabl_tbl_pass\or - \setfirstpasscharacteralign\checkalignment{#2}% {\strut#2\unskip}% - \fi % force hsize, so always a second - \setsecondpasscharacteralign \checkalignment{#2}% {\strut#2\unskip}% - \ignorespaces} +\def\tabl_ntb_char_align_indeed#1#2% row column + {\ifcase\c_tabl_tbl_pass \or + \setcharacteralign{#2}{\naturaltablelocalparameter\c!alignmentcharacter}% + \fi + \signalcharacteralign{#2}{#1}} \unexpanded\def\tabl_ntb_cell_process_a#1#2[#3]#4% grouping added ! ! ! {\bgroup @@ -1539,7 +1550,7 @@ \tabl_ntb_set_dis{#2}{\the\scratchdimen}% \fi \setupcurrentnaturaltablelocal[#3,\c!background=,\c!frame=\v!off]% 25% faster - \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#2}{#4}\tabl_ntb_cell_stop\tabl_ntb_cell_finalize}}% + \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop\tabl_ntb_cell_finalize}}% \scratchdimen\tabl_ntb_get_wid\c_tabl_ntb_col\relax \ifdim\wd\scratchbox>\scratchdimen \ifsqueezeTBLspan @@ -1625,7 +1636,7 @@ \fi \fi \normalexpanded{\tabl_ntb_cell_process_b_c{\ifdim\scratchdimen>\zeropoint \c!width=\the\scratchdimen\fi}}% - {#1}{#2}[#3]{\tabl_ntb_char_align{#2}{#4}}} + {#1}{#2}[#3]{\tabl_ntb_char_align{#1}{#2}#4}} \unexpanded\def\tabl_ntb_cell_process_c {\tabl_ntb_cell_process_b_c{}} @@ -1634,7 +1645,7 @@ {\tabl_ntb_setup_cell{#1}{#2}% \bgroup \setupcurrentnaturaltablelocal[#3,\c!width=\d_tabl_ntb_width,\c!background=,\c!frame=\v!off]% 25% faster - \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#2}{#4}\tabl_ntb_cell_stop}% + \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop}% \egroup} \unexpanded\def\tabl_ntb_cell_process_e#1#2[#3]#4% @@ -1647,7 +1658,7 @@ \else \setupcurrentnaturaltablelocal[\c!color=,\c!width=\d_tabl_ntb_width,\c!height=\d_tabl_ntb_height]% \fi - \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#2}{#4}\tabl_ntb_cell_stop}}% + \inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop}}% \hskip\tabl_ntb_get_dis{#2}} \setupTABLE diff --git a/tex/context/base/tabl-nte.mkiv b/tex/context/base/tabl-nte.mkiv index 4a9774cb0..af74a2abe 100644 --- a/tex/context/base/tabl-nte.mkiv +++ b/tex/context/base/tabl-nte.mkiv @@ -102,9 +102,9 @@ \unexpanded\def\startTABLEbody{\dosingleempty\tabl_nte_start_body} \let\stopTABLEbody\relax \unexpanded\def\startTABLEfoot{\dosingleempty\tabl_nte_start_foot} \let\stopTABLEfoot\relax -\def\tabl_nte_start_head[#1]#2\stopTABLEhead{\appendtoks\doTABLEsection[#1]{#2}\to\TBLhead} -\def\tabl_nte_start_next[#1]#2\stopTABLEnext{\appendtoks\doTABLEsection[#1]{#2}\to\TBLnext} -\def\tabl_nte_start_body[#1]#2\stopTABLEbody{\appendtoks\doTABLEsection[#1]{#2}\to\TBLbody} -\def\tabl_nte_start_foot[#1]#2\stopTABLEfoot{\appendtoks\doTABLEsection[#1]{#2}\to\TBLfoot} +\def\tabl_nte_start_head[#1]#2\stopTABLEhead{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_head} +\def\tabl_nte_start_next[#1]#2\stopTABLEnext{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_next} +\def\tabl_nte_start_body[#1]#2\stopTABLEbody{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_body} +\def\tabl_nte_start_foot[#1]#2\stopTABLEfoot{\appendtoks\tabl_ntb_section[#1]{#2}\to\t_tabl_ntb_foot} \protect \endinput diff --git a/tex/context/base/tabl-tbl.lua b/tex/context/base/tabl-tbl.lua index 19548e7b3..21564a472 100644 --- a/tex/context/base/tabl-tbl.lua +++ b/tex/context/base/tabl-tbl.lua @@ -15,7 +15,7 @@ local tonumber = tonumber local gsub, rep, sub, find = string.gsub, string.rep, string.sub, string.find local P, C, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Ct, lpeg.match -local settexcount = tex.setcount +local texsetcount = tex.setcount local separator = P("|") local nested = lpeg.patterns.nested @@ -31,9 +31,9 @@ function commands.presettabulate(preamble) end local t = lpegmatch(pattern,preamble) local m = #t - 2 - settexcount("global","c_tabl_tabulate_nofcolumns", m/2) - settexcount("global","c_tabl_tabulate_has_rule_spec_first", t[1] == "" and 0 or 1) - settexcount("global","c_tabl_tabulate_has_rule_spec_last", t[m+1] == "" and 0 or 1) + texsetcount("global","c_tabl_tabulate_nofcolumns", m/2) + texsetcount("global","c_tabl_tabulate_has_rule_spec_first", t[1] == "" and 0 or 1) + texsetcount("global","c_tabl_tabulate_has_rule_spec_last", t[m+1] == "" and 0 or 1) for i=1,m,2 do context.settabulateentry(t[i],t[i+1]) end diff --git a/tex/context/base/tabl-tbl.mkiv b/tex/context/base/tabl-tbl.mkiv index 2fa8c4805..fa2417712 100644 --- a/tex/context/base/tabl-tbl.mkiv +++ b/tex/context/base/tabl-tbl.mkiv @@ -48,6 +48,7 @@ % p p(dimen) of automatisch als alleen p % w column width % f font#1 +% A {alignmentoptions} % B bold % I italic % S slanted @@ -231,7 +232,6 @@ \unexpanded\def\tolerantTABLEbreaktrue {\settrue \c_tabl_tabulate_tolerant_break} % used in styles ! \unexpanded\def\handletabulatepbreakfalse{\setfalse\c_tabl_tabulate_handlepbreak } % depricated -\installcorenamespace{tabulatealign} \installcorenamespace{tabulatebox} \installcorenamespace{tabulatesetup} \installcorenamespace{tabulatehook} @@ -296,14 +296,6 @@ % [|lg{.}|] => \NG 12.34 \NC -\def\tabl_tabulate_charalign#1 % space delimited ! (will be redone in lua) - {\edef\alignmentclass{\the\c_tabl_tabulate_column}% - \edef\alignmentcharacter{\csname\??tabulatealign\the\c_tabl_tabulate_column\endcsname}% - \ifcase\c_tabl_tabulate_pass\or - \setfirstpasscharacteralign\checkalignment{#1}% - \fi % force hsize - \setsecondpasscharacteralign\checkalignment{#1}} - \def\tabl_tabulate_nobreak_inject_tracer {\red % maybe use the fast color switcher here \hrule\s!height.5\linewidth\s!depth.5\linewidth @@ -424,6 +416,8 @@ \let\tabl_tabulate_hook_b\donothing \let\tabl_tabulate_hook_e\donothing +\let\tabl_tabulate_hook_g\donothing + \def\tabl_tabulate_set_preamble_step#1#2% only makes sense for many tabulates {\normalexpanded{\t_tabl_tabulate_preamble{\the\t_tabl_tabulate_preamble \tabl_tabulate_check_local_vrule_thickness\constantdimenargument\d_tabl_tabulate_vrulethickness @@ -439,6 +433,7 @@ \tabl_tabulate_color_side_both \global\c_tabl_tabulate_colorspan\zerocount \global\c_tabl_tabulate_column\constantnumber\c_tabl_tabulate_columns + \tabl_tabulate_hook_g \tabl_tabulate_setups_check % unexpandable \tabl_tabulate_hook_check % unexpandable \ifzeropt\d_tabl_tabulate_width @@ -458,6 +453,9 @@ \bgroup % we cannot combine the if because a cell may have only one ## \tabl_tabulate_hook_b \c_tabl_tabulate_align\constantnumber\c_tabl_tabulate_align % needed in tag passing + \ifx\m_tabl_tabulate_alignment\empty \else + \spac_align_use_now{\m_tabl_tabulate_alignment}% + \fi \noexpand\dostarttagged\noexpand\t!tabulatecell\noexpand\empty \noexpand\dotagtabulatecell \noexpand#1% @@ -554,6 +552,7 @@ \installtabulatepreambleoption{d}{\t_tabl_tabulate_settings\expandafter{\the\t_tabl_tabulate_settings\fixedspaces}% \tabl_tabulate_set_preamble} \installtabulatepreambleoption{ }{\tabl_tabulate_set_preamble} +\installtabulatepreambleoption{A}{\tabl_tabulate_set_alignment} % We no longer deal with '~' here but map it onto 'd' instead. Of course % we could prefix a key with \type {\meaning} instead, which works ok (and @@ -619,10 +618,22 @@ {\setvalue{\??tabulatehook\the\c_tabl_tabulate_columns}{#1}% \tabl_tabulate_set_preamble} +% begin of character align plugin + +\newconditional\c_tabl_auto_align_mode % reset later + +\def\tabl_tabulate_hook_g % partly expanded + {\ifconditional\c_tabl_auto_align_mode + \signalcharacteralign\c_tabl_tabulate_column{\c_tabl_tabulate_noflines+\plusone}% + \fi} + \def\tabl_tabulate_set_align#1% - {\setvalue{\??tabulatealign\the\c_tabl_tabulate_columns}{#1}% + {\global\settrue\c_tabl_auto_align_mode + \setcharacteralign\c_tabl_tabulate_columns{#1}% \tabl_tabulate_set_preamble} +% end of character align plugin + \def\tabl_tabulate_set_before#1% {\t_tabl_tabulate_before{#1}% \tabl_tabulate_set_preamble} @@ -643,6 +654,11 @@ \c_tabl_tabulate_modus\zerocount \tabl_tabulate_pickup_width} +\def\tabl_tabulate_set_alignment#1% + {\edef\m_tabl_tabulate_alignment{#1}% + \spac_align_use_later\m_tabl_tabulate_alignment + \tabl_tabulate_set_preamble} + \def\tabl_tabulate_set_paragraph {\doifnextparenthesiselse {\c_tabl_tabulate_modus\plusone @@ -718,6 +734,7 @@ \installcorenamespace{tabulatecolorspec} +\setvalue{\??tabulatecolorspec C}#1{\xdef\m_tabl_tabulate_color {#1}\global\c_tabl_tabulate_colorspan\zerocount} \setvalue{\??tabulatecolorspec L}#1{\xdef\m_tabl_tabulate_color {#1}\global\c_tabl_tabulate_colorspan\plusone } \setvalue{\??tabulatecolorspec M}#1{\xdef\m_tabl_tabulate_color {#1}\global\c_tabl_tabulate_colorspan\plustwo } \setvalue{\??tabulatecolorspec R}#1{\xdef\m_tabl_tabulate_color {#1}\global\c_tabl_tabulate_colorspan\plusthree} @@ -743,10 +760,12 @@ \t_tabl_tabulate_emath\emptytoks \t_tabl_tabulate_font\emptytoks \t_tabl_tabulate_settings\emptytoks + \global\let\m_tabl_tabulate_alignment\empty \global\let\m_tabl_tabulate_color\empty \global\let\m_tabl_tabulate_text_color\empty \global\let\m_tabl_tabulate_vrule_color\empty \global\c_tabl_tabulate_colorspan\zerocount + \global\setfalse\c_tabl_auto_align_mode \global\advance\c_tabl_tabulate_columns\plusone \expandafter\let\csname\??tabulatesetup\the\c_tabl_tabulate_columns\endcsname\donothing % here ? \edef\currenttabulationtrulespec{#1}% @@ -756,7 +775,7 @@ \global\d_tabl_tabulate_vrulethickness\d_tabl_tabulate_vrulethickness_default \rawprocesscommalist[#1]\tabl_tabulate_set_vrule_command \fi - \tabl_tabulate_set_preamble#2\relax\relax % permits i without n + \tabl_tabulate_set_preamble#2\relax\relax % permits i without n \ifcase\c_tabl_tabulate_modus\relax \tabl_tabulate_set_width_normal \or % fixed width @@ -833,8 +852,7 @@ \let\tabl_tabulate_hook\tabl_tabulate_hook_nop -\def\tabl_tabulate_hook_yes {\csname\??tabulatehook \the\c_tabl_tabulate_column\endcsname} -\def\tabl_tabulate_align_yes{\csname\??tabulatealign\the\c_tabl_tabulate_column\endcsname} % to be used +\def\tabl_tabulate_hook_yes{\csname\??tabulatehook\the\c_tabl_tabulate_column\endcsname} \def\tabl_tabulate_pheight_reset {\global\c_tabl_tabulate_plines_min\plusone @@ -1684,7 +1702,8 @@ \unexpanded\def\tabl_tabulate_RQ_first{\tabl_tabulate_column_equal \plusone} \unexpanded\def\tabl_tabulate_HQ_first{\tabl_tabulate_column_equal \plustwo} -\unexpanded\def\tabl_tabulate_NG_first{\NC\tabl_tabulate_charalign} +%unexpanded\def\tabl_tabulate_NG_first{\NC\tabl_tabulate_charalign} +\unexpanded\def\tabl_tabulate_NG_first{\NC} \unexpanded\def\tabl_tabulate_NN_first{\NC\tabl_tabulate_digits} % new, undocumented, test first \unexpanded\def\tabl_tabulate_ND_first{\NC\tabl_tabulate_digits} % same, for old times sake @@ -1911,7 +1930,7 @@ \tabl_tabulate_nobreak_inject \stoptabulatenoalign} -\let\tabl_tabulate_BL_second\tabl_tabulate_TL_second +\let\tabl_tabulate_BL_second_indeed\tabl_tabulate_TL_second_indeed \def\tabl_tabulate_HL_second {\csname @@ -2004,6 +2023,8 @@ \let\tabl_tabulate_flush_collected \empty \let\tabl_tabulate_flush_collected_indeed\empty +\let\v_tabl_tabulate_align\!!zerocount + \def\tabl_tabulate_set_local_hsize {\setlocalhsize \hsize\localhsize} diff --git a/tex/context/base/tabl-xtb.lua b/tex/context/base/tabl-xtb.lua index 3ffe8a219..488ef5b78 100644 --- a/tex/context/base/tabl-xtb.lua +++ b/tex/context/base/tabl-xtb.lua @@ -27,11 +27,12 @@ this mechamism will be improved so that it can replace its older cousin. local commands, context, tex, node = commands, context, tex, node -local texdimen = tex.dimen -local texcount = tex.count -local texbox = tex.box +local texgetcount = tex.getcount local texsetcount = tex.setcount +local texgetbox = tex.getbox +local texgetdimen = tex.getdimen local texsetdimen = tex.setdimen +local texget = tex.get local format = string.format local concat = table.concat @@ -171,11 +172,11 @@ function xtables.create(settings) settings.leftmargindistance = tonumber(settings.leftmargindistance) or 0 settings.rightmargindistance = tonumber(settings.rightmargindistance) or 0 settings.options = settings_to_hash(settings.option) - settings.textwidth = tonumber(settings.textwidth) or tex.hsize - settings.lineheight = tonumber(settings.lineheight) or texdimen.lineheight + settings.textwidth = tonumber(settings.textwidth) or texget("hsize") + settings.lineheight = tonumber(settings.lineheight) or texgetdimen("lineheight") settings.maxwidth = tonumber(settings.maxwidth) or settings.textwidth/8 -- if #stack > 0 then - -- settings.textwidth = tex.hsize + -- settings.textwidth = texget("hsize") -- end data.criterium_v = 2 * data.settings.lineheight data.criterium_h = .75 * data.settings.textwidth @@ -186,10 +187,10 @@ function xtables.initialize_reflow_width(option) local r = data.currentrow local c = data.currentcolumn + 1 local drc = data.rows[r][c] - drc.nx = texcount.c_tabl_x_nx - drc.ny = texcount.c_tabl_x_ny + drc.nx = texgetcount("c_tabl_x_nx") + drc.ny = texgetcount("c_tabl_x_ny") local distances = data.distances - local distance = texdimen.d_tabl_x_distance + local distance = texgetdimen("d_tabl_x_distance") if distance > distances[c] then distances[c] = distance end @@ -214,7 +215,7 @@ function xtables.set_reflow_width() while row[c].span do -- can also be previous row ones c = c + 1 end - local tb = texbox.b_tabl_x + local tb = texgetbox("b_tabl_x") local drc = row[c] -- drc.list = true -- we don't need to keep the content around as we're in trial mode (no: copy_node_list(tb)) @@ -232,7 +233,7 @@ function xtables.set_reflow_width() depths[r] = depth end -- - local dimensionstate = texcount.frameddimensionstate + local dimensionstate = texgetcount("frameddimensionstate") local fixedcolumns = data.fixedcolumns local fixedrows = data.fixedrows if dimensionstate == 1 then @@ -294,19 +295,19 @@ function xtables.initialize_reflow_height() for x=1,drc.nx-1 do w = w + widths[c+x] end - texdimen.d_tabl_x_width = w + texsetdimen("d_tabl_x_width",w) local dimensionstate = drc.dimensionstate or 0 if dimensionstate == 1 or dimensionstate == 3 then -- width was fixed so height is known - texcount.c_tabl_x_skip_mode = 1 + texsetcount("c_tabl_x_skip_mode",1) elseif dimensionstate == 2 then -- height is enforced - texcount.c_tabl_x_skip_mode = 1 + texsetcount("c_tabl_x_skip_mode",1) elseif data.autowidths[c] then -- width has changed so we need to recalculate the height - texcount.c_tabl_x_skip_mode = 0 + texsetcount("c_tabl_x_skip_mode",0) else - texcount.c_tabl_x_skip_mode = 1 + texsetcount("c_tabl_x_skip_mode",1) end end @@ -315,10 +316,10 @@ function xtables.set_reflow_height() local c = data.currentcolumn local rows = data.rows local row = rows[r] --- while row[c].span do -- we could adapt drc.nx instead --- c = c + 1 --- end - local tb = texbox.b_tabl_x + -- while row[c].span do -- we could adapt drc.nx instead + -- c = c + 1 + -- end + local tb = texgetbox("b_tabl_x") local drc = row[c] if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2 local heights, height = data.heights, tb.height @@ -330,8 +331,8 @@ function xtables.set_reflow_height() depths[r] = depth end end --- c = c + drc.nx - 1 --- data.currentcolumn = c + -- c = c + drc.nx - 1 + -- data.currentcolumn = c end function xtables.initialize_construct() @@ -357,9 +358,9 @@ function xtables.initialize_construct() h = h + heights[r+y] d = d + depths[r+y] end - texdimen.d_tabl_x_width = w - texdimen.d_tabl_x_height = h + d - texdimen.d_tabl_x_depth = 0 + texsetdimen("d_tabl_x_width",w) + texsetdimen("d_tabl_x_height",h + d) + texsetdimen("d_tabl_x_depth",0) end function xtables.set_construct() @@ -367,14 +368,14 @@ function xtables.set_construct() local c = data.currentcolumn local rows = data.rows local row = rows[r] --- while row[c].span do -- can also be previous row ones --- c = c + 1 --- end + -- while row[c].span do -- can also be previous row ones + -- c = c + 1 + -- end local drc = row[c] -- this will change as soon as in luatex we can reset a box list without freeing - drc.list = copy_node_list(texbox.b_tabl_x) --- c = c + drc.nx - 1 --- data.currentcolumn = c + drc.list = copy_node_list(texgetbox("b_tabl_x")) + -- c = c + drc.nx - 1 + -- data.currentcolumn = c end local function showwidths(where,widths,autowidths) @@ -669,8 +670,8 @@ function xtables.construct() end local kern = new_kern(step) if stop then - stop.prev = kern stop.next = kern + kern.prev = stop else -- can be first spanning next row (ny=...) start = kern end @@ -724,6 +725,8 @@ function xtables.construct() end end +-- todo: join as that is as efficient as fushing multiple + local function inject(row,copy,package) local list = row[1] if copy then @@ -966,7 +969,7 @@ end function xtables.next_row() local r = data.currentrow + 1 - data.modes[r] = texcount.c_tabl_x_mode + data.modes[r] = texgetcount("c_tabl_x_mode") data.currentrow = r data.currentcolumn = 0 end @@ -986,3 +989,6 @@ commands.x_table_init_construct = xtables.initialize_construct commands.x_table_set_reflow_width = xtables.set_reflow_width commands.x_table_set_reflow_height = xtables.set_reflow_height commands.x_table_set_construct = xtables.set_construct + +commands.x_table_r = function() context(data.currentrow or 0) end +commands.x_table_c = function() context(data.currentcolumn or 0) end diff --git a/tex/context/base/tabl-xtb.mkvi b/tex/context/base/tabl-xtb.mkvi index aba4e5027..556bec5ce 100644 --- a/tex/context/base/tabl-xtb.mkvi +++ b/tex/context/base/tabl-xtb.mkvi @@ -94,9 +94,11 @@ \newdimen\d_tabl_x_final_width \newcount\c_tabl_x_nesting \newcount\c_tabl_x_skip_mode % 1 = skip - \newdimen\d_tabl_x_textwidth +\def\currentxtablerow {\ctxcommand{x_table_r()}} +\def\currentxtablecolumn{\ctxcommand{x_table_c()}} + % \setupxtable[one][parent][a=b,c=d] % \setupxtable[one] [a=b,c=d] % \setupxtable [a=b,c=d] @@ -151,6 +153,7 @@ \let\stopxtable\relax \def\tabl_x_default_buffer{x_table_\number\c_tabl_x_nesting} +\let\tabl_x_current_buffer\empty \unexpanded\def\tabl_x_start_table[#settings]% maybe two arguments: [tag][settings] | [tag] | [settings] {\bgroup diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua index 0f477cb6e..51aa550cb 100644 --- a/tex/context/base/task-ini.lua +++ b/tex/context/base/task-ini.lua @@ -12,6 +12,10 @@ if not modules then modules = { } end modules ['task-ini'] = { -- we can disable more handlers and enable then when really used (*) -- -- todo: two finalizers: real shipout (can be imposed page) and page shipout (individual page) +-- +-- todo: consider moving the kernel kerning/ligaturing functions in the main font loop because +-- there we know if they are needed; doesn't save time but; if we overload unh* commands to +-- not apply the font handler, we can remove all checks for subtypes 255 local tasks = nodes.tasks local appendaction = tasks.appendaction @@ -23,6 +27,7 @@ appendaction("processors", "normalizers", "typesetters.characters.handler") appendaction("processors", "normalizers", "fonts.collections.process") -- disabled appendaction("processors", "normalizers", "fonts.checkers.missing") -- disabled +appendaction("processors", "characters", "typesetters.characteralign.handler") -- disabled appendaction("processors", "characters", "scripts.autofontfeature.handler") appendaction("processors", "characters", "scripts.splitters.handler") -- disabled appendaction("processors", "characters", "typesetters.cleaners.handler") -- disabled @@ -32,7 +37,10 @@ appendaction("processors", "characters", "typesetters.breakpoints.handler") appendaction("processors", "characters", "scripts.injectors.handler") -- disabled appendaction("processors", "words", "builders.kernel.hyphenation") -- always on -appendaction("processors", "words", "languages.words.check") -- disabled +appendaction("processors", "words", "languages.words.check") -- disabled -- might move up, no disc check needed then + +appendaction("processors", "words", "typesetters.initials.handler") -- disabled -- might move up +appendaction("processors", "words", "typesetters.firstlines.handler") -- disabled -- might move up appendaction("processors", "fonts", "builders.paragraphs.solutions.splitters.split") -- experimental appendaction("processors", "fonts", "nodes.handlers.characters") -- maybe todo @@ -47,7 +55,7 @@ appendaction("processors", "lists", "typesetters.spacings.handler") appendaction("processors", "lists", "typesetters.kerns.handler") -- disabled appendaction("processors", "lists", "typesetters.digits.handler") -- disabled (after otf handling) appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling) -appendaction("processors", "lists", "typesetters.paragraphs.handler") -- disabled +------------("processors", "lists", "typesetters.initials.handler") -- disabled appendaction("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled appendaction("shipouts", "normalizers", "typesetters.alignments.handler") @@ -71,6 +79,8 @@ appendaction("shipouts", "finishers", "attributes.viewerlayers.handler") --maybe integrate relocate and families +appendaction("math", "normalizers", "noads.handlers.showtree", nil, "nohead") + appendaction("math", "normalizers", "noads.handlers.unscript", nil, "nohead") -- always on (maybe disabled) appendaction("math", "normalizers", "noads.handlers.variants", nil, "nohead") -- always on appendaction("math", "normalizers", "noads.handlers.relocate", nil, "nohead") -- always on @@ -83,9 +93,11 @@ appendaction("math", "normalizers", "noads.handlers.resize", nil, "noh appendaction("math", "normalizers", "noads.handlers.check", nil, "nohead") -- always on appendaction("math", "normalizers", "noads.handlers.tags", nil, "nohead") -- disabled appendaction("math", "normalizers", "noads.handlers.italics", nil, "nohead") -- disabled +appendaction("math", "normalizers", "noads.handlers.classes", nil, "nohead") -- disabled appendaction("math", "builders", "builders.kernel.mlist_to_hlist") -- always on ------------("math", "builders", "noads.handlers.italics", nil, "nohead") -- disabled +appendaction("math", "builders", "typesetters.directions.processmath") -- disabled (has to happen pretty late) -- quite experimental (nodes.handlers.graphicvadjust might go away) @@ -103,11 +115,12 @@ appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler") -- experimental too -appendaction("mvlbuilders","normalizers","typesetters.checkers.handler") +appendaction("mvlbuilders", "normalizers","typesetters.checkers.handler") appendaction("vboxbuilders","normalizers","typesetters.checkers.handler") -- speedup: only kick in when used +disableaction("processors", "typesetters.characteralign.handler") disableaction("processors", "scripts.autofontfeature.handler") disableaction("processors", "scripts.splitters.handler") disableaction("processors", "scripts.injectors.handler") -- was enabled @@ -120,11 +133,12 @@ disableaction("processors", "typesetters.digits.handler") disableaction("processors", "typesetters.breakpoints.handler") disableaction("processors", "typesetters.directions.handler") disableaction("processors", "languages.words.check") +disableaction("processors", "typesetters.initials.handler") +disableaction("processors", "typesetters.firstlines.handler") disableaction("processors", "typesetters.spacings.handler") disableaction("processors", "typesetters.kerns.handler") disableaction("processors", "typesetters.italics.handler") disableaction("processors", "nodes.handlers.stripping") -disableaction("processors", "typesetters.paragraphs.handler") disableaction("shipouts", "typesetters.alignments.handler") disableaction("shipouts", "nodes.rules.handler") @@ -156,8 +170,11 @@ disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize") disableaction("finalizers", "nodes.handlers.graphicvadjust") -- sort of obsolete disableaction("finalizers", "builders.paragraphs.tag") +disableaction("math", "noads.handlers.showtree") disableaction("math", "noads.handlers.tags") disableaction("math", "noads.handlers.italics") +disableaction("math", "noads.handlers.classes") +disableaction("math", "typesetters.directions.processmath") disableaction("mvlbuilders", "typesetters.checkers.handler") disableaction("vboxbuilders","typesetters.checkers.handler") diff --git a/tex/context/base/toks-ini.lua b/tex/context/base/toks-ini.lua index ef4b5406b..0f0c016f8 100644 --- a/tex/context/base/toks-ini.lua +++ b/tex/context/base/toks-ini.lua @@ -5,6 +5,7 @@ if not modules then modules = { } end modules ['toks-ini'] = { license = "see context related readme files" } +local context, commands = context, commands local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values local format, gsub = string.format, string.gsub diff --git a/tex/context/base/trac-deb.lua b/tex/context/base/trac-deb.lua index fe167c343..4cc48c4a5 100644 --- a/tex/context/base/trac-deb.lua +++ b/tex/context/base/trac-deb.lua @@ -11,22 +11,25 @@ local lpeg, status = lpeg, status local lpegmatch = lpeg.match local format, concat, match = string.format, table.concat, string.match local tonumber, tostring = tonumber, tostring -local texdimen, textoks, texcount = tex.dimen, tex.toks, tex.count -- maybe tracers -> tracers.tex (and tracers.lua for current debugger) local report_system = logs.reporter("system","tex") -tracers = tracers or { } -local tracers = tracers +tracers = tracers or { } +local tracers = tracers -tracers.lists = { } -local lists = tracers.lists +tracers.lists = { } +local lists = tracers.lists -tracers.strings = { } -local strings = tracers.strings +tracers.strings = { } +local strings = tracers.strings -strings.undefined = "undefined" +local texgetdimen = tex.getdimen +local texgettoks = tex.gettoks +local texgetcount = tex.getcount + +strings.undefined = "undefined" lists.scratch = { 0, 2, 4, 6, 8 @@ -71,16 +74,16 @@ function tracers.cs(csname) end function tracers.dimen(name) - local d = texdimen[name] + local d = texgetdimen(name) return d and number.topoints(d) or strings.undefined end function tracers.count(name) - return texcount[name] or strings.undefined + return texgetcount(name) or strings.undefined end function tracers.toks(name,limit) - local t = textoks[name] + local t = texgettoks(name) return t and string.limit(t,tonumber(limit) or 40) or strings.undefined end diff --git a/tex/context/base/trac-inf.lua b/tex/context/base/trac-inf.lua index eefc15a6f..79cbdba3f 100644 --- a/tex/context/base/trac-inf.lua +++ b/tex/context/base/trac-inf.lua @@ -11,20 +11,24 @@ if not modules then modules = { } end modules ['trac-inf'] = { -- get warnings about assignments. This is more efficient than using rawset -- and rawget. -local type, tonumber = type, tonumber +local type, tonumber, select = type, tonumber, select local format, lower = string.format, string.lower local concat = table.concat local clock = os.gettimeofday or os.clock -- should go in environment -statistics = statistics or { } -local statistics = statistics +local setmetatableindex = table.setmetatableindex +local serialize = table.serialize +local formatters = string.formatters -statistics.enable = true -statistics.threshold = 0.01 +statistics = statistics or { } +local statistics = statistics + +statistics.enable = true +statistics.threshold = 0.01 local statusinfo, n, registered, timers = { }, 0, { }, { } -table.setmetatableindex(timers,function(t,k) +setmetatableindex(timers,function(t,k) local v = { timing = 0, loadtime = 0 } t[k] = v return v @@ -178,6 +182,19 @@ function statistics.timed(action) report("total runtime: %s",elapsedtime("run")) end +-- goodie + +function statistics.tracefunction(base,tag,...) + for i=1,select("#",...) do + local name = select(i,...) + local stat = { } + local func = base[name] + setmetatableindex(stat,function(t,k) t[k] = 0 return 0 end) + base[name] = function(n,k,v) stat[k] = stat[k] + 1 return func(n,k,v) end + statistics.register(formatters["%s.%s"](tag,name),function() return serialize(stat,"calls") end) + end +end + -- where, not really the best spot for this: commands = commands or { } diff --git a/tex/context/base/trac-jus.lua b/tex/context/base/trac-jus.lua index 9d99f059d..38220a752 100644 --- a/tex/context/base/trac-jus.lua +++ b/tex/context/base/trac-jus.lua @@ -15,19 +15,16 @@ local a_alignstate = attributes.private("alignstate") local a_justification = attributes.private("justification") local tracers = nodes.tracers -local setcolor = tracers.colors.set -local settransparency = tracers.transparencies.set +local tracedrule = tracers.rule local new_rule = nodes.pool.rule +local new_hlist = nodes.pool.hlist local new_glue = nodes.pool.glue local new_kern = nodes.pool.kern -local concat_nodes = nodes.concat -local hpack_nodes = node.hpack -local copy_node = node.copy local get_list_dimensions = node.dimensions local hlist_code = nodes.nodecodes.hlist -local tex_set_attribute = tex.setattribute +local texsetattribute = tex.setattribute local unsetvalue = attributes.unsetvalue local min_threshold = 0 @@ -36,14 +33,14 @@ local max_threshold = 0 local function set(n) nodes.tasks.enableaction("mvlbuilders", "typesetters.checkers.handler") nodes.tasks.enableaction("vboxbuilders","typesetters.checkers.handler") - tex_set_attribute(a_justification,n or 1) + texsetattribute(a_justification,n or 1) function typesetters.checkers.set(n) - tex_set_attribute(a_justification,n or 1) + texsetattribute(a_justification,n or 1) end end local function reset() - tex_set_attribute(a_justification,unsetvalue) + texsetattribute(a_justification,unsetvalue) end checkers.set = set @@ -74,58 +71,22 @@ function checkers.handler(head) if naturalwidth == 0 or delta == 0 then -- special box elseif delta >= max_threshold then - local rule = new_rule(delta,naturalheight,naturaldepth) - list = hpack_nodes(list,width,"exactly") - if list.glue_set == 1 then - setcolor(rule,"trace:dr") - settransparency(rule,"trace:dr") - else - setcolor(rule,"trace:db") - settransparency(rule,"trace:db") - end - rule = hpack_nodes(rule) - rule.width = 0 - rule.height = 0 - rule.depth = 0 - current.list = concat_nodes { list, rule } - -- current.list = concat_nodes { list, new_kern(-naturalwidth+width), rule } + local rule = tracedrule(delta,naturalheight,naturaldepth,list.glue_set == 1 and "trace:dr"or "trace:db") + current.list = list .. new_hlist(rule) elseif delta <= min_threshold then local alignstate = list[a_alignstate] if alignstate == 1 then - local rule = new_rule(-delta,naturalheight,naturaldepth) - setcolor(rule,"trace:dc") - settransparency(rule,"trace:dc") - rule = hpack_nodes(rule) - rule.height = 0 - rule.depth = 0 - rule.width = 0 - current.list = nodes.concat { rule, list } + local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dc") + current.list = new_hlist(rule) .. list elseif alignstate == 2 then - local rule = new_rule(-delta/2,naturalheight,naturaldepth) - setcolor(rule,"trace:dy") - settransparency(rule,"trace:dy") - rule = hpack_nodes(rule) - rule.width = 0 - rule.height = 0 - rule.depth = 0 - current.list = concat_nodes { copy_node(rule), list, new_kern(delta/2), rule } + local rule = tracedrule(-delta/2,naturalheight,naturaldepth,"trace:dy") + current.list = new_hlist(rule^1) .. list .. new_kern(delta/2) .. new_hlist(rule) elseif alignstate == 3 then - local rule = new_rule(-delta,naturalheight,naturaldepth) - setcolor(rule,"trace:dm") - settransparency(rule,"trace:dm") - rule = hpack_nodes(rule) - rule.height = 0 - rule.depth = 0 - current.list = concat_nodes { list, new_kern(delta), rule } + local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dm") + current.list = list .. new_kern(delta) .. new_hlist(rule) else - local rule = new_rule(-delta,naturalheight,naturaldepth) - setcolor(rule,"trace:dg") - settransparency(rule,"trace:dg") - rule = hpack_nodes(rule) - rule.height = 0 - rule.depth = 0 - rule.width = 0 - current.list = concat_nodes { list, new_kern(delta), rule } + local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dg") + current.list = list .. new_kern(delta) .. new_hlist(rule) end end end diff --git a/tex/context/base/trac-lmx.lua b/tex/context/base/trac-lmx.lua index 18c7f6020..41d930536 100644 --- a/tex/context/base/trac-lmx.lua +++ b/tex/context/base/trac-lmx.lua @@ -430,7 +430,7 @@ local optionalspaces = whitespace^0 local dquote = P('"') -local begincomment = P("") local beginembedxml = P(" +local pattern_1 = Cs((includexml + includecss + P(1))^0) local pattern_2 = Cs((definexml + resolvexml + definecss + resolvecss + P(1))^0) local pattern_3 = Cs((luacodexml + luacodecss + othercode)^0) diff --git a/tex/context/base/trac-log.lua b/tex/context/base/trac-log.lua index 1f2520130..0d0b66260 100644 --- a/tex/context/base/trac-log.lua +++ b/tex/context/base/trac-log.lua @@ -68,13 +68,14 @@ local write_nl, write = texio and texio.write_nl or print, texio and texio.write local format, gmatch, find = string.format, string.gmatch, string.find local concat, insert, remove = table.concat, table.insert, table.remove local topattern = string.topattern -local texcount = tex and tex.count local next, type, select = next, type, select local utfchar = utf.char local setmetatableindex = table.setmetatableindex local formatters = string.formatters +local texgetcount = tex and tex.getcount + --[[ldx--

This is a prelude to a more extensive logging module. We no longer provide based logging as parsing is relatively easy anyway.

@@ -97,13 +98,13 @@ wiki : http://contextgarden.net -- function utilities.strings.chruni(s) return utfchar(s) .. " (U+" .. format("%05X",s) .. ")" end -- -- utilities.strings.formatters.add ( --- string.formatters, "uni", +-- string.formatters, "unichr", -- [[unichr(%s)]], -- [[local unichr = utilities.strings.unichr]] -- ) -- -- utilities.strings.formatters.add ( --- string.formatters, "chr", +-- string.formatters, "chruni", -- [[chruni(%s)]], -- [[local chruni = utilities.strings.chruni]] -- ) @@ -118,8 +119,25 @@ utilities.strings.formatters.add ( [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]] ) +-- function utilities.strings.unichk(s) return s <= 0xFFFF and ("U+" .. format("%05X",s) .. " (" .. utfchar(s) .. ")") or ("U+" .. format("%05X",s)) end +-- function utilities.strings.chkuni(s) return s <= 0xFFFF and (utfchar(s) .. " (U+" .. format("%05X",s) .. ")") or ("U+" .. format("%05X",s)) end +-- +-- utilities.strings.formatters.add ( +-- string.formatters, "unichk", +-- [[unichk(%s)]], +-- [[local unichk = utilities.strings.unichk]] +-- ) +-- +-- utilities.strings.formatters.add ( +-- string.formatters, "chkuni", +-- [[chkuni(%s)]], +-- [[local chkuni = utilities.strings.chkuni]] +-- ) +-- -- print(formatters["Missing character %!chruni! in font."](234)) -- print(formatters["Missing character %!unichr! in font."](234)) +-- print(formatters["Missing character %!chkuni! in font."](234)) +-- print(formatters["Missing character %!unichk! in font."](234)) -- basic loggers @@ -129,7 +147,7 @@ setmetatableindex(logs, function(t,k) t[k] = ignore ; return ignore end) local report, subreport, status, settarget, setformats, settranslations -local direct, subdirect, writer, pushtarget, poptarget +local direct, subdirect, writer, pushtarget, poptarget, setlogfile, settimedlog, setprocessor, setformatters if tex and (tex.jobname or tex.formatname) then @@ -152,8 +170,8 @@ if tex and (tex.jobname or tex.formatname) then write_nl(target,"\n") end - local f_one = formatters["%-15s > %s\n"] - local f_two = formatters["%-15s >\n"] + local report_yes = formatters["%-15s > %s\n"] + local report_nop = formatters["%-15s >\n"] -- we can use formatters but best check for % then because for simple messages -- we con't want this overhead for single messages (not that there are that @@ -161,71 +179,71 @@ if tex and (tex.jobname or tex.formatname) then report = function(a,b,c,...) if c then - write_nl(target,f_one(translations[a],formatters[formats[b]](c,...))) + write_nl(target,report_yes(translations[a],formatters[formats[b]](c,...))) elseif b then - write_nl(target,f_one(translations[a],formats[b])) + write_nl(target,report_yes(translations[a],formats[b])) elseif a then - write_nl(target,f_two(translations[a])) + write_nl(target,report_nop(translations[a])) else write_nl(target,"\n") end end - local f_one = formatters["%-15s > %s"] - local f_two = formatters["%-15s >"] + local direct_yes = formatters["%-15s > %s"] + local direct_nop = formatters["%-15s >"] direct = function(a,b,c,...) if c then - return f_one(translations[a],formatters[formats[b]](c,...)) + return direct_yes(translations[a],formatters[formats[b]](c,...)) elseif b then - return f_one(translations[a],formats[b]) + return direct_yes(translations[a],formats[b]) elseif a then - return f_two(translations[a]) + return direct_nop(translations[a]) else return "" end end - local f_one = formatters["%-15s > %s > %s\n"] - local f_two = formatters["%-15s > %s >\n"] + local subreport_yes = formatters["%-15s > %s > %s\n"] + local subreport_nop = formatters["%-15s > %s >\n"] subreport = function(a,s,b,c,...) if c then - write_nl(target,f_one(translations[a],translations[s],formatters[formats[b]](c,...))) + write_nl(target,subreport_yes(translations[a],translations[s],formatters[formats[b]](c,...))) elseif b then - write_nl(target,f_one(translations[a],translations[s],formats[b])) + write_nl(target,subreport_yes(translations[a],translations[s],formats[b])) elseif a then - write_nl(target,f_two(translations[a],translations[s])) + write_nl(target,subreport_nop(translations[a],translations[s])) else write_nl(target,"\n") end end - local f_one = formatters["%-15s > %s > %s"] - local f_two = formatters["%-15s > %s >"] + local subdirect_yes = formatters["%-15s > %s > %s"] + local subdirect_nop = formatters["%-15s > %s >"] subdirect = function(a,s,b,c,...) if c then - return f_one(translations[a],translations[s],formatters[formats[b]](c,...)) + return subdirect_yes(translations[a],translations[s],formatters[formats[b]](c,...)) elseif b then - return f_one(translations[a],translations[s],formats[b]) + return subdirect_yes(translations[a],translations[s],formats[b]) elseif a then - return f_two(translations[a],translations[s]) + return subdirect_nop(translations[a],translations[s]) else return "" end end - local f_one = formatters["%-15s : %s\n"] - local f_two = formatters["%-15s :\n"] + local status_yes = formatters["%-15s : %s\n"] + local status_nop = formatters["%-15s :\n"] status = function(a,b,c,...) if c then - write_nl(target,f_one(translations[a],formatters[formats[b]](c,...))) + write_nl(target,status_yes(translations[a],formatters[formats[b]](c,...))) elseif b then - write_nl(target,f_one(translations[a],formats[b])) + write_nl(target,status_yes(translations[a],formats[b])) elseif a then - write_nl(target,f_two(translations[a])) + write_nl(target,status_nop(translations[a])) else write_nl(target,"\n") end @@ -270,56 +288,81 @@ if tex and (tex.jobname or tex.formatname) then translations = t end + setprocessor = function(f) + local writeline = write_nl + write_nl = function(target,...) + writeline(target,f(...)) + end + end + + setformatters = function(f) + report_yes = f.report_yes or report_yes + report_nop = f.report_nop or report_nop + subreport_yes = f.subreport_yes or subreport_yes + subreport_nop = f.subreport_nop or subreport_nop + direct_yes = f.direct_yes or direct_yes + direct_nop = f.direct_nop or direct_nop + subdirect_yes = f.subdirect_yes or subdirect_yes + subdirect_nop = f.subdirect_nop or subdirect_nop + status_yes = f.status_yes or status_yes + status_nop = f.status_nop or status_nop + end + + setlogfile = ignore + settimedlog = ignore + else logs.flush = ignore - writer = write_nl + writer = function(s) + write_nl(s) + end newline = function() write_nl("\n") end - local f_one = formatters["%-15s | %s"] - local f_two = formatters["%-15s |"] + local report_yes = formatters["%-15s | %s"] + local report_nop = formatters["%-15s |"] report = function(a,b,c,...) if c then - write_nl(f_one(a,formatters[b](c,...))) + write_nl(report_yes(a,formatters[b](c,...))) elseif b then - write_nl(f_one(a,b)) + write_nl(report_yes(a,b)) elseif a then - write_nl(f_two(a)) + write_nl(report_nop(a)) else write_nl("") end end - local f_one = formatters["%-15s | %s | %s"] - local f_two = formatters["%-15s | %s |"] + local subreport_yes = formatters["%-15s | %s | %s"] + local subreport_nop = formatters["%-15s | %s |"] subreport = function(a,sub,b,c,...) if c then - write_nl(f_one(a,sub,formatters[b](c,...))) + write_nl(subreport_yes(a,sub,formatters[b](c,...))) elseif b then - write_nl(f_one(a,sub,b)) + write_nl(subreport_yes(a,sub,b)) elseif a then - write_nl(f_two(a,sub)) + write_nl(subreport_nop(a,sub)) else write_nl("") end end - local f_one = formatters["%-15s : %s\n"] - local f_two = formatters["%-15s :\n"] + local status_yes = formatters["%-15s : %s\n"] + local status_nop = formatters["%-15s :\n"] status = function(a,b,c,...) -- not to be used in lua anyway if c then - write_nl(f_one(a,formatters[b](c,...))) + write_nl(status_yes(a,formatters[b](c,...))) elseif b then - write_nl(f_one(a,b)) -- b can have %'s + write_nl(status_yes(a,b)) -- b can have %'s elseif a then - write_nl(f_two(a)) + write_nl(status_nop(a)) else write_nl("\n") end @@ -334,6 +377,53 @@ else setformats = ignore settranslations = ignore + setprocessor = function(f) + local writeline = write_nl + write_nl = function(s) + writeline(f(s)) + end + end + + setformatters = function(f) + report_yes = f.report_yes or report_yes + report_nop = f.report_nop or report_nop + subreport_yes = f.subreport_yes or subreport_yes + subreport_nop = f.subreport_nop or subreport_nop + status_yes = f.status_yes or status_yes + status_nop = f.status_nop or status_nop + end + + setlogfile = function(name,keepopen) + if name and name ~= "" then + local localtime = os.localtime + local writeline = write_nl + if keepopen then + local f = io.open(name,"ab") + write_nl = function(s) + writeline(s) + f:write(localtime()," | ",s,"\n") + end + else + write_nl = function(s) + writeline(s) + local f = io.open(name,"ab") + f:write(localtime()," | ",s,"\n") + f:close() + end + end + end + setlogfile = ignore + end + + settimedlog = function() + local localtime = os.localtime + local writeline = write_nl + write_nl = function(s) + writeline(localtime() .. " | " .. s) + end + settimedlog = ignore + end + end logs.report = report @@ -345,6 +435,11 @@ logs.poptarget = poptarget logs.setformats = setformats logs.settranslations = settranslations +logs.setlogfile = setlogfile +logs.settimedlog = settimedlog +logs.setprocessor = setprocessor +logs.setformatters = setformatters + logs.direct = direct logs.subdirect = subdirect logs.writer = writer @@ -529,8 +624,9 @@ local report_pages = logs.reporter("pages") -- not needed but saves checking whe local real, user, sub function logs.start_page_number() - real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno --- real, user, sub = 0, 0, 0 + real = texgetcount("realpageno") + user = texgetcount("userpageno") + sub = texgetcount("subpageno") end local timing = false diff --git a/tex/context/base/trac-vis.lua b/tex/context/base/trac-vis.lua index df4909c3e..dc8bcc5e7 100644 --- a/tex/context/base/trac-vis.lua +++ b/tex/context/base/trac-vis.lua @@ -34,10 +34,8 @@ local formatters = string.formatters -- todo: inline concat (more efficient) local nodecodes = nodes.nodecodes -local disc_code = nodecodes.disc local kern_code = nodecodes.kern local glyph_code = nodecodes.glyph -local disc_code = nodecodes.disc local hlist_code = nodecodes.hlist local vlist_code = nodecodes.vlist local glue_code = nodecodes.glue @@ -60,11 +58,9 @@ local rightskip_code = gluecodes.rightskip local whatsitcodes = nodes.whatsitcodes -local concat_nodes = nodes.concat local hpack_nodes = node.hpack local vpack_nodes = node.vpack -local hpack_string = typesetters.hpack -local fast_hpack_string = typesetters.fast_hpack +local fast_hpack_string = nodes.typesetters.fast_hpack local copy_node = node.copy local copy_list = node.copy_list local free_node = node.free @@ -74,8 +70,9 @@ local insert_node_after = node.insert_after local fast_hpack = nodes.fasthpack local traverse_nodes = node.traverse -local tex_attribute = tex.attribute -local tex_box = tex.box +local texgetattribute = tex.getattribute +local texsetattribute = tex.setattribute +local texgetbox = tex.getbox local unsetvalue = attributes.unsetvalue local current_font = font.current @@ -158,7 +155,7 @@ local preset_all = preset_makeup + modes.fontkern + modes.whatsit + modes.gly function visualizers.setfont(id) usedfont = id or current_font() exheight = exheights[usedfont] - emwidth = emwidths[usedfont] + emwidth = emwidths[usedfont] end -- we can preset a bunch of bits @@ -204,18 +201,12 @@ local function setvisual(n,a,what) -- this will become more efficient when we ha a = preset_makeup else a = setbit(a,preset_makeup) - -- for i=1,#modes_makeup do - -- a = setvisual(modes_makeup[i],a) - -- end end elseif n == "boxes" then if not a or a == 0 or a == unsetvalue then a = preset_boxes else a = setbit(a,preset_boxes) - -- for i=1,#modes_boxes do - -- a = setvisual(modes_boxes[i],a) - -- end end elseif n == "all" then if what == false then @@ -224,9 +215,6 @@ local function setvisual(n,a,what) -- this will become more efficient when we ha a = preset_all else a = setbit(a,preset_all) - -- for i=1,#modes_all do - -- a = setvisual(modes_all[i],a) - -- end end else local m = modes[n] @@ -256,11 +244,11 @@ local function setvisual(n,a,what) -- this will become more efficient when we ha end function visualizers.setvisual(n) - tex_attribute[a_visual] = setvisual(n,tex_attribute[a_visual]) + texsetattribute(a_visual,setvisual(n,texgetattribute(a_visual))) end function visualizers.setlayer(n) - tex_attribute[a_layer] = layers[n] or unsetvalue + texsetattribute(a_layer,layers[n] or unsetvalue) end commands.setvisual = visualizers.setvisual @@ -271,7 +259,7 @@ function commands.visual(n) end local function set(mode,v) - tex_attribute[a_visual] = setvisual(mode,tex_attribute[a_visual],v) + texsetattribute(a_visual,setvisual(mode,texgetattribute(a_visual),v)) end for mode, value in next, modes do @@ -314,11 +302,7 @@ local function sometext(str,layer,color,textcolor) -- we can just paste verbatim if textcolor then setlistcolor(text.list,textcolor) end - local info = concat_nodes { - rule, - kern, - text, - } + local info = rule .. kern .. text setlisttransparency(info,c_zero) info = fast_hpack(info) if layer then @@ -352,10 +336,7 @@ local function fontkern(head,current) setlisttransparency(list,c_text_d) settransparency(rule,c_text_d) text.shift = -5 * exheight - info = concat_nodes { - rule, - text, - } + info = rule .. text info = fast_hpack(info) info[a_layer] = l_fontkern info.width = 0 @@ -432,12 +413,15 @@ end local b_cache = { } -local function ruledbox(head,current,vertical,layer,what,simple) +local function ruledbox(head,current,vertical,layer,what,simple,previous) local wd = current.width if wd ~= 0 then - local ht, dp = current.height, current.depth - local next, prev = current.next, current.prev - current.next, current.prev = nil, nil + local ht = current.height + local dp = current.depth + local next = current.next + local prev = previous -- current.prev ... prev can be wrong in math mode + current.next = nil + current.prev = nil local linewidth = emwidth/10 local baseline, baseskip if dp ~= 0 and ht ~= 0 then @@ -446,19 +430,16 @@ local function ruledbox(head,current,vertical,layer,what,simple) if not baseline then -- due to an optimized leader color/transparency we need to set the glue node in order -- to trigger this mechanism - local leader = concat_nodes { - new_glue(2*linewidth), -- 2.5 - new_rule(6*linewidth,linewidth,0), -- 5.0 - new_glue(2*linewidth), -- 2.5 - } + local leader = new_glue(2*linewidth) .. new_rule(6*linewidth,linewidth,0) .. new_glue(2*linewidth) -- setlisttransparency(leader,c_text) leader = fast_hpack(leader) -- setlisttransparency(leader,c_text) baseline = new_glue(0) baseline.leader = leader baseline.subtype = cleaders_code - baseline.spec.stretch = 65536 - baseline.spec.stretch_order = 2 + local spec = baseline.spec + spec.stretch = 65536 + spec.stretch_order = 2 setlisttransparency(baseline,c_text) b_cache.baseline = baseline end @@ -480,10 +461,7 @@ local function ruledbox(head,current,vertical,layer,what,simple) this = b_cache[what] if not this then local text = fast_hpack_string(what,usedfont) - this = concat_nodes { - new_kern(-text.width), - text, - } + this = new_kern(-text.width) .. text setlisttransparency(this,c_text) this = fast_hpack(this) this.width = 0 @@ -492,27 +470,24 @@ local function ruledbox(head,current,vertical,layer,what,simple) b_cache[what] = this end end - local info = concat_nodes { - this and copy_list(this) or nil, -- this also triggers the right mode (else sometimes no whatits) - new_rule(linewidth,ht,dp), - new_rule(wd-2*linewidth,-dp+linewidth,dp), - new_rule(linewidth,ht,dp), - new_kern(-wd+linewidth), - new_rule(wd-2*linewidth,ht,-ht+linewidth), - baseskip, - baseline, - } + -- we need to trigger the right mode (else sometimes no whatits) + local info = + (this and copy_list(this) or nil) .. + new_rule(linewidth,ht,dp) .. + new_rule(wd-2*linewidth,-dp+linewidth,dp) .. + new_rule(linewidth,ht,dp) .. + new_kern(-wd+linewidth) .. + new_rule(wd-2*linewidth,ht,-ht+linewidth) + if baseskip then + info = info .. baseskip .. baseline + end setlisttransparency(info,c_text) info = fast_hpack(info) info.width = 0 info.height = 0 info.depth = 0 info[a_layer] = layer - local info = concat_nodes { - current, - new_kern(-wd), - info, - } + local info = current .. new_kern(-wd) .. info info = fast_hpack(info,wd) if vertical then info = vpack_nodes(info) @@ -522,12 +497,12 @@ local function ruledbox(head,current,vertical,layer,what,simple) next.prev = info end if prev then -if prev.id == gluespec_code then - -- weird, how can this happen, an inline glue-spec -else - info.prev = prev - prev.next = info -end + if prev.id == gluespec_code then + -- weird, how can this happen, an inline glue-spec + else + info.prev = prev + prev.next = info + end end if head == current then return info, info @@ -539,27 +514,30 @@ end end end -local function ruledglyph(head,current) +local function ruledglyph(head,current,previous) local wd = current.width if wd ~= 0 then - local ht, dp = current.height, current.depth - local next, prev = current.next, current.prev - current.next, current.prev = nil, nil + local ht = current.height + local dp = current.depth + local next = current.next + local prev = previous + current.next = nil + current.prev = nil local linewidth = emwidth/20 local baseline if dp ~= 0 and ht ~= 0 then baseline = new_rule(wd-2*linewidth,linewidth,0) end local doublelinewidth = 2*linewidth - local info = concat_nodes { - new_rule(linewidth,ht,dp), - new_rule(wd-doublelinewidth,-dp+linewidth,dp), - new_rule(linewidth,ht,dp), - new_kern(-wd+linewidth), - new_rule(wd-doublelinewidth,ht,-ht+linewidth), - new_kern(-wd+doublelinewidth), - baseline, - } + -- could be a pdf rule + local info = + new_rule(linewidth,ht,dp) .. + new_rule(wd-doublelinewidth,-dp+linewidth,dp) .. + new_rule(linewidth,ht,dp) .. + new_kern(-wd+linewidth) .. + new_rule(wd-doublelinewidth,ht,-ht+linewidth) .. + new_kern(-wd+doublelinewidth) .. + baseline setlistcolor(info,c_glyph) setlisttransparency(info,c_glyph_d) info = fast_hpack(info) @@ -567,11 +545,7 @@ local function ruledglyph(head,current) info.height = 0 info.depth = 0 info[a_layer] = l_glyph - local info = concat_nodes { - current, - new_kern(-wd), - info, - } + local info = current .. new_kern(-wd) .. info info = fast_hpack(info) info.width = wd if next then @@ -622,6 +596,8 @@ local tags = { -- false = "HS", } +-- we sometimes pass previous as we can have issues in math (not watertight for all) + local function ruledglue(head,current,vertical) local spec = current.spec local width = spec.width @@ -721,9 +697,10 @@ local function visualize(head,vertical) local trace_glyph = false local trace_simple = false local trace_user = false - local current = head + local current = head + local previous = nil + local attr = unsetvalue local prev_trace_fontkern = nil - local attr = unsetvalue while current do local id = current.id local a = current[a_visual] or unsetvalue @@ -762,7 +739,7 @@ local function visualize(head,vertical) current[a_layer] = l_strut elseif id == glyph_code then if trace_glyph then - head, current = ruledglyph(head,current) + head, current = ruledglyph(head,current,previous) end elseif id == disc_code then if trace_glyph then @@ -812,7 +789,7 @@ local function visualize(head,vertical) current.list = visualize(content,false) end if trace_hbox then - head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple) + head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple,previous) end elseif id == vlist_code then local content = current.list @@ -820,9 +797,9 @@ local function visualize(head,vertical) current.list = visualize(content,true) end if trace_vtop then - head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple) + head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple,previous) elseif trace_vbox then - head, current = ruledbox(head,current,true,l_vbox,"__V",trace_simple) + head, current = ruledbox(head,current,true,l_vbox,"__V",trace_simple,previous) end elseif id == whatsit_code then if trace_whatsit then @@ -833,7 +810,8 @@ local function visualize(head,vertical) head, current = user(head,current) end end - current = current.next + previous = current + current = current.next end return head end @@ -865,13 +843,13 @@ end function visualizers.handler(head) if usedfont then starttiming(visualizers) - -- local l = tex_attribute[a_layer] - -- local v = tex_attribute[a_visual] - -- tex_attribute[a_layer] = unsetvalue - -- tex_attribute[a_visual] = unsetvalue + -- local l = texgetattribute(a_layer) + -- local v = texgetattribute(a_visual) + -- texsetattribute(a_layer,unsetvalue) + -- texsetattribute(a_visual,unsetvalue) head = visualize(head) - -- tex_attribute[a_layer] = l - -- tex_attribute[a_visual] = v + -- texsetattribute(a_layer,l) + -- texsetattribute(a_visual,v) -- -- cleanup() stoptiming(visualizers) end @@ -879,7 +857,8 @@ function visualizers.handler(head) end function visualizers.box(n) - tex_box[n].list = visualizers.handler(tex_box[n].list) + local box = texgetbox(n) + box.list = visualizers.handler(box.list) end local last = nil @@ -911,7 +890,7 @@ end function visualizers.markfonts(list) last, used = 0, { } - markfonts(type(n) == "number" and tex_box[n].list or n) + markfonts(type(n) == "number" and texgetbox(n).list or n) end function commands.markfonts(n) diff --git a/tex/context/base/trac-vis.mkiv b/tex/context/base/trac-vis.mkiv index fbc6ad6c4..694d1b09d 100644 --- a/tex/context/base/trac-vis.mkiv +++ b/tex/context/base/trac-vis.mkiv @@ -59,6 +59,8 @@ \unexpanded\def\ruledvtop{\syst_visualizers_vtop attr \visualattribute \ctxcommand{visual("simplevtop")} } % special case \unexpanded\def\ruledtopv{\syst_visualizers_vtop attr \visualattribute \ctxcommand{visual("vtop")} } +\unexpanded\def\ruledmbox#1{\ruledhbox{\startimath#1\stopimath}} + \appendtoks \ifcase\c_syst_visualizers_state\else \syst_visualizers_speedup diff --git a/tex/context/base/type-imp-dejavu.mkiv b/tex/context/base/type-imp-dejavu.mkiv index 0e628c4ba..de1f7752c 100644 --- a/tex/context/base/type-imp-dejavu.mkiv +++ b/tex/context/base/type-imp-dejavu.mkiv @@ -73,6 +73,13 @@ \definetypeface [dejavu] [\s!mm] [\s!math] [xits] [\s!default] [\s!rscale=1.2] \stoptypescript + \starttypescript[dejavubidi] + \definetypeface [dejavu] [\s!rm] [\s!serif] [dejavu] [\s!default] + \definetypeface [dejavu] [\s!ss] [\s!sans] [dejavu] [\s!default] + \definetypeface [dejavu] [\s!tt] [\s!mono] [dejavu] [\s!default] + \definetypeface [dejavu] [\s!mm] [\s!math] [xitsbidi] [\s!default] [\s!rscale=1.2] + \stoptypescript + \starttypescript [\s!serif] [dejavu-condensed] [\s!name] \setups[\s!font:\s!fallback:\s!serif] \definefontsynonym [\s!Serif] [\s!name:dejavuserifcondensed] [\s!features=\s!default] diff --git a/tex/context/base/type-imp-euler.mkiv b/tex/context/base/type-imp-euler.mkiv index 60b9760f5..d3b552b56 100644 --- a/tex/context/base/type-imp-euler.mkiv +++ b/tex/context/base/type-imp-euler.mkiv @@ -12,35 +12,79 @@ %C details. \loadtypescriptfile[texgyre] +\loadtypescriptfile[dejavu] -\starttypescriptcollection[pagella-euler] +% U+0000 upto U+3100 is probably ok, but even then we don't want a mix of accented - \starttypescript [\s!math] [euler] - \definefontsynonym [EulerMath] [\s!file:euler.otf] +\resetfontfallback [euleroverpagella] +\resetfontfallback [pagellaovereuler] + +% 0x1D455 : italic h + +\definefontfallback [euleroverpagella] [\s!file:euler.otf] [0x02100-0x02BFF] [\s!check=yes,\c!force=yes] +\definefontfallback [euleroverpagella] [\s!file:euler.otf] [0x1D400-0x1D7FF] [\s!check=yes,\c!force=yes] +\definefontfallback [euleroverpagella] [texgyrepagella-math] [0x0210E] [\s!check=yes,\c!force=\v!yes] +%definefontfallback [euleroverpagella] [\s!file:euler.otf] [0x1D538-0x1D550] [\s!check=yes,\c!force=yes] + +\definefontfallback [pagellaovereuler] [texgyrepagella-math] [0x02100-0x02BFF] [\s!check=yes] +\definefontfallback [pagellaovereuler] [texgyrepagella-math] [0x1D400-0x1D7FF] [\s!check=yes] +\definefontfallback [pagellaovereuler] [texgyrepagella-math] [0x1D400-0x1D7FF] [\s!check=yes] +\definefontfallback [pagellaovereuler] [texgyrepagella-math] [0x0210E] [\s!check=yes,\c!force=\v!yes] +%definefontfallback [pagellaovereuler] [texgyrepagella-math] [0x1D538-0x1D550] [\s!check=yes] + +% example for aditya: + +% \definefontfallback [pagellaovereuler] [texgyrepagella-math] [lowercasenormal] [offset=uppercasenormal,force=yes] +% \definefontfallback [pagellaovereuler] [texgyrepagella-math] [uppercasenormal] [offset=lowercasenormal,force=yes] + +% \definefontfallback [pagellaovereuler] [texgyrepagella-math] [uppercasebolditalic] [force=yes] +% \definefontfallback [pagellaovereuler] [texgyrepagella-math] [lowercasebolditalic] [force=yes] + +\starttypescriptcollection[pagella-eulernova] + + \starttypescript [\s!serif] [eulernova] [\s!name] + \setups[\s!font:\s!fallback:\s!serif] + \definefontsynonym [\s!Serif] [\s!file:euler.otf] [\s!features=\s!default] \stoptypescript - \starttypescript [\s!math] [euler] [\s!name] - % \definefontsynonym [MathRoman] [EulerMath] [\s!features=\s!math] - \definefontsynonym [MathRoman] [EulerMath] [\s!features=\s!math\mathsizesuffix] + \starttypescript [\s!math] [eulernova] [\s!name] + \loadfontgoodies[euler-math] + \definefontsynonym [MathRoman] [\s!file:euler.otf] [\s!features=\s!math\mathsizesuffix] \stoptypescript - \starttypescript [pagella-euler] - \definetypeface [\typescriptone] [\s!rm] [\s!serif] [pagella] [\s!default] - % \definetypeface [\typescriptone] [\s!ss] [\s!sans] [pagella] [\s!default] - \definetypeface [\typescriptone] [\s!tt] [\s!mono] [modern] [\s!default] - \definetypeface [\typescriptone] [\s!mm] [\s!math] [euler] [\s!default] + \starttypescript [pagella-eulernova] + \definetypeface [\typescriptone] [\s!rm] [\s!serif] [pagella] [\s!default] + \definetypeface [\typescriptone] [\s!tt] [\s!mono] [dejavu] [\s!default] [\s!rscale=0.9] + \definetypeface [\typescriptone] [\s!mm] [\s!math] [eulernova] [\s!default] \quittypescriptscanning \stoptypescript - \starttypescript [\s!serif] [euler] [\s!name] - \setups[\s!font:\s!fallback:\s!serif] - \definefontsynonym [\s!Serif] [\s!file:euler.otf] [\s!features=\s!default] + \starttypescript [eulernova] + \definetypeface [\typescriptone] [\s!rm] [\s!serif] [eulernova] [\s!default] + \definetypeface [\typescriptone] [\s!tt] [\s!mono] [dejavu] [\s!default] [\s!rscale=0.9] + \definetypeface [\typescriptone] [\s!mm] [\s!math] [eulernova] [\s!default] + \quittypescriptscanning + \stoptypescript + + \starttypescript [\s!math] [euleroverpagella] [\s!name] + \definefontsynonym [MathRoman] [texgyrepagella-math] [\s!features=\s!math\mathsizesuffix,\s!fallbacks=euleroverpagella] + \stoptypescript + + \starttypescript [\s!math] [pagellaovereuler] [\s!name] + \definefontsynonym [MathRoman] [\s!file:euler.otf] [\s!features=\s!math\mathsizesuffix,\s!fallbacks=pagellaovereuler] + \stoptypescript + + \starttypescript [pagella-with-euler] + \definetypeface [\typescriptone] [\s!rm] [\s!serif] [pagella] [\s!default] + \definetypeface [\typescriptone] [\s!tt] [\s!mono] [dejavu] [\s!default] [\s!rscale=0.9] + \definetypeface [\typescriptone] [\s!mm] [\s!math] [euleroverpagella] [\s!default] + \quittypescriptscanning \stoptypescript - \starttypescript [euler] - \definetypeface [euler] [\s!rm] [\s!serif] [euler] [\s!default] - \definetypeface [euler] [\s!tt] [\s!mono] [modern] [\s!default] - \definetypeface [euler] [\s!mm] [\s!math] [euler] [\s!default] + \starttypescript [euler-with-pagella] + \definetypeface [\typescriptone] [\s!rm] [\s!serif] [eulernova] [\s!default] + \definetypeface [\typescriptone] [\s!tt] [\s!mono] [dejavu] [\s!default] [\s!rscale=0.9] + \definetypeface [\typescriptone] [\s!mm] [\s!math] [pagellaovereuler] [\s!default] \quittypescriptscanning \stoptypescript diff --git a/tex/context/base/type-imp-hgz.mkiv b/tex/context/base/type-imp-hgz.mkiv index a3c2a7841..091adeb2d 100644 --- a/tex/context/base/type-imp-hgz.mkiv +++ b/tex/context/base/type-imp-hgz.mkiv @@ -1 +1 @@ -\input type-ghz.mkiv \endinput % can be file synonym +\input type-imp-ghz.mkiv \endinput % can be file synonym diff --git a/tex/context/base/type-imp-mathdigits.mkiv b/tex/context/base/type-imp-mathdigits.mkiv new file mode 100644 index 000000000..7a7e6b764 --- /dev/null +++ b/tex/context/base/type-imp-mathdigits.mkiv @@ -0,0 +1,53 @@ +%D \module +%D [ file=type-imp-hvmath, +%D version=2007.07.30, +%D title=\CONTEXT\ Typescript Macros, +%D subtitle=Xits, +%D author=Khaled Hosny \& Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +% \starttypescript [math] [xits] [name] +% \definefontsynonym[MathRoman][file:xits-math.otf][features=math\mathsizesuffix,goodies=xits-math,fallbacks=mathdigits] +% \stoptypescript +% +% \usetypescript [mathdigits][dejavu][arabicindic] + +% \definefontsynonym [MathRomanDigitsXitsDejavu] [file:xits-math.otf] [features=math\mathsizesuffix,goodies=xits-math,fallbacks=mathdigits] +% +% \starttypescript [math] [xits] [name] +% \definefontsynonym [MathRoman] [MathRomanDigitsXitsDejavu] +% \stoptypescript +% +% \usetypescript [mathdigits][dejavu][arabicindic] + +\starttypescriptcollection[mathdigits] + + \starttypescript [mathdigits] [dejavu] [arabicindic,extendedarabicindic] + \resetfontfallback [mathdigits] + \definefontfallback [mathdigits] [dejavusansmono] [digits\typescriptthree] [check=yes,force=yes,offset=digitsnormal] + \definefontfallback [mathdigits] [dejavusansmonobold] [digits\typescriptthree] [check=yes,force=yes,offset=digitsbold] + \stoptypescript + + \starttypescript [mathdigits] [xits-dejavu] [arabicindic,extendedarabicindic] + \usetypescript [mathdigits] [dejavu] [\typescriptthree] + \definefontsynonym[MathRoman][file:xits-math.otf][features=math\mathsizesuffix,goodies=xits-math,fallbacks=mathdigits] + \stoptypescript + +\stoptypescriptcollection + +\continueifinputfile{type-imp-mathdigits.mkiv} + +% \usetypescriptfile[mathdigits] + +\usetypescript [mathdigits] [xits-dejavu] [arabicindic] + +\setupbodyfont[dejavu] + +\starttext + $3+2=5 \quad \bf 3+2=5$ +\stoptext diff --git a/tex/context/base/type-ini.mkvi b/tex/context/base/type-ini.mkvi index 0ce14ef9f..a4d576d80 100644 --- a/tex/context/base/type-ini.mkvi +++ b/tex/context/base/type-ini.mkvi @@ -601,6 +601,8 @@ \def\font_typescripts_inherit_check_step#style{\setevalue{\??typescriptinheritances#name:#style}{#parentclass}}% \processcommalist[#styles]\font_typescripts_inherit_check_step}}} +\let\font_typescripts_inherit_check_step\relax + %D This hooks into the font mechanism with: \def\font_typescripts_inherit_check_indeed#name% called often diff --git a/tex/context/base/type-set.mkiv b/tex/context/base/type-set.mkiv index bfbf7af9e..43e561722 100644 --- a/tex/context/base/type-set.mkiv +++ b/tex/context/base/type-set.mkiv @@ -95,4 +95,10 @@ %definefilesynonym [type-imp-mac.mkiv] [type-imp-osx.mkiv] %definefilesynonym [type-imp-win.mkiv] [type-imp-mscore.mkiv] +\definefilesynonym [type-imp-eulernovum.mkiv] [type-imp-euler.mkiv] +\definefilesynonym [type-imp-eulernova.mkiv] [type-imp-euler.mkiv] + +\definefilesynonym [type-imp-euler-with-pagella] [type-imp-euler.mkiv] +\definefilesynonym [type-imp-pagella-with-euler] [type-imp-euler.mkiv] + \protect \endinput diff --git a/tex/context/base/typo-bld.lua b/tex/context/base/typo-bld.lua index ed700add7..bc9f66ee4 100644 --- a/tex/context/base/typo-bld.lua +++ b/tex/context/base/typo-bld.lua @@ -31,6 +31,7 @@ constructors.attribute = a_parbuilder local unsetvalue = attributes.unsetvalue local texsetattribute = tex.setattribute local texnest = tex.nest +local texlists = tex.lists local nodepool = nodes.pool local new_baselineskip = nodepool.baselineskip @@ -183,3 +184,72 @@ commands.stopparbuilder = constructors.stop commands.setparbuilder = constructors.set commands.enableparbuilder = constructors.enable commands.disableparbuilder = constructors.disable + +-- todo: move from nodes.builders to builders + +nodes.builders = nodes.builder or { } +local builders = nodes.builders + +local actions = nodes.tasks.actions("vboxbuilders") + +function builders.vpack_filter(head,groupcode,size,packtype,maxdepth,direction) + local done = false + if head then + starttiming(builders) + if trace_vpacking then + local before = nodes.count(head) + head, done = actions(head,groupcode,size,packtype,maxdepth,direction) + local after = nodes.count(head) + if done then + nodes.processors.tracer("vpack","changed",head,groupcode,before,after,true) + else + nodes.processors.tracer("vpack","unchanged",head,groupcode,before,after,true) + end + else + head, done = actions(head,groupcode) + end + stoptiming(builders) + end + return head, done +end + +-- This one is special in the sense that it has no head and we operate on the mlv. Also, +-- we need to do the vspacing last as it removes items from the mvl. + +local actions = nodes.tasks.actions("mvlbuilders") + +local function report(groupcode,head) + report_page_builder("trigger: %s",groupcode) + report_page_builder(" vsize : %p",tex.vsize) + report_page_builder(" pagegoal : %p",tex.pagegoal) + report_page_builder(" pagetotal: %p",tex.pagetotal) + report_page_builder(" list : %s",head and nodeidstostring(head) or "") +end + +function builders.buildpage_filter(groupcode) + local head, done = texlists.contrib_head, false + if head then + starttiming(builders) + if trace_page_builder then + report(groupcode,head) + end + head, done = actions(head,groupcode) + stoptiming(builders) + -- -- doesn't work here (not passed on?) + -- tex.pagegoal = tex.vsize - tex.dimen.d_page_floats_inserted_top - tex.dimen.d_page_floats_inserted_bottom + texlists.contrib_head = head + return done and head or true + else + if trace_page_builder then + report(groupcode) + end + return nil, false + end +end + +callbacks.register('vpack_filter', builders.vpack_filter, "vertical spacing etc") +callbacks.register('buildpage_filter', builders.buildpage_filter, "vertical spacing etc (mvl)") + +statistics.register("v-node processing time", function() + return statistics.elapsedseconds(builders) +end) diff --git a/tex/context/base/typo-brk.lua b/tex/context/base/typo-brk.lua index d6326ebeb..3558efa8e 100644 --- a/tex/context/base/typo-brk.lua +++ b/tex/context/base/typo-brk.lua @@ -29,7 +29,7 @@ local remove_node = nodes.remove -- ! nodes local tonodes = nodes.tonodes -local texattribute = tex.attribute +local texsetattribute = tex.setattribute local unsetvalue = attributes.unsetvalue local nodepool = nodes.pool @@ -61,7 +61,6 @@ breakpoints.methods = breakpoints.methods or { } local methods = breakpoints.methods local a_breakpoints = attributes.private("breakpoint") -breakpoints.attribute = a_breakpoints storage.register("typesetters/breakpoints/mapping", breakpoints.mapping, "typesetters.breakpoints.mapping") @@ -155,8 +154,8 @@ methods[5] = function(head,start,settings) -- x => p q r return head, start end -local function process(namespace,attribute,head) - local done, numbers = false, languages.numbers +function breakpoints.handler(head) + local done, numbers = false, languages.numbers local start, n = head, 0 while start do local id = start.id @@ -282,15 +281,9 @@ function breakpoints.set(n) n = n.number end end - texattribute[a_breakpoints] = n + texsetattribute(a_breakpoints,n) end -breakpoints.handler = nodes.installattributehandler { - name = "breakpoint", - namespace = breakpoints, - processor = process, -} - -- function breakpoints.enable() -- tasks.enableaction("processors","typesetters.breakpoints.handler") -- end diff --git a/tex/context/base/typo-cap.lua b/tex/context/base/typo-cap.lua index fdbf2e353..2988d5474 100644 --- a/tex/context/base/typo-cap.lua +++ b/tex/context/base/typo-cap.lua @@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['typo-cap'] = { local next, type = next, type local format, insert = string.format, table.insert -local div = math.div +local div, randomnumber = math.div, math.random local trace_casing = false trackers.register("typesetters.casing", function(v) trace_casing = v end) @@ -16,12 +16,9 @@ local report_casing = logs.reporter("typesetting","casing") local nodes, node = nodes, node -local traverse_id = node.traverse_id -local copy_node = node.copy -local end_of_math = node.end_of_math +local copy_node = nodes.copy +local end_of_math = nodes.end_of_math -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue local nodecodes = nodes.nodecodes local skipcodes = nodes.skipcodes @@ -29,6 +26,7 @@ local kerncodes = nodes.kerncodes local glyph_code = nodecodes.glyph local kern_code = nodecodes.kern +local disc_code = nodecodes.disc local math_code = nodecodes.math local kerning_code = kerncodes.kerning @@ -44,6 +42,8 @@ local variables = interfaces.variables local v_reset = variables.reset local chardata = characters.data +local texsetattribute = tex.setattribute +local unsetvalue = attributes.unsetvalue typesetters = typesetters or { } local typesetters = typesetters @@ -53,83 +53,125 @@ local cases = typesetters.cases cases.actions = { } local actions = cases.actions -cases.attribute = c_cases -- no longer needed local a_cases = attributes.private("case") -local lastfont = nil +local extract = bit32.extract +local run = 0 -- a trick to make neighbouring ranges work --- we use char(0) as placeholder for the larger font, so we need to remove it --- before it can do further harm +local function set(tag,font) + if run == 2^6 then + run = 1 + else + run = run + 1 + end + return font * 0x10000 + tag * 0x100 + run +end + +local function get(a) + local font = extract(a,16,12) -- 4000 + local tag = extract(a, 8, 8) -- 250 + local run = extract(a, 0, 8) -- 50 + return tag, font, run +end + +-- print(get(set( 1, 0))) +-- print(get(set( 1, 99))) +-- print(get(set( 2, 96))) +-- print(get(set( 30, 922))) +-- print(get(set(250,4000))) + +-- a previous implementation used char(0) as placeholder for the larger font, so we needed +-- to remove it before it can do further harm ... that was too tricky as we use char 0 for +-- other cases too -- --- we could do the whole glyph run here (till no more attributes match) but --- then we end up with more code .. maybe i will clean this up anyway as the --- lastfont hack is somewhat ugly .. on the other hand, we need to deal with --- cases like: +-- we could do the whole glyph run here (till no more attributes match) but then we end up +-- with more code .. maybe i will clean this up anyway as the lastfont hack is somewhat ugly +-- ... on the other hand, we need to deal with cases like: -- -- \WORD {far too \Word{many \WORD{more \word{pushed} in between} useless} words} local uccodes = characters.uccodes local lccodes = characters.lccodes -local function helper(start, codes, special, attribute, once) +-- true false true == mixed + +local function helper(start,attr,lastfont,n,codes,special,once,keepother) local char = start.char - local dc = codes[char] + local dc = codes[char] if dc then local fnt = start.font - if special then - -- will become function - if start.char == 0 then - lastfont = fnt - local prev, next = start.prev, start.next - prev.next = next - if next then - next.prev = prev - end - return prev, true - elseif lastfont and start.prev.id ~= glyph_code then - fnt = lastfont - start.font = lastfont + if keepother and dc == char then + local lfa = lastfont[n] + if lfa then + start.font = lfa + return start, true + else + return start, false end - end - local ifc = fontchar[fnt] - if type(dc) == "table" then - local ok = true - for i=1,#dc do - ok = ok and ifc[dc[i]] + else + if special then + local lfa = lastfont[n] + if lfa then + local previd = start.prev.id + if previd ~= glyph_code and previd ~= disc_code then + fnt = lfa + setfield(start,"font",lfa) + end + end end - if ok then - -- tood; use generic injector - local prev, original = start, start + local ifc = fontchar[fnt] + if type(dc) == "table" then + local ok = true for i=1,#dc do - local chr = dc[i] - prev = start - if i == 1 then - start.char = chr - else - local g = copy_node(original) - g.char = chr - local next = start.next - g.prev = start - if next then - g.next = next - start.next = g - next.prev = g + -- could be cached in font + if not ifc[dc[i]] then + ok = false + break + end + end + if ok then + -- todo: use generic injector + local prev = start + local original = start + for i=1,#dc do + local chr = dc[i] + prev = start + if i == 1 then + start.char = chr + else + local g = copy_node(original) + g.char = chr + local next = start.next + g.prev = start + if next then + g.next = next + start.next = g + next.prev = g + end + start = g end - start = g end + if once then + lastfont[n] = false + end + return prev, true + end + if once then + lastfont[n] = false end - if once then lastfont = nil end - return prev, true + return start, false + elseif ifc[dc] then + start.char = dc + if once then + lastfont[n] = false + end + return start, true end - if once then lastfont = nil end - return start, false - elseif ifc[dc] then - start.char = dc - if once then lastfont = nil end - return start, true end end - if once then lastfont = nil end + if once then + lastfont[n] = false + end return start, false end @@ -150,73 +192,91 @@ end cases.register = register -local function WORD(start,attribute) - lastfont = nil - return helper(start,uccodes) +local function WORD(start,attr,lastfont,n) + lastfont[n] = false + return helper(start,attr,lastfont,n,uccodes) end -local function word(start,attribute) - lastfont = nil - return helper(start,lccodes) +local function word(start,attr,lastfont,n) + lastfont[n] = false + return helper(start,attr,lastfont,n,lccodes) end -local function Word(start,attribute,attr) - lastfont = nil +local function blockrest(start) + local n = start.next + while n do + local id = n.id + if id == glyph_code or id == disc_node and n[a_cases] == attr then + n[a_cases] = unsetvalue + else + -- break -- we can have nested mess + end + n = n.next + end +end + +local function Word(start,attr,lastfont,n) -- looks quite complex + lastfont[n] = false local prev = start.prev if prev and prev.id == kern_code and prev.subtype == kerning_code then prev = prev.prev end - if not prev or prev.id ~= glyph_code then - --- only the first character is treated - for n in traverse_id(glyph_code,start.next) do - if n[attribute] == attr then - n[attribute] = unsetvalue - else - -- break -- we can have nested mess - end - end + if not prev then + blockrest(start) + return helper(start,attr,lastfont,n,uccodes) + end + local previd = prev.id + if previd ~= glyph_code and previd ~= disc_code then + -- only the first character is treated + blockrest(start) -- we could return the last in the range and save some scanning -- but why bother - return helper(start,uccodes) + return helper(start,attr,lastfont,n,uccodes) else return start, false end end -local function Words(start,attribute) - lastfont = nil +local function Words(start,attr,lastfont,n) + lastfont[n] = false local prev = start.prev if prev and prev.id == kern_code and prev.subtype == kerning_code then prev = prev.prev end - if not prev or prev.id ~= glyph_code then - return helper(start,uccodes) + if not prev then + return helper(start,attr,lastfont,n,uccodes) + end + local previd = prev.id + if previd ~= glyph_code and previd ~= disc_code then + return helper(start,attr,lastfont,n,uccodes) else return start, false end end -local function capital(start,attribute) -- 3 - return helper(start,uccodes,true,attribute,true) +local function capital(start,attr,lastfont,n) -- 3 + return helper(start,attr,lastfont,n,uccodes,true,true) end -local function Capital(start,attribute) -- 4 - return helper(start,uccodes,true,attribute,false) +local function Capital(start,attr,lastfont,n) -- 4 + return helper(start,attr,lastfont,n,uccodes,true,false) end -local function none(start) +local function mixed(start,attr,lastfont,n) + return helper(start,attr,lastfont,n,uccodes,false,false,true) +end + +local function none(start,attr,lastfont,n) return start, false end -local function random(start) - lastfont = nil - local ch = start.char - local mr = math.random - -- local tfm = fontdata[start.font].characters +local function random(start,attr,lastfont,n) + lastfont[n] = false + local ch = start.char local tfm = fontchar[start.font] if lccodes[ch] then while true do - local d = chardata[mr(1,0xFFFF)] + local d = chardata[randomnumber(1,0xFFFF)] if d then local uc = uccodes[d] if uc and tfm[uc] then -- this also intercepts tables @@ -227,7 +287,7 @@ local function random(start) end elseif uccodes[ch] then while true do - local d = chardata[mr(1,0xFFFF)] + local d = chardata[randomnumber(1,0xFFFF)] if d then local lc = lccodes[d] if lc and tfm[lc] then -- this also intercepts tables @@ -248,36 +308,67 @@ register(variables.capital, capital) -- 5 register(variables.Capital, Capital) -- 6 register(variables.none, none) -- 7 (dummy) register(variables.random, random) -- 8 +register(variables.mixed, mixed) -- 9 register(variables.cap, variables.capital) -- clone register(variables.Cap, variables.Capital) -- clone --- node.traverse_id_attr - -local function process(namespace,attribute,head) -- not real fast but also not used on much data - lastfont = nil +function cases.handler(head) -- not real fast but also not used on much data + local lastfont = { } local lastattr = nil - local done = false - local start = head + local done = false + local start = head while start do -- while because start can jump ahead local id = start.id if id == glyph_code then - local attr = start[attribute] + local attr = start[a_cases] if attr and attr > 0 then if attr ~= lastattr then - lastfont = nil lastattr = attr end - start[attribute] = unsetvalue - local action = actions[attr%100] -- map back to low number + start[a_cases] = unsetvalue + local n, id, m = get(attr) + if lastfont[n] == nil then + lastfont[n] = id + end + local action = actions[n] -- map back to low number if action then - start, ok = action(start,attribute,attr) - done = done and ok + start, ok = action(start,attr,lastfont,n) + if ok then + done = true + end if trace_casing then - report_casing("case trigger %a, instance %a, result %a",attr%100,div(attr,100),ok) + report_casing("case trigger %a, instance %a, fontid %a, result %a",n,m,id,ok) end elseif trace_casing then - report_casing("unknown case trigger %a",attr) + report_casing("unknown case trigger %a",n) + end + end + elseif id == disc_code then + local attr = start[a_cases] + if attr and attr > 0 then + if attr ~= lastattr then + lastattr = attr + end + start[a_cases] = unsetvalue + local n, id, m = get(attr) + if lastfont[n] == nil then + lastfont[n] = id + end + local action = actions[n] -- map back to low number + if action then + local replace = start.replace + if replace then + action(replace,attr,lastfont,n) + end + local pre = start.pre + if pre then + action(pre,attr,lastfont,n) + end + local post = start.post + if post then + action(post,attr,lastfont,n) + end end end elseif id == math_code then @@ -287,13 +378,12 @@ local function process(namespace,attribute,head) -- not real fast but also not u start = start.next end end - lastfont = nil return head, done end -local m, enabled = 0, false -- a trick to make neighbouring ranges work +local enabled = false -function cases.set(n) +function cases.set(n,id) if n == v_reset then n = unsetvalue else @@ -306,26 +396,15 @@ function cases.set(n) end enabled = true end - if m == 100 then - m = 1 - else - m = m + 1 - end - n = m * 100 + n + n = set(n,id) else n = unsetvalue end end - texattribute[a_cases] = n + texsetattribute(a_cases,n) -- return n -- bonus end -cases.handler = nodes.installattributehandler { - name = "case", - namespace = cases, - processor = process, -} - -- interface commands.setcharactercasing = cases.set diff --git a/tex/context/base/typo-cap.mkiv b/tex/context/base/typo-cap.mkiv index d2f1dc0e0..c4458129f 100644 --- a/tex/context/base/typo-cap.mkiv +++ b/tex/context/base/typo-cap.mkiv @@ -22,6 +22,29 @@ \definesystemattribute[case][public] +%D \macros +%D {setupcapitals} +%D +%D By default we use pseudo small caps in titles. This can be +%D set up with: +%D +%D \showsetup{setupcapitals} + +\installcorenamespace{capitals} + +\installcommandhandler \??capitals {capitals} \??capitals + +%D Beware, these are not really defines (yet). + +\definecapitals[\v!WORD] % all upper +\definecapitals[\v!capital] % one upper + font +\definecapitals[\v!Capital] % some upper + font +\definecapitals[\v!mixed] % UpperCase +\definecapitals[\v!WORD] % all lower +\definecapitals[\v!Word] % one upper + font +\definecapitals[\v!Words] % some upper +\definecapitals[\v!word][\c!style=] % nothing + %D \macros %D {Word, Words, WORD, WORDS} %D @@ -53,7 +76,7 @@ % test \Word{test TEST \TeX} test \unexpanded\def\setcharactercasing[#1]% - {\ctxcommand{setcharactercasing("#1")}} + {\ctxcommand{setcharactercasing("#1",\number\fontid\font)}} % todo: names casings @@ -141,21 +164,40 @@ % % here we keep the \groupedcommand -\unexpanded\def\pseudosmallcapped{\groupedcommand{\setcharactercasing [\v!WORD ]\signalcharacter\tx}{}} % all upper -\unexpanded\def\pseudoSmallcapped{\groupedcommand{\setcharactercasing [\v!capital]\signalcharacter\tx}{}} % one upper + font -\unexpanded\def\pseudoSmallCapped{\groupedcommand{\setcharactercasing [\v!Capital]\signalcharacter\tx}{}} % some upper + font +\def\typo_capitals_set_fake#1% + {\edef\currentcapitals{#1}% + %\setcharactercasing[\currentcapitals]% + \ctxcommand{setcharactercasing("\currentcapitals",\number\fontid\font)}% + \signalcharacter % retain current style + \usecapitalsstyleparameter\c!style} + +\def\typo_capitals_set_real#1% + {\edef\currentcapitals{#1}% + \sc + %\setcharactercasing[\currentcapitals]} + \ctxcommand{setcharactercasing("\currentcapitals",\number\fontid\font)}} -\unexpanded\def\realsmallcapped {\groupedcommand{\sc\setcharactercasing[\v!WORD ]}{}} % all lower -\unexpanded\def\realSmallcapped {\groupedcommand{\sc\setcharactercasing[\v!Word ]}{}} % one upper + font -\unexpanded\def\realSmallCapped {\groupedcommand{\sc\setcharactercasing[\v!Words ]}{}} % some upper +\unexpanded\def\pseudosmallcapped{\groupedcommand{\typo_capitals_set_fake\v!WORD }\donothing} % all upper +\unexpanded\def\pseudoSmallcapped{\groupedcommand{\typo_capitals_set_fake\v!capital}\donothing} % one upper + font +\unexpanded\def\pseudoSmallCapped{\groupedcommand{\typo_capitals_set_fake\v!Capital}\donothing} % some upper + font +\unexpanded\def\pseudoMixedCapped{\groupedcommand{\typo_capitals_set_fake\v!mixed }\donothing} % UpperCase -\unexpanded\def\font_style_pseudosmallcapped{\setcharactercasing [\v!WORD ]\signalcharacter\tx} % all upper -\unexpanded\def\font_style_pseudoSmallcapped{\setcharactercasing [\v!capital]\signalcharacter\tx} % one upper + font -\unexpanded\def\font_style_pseudoSmallCapped{\setcharactercasing [\v!Capital]\signalcharacter\tx} % some upper + font +\unexpanded\def\realsmallcapped {\groupedcommand{\typo_capitals_set_real\v!WORD }\donothing} % all lower +\unexpanded\def\realSmallcapped {\groupedcommand{\typo_capitals_set_real\v!Word }\donothing} % one upper + font +\unexpanded\def\realSmallCapped {\groupedcommand{\typo_capitals_set_real\v!Words }\donothing} % some upper -\unexpanded\def\font_style_realsmallcapped {\sc\setcharactercasing[\v!WORD ]} % all lower -\unexpanded\def\font_style_realSmallcapped {\sc\setcharactercasing[\v!Word ]} % one upper + font -\unexpanded\def\font_style_realSmallCapped {\sc\setcharactercasing[\v!Words ]} % some upper +\unexpanded\def\notsmallcapped {\groupedcommand{\typo_capitals_set_fake\v!word }\donothing} + +\unexpanded\def\font_style_pseudosmallcapped{\typo_capitals_set_fake\v!WORD } % all upper +\unexpanded\def\font_style_pseudoSmallcapped{\typo_capitals_set_fake\v!capital} % one upper + font +\unexpanded\def\font_style_pseudoSmallCapped{\typo_capitals_set_fake\v!Capital} % some upper + font +\unexpanded\def\font_style_pseudoMixedCapped{\typo_capitals_set_fake\v!mixed } + +\unexpanded\def\font_style_realsmallcapped {\typo_capitals_set_real\v!WORD } % all lower +\unexpanded\def\font_style_realSmallcapped {\typo_capitals_set_real\v!Word } % one upper + font +\unexpanded\def\font_style_realSmallCapped {\typo_capitals_set_real\v!Words } % some upper + +\unexpanded\def\font_style_notsmallcapped {\typo_capitals_set_fake\v!word } \unexpanded\def\typo_capitals_smallcaps {\ifconditional\c_typo_capitals_pseudo @@ -182,6 +224,8 @@ \let\kap\cap % for old times sake \let\Caps\SmallCapped % for old times sake +\let\mixedcaps\pseudoMixedCapped + \let\normalsmallcapped\smallcapped \let\normalWORD \WORD \let\normalword \word @@ -190,33 +234,21 @@ \let\font_style_normalWORD \WORD \let\font_style_normalword \word -%D As suggested by WS: - -\unexpanded\def\notsmallcapped{\groupedcommand{\setcharactercasing[\v!word]\signalcharacter}{}} - -%D \macros -%D {setupcapitals} -%D -%D By default we use pseudo small caps in titles. This can be -%D set up with: -%D -%D \showsetup{setupcapitals} - -\installcorenamespace{capitals} - -\installsetuponlycommandhandler \??capitals {capitals} +%D Further tweaks \let\normalsmallcapped\smallcapped \appendtoks - \doifelse{\directcapitalsparameter\c!title}\v!yes - {\definealternativestyle[\v!capital ][\font_style_normalsmallcapped][\font_style_normalsmallcapped]% - \definealternativestyle[\v!smallcaps][\setsmallcaps][\setsmallcaps]} - {\definealternativestyle[\v!capital ][\font_style_normalsmallcapped][\font_style_normalWORD]% - \definealternativestyle[\v!smallcaps][\setsmallcaps][\font_style_normalWORD]}% - \doifelse{\directcapitalsparameter\s!sc}\v!yes - \userealcaps - \usepseudocaps + \ifx\currentcapitals\empty + \doifelse{\directcapitalsparameter\c!title}\v!yes + {\definealternativestyle[\v!capital ][\font_style_normalsmallcapped][\font_style_normalsmallcapped]% + \definealternativestyle[\v!smallcaps][\setsmallcaps][\setsmallcaps]} + {\definealternativestyle[\v!capital ][\font_style_normalsmallcapped][\font_style_normalWORD]% + \definealternativestyle[\v!smallcaps][\setsmallcaps][\font_style_normalWORD]}% + \doifelse{\directcapitalsparameter\s!sc}\v!yes + \userealcaps + \usepseudocaps + \fi \to \everysetupcapitals \let\uppercased\normalWORD @@ -224,8 +256,30 @@ \setupcapitals [\c!title=\v!yes, + \c!style=\tx, \s!sc=\v!no] % no \c!sc any longer +% \definefont +% [MixedCaps] +% [\v_font_string_a\v_font_string_c*default cp \the\exheight] +% +% \definefont +% [MixedCaps] +% [MixedCaps*default cp \the\exheight] + +\definefontfeature + [mixeddefault] + [default] + [extend=1.2] + +\definefont + [MixedCaps] + [CurrentFont*default,mixeddefault cp \the\exheight] + +\setupcapitals + [\v!mixed] + [\c!style=MixedCaps] + % \definestartstop is not yet in available at core-spa time % % \startrandomized \input tufte \stoprandomized diff --git a/tex/context/base/typo-cln.lua b/tex/context/base/typo-cln.lua index be00ac10d..2aa05b6d1 100644 --- a/tex/context/base/typo-cln.lua +++ b/tex/context/base/typo-cln.lua @@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['typo-cln'] = { } -- This quick and dirty hack took less time than listening to a CD (In --- this case Dream Theaters' Octavium. Of course extensions will take +-- this case Dream Theaters' Octavium). Of course extensions will take -- more time. local utfbyte = utf.byte @@ -26,7 +26,7 @@ local variables = interfaces.variables local nodecodes = nodes.nodecodes local tasks = nodes.tasks -local texattribute = tex.attribute +local texsetattribute = tex.setattribute local traverse_id = node.traverse_id @@ -46,14 +46,14 @@ local resetter = { -- this will become an entry in char-def -- the other hand we might want to apply casing afterwards. So, -- cleaning comes first. -local function process(namespace,attribute,head) +function cleaners.handler(head) local inline, done = false, false for n in traverse_id(glyph_code,head) do local char = n.char if resetter[char] then inline = false elseif not inline then - local a = n[attribute] + local a = n[a_cleaner] if a == 1 then -- currently only one cleaner so no need to be fancy local upper = uccodes[char] if type(upper) == "table" then @@ -78,7 +78,7 @@ local enabled = false function cleaners.set(n) if n == variables.reset or not tonumber(n) or n == 0 then - texattribute[a_cleaner] = unsetvalue + texsetattribute(a_cleaner,unsetvalue) else if not enabled then tasks.enableaction("processors","typesetters.cleaners.handler") @@ -87,16 +87,10 @@ function cleaners.set(n) end enabled = true end - texattribute[a_cleaner] = n + texsetattribute(a_cleaner,n) end end -cleaners.handler = nodes.installattributehandler { - name = "cleaner", - namespace = cleaners, - processor = process, -} - -- interface commands.setcharactercleaning = cleaners.set diff --git a/tex/context/base/typo-del.mkiv b/tex/context/base/typo-del.mkiv index 82cc7472d..603471f75 100644 --- a/tex/context/base/typo-del.mkiv +++ b/tex/context/base/typo-del.mkiv @@ -334,6 +334,8 @@ \def\typo_delimited_start_par {\dosingleempty\typo_delimited_start_par_indeed} +\let\typo_delimited_stop_par_indeed\endgraf + \def\typo_delimited_start_par_indeed[#1]% {\let\typo_delimited_stop\typo_delimited_stop_par \doifsomething{\delimitedtextparameter\c!spacebefore} diff --git a/tex/context/base/typo-dha.lua b/tex/context/base/typo-dha.lua new file mode 100644 index 000000000..d5ad66e7e --- /dev/null +++ b/tex/context/base/typo-dha.lua @@ -0,0 +1,398 @@ +if not modules then modules = { } end modules ['typo-dha'] = { + version = 1.001, + comment = "companion to typo-dir.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Some analysis by Idris: +-- +-- 1. Assuming the reading- vs word-order distinction (bidi-char types) is governing; +-- 2. Assuming that 'ARAB' represents an actual arabic string in raw input order, not word-order; +-- 3. Assuming that 'BARA' represent the correct RL word order; +-- +-- Then we have, with input: LATIN ARAB +-- +-- \textdir TLT LATIN ARAB => LATIN BARA +-- \textdir TRT LATIN ARAB => LATIN BARA +-- \textdir TRT LRO LATIN ARAB => LATIN ARAB +-- \textdir TLT LRO LATIN ARAB => LATIN ARAB +-- \textdir TLT RLO LATIN ARAB => NITAL ARAB +-- \textdir TRT RLO LATIN ARAB => NITAL ARAB + +-- elseif d == "es" then -- European Number Separator +-- elseif d == "et" then -- European Number Terminator +-- elseif d == "cs" then -- Common Number Separator +-- elseif d == "nsm" then -- Non-Spacing Mark +-- elseif d == "bn" then -- Boundary Neutral +-- elseif d == "b" then -- Paragraph Separator +-- elseif d == "s" then -- Segment Separator +-- elseif d == "ws" then -- Whitespace +-- elseif d == "on" then -- Other Neutrals + +-- todo : delayed inserts here +-- todo : get rid of local functions here +-- beware: math adds whatsits afterwards so that will mess things up +-- todo : use new dir functions +-- todo : make faster +-- todo : move dir info into nodes +-- todo : swappable tables and floats i.e. start-end overloads (probably loop in builders) +-- todo : check if we still have crashes in luatex when non-matched (used to be the case) + +-- I removed the original tracing code and now use the colorful one. If I ever want to change +-- something I will just inject prints for tracing. + +local nodes, node = nodes, node + +local trace_directions = false trackers.register("typesetters.directions.default", function(v) trace_directions = v end) + +local report_directions = logs.reporter("typesetting","text directions") + + +local insert_node_before = nodes.insert_before +local insert_node_after = nodes.insert_after +local remove_node = nodes.remove +local end_of_math = nodes.end_of_math + +local nodepool = nodes.pool + +local nodecodes = nodes.nodecodes +local whatcodes = nodes.whatcodes +local mathcodes = nodes.mathcodes + +local glyph_code = nodecodes.glyph +local whatsit_code = nodecodes.whatsit +local math_code = nodecodes.math +local penalty_code = nodecodes.penalty +local kern_code = nodecodes.kern +local glue_code = nodecodes.glue +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist + +local localpar_code = whatcodes.localpar +local dir_code = whatcodes.dir + +local new_textdir = nodepool.textdir + +local hasbit = number.hasbit +local formatters = string.formatters +local insert = table.insert + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers +local fontchar = fonthashes.characters + +local chardirections = characters.directions +local charmirrors = characters.mirrors +local charclasses = characters.textclasses + +local directions = typesetters.directions +local setcolor = directions.setcolor +local getglobal = directions.getglobal + +local a_state = attributes.private('state') +local a_directions = attributes.private('directions') + +local strip = false + +local s_isol = fonts.analyzers.states.isol + +local function stopdir(finish) + return new_textdir(finish == "TRT" and "-TRT" or "-TLT") +end + +local function startdir(finish) + return new_textdir(finish == "TRT" and "+TRT" or "+TLT") +end + +local function process(start) + + local head = start + + local current = head + local inserted = nil + local finish = nil + local autodir = 0 + local embedded = 0 + local override = 0 + local pardir = 0 + local textdir = 0 + local done = false + local finished = nil + local finidir = nil + local stack = { } + local top = 0 + local obsolete = { } + local lro = false + local lro = false + local prevattr = false + local fences = { } + + local function finish_auto_before() + head, inserted = insert_node_before(head,current,stopdir(finish)) + finished, finidir, autodir = inserted, finish, 0 + finish, done = nil, true + end + + local function finish_auto_after() + head, current = insert_node_after(head,current,stopdir(finish)) + finished, finidir, autodir = current, finish, 0 + finish, done = nil, true + end + + local function force_auto_left_before(direction) + if finish then + head, inserted = insert_node_before(head,current,stopdir(finish)) + finished = inserted + finidir = finish + end + if embedded >= 0 then + finish, autodir = "TLT", 1 + else + finish, autodir = "TRT", -1 + end + done = true + if finidir == finish then + head = remove_node(head,finished,true) + else + head, inserted = insert_node_before(head,current,startdir(finish)) + end + end + + local function force_auto_right_before(direction) + if finish then + head, inserted = insert_node_before(head,current,stopdir(finish)) + finished = inserted + finidir = finish + end + if embedded <= 0 then + finish, autodir, done = "TRT", -1 + else + finish, autodir, done = "TLT", 1 + end + done = true + if finidir == finish then + head = remove_node(head,finished,true) + else + head, inserted = insert_node_before(head,current,startdir(finish)) + end + end + + local function nextisright(current) + current = current.next + local id = current.id + if id == glyph_code then + local character = current.char + local direction = chardirections[character] + return direction == "r" or direction == "al" or direction == "an" + end + end + + local function previsright(current) + current = current.prev + local id = current.id + if id == glyph_code then + local char = current.char + local direction = chardirections[character] + return direction == "r" or direction == "al" or direction == "an" + end + end + + while current do + local id = current.id + if id == math_code then + current = end_of_math(current.next).next + else + local attr = current[a_directions] + if attr and attr > 0 and attr ~= prevattr then + if not getglobal(a) then + lro, rlo = false, false + end + prevattr = attr + end + if id == glyph_code then + if attr and attr > 0 then + local character = current.char + local direction = chardirections[character] + local reversed = false + if rlo or override > 0 then + if direction == "l" then + direction = "r" + reversed = true + end + elseif lro or override < 0 then + if direction == "r" or direction == "al" then + current[a_state] = s_isol + direction = "l" + reversed = true + end + end + if direction == "on" then + local mirror = charmirrors[character] + if mirror and fontchar[current.font][mirror] then + local class = charclasses[character] + if class == "open" then + if nextisright(current) then + if autodir >= 0 then + force_auto_right_before(direction) + end + current.char = mirror + done = true + elseif autodir < 0 then + current.char = mirror + done = true + else + mirror = false + end + local fencedir = autodir == 0 and textdir or autodir + fences[#fences+1] = fencedir + elseif class == "close" and #fences > 0 then + local fencedir = fences[#fences] + fences[#fences] = nil + if fencedir < 0 then + current.char = mirror + done = true + force_auto_right_before(direction) + else + mirror = false + end + elseif autodir < 0 then + current.char = mirror + done = true + else + mirror = false + end + end + if trace_directions then + setcolor(current,direction,false,mirror) + end + elseif direction == "l" then + if trace_directions then + setcolor(current,"l",reversed) + end + if autodir <= 0 then -- could be option + force_auto_left_before(direction) + end + elseif direction == "r" then + if trace_directions then + setcolor(current,"r",reversed) + end + if autodir >= 0 then + force_auto_right_before(direction) + end + elseif direction == "en" then -- european number + if trace_directions then + setcolor(current,"l") + end + if autodir <= 0 then -- could be option + force_auto_left_before(direction) + end + elseif direction == "al" then -- arabic number + if trace_directions then + setcolor(current,"r") + end + if autodir >= 0 then + force_auto_right_before(direction) + end + elseif direction == "an" then -- arabic number + if trace_directions then + setcolor(current,"r") + end + if autodir >= 0 then + force_auto_right_before(direction) + end + elseif direction == "lro" then -- Left-to-Right Override -> right becomes left + top = top + 1 + stack[top] = { override, embedded } + override = -1 + obsolete[#obsolete+1] = current + elseif direction == "rlo" then -- Right-to-Left Override -> left becomes right + top = top + 1 + stack[top] = { override, embedded } + override = 1 + obsolete[#obsolete+1] = current + elseif direction == "lre" then -- Left-to-Right Embedding -> TLT + top = top + 1 + stack[top] = { override, embedded } + embedded = 1 + obsolete[#obsolete+1] = current + elseif direction == "rle" then -- Right-to-Left Embedding -> TRT + top = top + 1 + stack[top] = { override, embedded } + embedded = -1 + obsolete[#obsolete+1] = current + elseif direction == "pdf" then -- Pop Directional Format + if top > 0 then + local s = stack[top] + override, embedded = s[1], s[2] + top = top - 1 + end + obsolete[#obsolete+1] = current + else + setcolor(current) + end + else + -- we do nothing + end + elseif id == whatsit_code then + local subtype = current.subtype + if subtype == localpar_code then + local dir = current.dir + if dir == 'TRT' then + autodir = -1 + elseif dir == 'TLT' then + autodir = 1 + end + pardir = autodir + textdir = pardir + elseif subtype == dir_code then + -- todo: also treat as lro|rlo and stack + if finish then + finish_auto_before() + end + local dir = current.dir + if dir == "+TRT" then + finish, autodir = "TRT", -1 + elseif dir == "-TRT" then + finish, autodir = nil, 0 + elseif dir == "+TLT" then + finish, autodir = "TLT", 1 + elseif dir == "-TLT" then + finish, autodir = nil, 0 + end + textdir = autodir + else + if finish then + finish_auto_before() + end + end + elseif finish then + finish_auto_before() + end + local cn = current.next + if cn then + -- we're okay + elseif finish then + finish_auto_after() + end + current = cn + end + end + + if done and strip then + local n = #obsolete + if n > 0 then + for i=1,n do + remove_node(head,obsolete[i],true) + end + report_directions("%s character nodes removed",n) + end + end + + return head, done + +end + +directions.installhandler(interfaces.variables.default,process) + diff --git a/tex/context/base/typo-dig.lua b/tex/context/base/typo-dig.lua index 62d17fa3b..ef05e62da 100644 --- a/tex/context/base/typo-dig.lua +++ b/tex/context/base/typo-dig.lua @@ -24,7 +24,7 @@ local traverse_id = node.traverse_id local insert_node_before = node.insert_before local insert_node_after = node.insert_after -local texattribute = tex.attribute +local texsetattribute = tex.setattribute local unsetvalue = attributes.unsetvalue local nodecodes = nodes.nodecodes @@ -55,7 +55,6 @@ digits.actions = { } local actions = digits.actions local a_digits = attributes.private("digits") -digits.attribute = a_digits -- at some point we can manipulate the glyph node so then i need -- to rewrite this then @@ -83,7 +82,7 @@ function nodes.aligned(head,start,stop,width,how) end end -actions[1] = function(head,start,attribute,attr) +actions[1] = function(head,start,attr) local font = start.font local char = start.char local unic = chardata[font][char].tounicode @@ -102,16 +101,16 @@ actions[1] = function(head,start,attribute,attr) return head, start, false end -local function process(namespace,attribute,head) +function digits.handler(head) local done, current, ok = false, head, false while current do if current.id == glyph_code then - local attr = current[attribute] + local attr = current[a_digits] if attr and attr > 0 then - current[attribute] = unsetvalue + current[a_digits] = unsetvalue local action = actions[attr%100] -- map back to low number if action then - head, current, ok = action(head,current,attribute,attr) + head, current, ok = action(head,current,attr) done = done and ok elseif trace_digits then report_digits("unknown digit trigger %a",attr) @@ -148,15 +147,9 @@ function digits.set(n) -- number or 'reset' n = unsetvalue end end - texattribute[a_digits] = n + texsetattribute(a_digits,n) end -digits.handler = nodes.installattributehandler { -- we could avoid this wrapper - name = "digits", - namespace = digits, - processor = process, -} - -- interface commands.setdigitsmanipulation = digits.set diff --git a/tex/context/base/typo-dir.lua b/tex/context/base/typo-dir.lua index 7e5f8c2d3..a04028452 100644 --- a/tex/context/base/typo-dir.lua +++ b/tex/context/base/typo-dir.lua @@ -6,37 +6,72 @@ if not modules then modules = { } end modules ['typo-dir'] = { license = "see context related readme files" } --- todo: also use end_of_math here? +-- When we started with this, there were some issues in luatex so we needed to take care of +-- intereferences. Some has been improved but we stil might end up with each node having a +-- dir property. Now, the biggest problem is that there is an official bidi algorithm but +-- some searching on the web shows that there are many confusing aspects and therefore +-- proposals circulate about (sometimes imcompatible ?) improvements. In the end it all boils +-- down to the lack of willingness to tag an input source. Of course tagging of each number +-- and fenced strip is somewhat over the top, but now it has to be captured in logic. Texies +-- normally have no problem with tagging but we need to handle any input. So, what we have +-- done here (over the years) is starting from what we expect to see happen, especially with +-- respect to punctation, numbers and fences. Eventually alternative algorithms will be provides +-- so that users can choose (the reason why suggestion sfor improvements circulate on the web +-- is that it is non trivial to predict the expected behaviour so one hopes that the ditor +-- and the rest of the machinery match somehow. Anyway, the fun of tex is that it has no hard +-- coded behavior. And ... we also want to have more debugging and extras and ... so we want +-- a flexible approach. In the end we will have: +-- +-- = full tagging (mechanism turned off) +-- = half tagging (the current implementation) +-- = unicode version x interpretation (several depending on the evolution) local next, type = next, type local format, insert, sub, find, match = string.format, table.insert, string.sub, string.find, string.match local utfchar = utf.char - --- vertical space handler +local formatters = string.formatters local nodes, node = nodes, node -local trace_directions = false trackers.register("typesetters.directions", function(v) trace_directions = v end) +local trace_textdirections = false trackers.register("typesetters.directions.text", function(v) trace_textdirections = v end) +local trace_mathdirections = false trackers.register("typesetters.directions.math", function(v) trace_mathdirections = v end) +local trace_directions = false trackers.register("typesetters.directions", function(v) trace_textdirections = v trace_mathdirections = v end) + +local report_textdirections = logs.reporter("typesetting","text directions") +local report_mathdirections = logs.reporter("typesetting","math directions") + + -local report_directions = logs.reporter("typesetting","directions") local traverse_id = node.traverse_id local insert_node_before = node.insert_before local insert_node_after = node.insert_after local remove_node = nodes.remove +local end_of_math = nodes.end_of_math -local texattribute = tex.attribute +local texsetattribute = tex.setattribute +local texsetcount = tex.setcount local unsetvalue = attributes.unsetvalue +local hasbit = number.hasbit + local nodecodes = nodes.nodecodes local whatcodes = nodes.whatcodes local mathcodes = nodes.mathcodes local tasks = nodes.tasks +local tracers = nodes.tracers +local setcolor = tracers.colors.set +local resetcolor = tracers.colors.reset local glyph_code = nodecodes.glyph local whatsit_code = nodecodes.whatsit local math_code = nodecodes.math +local penalty_code = nodecodes.penalty +local kern_code = nodecodes.kern +local glue_code = nodecodes.glue +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist local localpar_code = whatcodes.localpar local dir_code = whatcodes.dir @@ -45,404 +80,149 @@ local nodepool = nodes.pool local new_textdir = nodepool.textdir -local beginmath_code = mathcodes.beginmath -local endmath_code = mathcodes.endmath - local fonthashes = fonts.hashes local fontdata = fonthashes.identifiers local fontchar = fonthashes.characters -local chardata = characters.data -local chardirs = characters.directions -- maybe make a special mirror table - ---~ Analysis by Idris: ---~ ---~ 1. Assuming the reading- vs word-order distinction (bidi-char types) is governing; ---~ 2. Assuming that 'ARAB' represents an actual arabic string in raw input order, not word-order; ---~ 3. Assuming that 'BARA' represent the correct RL word order; ---~ ---~ Then we have, with input: LATIN ARAB ---~ ---~ \textdir TLT LATIN ARAB => LATIN BARA ---~ \textdir TRT LATIN ARAB => LATIN BARA ---~ \textdir TRT LRO LATIN ARAB => LATIN ARAB ---~ \textdir TLT LRO LATIN ARAB => LATIN ARAB ---~ \textdir TLT RLO LATIN ARAB => NITAL ARAB ---~ \textdir TRT RLO LATIN ARAB => NITAL ARAB - --- elseif d == "es" then -- European Number Separator --- elseif d == "et" then -- European Number Terminator --- elseif d == "cs" then -- Common Number Separator --- elseif d == "nsm" then -- Non-Spacing Mark --- elseif d == "bn" then -- Boundary Neutral --- elseif d == "b" then -- Paragraph Separator --- elseif d == "s" then -- Segment Separator --- elseif d == "ws" then -- Whitespace --- elseif d == "on" then -- Other Neutrals - -typesetters.directions = typesetters.directions or { } -local directions = typesetters.directions - -local a_state = attributes.private('state') -local a_directions = attributes.private('directions') - -local skipmath = true -local strip = false - --- todo: delayed inserts here --- todo: get rid of local functions here - --- beware, math adds whatsits afterwards so that will mess things up - -local finish, autodir, embedded, override, done = nil, 0, 0, 0, false -local list, glyphs = nil, false -local finished, finidir, finipos = nil, nil, 1 -local head, current, inserted = nil, nil, nil - -local function finish_auto_before() - head, inserted = insert_node_before(head,current,new_textdir("-"..finish)) - finished, finidir = inserted, finish - if trace_directions then - insert(list,#list,format("auto finish inserted before: %s",finish)) - finipos = #list-1 - end - finish, autodir, done = nil, 0, true -end +local chardirections = characters.directions +local charmirrors = characters.mirrors +local charclasses = characters.textclasses + +local directions = typesetters.directions or { } +typesetters.directions = directions + +local a_state = attributes.private('state') +local a_directions = attributes.private('directions') +local a_mathbidi = attributes.private('mathbidi') + +local strip = false -local function finish_auto_after() - head, current = insert_node_after(head,current,new_textdir("-"..finish)) - finished, finidir = current, finish - if trace_directions then - list[#list+1] = format("auto finish inserted after: %s",finish) - finipos = #list +local s_isol = fonts.analyzers.states.isol + +local variables = interfaces.variables +local v_global = variables["global"] +local v_local = variables["local"] +local v_on = variables.on +local v_yes = variables.yes + +local m_enabled = 2^6 -- 64 +local m_global = 2^7 +local m_fences = 2^8 + +local handlers = { } +local methods = { } +local lastmethod = 0 + +local function installhandler(name,handler) + local method = methods[name] + if not method then + lastmethod = lastmethod + 1 + method = lastmethod + methods[name] = method end - finish, autodir, done = nil, 0, true + handlers[method] = handler + return method end -local function force_auto_left_before() - if finish then - finish_auto_before() - end - if embedded >= 0 then - finish, autodir, done = "TLT", 1, true +directions.handlers = handlers +directions.installhandler = installhandler + +local function tomode(specification) + local scope = specification.scope + local mode + if scope == v_global or scope == v_on then + mode = m_enabled + m_global + elseif scope == v_local then + mode = m_enabled else - finish, autodir, done = "TRT", -1, true + return 0 end - if finidir == finish then - head = remove_node(head,finished,true) - if trace_directions then - list[finipos] = list[finipos] .. " (deleted afterwards)" - insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) - end + local method = methods[specification.method] + if method then + mode = mode + method else - head, inserted = insert_node_before(head,current,new_textdir("+"..finish)) - if trace_directions then - insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) - end + return 0 end + if specification.fences == v_yes then + mode = mode + m_fences + end + return mode end -local function force_auto_right_before() - if finish then - finish_auto_before() - end - if embedded <= 0 then - finish, autodir, done = "TRT", -1, true - else - finish, autodir, done = "TLT", 1, true - end - if finidir == finish then - head = remove_node(head,finished,true) - if trace_directions then - list[finipos] = list[finipos] .. " (deleted afterwards)" - insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) - end +local function getglobal(a) + return a and a > 0 and hasbit(a,m_global) +end + +local function getfences(a) + return a and a > 0 and hasbit(a,m_fences) +end + +local function getmethod(a) + return a and a > 0 and a % m_enabled or 0 +end + +directions.tomode = tomode +directions.getglobal = getglobal +directions.getfences = getfences +directions.getmethod = getmethod +directions.installhandler = installhandler + +-- beware: in dha we have character properties and in dua|b we have direction properties + +function directions.setcolor(current,direction,reversed,mirror) + if mirror then + setcolor(current,"bidi:mirrored") + elseif direction == "l" then + setcolor(current,reversed and "bidi:left:reversed" or "bidi:left:original") + elseif direction == "r" then + setcolor(current,reversed and "bidi:right:reversed" or "bidi:right:original") else - head, inserted = insert_node_before(head,current,new_textdir("+"..finish)) - if trace_directions then - insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded)) - end + resetcolor(current) end end --- todo: use new dir functions +function commands.getbidimode(specification) + context(tomode(specification)) -- hash at tex end +end -local s_isol = fonts.analyzers.states.isol +local enabled = false -function directions.process(namespace,attribute,start) -- todo: make faster - if not start.next then - return start, false - end - head, current, inserted = start, start, nil - finish, autodir, embedded, override, done = nil, 0, 0, 0, false - list, glyphs = trace_directions and { }, false - finished, finidir, finipos = nil, nil, 1 - local stack, top, obsolete = { }, 0, { } - local lro, rlo, prevattr, inmath = false, false, 0, false - while current do - local id = current.id - if skipmath and id == math_code then - local subtype = current.subtype - if subtype == beginmath_code then - inmath = true - elseif subtype == endmath_code then - inmath = false - else - -- todo - end - current = current.next - elseif inmath then - current = current.next - else - local attr = current[attribute] - if attr and attr > 0 then - -- current[attribute] = unsetvalue -- slow, needed? - if attr == 1 then - -- bidi parsing mode - elseif attr ~= prevattr then - -- no pop, grouped driven (2=normal,3=lro,4=rlo) - if attr == 3 then - if trace_directions then - list[#list+1] = format("override right -> left (lro) (bidi=%s)",attr) - end - lro, rlo = true, false - elseif attr == 4 then - if trace_directions then - list[#list+1] = format("override left -> right (rlo) (bidi=%s)",attr) - end - lro, rlo = false, true - else - if trace_directions and - current ~= head then list[#list+1] = format("override reset (bidi=%s)",attr) - end - lro, rlo = false, false - end - prevattr = attr - end - end - if id == glyph_code then - glyphs = true - if attr and attr > 0 then - local char = current.char - local d = chardirs[char] - if rlo or override > 0 then - if d == "l" then - if trace_directions then - list[#list+1] = format("char %s (%s / U+%04X) of class %s overidden to r (bidi=%s)",utfchar(char),char,char,d,attr) - end - d = "r" - elseif trace_directions then - if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal - list[#list+1] = format("override char of class %s (bidi=%s)",d,attr) - else -- todo: rle lre - list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr) - end - end - elseif lro or override < 0 then - if d == "r" or d == "al" then - current[a_state] = s_isol -- maybe better have a special bidi attr value -> override (9) -> todo - if trace_directions then - list[#list+1] = format("char %s (%s / U+%04X) of class %s overidden to l (bidi=%s) (state=isol)",utfchar(char),char,char,d,attr) - end - d = "l" - elseif trace_directions then - if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal - list[#list+1] = format("override char of class %s (bidi=%s)",d,attr) - else -- todo: rle lre - list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr) - end - end - elseif trace_directions then - if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal - list[#list+1] = format("override char of class %s (bidi=%s)",d,attr) - else -- todo: rle lre - list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr) - end - end - if d == "on" then - local mirror = chardata[char].mirror -- maybe make a special mirror table - if mirror and fontchar[current.font][mirror] then - -- todo: set attribute - if autodir < 0 then - current.char = mirror - done = true - --~ elseif left or autodir > 0 then - --~ if not is_right(current.prev) then - --~ current.char = mirror - --~ done = true - --~ end - end - end - elseif d == "l" or d == "en" then -- european number - if autodir <= 0 then -- could be option - force_auto_left_before() - end - elseif d == "r" or d == "al" then -- arabic number - if autodir >= 0 then - force_auto_right_before() - end - elseif d == "an" then -- arabic number - -- actually this is language dependent ... --- if autodir <= 0 then --- force_auto_left_before() --- end - if autodir >= 0 then - force_auto_right_before() - end - elseif d == "lro" then -- Left-to-Right Override -> right becomes left - if trace_directions then - list[#list+1] = "override right -> left" - end - top = top + 1 - stack[top] = { override, embedded } - override = -1 - obsolete[#obsolete+1] = current - elseif d == "rlo" then -- Right-to-Left Override -> left becomes right - if trace_directions then - list[#list+1] = "override left -> right" - end - top = top + 1 - stack[top] = { override, embedded } - override = 1 - obsolete[#obsolete+1] = current - elseif d == "lre" then -- Left-to-Right Embedding -> TLT - if trace_directions then - list[#list+1] = "embedding left -> right" - end - top = top + 1 - stack[top] = { override, embedded } - embedded = 1 - obsolete[#obsolete+1] = current - elseif d == "rle" then -- Right-to-Left Embedding -> TRT - if trace_directions then - list[#list+1] = "embedding right -> left" - end - top = top + 1 - stack[top] = { override, embedded } - embedded = -1 -- was 1 - obsolete[#obsolete+1] = current - elseif d == "pdf" then -- Pop Directional Format - -- override = 0 - if top > 0 then - local s = stack[top] - override, embedded = s[1], s[2] - top = top - 1 - if trace_directions then - list[#list+1] = format("state: override: %s, embedded: %s, autodir: %s",override,embedded,autodir) - end - else - if trace_directions then - list[#list+1] = "pop (error, too many pops)" - end - end - obsolete[#obsolete+1] = current - end - elseif trace_directions then - local char = current.char - local d = chardirs[char] - list[#list+1] = format("char %s (%s / U+%04X) of class %s (no bidi)",utfchar(char),char,char,d or "?") - end - elseif id == whatsit_code then - if finish then - finish_auto_before() - end - local subtype = current.subtype - if subtype == localpar_code then - local dir = current.dir - local d = sub(dir,2,2) - if d == 'R' then -- find(dir,".R.") / dir == "TRT" - autodir = -1 - else - autodir = 1 - end - -- embedded = autodir - if trace_directions then - list[#list+1] = format("pardir %s",dir) - end - elseif subtype == dir_code then - local dir = current.dir - -- local sign = sub(dir,1,1) - -- local dire = sub(dir,3,3) - local sign, dire = match(dir,"^(.).(.)") - if dire == "R" then - if sign == "+" then - finish, autodir = "TRT", -1 - else - finish, autodir = nil, 0 - end - else - if sign == "+" then - finish, autodir = "TLT", 1 - else - finish, autodir = nil, 0 - end - end - if trace_directions then - list[#list+1] = format("textdir %s",dir) - end - end - else - if trace_directions then - list[#list+1] = format("node %s (subtype %s)",nodecodes[id],current.subtype) - end - if finish then - finish_auto_before() - end - end - local cn = current.next - if not cn then - if finish then - finish_auto_after() - end - end - current = cn - end +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming + +function directions.handler(head) -- ,_,_,_,direction) -- nodes not nuts | 5th arg is direction + if not head.next then + return head, false end - if trace_directions and glyphs then - report_directions("start log") - for i=1,#list do - report_directions("%02i: %s",i,list[i]) - end - report_directions("stop log") + local attr = head[a_directions] + if not attr or attr == 0 then + return head, false end - if done and strip then - local n = #obsolete - if n > 0 then - for i=1,n do - remove_node(head,obsolete[i],true) - end - report_directions("%s character nodes removed",n) - end + local method = getmethod(attr) + local handler = handlers[method] + if not handler then + return head, false end + starttiming(directions) + local head, done = handler(head) + stoptiming(directions) return head, done end ---~ local function is_right(n) -- keep ! ---~ if n then ---~ local id = n.id ---~ if id == glyph_code then ---~ local attr = n[attribute] ---~ if attr and attr > 0 then ---~ local d = chardirs[n.char] ---~ if d == "r" or d == "al" then -- override ---~ return true ---~ end ---~ end ---~ end ---~ end ---~ return false ---~ end - ---~ function directions.enable() ---~ tasks.enableaction("processors","directions.handler") ---~ end +statistics.register("text directions", function() + if enabled then + return statistics.elapsedseconds(directions) + end +end) -local enabled = false +-- function directions.enable() +-- tasks.enableaction("processors","directions.handler") +-- end function directions.set(n) -- todo: names and numbers if not enabled then - if trace_directions then - report_breakpoints("enabling directions handler") + if trace_textdirections then + report_textdirections("enabling directions handler") end tasks.enableaction("processors","typesetters.directions.handler") enabled = true @@ -451,13 +231,7 @@ function directions.set(n) -- todo: names and numbers n = unsetvalue -- maybe tracing end - texattribute[a_directions] = n + texsetattribute(a_directions,n) end commands.setdirection = directions.set - -directions.handler = nodes.installattributehandler { - name = "directions", - namespace = directions, - processor = directions.process, -} diff --git a/tex/context/base/typo-dir.mkiv b/tex/context/base/typo-dir.mkiv index d35dfeb66..0362af56c 100644 --- a/tex/context/base/typo-dir.mkiv +++ b/tex/context/base/typo-dir.mkiv @@ -16,6 +16,9 @@ \unprotect \registerctxluafile{typo-dir}{1.001} +\registerctxluafile{typo-dha}{1.001} +\registerctxluafile{typo-dua}{1.001} +\registerctxluafile{typo-dub}{1.001} \definesystemattribute[directions][public] @@ -26,6 +29,9 @@ \installsimplecommandhandler \??directions {directions} \??directions % no \define... yet +\edef\lefttorightmark{\normalUchar"200E} \let\lrm\lefttorightmark +\edef\righttoleftmark{\normalUchar"200F} \let\rlm\righttoleftmark + \unexpanded\def\setdirection[#1]% todo: symbolic names {\ctxcommand{setdirection(\number#1)}} @@ -35,34 +41,42 @@ \newconstant\directionsbidimode % this one might become pivate -\letvalue{\??directionsbidimode\v!off }\zerocount -\letvalue{\??directionsbidimode\v!global}\plusone -\letvalue{\??directionsbidimode\v!local }\plustwo -\letvalue{\??directionsbidimode\v!on }\plustwo +% \setupdirections[bidi=global,method=default] +% \setupdirections[bidi=global,method=one] +% \setupdirections[bidi=global,method=two] +% \setupdirections[bidi=global,method=two,fences=no] + +\def\typo_dir_get_mode + {\def\currentbidimode{\ctxcommand{getbidimode { + scope = "\directionsparameter\c!bidi ", + method = "\directionsparameter\c!method", + fences = "\directionsparameter\c!fences", + }}}% + \expandafter\glet\csname\??directionsbidimode\currentbidistamp\endcsname\currentbidimode} \appendtoks - \directionsbidimode - \ifcsname\??directionsbidimode\directionsparameter\c!bidi\endcsname - \csname\??directionsbidimode\directionsparameter\c!bidi\endcsname - \else - \zerocount - \fi - \relax + \edef\currentbidistamp + {\directionsparameter\c!bidi + :\directionsparameter\c!method + :\directionsparameter\c!fences}% + \expandafter\let\expandafter\currentbidimode\csname\??directionsbidimode\currentbidistamp\endcsname + \ifx\currentbidimode\relax + \typo_dir_get_mode + \fi + \directionsbidimode\currentbidimode\relax \ifcase\directionsbidimode - \resetdirection - \or % 1 - \setdirection[1]% global, chars - \or % 2 - \setdirection[2]% local, attributes - \or % else - \setdirection[1]% default + \resetdirection + \else + \setdirection[\number\directionsbidimode]% \fi \to \everysetupdirections % bidi: local=obey grouping, global=ignore grouping (unicode has no grouping) \setupdirections % maybe start/stop - [\c!bidi=\v!off] + [\c!bidi=\v!off, + \c!method=\v!default, + \c!fences=\v!yes] \unexpanded\edef\bidilre{\normalUchar"202A} % maybe \edef's \unexpanded\edef\bidirle{\normalUchar"202B} @@ -77,6 +91,12 @@ % for the moment: \setdirection[\plusone] +\definecolor[bidi:left:original] [r=.6] +\definecolor[bidi:left:reversed] [g=.6] +\definecolor[bidi:right:original][b=.6] +\definecolor[bidi:right:reversed][r=.6,g=.6] +\definecolor[bidi:mirrored] [r=.6,b=.6] + \protect \endinput % bidi test @@ -145,4 +165,21 @@ {\typebuffer[bidi-setup] \getbuffer[bidi-setup] \getbuffer[bidi-sample]} +\startbuffer[bidi-sample] +\setupdirections[bidi=global] + + \hbox{\righttoleft\arabicfont (0001)}\par + \dontleavehmode\hbox{\righttoleft\arabicfont (0002)}\par + {\righttoleft\arabicfont (0003)\par} + {\righttoleft\arabicfont (0004)}\par + \dontleavehmode{\righttoleft\arabicfont (0005)\par} + \dontleavehmode{\righttoleft\arabicfont (0006)}\par + \rtlhbox{\arabicfont (0007)}\par + \ltrhbox{\arabicfont (0008)}\par +\dontleavehmode\rtlhbox{\arabicfont (0009)}\par +\dontleavehmode\ltrhbox{\arabicfont (0010)}\par +\stopsetups + +{\typebuffer[bidi-sample] \getbuffer[bidi-sample]} + \stoptext diff --git a/tex/context/base/typo-drp.lua b/tex/context/base/typo-drp.lua new file mode 100644 index 000000000..903140dae --- /dev/null +++ b/tex/context/base/typo-drp.lua @@ -0,0 +1,208 @@ +if not modules then modules = { } end modules ['typo-drp'] = { + version = 1.001, + comment = "companion to typo-drp.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This ons is sensitive for order (e.g. when combined with first line +-- processing. + +local tonumber, type, next = tonumber, type, next +local ceil = math.ceil + +local utfbyte = utf.byte +local utfchar = utf.char + +local trace_initials = false trackers.register("typesetters.initials", function(v) trace_initials = v end) +local report_initials = logs.reporter("nodes","initials") + +local initials = typesetters.paragraphs or { } +typesetters.initials = initials or { } + +local nodes = nodes +local tasks = nodes.tasks + +local hpack_nodes = nodes.hpack +local nodecodes = nodes.nodecodes +local whatsitcodes = nodes.whatsitcodes + +local nodepool = nodes.pool +local new_kern = nodepool.kern + +local insert_before = nodes.insert_before +local insert_after = nodes.insert_after + +local variables = interfaces.variables +local v_default = variables.default +local v_margin = variables.margin + +local texget = tex.get +local texsetattribute = tex.setattribute +local unsetvalue = attributes.unsetvalue + +local glyph_code = nodecodes.glyph +local hlist_code = nodecodes.hlist +local kern_node = nodecodes.kern +local whatsit_code = nodecodes.whatsit +local localpar_code = whatsitcodes.localpar + +local actions = { } +initials.actions = actions + +local a_initial = attributes.private("initial") +local a_color = attributes.private('color') +local a_transparency = attributes.private('transparency') +local a_colorspace = attributes.private('colormodel') + +local settings = nil + +function initials.set(specification) + settings = specification or { } + settings.enabled = true + tasks.enableaction("processors","typesetters.initials.handler") + if trace_initials then + report_initials("enabling initials") + end + texsetattribute(a_initial,1) +end + +commands.setinitial = initials.set + +-- dropped caps experiment (will be done properly when luatex +-- stores the state in the local par node) .. btw, search still +-- works with dropped caps, as does an export + +-- we need a 'par' attribute and in fact for dropped caps we don't need +-- need an attribute ... dropit will become s state counter (or end up +-- in the localpar user data + +-- for the moment, each paragraph gets a number as id (attribute) ..problem +-- with nesting .. or anyhow, needed for tagging anyway + +-- todo: prevent linebreak .. but normally a initial ends up at the top of +-- a page so this has a low priority + +actions[v_default] = function(head,setting) + local done = false + if head.id == whatsit_code and head.subtype == localpar_code then + -- begin of par + local first = head.next + -- parbox .. needs to be set at 0 + if first and first.id == hlist_code then + first = first.next + end + -- we need to skip over kerns and glues (signals) + while first and first.id ~= glyph_code do + first = first.next + end + if first and first.id == glyph_code then + local char = first.char + local prev = first.prev + local next = first.next + -- if prev.id == hlist_code then + -- -- set the width to 0 + -- end + if next and next.id == kern_node then + next.kern = 0 + end + if setting.font then + first.font = setting.font + end + if setting.dynamic > 0 then + first[0] = setting.dynamic + end + -- can be a helper + local ma = setting.ma or 0 + local ca = setting.ca + local ta = setting.ta + if ca and ca > 0 then + first[a_colorspace] = ma == 0 and 1 or ma + first[a_color] = ca + end + if ta and ta > 0 then + first[a_transparency] = ta + end + -- + local width = first.width + local height = first.height + local depth = first.depth + local distance = setting.distance or 0 + local voffset = setting.voffset or 0 + local hoffset = setting.hoffset or 0 + local parindent = tex.parindent + local baseline = texget("baselineskip").width + local lines = tonumber(setting.n) or 0 + -- + first.xoffset = - width - hoffset - distance - parindent + first.yoffset = - voffset -- no longer - height here + -- We pack so that successive handling cannot touch the dropped cap. Packaging + -- in a hlist is also needed because we cannot locally adapt e.g. parindent (not + -- yet stored in with localpar). + first.prev = nil + first.next = nil + local h = hpack_nodes(first) + h.width = 0 + h.height = 0 + h.depth = 0 + prev.next = h + next.prev = h + h.next = next + h.prev = prev + + -- end of packaging + if setting.location == v_margin then + -- okay + else + if lines == 0 then -- safeguard, not too precise + lines = ceil((height+voffset) / baseline) + end + -- We cannot set parshape yet ... when we can I'll add a slope + -- option (positive and negative, in emwidth). + local hangafter = - lines + local hangindent = width + distance + parindent + if trace_initials then + report_initials("setting hangafter to %i and hangindent to %p",hangafter,hangindent) + end + tex.hangafter = hangafter + tex.hangindent = hangindent + if parindent ~= 0 then + insert_after(first,first,new_kern(-parindent)) + end + end + done = true + end + end + return head, done +end + +function initials.handler(head) + local start = head + local attr = nil + while start do + attr = start[a_initial] + if attr then + break + elseif start.id == glyph then + break + else + start = start.next + end + end + if attr then + -- here as we can process nested boxes first so we need to keep state + tasks.disableaction("processors","typesetters.initials.handler") + -- texsetattribute(attribute,unsetvalue) + local alternative = settings.alternative or v_default + local action = actions[alternative] or actions[v_default] + if action then + if trace_initials then + report_initials("processing initials, alternative %a",alternative) + end + local head, done = action(head,settings) + return head, done + end + end + return head, false +end diff --git a/tex/context/base/typo-drp.mkiv b/tex/context/base/typo-drp.mkiv new file mode 100644 index 000000000..78f6df0a2 --- /dev/null +++ b/tex/context/base/typo-drp.mkiv @@ -0,0 +1,118 @@ +%D \module +%D [ file=typo-drp, % was typo-par +%D version=2011.10.27, +%D title=\CONTEXT\ Typesetting Macros, +%D subtitle=Initials, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\writestatus{loading}{ConTeXt Typesetting Macros / Initials} + +%D This will change ... par specific attribute set and such. + +\unprotect + +\registerctxluafile{typo-drp}{1.001} + +\definesystemattribute[initial][public] + +\appendtoks + \attribute\initialattribute\attributeunsetvalue +\to \everyforgetall + +%D For the moment here: dropped caps redone. In addition to the +%D parameters shown in the example you can use the \type {style} and +%D more low level \type {font} keys to set up the font where +%D the first one wins. If you don't like the behaviour you can always +%D roll out your own command. +%D +%D \starttyping +%D \placeinitial \input ward \par \input ward \par \placeinitial \input tufte +%D +%D \setupinitial +%D [location=text, +%D n=2, +%D color=darkred, +%D distance=-1em, +%D hoffset=1em, +%D voffset=-3ex, +%D before=\blank] +%D +%D \placeinitial \input ward \par \input ward \placeinitial \input tufte +%D \stoptyping + +% todo: angle (once we can set parshape at the Lua end) + +\installcorenamespace{initial} + +\installcommandhandler \??initial {initial} \??initial + +\definemeasure[initial:n][\lineheight*\initialparameter\c!n - \strutdp] + +\setupinitial + [\c!location=\v!text, + \c!n=3, + % \s!font=Bold sa 4, + % \s!font=Bold ht \measure{initial:n}, + \s!font=Bold cp \measure{initial:n}, + \c!distance=.125\emwidth, + \c!hoffset=\zeropoint, + \c!voffset=\v!line, % \dimexp\lineheight*\initialparameter\c!n-\lineheight\relax] + \c!style=, + \c!color=, + \c!before=\blank] + +\unexpanded\def\placeinitial + {\dosingleempty\typo_initials_place} + +\def\typo_initials_place[#1]% old command + {\par + \namedinitialparameter{#1}\c!before + \setinitial[#1]} + +\unexpanded\def\setinitial + {\dosingleempty\typo_initials_set} + +\unexpanded\def\typo_initials_set[#1]% + {\edef\typo_initial_handle{\typo_initial_handle_indeed{#1}}} + +\unexpanded\def\typo_initial_handle_indeed#1% + {\dontleavehmode + \begingroup + \edef\currentinitial{#1}% + \scratchcounter \initialparameter\c!n\relax + \scratchdistance\initialparameter\c!distance\relax + \scratchhoffset \initialparameter\c!hoffset \relax + \edef\p_voffset{\initialparameter\c!voffset}% + \scratchvoffset\dimexpr\ifx\p_voffset\v!line\scratchcounter\lineheight-\lineheight\else\p_voffset\fi\relax + \resetfontfeature % might be needed in more places + \doifelsenothing{\initialparameter\c!style} + {\definedfont[\initialparameter\s!font]} + {\useinitialstyleparameter\c!style}% + \useinitialcolorparameter\c!color + \ctxcommand{setinitial{ + location = "\initialparameter\c!location", + enabled = true, + n = \number\scratchcounter, + distance = \number\scratchdistance, + hoffset = \number\scratchhoffset, + voffset = \number\scratchvoffset, + ma = \the\attribute\colormodelattribute , + ca = \the\attribute\colorattribute , + ta = \the\attribute\transparencyattribute, + font = \fontid\font, + dynamic = \number\attribute\zerocount, % it's a bit over the top to support this here + }}% + \stopluacode + \kern\zeropoint % we need a node + \endgroup + \globallet\typo_initial_handle\relax} + +\let\typo_initial_handle\relax + +\protect \endinput diff --git a/tex/context/base/typo-dua.lua b/tex/context/base/typo-dua.lua new file mode 100644 index 000000000..ec85a3d9f --- /dev/null +++ b/tex/context/base/typo-dua.lua @@ -0,0 +1,758 @@ +if not modules then modules = { } end modules ['typo-dua'] = { + version = 1.001, + comment = "companion to typo-dir.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team / See below", + license = "see context related readme files / whatever applies", + comment = "Unicode bidi (sort of) variant a", + derived = "derived from t-bidi by Khaled Hosny who derived from minibidi.c by Arabeyes", +} + +-- Comment by Khaled Hosny: +-- +-- This code started as a line for line translation of Arabeyes' minibidi.c from C to Lua, +-- excluding parts that of no use to us like shaping. The C code is Copyright (c) 2004 +-- Ahmad Khalifa, and is distributed under the MIT Licence. The full license text can be +-- found at: http://svn.arabeyes.org/viewvc/projects/adawat/minibidi/LICENCE. +-- +-- Comment by Hans Hagen: +-- +-- The initial conversion to Lua has been done by Khaled Hosny. As a first step I optimized the +-- code (to suit todays context mkiv). Next I fixed the foreign object handling, for instance, +-- we can skip over math but we need to inject before the open math node and after the close node, +-- so we need to keep track of the endpoint. After I fixed that bit I realized that it was possible +-- to generalize the object skipper if only because it saves memory (and processing time). The +-- current implementation is about three times as fast (roughly measured) and I can probably squeeze +-- out some more, only to sacrifice soem when I start adding features. A next stage will be to have +-- more granularity in foreign objects. Of course all errors are mine. I'll also added the usual bit +-- of context tracing and reshuffled some code. A memory optimization is on the agenda (already sort +-- of prepared). It is no longer line by line. +-- +-- The first implementation of bidi in context started out from examples of mixed usage (including +-- more than text) with an at that point bugged r2l support. It has some alternatives for letting +-- the tex markup having a bit higher priority. I will probably add some local (style driven) +-- overrides to the following code as well. It also means that we can selectively enable and disable +-- the parser (because a document wide appliance migh tnot be what we want). This will bring a +-- slow down but not that much. (I need to check with Idris why we have things like isol there.) +-- +-- We'll probably keep multiple methods around (this is just a side track of improving the already +-- available scanner). I need to look into the changed unicode recomendations anyway as a first +-- impression is that some fuzzyness has been removed. I finally need to spend time on those specs. So, +-- there will be a third variant (written from scratch) so some point. The fun about TeX is that we +-- can provide alternative solutions (given that it doesn't bloat the engine!) +-- +-- A test with some hebrew, mixed with hboxes with latin/hebrew and simple math. In fact this triggered +-- playing with bidi again: +-- +-- 0.11 : nothing +-- 0.14 : 0.03 node list only, one pass +-- 0.23 : 0.12 close to unicode bidi, multipass +-- 0.44 : 0.33 original previous +-- +-- todo: check for introduced errors +-- todo: reuse list, we have size, so we can just change values (and auto allocate when not there) +-- todo: reuse the stack +-- todo: no need for a max check +-- todo: collapse bound similar ranges (not ok yet) +-- tood: combine some sweeps +-- +-- This one wil get frozen (or if needed in sync with basic t-bidi) and I will explore more options +-- in typo-dub.lua. There I might also be able to improve performance a bit. + +local insert, remove, unpack, concat = table.insert, table.remove, table.unpack, table.concat +local utfchar = utf.char +local formatters = string.formatters + +local directiondata = characters.directions +local mirrordata = characters.mirrors + +local remove_node = nodes.remove +local insert_node_after = nodes.insert_after +local insert_node_before = nodes.insert_before + +local nodepool = nodes.pool +local new_textdir = nodepool.textdir + +local nodecodes = nodes.nodecodes +local whatsitcodes = nodes.whatsitcodes +local skipcodes = nodes.skipcodes + +local glyph_code = nodecodes.glyph +local glue_code = nodecodes.glue +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local math_code = nodecodes.math +local whatsit_code = nodecodes.whatsit +local dir_code = whatsitcodes.dir +local localpar_code = whatsitcodes.localpar +local parfillskip_code = skipcodes.skipcodes + +----- object_replacement = 0xFFFC -- object replacement character +local maximum_stack = 60 -- probably spec but not needed + +local directions = typesetters.directions +local setcolor = directions.setcolor + +local a_directions = attributes.private('directions') + +local remove_controls = true directives.register("typesetters.directions.one.removecontrols",function(v) remove_controls = v end) + +local trace_directions = false trackers .register("typesetters.directions.one", function(v) trace_directions = v end) +local trace_details = false trackers .register("typesetters.directions.one.details", function(v) trace_details = v end) + +local report_directions = logs.reporter("typesetting","directions one") + +local whitespace = { + lre = true, + rle = true, + lro = true, + rlo = true, + pdf = true, + bn = true, + ws = true, +} + +local b_s_ws_on = { + b = true, + s = true, + ws = true, + on = true +} + +-- tracing + +local function show_list(list,size,what) + local what = what or "direction" + local joiner = utfchar(0x200C) + local result = { } + for i=1,size do + local entry = list[i] + local character = entry.char + local direction = entry[what] + if character == 0xFFFC then + local first = entry.id + local last = entry.last + local skip = entry.skip + if last then + result[i] = formatters["%-3s:%s %s..%s (%i)"](direction,joiner,nodecodes[first],nodecodes[last],skip or 0) + else + result[i] = formatters["%-3s:%s %s (%i)"](direction,joiner,nodecodes[first],skip or 0) + end + elseif character >= 0x202A and character <= 0x202C then + result[i] = formatters["%-3s:%s %U"](direction,joiner,character) + else + result[i] = formatters["%-3s:%s %c %U"](direction,joiner,character,character) + end + end + return concat(result,joiner .. " | " .. joiner) +end + +-- preparation + +local function show_done(list,size) + local joiner = utfchar(0x200C) + local result = { } + for i=1,size do + local entry = list[i] + local character = entry.char + local begindir = entry.begindir + local enddir = entry.enddir + if begindir then + result[#result+1] = formatters["<%s>"](begindir) + end + if entry.remove then + -- continue + elseif character == 0xFFFC then + result[#result+1] = formatters["<%s>"]("?") + elseif character == 0x0020 then + result[#result+1] = formatters["<%s>"](" ") + elseif character >= 0x202A and character <= 0x202C then + result[#result+1] = formatters["<%s>"](entry.original) + else + result[#result+1] = utfchar(character) + end + if enddir then + result[#result+1] = formatters["<%s>"](enddir) + end + end + return concat(result,joiner) +end + +-- keeping the list and overwriting doesn't save much runtime, only a few percent +-- char is only used for mirror, so in fact we can as well only store it for +-- glyphs only + +local function build_list(head) -- todo: store node pointer ... saves loop + -- P1 + local current = head + local list = { } + local size = 0 + while current do + size = size + 1 + local id = current.id + if id == glyph_code then + local chr = current.char + local dir = directiondata[chr] + list[size] = { char = chr, direction = dir, original = dir, level = 0 } + current = current.next + elseif id == glue_code then + list[size] = { char = 0x0020, direction = "ws", original = "ws", level = 0 } + current = current.next + elseif id == whatsit_code and current.subtype == dir_code then + local dir = current.dir + if dir == "+TLT" then + list[size] = { char = 0x202A, direction = "lre", original = "lre", level = 0 } + elseif dir == "+TRT" then + list[size] = { char = 0x202B, direction = "rle", original = "rle", level = 0 } + elseif dir == "-TLT" or dir == "-TRT" then + list[size] = { char = 0x202C, direction = "pdf", original = "pdf", level = 0 } + else + list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, id = id } -- object replacement character + end + current = current.next + elseif id == math_code then + local skip = 0 + current = current.next + while current.id ~= math_code do + skip = skip + 1 + current = current.next + end + skip = skip + 1 + current = current.next + list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id } + else + local skip = 0 + local last = id + current = current.next + while n do + local id = current.id + if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and current.subtype == dir_code) then + skip = skip + 1 + last = id + current = current.next + else + break + end + end + if id == last then + list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id } + else + list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id, last = last } + end + end + end + return list, size +end + +-- the action + +-- local function find_run_limit_et(list,run_start,limit) +-- local run_limit = run_start +-- local i = run_start +-- while i <= limit and list[i].direction == "et" do +-- run_limit = i +-- i = i + 1 +-- end +-- return run_limit +-- end + +local function find_run_limit_et(list,start,limit) -- returns last match + for i=start,limit do + if list[i].direction == "et" then + start = i + else + return start + end + end + return start +end + +-- local function find_run_limit_b_s_ws_on(list,run_start,limit) +-- local run_limit = run_start +-- local i = run_start +-- while i <= limit and b_s_ws_on[list[i].direction] do +-- run_limit = i +-- i = i + 1 +-- end +-- return run_limit +-- end + +local function find_run_limit_b_s_ws_on(list,start,limit) + for i=start,limit do + if b_s_ws_on[list[i].direction] then + start = i + else + return start + end + end + return start +end + +local function get_baselevel(head,list,size) -- todo: skip if first is object (or pass head and test for local_par) + if head.id == whatsit_code and head.subtype == localpar_code then + if head.dir == "TRT" then + return 1, "TRT", true + else + return 0, "TLT", true + end + else + -- P2, P3 + for i=1,size do + local entry = list[i] + local direction = entry.direction + if direction == "r" or direction == "al" then + return 1, "TRT", true + elseif direction == "l" then + return 0, "TLT", true + end + end + return 0, "TLT", false + end +end + +local function resolve_explicit(list,size,baselevel) + -- X1 + local level = baselevel + local override = "on" + local stack = { } + local nofstack = 0 + for i=1,size do + local entry = list[i] + local direction = entry.direction + -- X2 + if direction == "rle" then + if nofstack < maximum_stack then + nofstack = nofstack + 1 + stack[nofstack] = { level, override } + level = level + (level % 2 == 1 and 2 or 1) -- least_greater_odd(level) + override = "on" + entry.level = level + entry.direction = "bn" + entry.remove = true + elseif trace_directions then + report_directions("stack overflow at position %a with direction %a",i,direction) + end + -- X3 + elseif direction == "lre" then + if nofstack < maximum_stack then + nofstack = nofstack + 1 + stack[nofstack] = { level, override } + level = level + (level % 2 == 1 and 1 or 2) -- least_greater_even(level) + override = "on" + entry.level = level + entry.direction = "bn" + entry.remove = true + elseif trace_directions then + report_directions("stack overflow at position %a with direction %a",i,direction) + end + -- X4 + elseif direction == "rlo" then + if nofstack < maximum_stack then + nofstack = nofstack + 1 + stack[nofstack] = { level, override } + level = level + (level % 2 == 1 and 2 or 1) -- least_greater_odd(level) + override = "r" + entry.level = level + entry.direction = "bn" + entry.remove = true + elseif trace_directions then + report_directions("stack overflow at position %a with direction %a",i,direction) + end + -- X5 + elseif direction == "lro" then + if nofstack < maximum_stack then + nofstack = nofstack + 1 + stack[nofstack] = { level, override } + level = level + (level % 2 == 1 and 1 or 2) -- least_greater_even(level) + override = "l" + entry.level = level + entry.direction = "bn" + entry.remove = true + elseif trace_directions then + report_directions("stack overflow at position %a with direction %a",i,direction) + end + -- X7 + elseif direction == "pdf" then + if nofstack < maximum_stack then + local stacktop = stack[nofstack] + nofstack = nofstack - 1 + level = stacktop[1] + override = stacktop[2] + entry.level = level + entry.direction = "bn" + entry.remove = true + elseif trace_directions then + report_directions("stack overflow at position %a with direction %a",i,direction) + end + -- X6 + else + entry.level = level + if override ~= "on" then + entry.direction = override + end + end + end + -- X8 (reset states and overrides after paragraph) +end + +local function resolve_weak(list,size,start,limit,sor,eor) + -- W1 + for i=start,limit do + local entry = list[i] + if entry.direction == "nsm" then + if i == start then + entry.direction = sor + else + entry.direction = list[i-1].direction + end + end + end + -- W2 + for i=start,limit do + local entry = list[i] + if entry.direction == "en" then + for j=i-1,start,-1 do + local prev = list[j] + local direction = prev.direction + if direction == "al" then + entry.direction = "an" + break + elseif direction == "r" or direction == "l" then + break + end + end + end + end + -- W3 + for i=start,limit do + local entry = list[i] + if entry.direction == "al" then + entry.direction = "r" + end + end + -- W4 + for i=start+1,limit-1 do + local entry = list[i] + local direction = entry.direction + if direction == "es" then + if list[i-1].direction == "en" and list[i+1].direction == "en" then + entry.direction = "en" + end + elseif direction == "cs" then + local prevdirection = list[i-1].direction + if prevdirection == "en" then + if list[i+1].direction == "en" then + entry.direction = "en" + end + elseif prevdirection == "an" and list[i+1].direction == "an" then + entry.direction = "an" + end + end + end + -- W5 + local i = start + while i <= limit do + if list[i].direction == "et" then + local runstart = i + local runlimit = find_run_limit_et(list,runstart,limit) -- when moved inline we can probably collapse a lot + local rundirection = runstart == start and sor or list[runstart-1].direction + if rundirection ~= "en" then + rundirection = runlimit == limit and eor or list[runlimit+1].direction + end + if rundirection == "en" then + for j=runstart,runlimit do + list[j].direction = "en" + end + end + i = runlimit + end + i = i + 1 + end + -- W6 + for i=start,limit do + local entry = list[i] + local direction = entry.direction + if direction == "es" or direction == "et" or direction == "cs" then + entry.direction = "on" + end + end + -- W7 + for i=start,limit do + local entry = list[i] + if entry.direction == "en" then + local prev_strong = sor + for j=i-1,start,-1 do + local direction = list[j].direction + if direction == "l" or direction == "r" then + prev_strong = direction + break + end + end + if prev_strong == "l" then + entry.direction = "l" + end + end + end +end + +local function resolve_neutral(list,size,start,limit,sor,eor) + -- N1, N2 + for i=start,limit do + local entry = list[i] + if b_s_ws_on[entry.direction] then + local leading_direction, trailing_direction, resolved_direction + local runstart = i + local runlimit = find_run_limit_b_s_ws_on(list,runstart,limit) + if runstart == start then + leading_direction = sor + else + leading_direction = list[runstart-1].direction + if leading_direction == "en" or leading_direction == "an" then + leading_direction = "r" + end + end + if runlimit == limit then + trailing_direction = eor + else + trailing_direction = list[runlimit+1].direction + if trailing_direction == "en" or trailing_direction == "an" then + trailing_direction = "r" + end + end + if leading_direction == trailing_direction then + -- N1 + resolved_direction = leading_direction + else + -- N2 / does the weird period + resolved_direction = entry.level % 2 == 1 and "r" or "l" -- direction_of_level(entry.level) + end + for j=runstart,runlimit do + list[j].direction = resolved_direction + end + i = runlimit + end + i = i + 1 + end +end + +local function resolve_implicit(list,size,start,limit,sor,eor) + -- I1 + for i=start,limit do + local entry = list[i] + local level = entry.level + if level % 2 ~= 1 then -- not odd(level) + local direction = entry.direction + if direction == "r" then + entry.level = level + 1 + elseif direction == "an" or direction == "en" then + entry.level = level + 2 + end + end + end + -- I2 + for i=start,limit do + local entry = list[i] + local level = entry.level + if level % 2 == 1 then -- odd(level) + local direction = entry.direction + if direction == "l" or direction == "en" or direction == "an" then + entry.level = level + 1 + end + end + end +end + +local function resolve_levels(list,size,baselevel) + -- X10 + local start = 1 + while start < size do + local level = list[start].level + local limit = start + 1 + while limit < size and list[limit].level == level do + limit = limit + 1 + end + local prev_level = start == 1 and baselevel or list[start-1].level + local next_level = limit == size and baselevel or list[limit+1].level + local sor = (level > prev_level and level or prev_level) % 2 == 1 and "r" or "l" -- direction_of_level(max(level,prev_level)) + local eor = (level > next_level and level or next_level) % 2 == 1 and "r" or "l" -- direction_of_level(max(level,next_level)) + -- W1 .. W7 + resolve_weak(list,size,start,limit,sor,eor) + -- N1 .. N2 + resolve_neutral(list,size,start,limit,sor,eor) + -- I1 .. I2 + resolve_implicit(list,size,start,limit,sor,eor) + start = limit + end + -- L1 + for i=1,size do + local entry = list[i] + local direction = entry.original + -- (1) + if direction == "s" or direction == "b" then + entry.level = baselevel + -- (2) + for j=i-1,1,-1 do + local entry = list[j] + if whitespace[entry.original] then + entry.level = baselevel + else + break + end + end + end + end + -- (3) + for i=size,1,-1 do + local entry = list[i] + if whitespace[entry.original] then + entry.level = baselevel + else + break + end + end + -- L4 + for i=1,size do + local entry = list[i] + if entry.level % 2 == 1 then -- odd(entry.level) + local mirror = mirrordata[entry.char] + if mirror then + entry.mirror = mirror + end + end + end +end + +local function insert_dir_points(list,size) + -- L2, but no actual reversion is done, we simply annotate where + -- begindir/endddir node will be inserted. + local maxlevel = 0 + local finaldir = false + for i=1,size do + local level = list[i].level + if level > maxlevel then + maxlevel = level + end + end + for level=0,maxlevel do + local started = false + local begindir = nil + local enddir = nil + if level % 2 == 1 then + begindir = "+TRT" + enddir = "-TRT" + else + begindir = "+TLT" + enddir = "-TLT" + end + for i=1,size do + local entry = list[i] + if entry.level >= level then + if not started then + entry.begindir = begindir + started = true + end + else + if started then + list[i-1].enddir = enddir + started = false + end + end + end + -- make sure to close the run at end of line + if started then + finaldir = enddir + end + end + if finaldir then + list[size].enddir = finaldir + end +end + +local function apply_to_list(list,size,head,pardir) + local index = 1 + local current = head + local done = false + while current do + if index > size then + report_directions("fatal error, size mismatch") + break + end + local id = current.id + local entry = list[index] + local begindir = entry.begindir + local enddir = entry.enddir + if id == glyph_code then + local mirror = entry.mirror + if mirror then + current.char = mirror + end + if trace_directions then + local direction = entry.direction + setcolor(current,direction,direction ~= entry.original,mirror) + end + elseif id == hlist_code or id == vlist_code then + current.dir = pardir -- is this really needed? + elseif id == glue_code then + if enddir and current.subtype == parfillskip_code then + -- insert the last enddir before \parfillskip glue + head = insert_node_before(head,current,new_textdir(enddir)) + enddir = false + done = true + end + elseif id == whatsit_code then + if begindir and current.subtype == localpar_code then + -- local_par should always be the 1st node + head, current = insert_node_after(head,current,new_textdir(begindir)) + begindir = nil + done = true + end + end + if begindir then + head = insert_node_before(head,current,new_textdir(begindir)) + done = true + end + local skip = entry.skip + if skip and skip > 0 then + for i=1,skip do + current = current.next + end + end + if enddir then + head, current = insert_node_after(head,current,new_textdir(enddir)) + done = true + end + if not entry.remove then + current = current.next + elseif remove_controls then + -- X9 + head, current = remove_node(head,current,true) + done = true + else + current = current.next + end + index = index + 1 + end + return head, done +end + +local function process(head) + local list, size = build_list(head) + local baselevel, pardir, dirfound = get_baselevel(head,list,size) -- we always have an inline dir node in context + if not dirfound and trace_details then + report_directions("no initial direction found, gambling") + end + if trace_details then + report_directions("before : %s",show_list(list,size,"original")) + end + resolve_explicit(list,size,baselevel) + resolve_levels(list,size,baselevel) + insert_dir_points(list,size) + if trace_details then + report_directions("after : %s",show_list(list,size,"direction")) + report_directions("result : %s",show_done(list,size)) + end + head, done = apply_to_list(list,size,head,pardir) + return head, done +end + +directions.installhandler(interfaces.variables.one,process) diff --git a/tex/context/base/typo-dub.lua b/tex/context/base/typo-dub.lua new file mode 100644 index 000000000..3ecfce364 --- /dev/null +++ b/tex/context/base/typo-dub.lua @@ -0,0 +1,870 @@ +if not modules then modules = { } end modules ['typo-dub'] = { + version = 1.001, + comment = "companion to typo-dir.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", + comment = "Unicode bidi (sort of) variant b", +} + +-- This is a follow up on typo-uba which itself is a follow up on t-bidi by Khaled Hosny which +-- in turn is based on minibidi.c from Arabeyes. This is a further optimizations, as well as +-- an update on some recent unicode bidi developments. There is (and will) also be more control +-- added. As a consequence this module is somewhat slower than its precursor which itself is +-- slower than the one-pass bidi handler. This is also a playground and I might add some plugin +-- support. + +-- todo (cf html): +-- +-- normal The element does not offer a additional level of embedding with respect to the bidirectional algorithm. For inline elements implicit reordering works across element boundaries. +-- embed If the element is inline, this value opens an additional level of embedding with respect to the bidirectional algorithm. The direction of this embedding level is given by the direction property. +-- bidi-override For inline elements this creates an override. For block container elements this creates an override for inline-level descendants not within another block container element. This means that inside the element, reordering is strictly in sequence according to the direction property; the implicit part of the bidirectional algorithm is ignored. +-- isolate This keyword indicates that the element's container directionality should be calculated without considering the content of this element. The element is therefore isolated from its siblings. When applying its bidirectional-resolution algorithm, its container element treats it as one or several U+FFFC Object Replacement Character, i.e. like an image. +-- isolate-override This keyword applies the isolation behavior of the isolate keyword to the surrounding content and the override behavior o f the bidi-override keyword to the inner content. +-- plaintext This keyword makes the elements directionality calculated without considering its parent bidirectional state or the value of the direction property. The directionality is calculated using the P2 and P3 rules of the Unicode Bidirectional Algorithm. +-- This value allows to display data which has already formatted using a tool following the Unicode Bidirectional Algorithm. +-- +-- todo: check for introduced errors +-- todo: reuse list, we have size, so we can just change values (and auto allocate when not there) +-- todo: reuse the stack +-- todo: no need for a max check +-- todo: collapse bound similar ranges (not ok yet) +-- todo: combine some sweeps +-- todo: removing is not needed when we inject at the same spot (only chnage the dir property) +-- todo: isolated runs (isolating runs are similar to bidi=local in the basic analyzer) + +-- todo: check unicode addenda (from the draft): +-- +-- Added support for canonical equivalents in BD16. +-- Changed logic in N0 to not check forwards for context in the case of enclosed text opposite the embedding direction. +-- Major extension of the algorithm to allow for the implementation of directional isolates and the introduction of new isolate-related values to the Bidi_Class property. +-- Adds BD8, BD9, BD10, BD11, BD12, BD13, BD14, BD15, and BD16, Sections 2.4 and 2.5, and Rules X5a, X5b, X5c and X6a. +-- Extensively revises Section 3.3.2, Explicit Levels and Directions and its existing X rules to formalize the algorithm for matching a PDF with the embedding or override initiator whose scope it terminates. +-- Moves Rules X9 and X10 into a separate new Section 3.3.3, Preparations for Implicit Processing. +-- Modifies Rule X10 to make the isolating run sequence the unit to which subsequent rules are applied. +-- Modifies Rule W1 to change an NSM preceded by an isolate initiator or PDI into ON. +-- Adds Rule N0 and makes other changes to Section 3.3.5, Resolving Neutral and Isolate Formatting Types to resolve bracket pairs to the same level. + +local insert, remove, unpack, concat = table.insert, table.remove, table.unpack, table.concat +local utfchar = utf.char +local setmetatable = setmetatable +local formatters = string.formatters + +local directiondata = characters.directions +local mirrordata = characters.mirrors +local textclassdata = characters.textclasses + +local remove_node = nodes.remove +local insert_node_after = nodes.insert_after +local insert_node_before = nodes.insert_before + +local nodepool = nodes.pool +local new_textdir = nodepool.textdir + +local nodecodes = nodes.nodecodes +local whatsitcodes = nodes.whatsitcodes +local skipcodes = nodes.skipcodes + +local glyph_code = nodecodes.glyph +local glue_code = nodecodes.glue +local hlist_code = nodecodes.hlist +local vlist_code = nodecodes.vlist +local math_code = nodecodes.math +local whatsit_code = nodecodes.whatsit +local dir_code = whatsitcodes.dir +local localpar_code = whatsitcodes.localpar +local parfillskip_code = skipcodes.skipcodes + +local maximum_stack = 0xFF -- unicode: 60, will be jumped to 125, we don't care too much + +local directions = typesetters.directions +local setcolor = directions.setcolor +local getfences = directions.getfences + +local a_directions = attributes.private('directions') +local a_textbidi = attributes.private('textbidi') +local a_state = attributes.private('state') + +local s_isol = fonts.analyzers.states.isol + +-- current[a_state] = s_isol -- maybe better have a special bidi attr value -> override (9) -> todo + +local remove_controls = true directives.register("typesetters.directions.removecontrols",function(v) remove_controls = v end) +----- analyze_fences = true directives.register("typesetters.directions.analyzefences", function(v) analyze_fences = v end) + +local trace_directions = false trackers .register("typesetters.directions.two", function(v) trace_directions = v end) +local trace_details = false trackers .register("typesetters.directions.two.details", function(v) trace_details = v end) + +local report_directions = logs.reporter("typesetting","directions two") + +-- strong (old): +-- +-- l : left to right +-- r : right to left +-- lro : left to right override +-- rlo : left to left override +-- lre : left to right embedding +-- rle : left to left embedding +-- al : right to legt arabic (esp punctuation issues) + +-- weak: +-- +-- en : english number +-- es : english number separator +-- et : english number terminator +-- an : arabic number +-- cs : common number separator +-- nsm : nonspacing mark +-- bn : boundary neutral + +-- neutral: +-- +-- b : paragraph separator +-- s : segment separator +-- ws : whitespace +-- on : other neutrals + +-- interesting: this is indeed better (and more what we expect i.e. we already use this split +-- in the old original (also these isolates) + +-- strong (new): +-- +-- l : left to right +-- r : right to left +-- al : right to legt arabic (esp punctuation issues) + +-- explicit: (new) +-- +-- lro : left to right override +-- rlo : left to left override +-- lre : left to right embedding +-- rle : left to left embedding +-- pdf : pop dir format +-- lri : left to right isolate +-- rli : left to left isolate +-- fsi : first string isolate +-- pdi : pop directional isolate + +local whitespace = { + lre = true, + rle = true, + lro = true, + rlo = true, + pdf = true, + bn = true, + ws = true, +} + +local b_s_ws_on = { + b = true, + s = true, + ws = true, + on = true +} + +-- tracing + +local function show_list(list,size,what) + local what = what or "direction" + local joiner = utfchar(0x200C) + local result = { } + for i=1,size do + local entry = list[i] + local character = entry.char + local direction = entry[what] + if character == 0xFFFC then + local first = entry.id + local last = entry.last + local skip = entry.skip + if last then + result[i] = formatters["%-3s:%s %s..%s (%i)"](direction,joiner,nodecodes[first],nodecodes[last],skip or 0) + else + result[i] = formatters["%-3s:%s %s (%i)"](direction,joiner,nodecodes[first],skip or 0) + end + elseif character >= 0x202A and character <= 0x202C then + result[i] = formatters["%-3s:%s %U"](direction,joiner,character) + else + result[i] = formatters["%-3s:%s %c %U"](direction,joiner,character,character) + end + end + return concat(result,joiner .. " | " .. joiner) +end + +-- preparation + +local function show_done(list,size) + local joiner = utfchar(0x200C) + local result = { } + for i=1,size do + local entry = list[i] + local character = entry.char + local begindir = entry.begindir + local enddir = entry.enddir + if begindir then + result[#result+1] = formatters["<%s>"](begindir) + end + if entry.remove then + -- continue + elseif character == 0xFFFC then + result[#result+1] = formatters["<%s>"]("?") + elseif character == 0x0020 then + result[#result+1] = formatters["<%s>"](" ") + elseif character >= 0x202A and character <= 0x202C then + result[#result+1] = formatters["<%s>"](entry.original) + else + result[#result+1] = utfchar(character) + end + if enddir then + result[#result+1] = formatters["<%s>"](enddir) + end + end + return concat(result,joiner) +end + +-- keeping the list and overwriting doesn't save much runtime, only a few percent +-- char is only used for mirror, so in fact we can as well only store it for +-- glyphs only + +-- using metatable is slightly faster so maybe some day ... + +-- local space = { char = 0x0020, direction = "ws", original = "ws" } +-- local lre = { char = 0x202A, direction = "lre", original = "lre" } +-- local lre = { char = 0x202B, direction = "rle", original = "rle" } +-- local pdf = { char = 0x202C, direction = "pdf", original = "pdf" } +-- local object = { char = 0xFFFC, direction = "on", original = "on" } +-- +-- local t = { level = 0 } setmetatable(t,space) list[size] = t + +local function build_list(head) -- todo: store node pointer ... saves loop + -- P1 + local current = head + local list = { } + local size = 0 + while current do + size = size + 1 + local id = current.id + if id == glyph_code then + local chr = current.char + local dir = directiondata[chr] + list[size] = { char = chr, direction = dir, original = dir, level = 0 } + current = current.next + elseif id == glue_code then + list[size] = { char = 0x0020, direction = "ws", original = "ws", level = 0 } + current = current.next + elseif id == whatsit_code and current.subtype == dir_code then + local dir = current.dir + if dir == "+TLT" then + list[size] = { char = 0x202A, direction = "lre", original = "lre", level = 0 } + elseif dir == "+TRT" then + list[size] = { char = 0x202B, direction = "rle", original = "rle", level = 0 } + elseif dir == "-TLT" or dir == "-TRT" then + list[size] = { char = 0x202C, direction = "pdf", original = "pdf", level = 0 } + else + list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, id = id } -- object replacement character + end + current = current.next + elseif id == math_code then + local skip = 0 + current = current.next + while current.id ~= math_code do + skip = skip + 1 + current = current.next + end + skip = skip + 1 + current = current.next + list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id } + else + local skip = 0 + local last = id + current = current.next + while n do + local id = current.id + if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and current.subtype == dir_code) then + skip = skip + 1 + last = id + current = current.next + else + break + end + end + if id == last then + list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id } + else + list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id, last = last } + end + end + end + return list, size +end + +-- new + +-- we could support ( ] and [ ) and such ... + +-- ש ) ל ( א 0-0 +-- ש ( ל ] א 0-0 +-- ש ( ל ) א 2-4 +-- ש ( ל [ א ) כ ] 2-6 +-- ש ( ל ] א ) כ 2-6 +-- ש ( ל ) א ) כ 2-4 +-- ש ( ל ( א ) כ 4-6 +-- ש ( ל ( א ) כ ) 2-8,4-6 +-- ש ( ל [ א ] כ ) 2-8,4-6 + +function resolve_fences(list,size,start,limit) + -- N0: funny effects, not always better, so it's an options + local stack = { } + local top = 0 + for i=start,limit do + local entry = list[i] + if entry.direction == "on" then + local char = entry.char + local mirror = mirrordata[char] + if mirror then + local class = textclassdata[char] + entry.mirror = mirror + entry.class = class + if class == "open" then + top = top + 1 + stack[top] = { mirror, i, false } + elseif top == 0 then + -- skip + elseif class == "close" then + while top > 0 do + local s = stack[top] + if s[1] == char then + local open = s[2] + local close = i + list[open ].paired = close + list[close].paired = open + break + else + -- do we mirror or not + end + top = top - 1 + end + end + end + end + end +end + +-- local function test_fences(str) +-- local list = { } +-- for s in string.gmatch(str,".") do +-- local b = utf.byte(s) +-- list[#list+1] = { c = s, char = b, direction = directiondata[b] } +-- end +-- resolve_fences(list,#list,1,#size) +-- inspect(list) +-- end +-- +-- test_fences("a(b)c(d)e(f(g)h)i") +-- test_fences("a(b[c)d]") + +-- the action + +local function get_baselevel(head,list,size) -- todo: skip if first is object (or pass head and test for local_par) + if head.id == whatsit_code and head.subtype == localpar_code then + if head.dir == "TRT" then + return 1, "TRT", true + else + return 0, "TLT", true + end + else + -- P2, P3 + for i=1,size do + local entry = list[i] + local direction = entry.direction + if direction == "r" or direction == "al" then + return 1, "TRT", true + elseif direction == "l" then + return 0, "TLT", true + end + end + return 0, "TLT", false + end +end + +local function resolve_explicit(list,size,baselevel) + -- X1 + local level = baselevel + local override = "on" + local stack = { } + local nofstack = 0 + for i=1,size do + local entry = list[i] + local direction = entry.direction + -- X2 + if direction == "rle" then + if nofstack < maximum_stack then + nofstack = nofstack + 1 + stack[nofstack] = { level, override } + level = level + (level % 2 == 1 and 2 or 1) -- least_greater_odd(level) + override = "on" + entry.level = level + entry.direction = "bn" + entry.remove = true + elseif trace_directions then + report_directions("stack overflow at position %a with direction %a",i,direction) + end + -- X3 + elseif direction == "lre" then + if nofstack < maximum_stack then + nofstack = nofstack + 1 + stack[nofstack] = { level, override } + level = level + (level % 2 == 1 and 1 or 2) -- least_greater_even(level) + override = "on" + entry.level = level + entry.direction = "bn" + entry.remove = true + elseif trace_directions then + report_directions("stack overflow at position %a with direction %a",i,direction) + end + -- X4 + elseif direction == "rlo" then + if nofstack < maximum_stack then + nofstack = nofstack + 1 + stack[nofstack] = { level, override } + level = level + (level % 2 == 1 and 2 or 1) -- least_greater_odd(level) + override = "r" + entry.level = level + entry.direction = "bn" + entry.remove = true + elseif trace_directions then + report_directions("stack overflow at position %a with direction %a",i,direction) + end + -- X5 + elseif direction == "lro" then + if nofstack < maximum_stack then + nofstack = nofstack + 1 + stack[nofstack] = { level, override } + level = level + (level % 2 == 1 and 1 or 2) -- least_greater_even(level) + override = "l" + entry.level = level + entry.direction = "bn" + entry.remove = true + elseif trace_directions then + report_directions("stack overflow at position %a with direction %a",i,direction) + end + -- X7 + elseif direction == "pdf" then + if nofstack < maximum_stack then + local stacktop = stack[nofstack] + nofstack = nofstack - 1 + level = stacktop[1] + override = stacktop[2] + entry.level = level + entry.direction = "bn" + entry.remove = true + elseif trace_directions then + report_directions("stack overflow at position %a with direction %a",i,direction) + end + -- X6 + else + entry.level = level + if override ~= "on" then + entry.direction = override + end + end + end + -- X8 (reset states and overrides after paragraph) +end + +local function resolve_weak(list,size,start,limit,orderbefore,orderafter) + -- W1: non spacing marks get the direction of the previous character + for i=start,limit do + local entry = list[i] + if entry.direction == "nsm" then + if i == start then + entry.direction = orderbefore + else + entry.direction = list[i-1].direction + end + end + end + -- W2: mess with numbers and arabic + for i=start,limit do + local entry = list[i] + if entry.direction == "en" then + for j=i-1,start,-1 do + local prev = list[j] + local direction = prev.direction + if direction == "al" then + entry.direction = "an" + break + elseif direction == "r" or direction == "l" then + break + end + end + end + end + -- W3 + for i=start,limit do + local entry = list[i] + if entry.direction == "al" then + entry.direction = "r" + end + end + -- W4: make separators number + for i=start+1,limit-1 do + local entry = list[i] + local direction = entry.direction + if direction == "es" then + if list[i-1].direction == "en" and list[i+1].direction == "en" then + entry.direction = "en" + end + elseif direction == "cs" then + local prevdirection = list[i-1].direction + if prevdirection == "en" then + if list[i+1].direction == "en" then + entry.direction = "en" + end + elseif prevdirection == "an" and list[i+1].direction == "an" then + entry.direction = "an" + end + end + end + -- W5 + local i = start + while i <= limit do + if list[i].direction == "et" then + local runstart = i + local runlimit = runstart + for i=runstart,limit do + if list[i].direction == "et" then + runlimit = i + else + break + end + end + local rundirection = runstart == start and sor or list[runstart-1].direction + if rundirection ~= "en" then + rundirection = runlimit == limit and orderafter or list[runlimit+1].direction + end + if rundirection == "en" then + for j=runstart,runlimit do + list[j].direction = "en" + end + end + i = runlimit + end + i = i + 1 + end + -- W6 + for i=start,limit do + local entry = list[i] + local direction = entry.direction + if direction == "es" or direction == "et" or direction == "cs" then + entry.direction = "on" + end + end + -- W7 + for i=start,limit do + local entry = list[i] + if entry.direction == "en" then + local prev_strong = orderbefore + for j=i-1,start,-1 do + local direction = list[j].direction + if direction == "l" or direction == "r" then + prev_strong = direction + break + end + end + if prev_strong == "l" then + entry.direction = "l" + end + end + end +end + +local function resolve_neutral(list,size,start,limit,orderbefore,orderafter) + -- N1, N2 + for i=start,limit do + local entry = list[i] + if b_s_ws_on[entry.direction] then + local leading_direction, trailing_direction, resolved_direction + local runstart = i + local runlimit = runstart + for i=runstart,limit do + if b_s_ws_on[list[i].direction] then + runstart = i + else + break + end + end + if runstart == start then + leading_direction = sor + else + leading_direction = list[runstart-1].direction + if leading_direction == "en" or leading_direction == "an" then + leading_direction = "r" + end + end + if runlimit == limit then + trailing_direction = orderafter + else + trailing_direction = list[runlimit+1].direction + if trailing_direction == "en" or trailing_direction == "an" then + trailing_direction = "r" + end + end + if leading_direction == trailing_direction then + -- N1 + resolved_direction = leading_direction + else + -- N2 / does the weird period + resolved_direction = entry.level % 2 == 1 and "r" or "l" -- direction_of_level(entry.level) + end + for j=runstart,runlimit do + list[j].direction = resolved_direction + end + i = runlimit + end + i = i + 1 + end +end + +local function resolve_implicit(list,size,start,limit,orderbefore,orderafter) + -- I1 + for i=start,limit do + local entry = list[i] + local level = entry.level + if level % 2 ~= 1 then -- not odd(level) + local direction = entry.direction + if direction == "r" then + entry.level = level + 1 + elseif direction == "an" or direction == "en" then + entry.level = level + 2 + end + end + end + -- I2 + for i=start,limit do + local entry = list[i] + local level = entry.level + if level % 2 == 1 then -- odd(level) + local direction = entry.direction + if direction == "l" or direction == "en" or direction == "an" then + entry.level = level + 1 + end + end + end +end + +local function resolve_levels(list,size,baselevel,analyze_fences) + -- X10 + local start = 1 + while start < size do + local level = list[start].level + local limit = start + 1 + while limit < size and list[limit].level == level do + limit = limit + 1 + end + local prev_level = start == 1 and baselevel or list[start-1].level + local next_level = limit == size and baselevel or list[limit+1].level + local orderbefore = (level > prev_level and level or prev_level) % 2 == 1 and "r" or "l" -- direction_of_level(max(level,prev_level)) + local orderafter = (level > next_level and level or next_level) % 2 == 1 and "r" or "l" -- direction_of_level(max(level,next_level)) + -- W1 .. W7 + resolve_weak(list,size,start,limit,orderbefore,orderafter) + -- N0 + if analyze_fences then + resolve_fences(list,size,start,limit) + end + -- N1 .. N2 + resolve_neutral(list,size,start,limit,orderbefore,orderafter) + -- I1 .. I2 + resolve_implicit(list,size,start,limit,orderbefore,orderafter) + start = limit + end + -- L1 + for i=1,size do + local entry = list[i] + local direction = entry.original + -- (1) + if direction == "s" or direction == "b" then + entry.level = baselevel + -- (2) + for j=i-1,1,-1 do + local entry = list[j] + if whitespace[entry.original] then + entry.level = baselevel + else + break + end + end + end + end + -- (3) + for i=size,1,-1 do + local entry = list[i] + if whitespace[entry.original] then + entry.level = baselevel + else + break + end + end + -- L4 + if analyze_fences then + for i=1,size do + local entry = list[i] + if entry.level % 2 == 1 then -- odd(entry.level) + if entry.mirror and not entry.paired then + entry.mirror = false + end + -- okay + elseif entry.mirror then + entry.mirror = false + end + end + else + for i=1,size do + local entry = list[i] + if entry.level % 2 == 1 then -- odd(entry.level) + local mirror = mirrordata[entry.char] + if mirror then + entry.mirror = mirror + end + end + end + end +end + +local function insert_dir_points(list,size) + -- L2, but no actual reversion is done, we simply annotate where + -- begindir/endddir node will be inserted. + local maxlevel = 0 + local finaldir = false + for i=1,size do + local level = list[i].level + if level > maxlevel then + maxlevel = level + end + end + for level=0,maxlevel do + local started = false + local begindir = nil + local enddir = nil + if level % 2 == 1 then + begindir = "+TRT" + enddir = "-TRT" + else + begindir = "+TLT" + enddir = "-TLT" + end + for i=1,size do + local entry = list[i] + if entry.level >= level then + if not started then + entry.begindir = begindir + started = true + end + else + if started then + list[i-1].enddir = enddir + started = false + end + end + end + -- make sure to close the run at end of line + if started then + finaldir = enddir + end + end + if finaldir then + list[size].enddir = finaldir + end +end + +local function apply_to_list(list,size,head,pardir) + local index = 1 + local current = head + local done = false + while current do + if index > size then + report_directions("fatal error, size mismatch") + break + end + local id = current.id + local entry = list[index] + local begindir = entry.begindir + local enddir = entry.enddir + if id == glyph_code then + local mirror = entry.mirror + if mirror then + current.char = mirror + end + if trace_directions then + local direction = entry.direction + setcolor(current,direction,direction ~= entry.original,mirror) + end + elseif id == hlist_code or id == vlist_code then + current.dir = pardir -- is this really needed? + elseif id == glue_code then + if enddir and current.subtype == parfillskip_code then + -- insert the last enddir before \parfillskip glue + head = insert_node_before(head,current,new_textdir(enddir)) + enddir = false + done = true + end + elseif id == whatsit_code then + if begindir and current.subtype == localpar_code then + -- local_par should always be the 1st node + head, current = insert_node_after(head,current,new_textdir(begindir)) + begindir = nil + done = true + end + end + if begindir then + head = insert_node_before(head,current,new_textdir(begindir)) + done = true + end + local skip = entry.skip + if skip and skip > 0 then + for i=1,skip do + current = current.next + end + end + if enddir then + head, current = insert_node_after(head,current,new_textdir(enddir)) + done = true + end + if not entry.remove then + current = current.next + elseif remove_controls then + -- X9 + head, current = remove_node(head,current,true) + done = true + else + current = current.next + end + index = index + 1 + end + return head, done +end + +local function process(head) + -- for the moment a whole paragraph property + local attr = head[a_directions] + local analyze_fences = getfences(attr) + -- + local list, size = build_list(head) + local baselevel, pardir, dirfound = get_baselevel(head,list,size) -- we always have an inline dir node in context + if not dirfound and trace_details then + report_directions("no initial direction found, gambling") + end + if trace_details then + report_directions("before : %s",show_list(list,size,"original")) + end + resolve_explicit(list,size,baselevel) + resolve_levels(list,size,baselevel,analyze_fences) + insert_dir_points(list,size) + if trace_details then + report_directions("after : %s",show_list(list,size,"direction")) + report_directions("result : %s",show_done(list,size)) + end + head, done = apply_to_list(list,size,head,pardir) + return head, done +end + +directions.installhandler(interfaces.variables.two,process) diff --git a/tex/context/base/typo-fln.lua b/tex/context/base/typo-fln.lua new file mode 100644 index 000000000..4c97af450 --- /dev/null +++ b/tex/context/base/typo-fln.lua @@ -0,0 +1,271 @@ +if not modules then modules = { } end modules ['typo-fln'] = { + version = 1.001, + comment = "companion to typo-fln.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- When I ran into the following experimental code again, I figured that it dated +-- from the early days of mkiv, so I updates it a bit to fit into todays context. +-- In the process I might have messed up things. For instance we had a diffent +-- wrapper then using head and tail. + +-- todo: only letters (no punctuation) +-- todo: nuts + +local trace_firstlines = false trackers.register("typesetters.firstlines", function(v) trace_firstlines = v end) +local report_firstlines = logs.reporter("nodes","firstlines") + +typesetters.firstlines = typesetters.firstlines or { } +local firstlines = typesetters.firstlines + +local nodes = nodes +local tasks = nodes.tasks + +local getbox = nodes.getbox +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local disc_code = nodecodes.disc +local kern_code = nodecodes.kern + +local traverse_id = nodes.traverse_id +local free_node_list = nodes.flush_list +local free_node = nodes.flush_node +local copy_node_list = nodes.copy_list +local insert_node_after = nodes.insert_after +local insert_node_before = nodes.insert_before +local hpack_node_list = nodes.hpack +local remove_node = nodes.remove + +local nodepool = nodes.pool +local newpenalty = nodepool.penalty +local newkern = nodepool.kern +local tracerrule = nodes.tracers.pool.nodes.rule + +local actions = { } +firstlines.actions = actions + +local a_firstline = attributes.private('firstline') +local a_color = attributes.private('color') +local a_transparency = attributes.private('transparency') +local a_colorspace = attributes.private('colormodel') + +local texsetattribute = tex.setattribute +local unsetvalue = attributes.unsetvalue + +local variables = interfaces.variables +local v_default = variables.default +local v_line = variables.line +local v_word = variables.word + +----- is_letter = characters.is_letter +----- categories = characters.categories + +local settings = nil + +function firstlines.set(specification) + settings = specification or { } + tasks.enableaction("processors","typesetters.firstlines.handler") + if trace_firstlines then + report_firstlines("enabling firstlines") + end + texsetattribute(a_firstline,1) +end + +commands.setfirstline = firstlines.set + +actions[v_line] = function(head,setting) + -- local attribute = fonts.specifiers.contextnumber(setting.feature) -- was experimental + local dynamic = setting.dynamic + local font = setting.font + local noflines = setting.n or 1 + local ma = setting.ma or 0 + local ca = setting.ca + local ta = setting.ta + local hangafter = tex.hangafter + local hangindent = tex.hangindent + local parindent = tex.parindent + local nofchars = 0 + local n = 0 + local temp = copy_node_list(head) + local linebreaks = { } + for g in traverse_id(glyph_code,temp) do + if dynamic > 0 then + g[0] = dynamic + end + g.font = font + end + local start = temp + local list = temp + local prev = temp + for i=1,noflines do + local hsize = tex.hsize - tex.leftskip.width - tex.rightskip.width + if i == 1 then + hsize = hsize - parindent + end + if i <= - hangafter then + hsize = hsize - hangindent + end + while start do + local id = start.id + if id == glyph_code then + n = n + 1 + elseif id == disc_code then + -- this could be an option + elseif id == kern_code then -- todo: fontkern + -- this could be an option + elseif n > 0 then + local pack = hpack_node_list(copy_node_list(list,start)) + if pack.width > hsize then + free_node_list(pack) + list = prev + break + else + linebreaks[i] = n + prev = start + free_node_list(pack) + nofchars = n + end + end + start = start.next + end + if not linebreaks[i] then + linebreaks[i] = n + end + end + local start = head + local n = 0 + for i=1,noflines do + local linebreak = linebreaks[i] + while start and n < nofchars do + local id = start.id + if id == glyph_code then -- or id == disc_code then + if dynamic > 0 then + start[0] = dynamic + end + start.font = font + if ca and ca > 0 then + start[a_colorspace] = ma == 0 and 1 or ma + start[a_color] = ca + end + if ta and ta > 0 then + start[a_transparency] = ta + end + n = n + 1 + end + if linebreak == n then + if trace_firstlines then + head, start = insert_node_after(head,start,newpenalty(10000)) -- nobreak + head, start = insert_node_after(head,start,newkern(-65536)) + head, start = insert_node_after(head,start,tracerrule(65536,4*65536,2*65536,"darkblue")) + end + head, start = insert_node_after(head,start,newpenalty(-10000)) -- break + break + end + start = start.next + end + end + free_node_list(temp) + return head, true +end + +actions[v_word] = function(head,setting) + -- local attribute = fonts.specifiers.contextnumber(setting.feature) -- was experimental + local dynamic = setting.dynamic + local font = setting.font + local words = 0 + local nofwords = setting.n or 1 + local start = head + local ok = false + local ma = setting.ma or 0 + local ca = setting.ca + local ta = setting.ta + while start do + local id = start.id + -- todo: delete disc nodes + if id == glyph_code then + if not ok then + words = words + 1 + ok = true + end + if ca and ca > 0 then + start[a_colorspace] = ma == 0 and 1 or ma + start[a_color] = ca + end + if ta and ta > 0 then + start[a_transparency] = ta + end + if dynamic > 0 then + start[0] = dynamic + end + start.font = font + elseif id == disc_code then + -- continue + elseif id == kern_code then -- todo: fontkern + -- continue + else + ok = false + if words == nofwords then + break + end + end + start = start.next + end + return head, true +end + +actions[v_default] = actions[v_line] + +function firstlines.handler(head) + local start = head + local attr = nil + while start do + attr = start[a_firstline] + if attr then + break + elseif start.id == glyph then + break + else + start = start.next + end + end + if attr then + -- here as we can process nested boxes first so we need to keep state + tasks.disableaction("processors","typesetters.firstlines.handler") + -- texsetattribute(attribute,unsetvalue) + local alternative = settings.alternative or v_default + local action = actions[alternative] or actions[v_default] + if action then + if trace_firstlines then + report_firstlines("processing firstlines, alternative %a",alternative) + end + return action(head,settings) + end + end + return head, false +end + +-- goodie + +function commands.applytofirstcharacter(box,what) + local tbox = getbox(box) -- assumes hlist + local list = tbox.list + local done = nil + for n in traverse_id(glyph_code,list) do + list = remove_node(list,n) + done = n + break + end + if done then + tbox.list = list + local kind = type(what) + if kind == "string" then + context[what](done) + elseif kind == "function" then + what(done) + else + -- error + end + end +end diff --git a/tex/context/base/typo-fln.mkiv b/tex/context/base/typo-fln.mkiv new file mode 100644 index 000000000..d8651b459 --- /dev/null +++ b/tex/context/base/typo-fln.mkiv @@ -0,0 +1,112 @@ +%D \module +%D [ file=typo-fln, +%D version=2013.08.22, % comes from the few years older m-newstuff +%D title=\CONTEXT\ Typesetting Macros, +%D subtitle=First Lines, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\writestatus{loading}{ConTeXt Typesetting Macros / First Lines} + +%D I had this code laying around for a while \unknown\ probably as a side effect +%D of cleaning up the supp-fun modules. There is probably room for improvement +%D and more features. + +% \setupbodyfont[pagella] +% +% \starttext +% +% \setupindenting[medium,yes] +% +% \definefirstline +% [smallcaps] +% [alternative=line, +% color=darkred, +% style=\setfontfeature{smallcaps}] +% +% \setfirstline[smallcaps] \input tufte \par +% \setfirstline[smallcaps] \input ward \par +% \setfirstline[smallcaps] \input knuth \par +% \setfirstline[smallcaps] \input bryson \par +% +% \definefirstline +% [smallcaps] +% [alternative=word, +% color=darkblue, +% style=bold, +% n=2] +% +% \setfirstline[smallcaps] \input tufte \par +% \setfirstline[smallcaps] \input ward \par +% \setfirstline[smallcaps] \input knuth \par +% \setfirstline[smallcaps] \input bryson \par +% +% \stoptext + +\unprotect + +\registerctxluafile{typo-fln}{1.001} + +\definesystemattribute[firstline][public] + +\appendtoks + \attribute\firstlineattribute\attributeunsetvalue +\to \everyforgetall + +\installcorenamespace {firstline} + +\installcommandhandler \??firstline {firstline} \??firstline + +\setupfirstline + [\c!alternative=\v!line, + %\c!style=, + %\c!color=, + \c!n=1] + +\unexpanded\def\setfirstline + {\dosingleempty\typo_firstline_set} + +\unexpanded\def\typo_firstline_set[#1]% + {\edef\typo_firstline_handle{\typo_firstline_handle_indeed{#1}}} + +\unexpanded\def\typo_firstline_handle_indeed#1% + {\dontleavehmode + \begingroup + \edef\currentfirstline{#1}% + \usefirstlinestyleandcolor\c!style\c!color + \ctxlua{commands.setfirstline { + alternative = "\firstlineparameter\c!alternative", + ma = \the\attribute\colormodelattribute, + ca = \the\attribute\colorattribute, + ta = \the\attribute\transparencyattribute, + n = \number\firstlineparameter\c!n, + font = \fontid\font, + dynamic = \number\attribute\zerocount, + }}% + \kern\zeropoint % we need a node +% \hskip\zeropoint\s!plus\emwidth\relax % can be an option + \endgroup + \globallet\typo_firstline_handle\relax} + +\let\typo_firstline_handle\relax + +% goodie, question on list +% +% \defineframed[ChapterFramed][location=low,background=color,backgroundcolor=red,frame=off] +% +% \setuphead[chapter][deeptextcommand=\applytofirstcharacter\ChapterFramed] + +\unexpanded\def\applytofirstcharacter#1% + {\begingroup + \dowithnextbox + {\ctxcommand{applytofirstcharacter(\number\nextbox,"\strippedcsname#1")}% + \unhbox\nextbox + \endgroup}% + \hbox} + +\protect \endinput diff --git a/tex/context/base/typo-itc.lua b/tex/context/base/typo-itc.lua index b39ea2f23..452b623c8 100644 --- a/tex/context/base/typo-itc.lua +++ b/tex/context/base/typo-itc.lua @@ -28,7 +28,8 @@ local insert_node_after = node.insert_after local delete_node = nodes.delete local end_of_math = node.end_of_math -local texattribute = tex.attribute +local texgetattribute = tex.getattribute +local texsetattribute = tex.setattribute local a_italics = attributes.private("italics") local unsetvalue = attributes.unsetvalue @@ -81,7 +82,7 @@ end -- todo: clear attribute -local function process(namespace,attribute,head) +function italics.handler(head) local done = false local italic = 0 local lastfont = nil @@ -120,7 +121,7 @@ local function process(namespace,attribute,head) lastfont = font end if data then - local attr = forcedvariant or current[attribute] + local attr = forcedvariant or current[a_italics] if attr and attr > 0 then local cd = data[char] if not cd then @@ -199,22 +200,16 @@ function italics.set(n) enable() end if n == variables.reset then - texattribute[a_italics] = unsetvalue + texsetattribute(a_italics,unsetvalue) else - texattribute[a_italics] = tonumber(n) or unsetvalue + texsetattribute(a_italics,tonumber(n) or unsetvalue) end end function italics.reset() - texattribute[a_italics] = unsetvalue + texsetattribute(a_italics,unsetvalue) end -italics.handler = nodes.installattributehandler { - name = "italics", - namespace = italics, - processor = process, -} - local variables = interfaces.variables local settings_to_hash = utilities.parsers.settings_to_hash @@ -231,10 +226,10 @@ function commands.setupitaliccorrection(option) -- no grouping ! end if options[variables.global] then forcedvariant = variant - texattribute[a_italics] = unsetvalue + texsetattribute(a_italics,unsetvalue) else forcedvariant = false - texattribute[a_italics] = variant + texsetattribute(a_italics,variant) end if trace_italics then report_italics("forcing %a, variant %a",forcedvariant,variant ~= unsetvalue and variant) @@ -246,11 +241,11 @@ end local stack = { } function commands.pushitaliccorrection() - table.insert(stack,{forcedvariant, texattribute[a_italics] }) + table.insert(stack,{forcedvariant, texgetattribute(a_italics) }) end function commands.popitaliccorrection() local top = table.remove(stack) forcedvariant = top[1] - texattribute[a_italics] = top[2] + texsetattribute(a_italics,top[2]) end diff --git a/tex/context/base/typo-krn.lua b/tex/context/base/typo-krn.lua index fb28d3b2d..56f58bb73 100644 --- a/tex/context/base/typo-krn.lua +++ b/tex/context/base/typo-krn.lua @@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['typo-krn'] = { license = "see context related readme files" } +-- glue is still somewhat suboptimal + local next, type, tonumber = next, type, tonumber local utfchar = utf.char @@ -20,7 +22,7 @@ local insert_node_before = node.insert_before local insert_node_after = node.insert_after local end_of_math = node.end_of_math -local texattribute = tex.attribute +local texsetattribute = tex.setattribute local unsetvalue = attributes.unsetvalue local nodepool = nodes.pool @@ -33,6 +35,7 @@ local new_glue = nodepool.glue local nodecodes = nodes.nodecodes local kerncodes = nodes.kerncodes local skipcodes = nodes.skipcodes +local disccodes = nodes.disccodes local glyph_code = nodecodes.glyph local kern_code = nodecodes.kern @@ -42,6 +45,7 @@ local hlist_code = nodecodes.hlist local vlist_code = nodecodes.vlist local math_code = nodecodes.math +local discretionary_code = disccodes.discretionary local kerning_code = kerncodes.kerning local userkern_code = kerncodes.userkern local userskip_code = skipcodes.userskip @@ -53,8 +57,16 @@ local fontdata = fonthashes.identifiers local chardata = fonthashes.characters local quaddata = fonthashes.quads local markdata = fonthashes.marks +local fontproperties = fonthashes.properties +local fontdescriptions = fonthashes.descriptions +local fontfeatures = fonthashes.features + +local tracers = nodes.tracers +local setcolor = tracers.colors.set +local resetcolor = tracers.colors.reset local v_max = interfaces.variables.max +local v_auto = interfaces.variables.auto typesetters = typesetters or { } local typesetters = typesetters @@ -62,11 +74,15 @@ local typesetters = typesetters typesetters.kerns = typesetters.kerns or { } local kerns = typesetters.kerns +local report = logs.reporter("kerns") +local trace_ligatures = false trackers.register("typesetters.kerns.ligatures",function(v) trace_ligatures = v end) + kerns.mapping = kerns.mapping or { } kerns.factors = kerns.factors or { } local a_kerns = attributes.private("kern") local a_fontkern = attributes.private('fontkern') -kerns.attribute = kerns.attribute + +local contextsetups = fonts.specifiers.contextsetups storage.register("typesetters/kerns/mapping", kerns.mapping, "typesetters.kerns.mapping") storage.register("typesetters/kerns/factors", kerns.factors, "typesetters.kerns.factors") @@ -79,12 +95,75 @@ local factors = kerns.factors -- make sure it runs after all others -- there will be a width adaptor field in nodes so this will change -- todo: interchar kerns / disc nodes / can be made faster +-- todo: use insert_before etc local gluefactor = 4 -- assumes quad = .5 enspace kerns.keepligature = false -- just for fun (todo: control setting with key/value) kerns.keeptogether = false -- just for fun (todo: control setting with key/value) +-- red : kept by dynamic feature +-- green : kept by static feature +-- blue : keep by goodie + +function kerns.keepligature(n) -- might become default + local f = n.font + local a = n[0] or 0 + if trace_ligatures then + local c = n.char + local d = fontdescriptions[f][c].name + if a > 0 and contextsetups[a].keepligatures == v_auto then + report("font %!font:name!, glyph %a, slot %X -> ligature %s, by %s feature %a",f,d,c,"kept","dynamic","keepligatures") + setcolor(n,"darkred") + return true + end + local k = fontfeatures[f].keepligatures + if k == v_auto then + report("font %!font:name!, glyph %a, slot %X -> ligature %s, by %s feature %a",f,d,c,"kept","static","keepligatures") + setcolor(n,"darkgreen") + return true + end + if not k then + report("font %!font:name!, glyph %a, slot %X -> ligature %s, by %s feature %a",f,d,c,"split","static","keepligatures") + resetcolor(n) + return false + end + local k = fontproperties[f].keptligatures + if not k then + report("font %!font:name!, glyph %a, slot %X -> ligature %s, %s goodie specification",f,d,c,"split","no") + resetcolor(n) + return false + end + if k and k[c] then + report("font %!font:name!, glyph %a, slot %X -> ligature %s, %s goodie specification",f,d,c,"kept","by") + setcolor(n,"darkblue") + return true + else + report("font %!font:name!, glyph %a, slot %X -> ligature %s, %s goodie specification",f,d,c,"split","by") + resetcolor(n) + return false + end + else + if a > 0 and contextsetups[a].keepligatures == v_auto then + return true + end + local k = fontfeatures[f].keepligatures + if k == v_auto then + return true + end + if not k then + return false + end + local k = fontproperties[f].keptligatures + if not k then + return false + end + if k and k[c] then + return true + end + end +end + -- can be optimized .. the prev thing .. but hardly worth the effort local function kern_injector(fillup,kern) @@ -109,18 +188,18 @@ local function spec_injector(fillup,width,stretch,shrink) end end --- needs checking ... base mode / node mode +-- needs checking ... base mode / node mode -- also use insert_before/after etc -local function do_process(namespace,attribute,head,force) -- todo: glue so that we can fully stretch +local function do_process(head,force) -- todo: glue so that we can fully stretch local start, done, lastfont = head, false, nil local keepligature = kerns.keepligature local keeptogether = kerns.keeptogether local fillup = false while start do -- faster to test for attr first - local attr = force or start[attribute] + local attr = force or start[a_kerns] if attr and attr > 0 then - start[attribute] = unsetvalue + start[a_kerns] = unsetvalue local krn = mapping[attr] if krn == v_max then krn = .25 @@ -133,30 +212,30 @@ local function do_process(namespace,attribute,head,force) -- todo: glue so that if id == glyph_code then lastfont = start.font local c = start.components - if c then - if keepligature and keepligature(start) then - -- keep 'm + if not c then + -- fine + elseif keepligature and keepligature(start) then + -- keep 'm + else + c = do_process(c,attr) + local s = start + local p, n = s.prev, s.next + local tail = find_node_tail(c) + if p then + p.next = c + c.prev = p else - c = do_process(namespace,attribute,c,attr) - local s = start - local p, n = s.prev, s.next - local tail = find_node_tail(c) - if p then - p.next = c - c.prev = p - else - head = c - end - if n then - n.prev = tail - end - tail.next = n - start = c - s.components = nil - -- we now leak nodes ! - -- free_node(s) - done = true + head = c + end + if n then + n.prev = tail end + tail.next = n + start = c + s.components = nil + -- we now leak nodes ! + -- free_node(s) + done = true end local prev = start.prev if not prev then @@ -199,49 +278,63 @@ local function do_process(namespace,attribute,head,force) -- todo: glue so that -- a bit too complicated, we can best not copy and just calculate -- but we could have multiple glyphs involved so ... local disc = prev -- disc - local pre, post, replace = disc.pre, disc.post, disc.replace local prv, nxt = disc.prev, disc.next - if pre and prv then -- must pair with start.prev - -- this one happens in most cases - local before = copy_node(prv) - pre.prev = before - before.next = pre - before.prev = nil - pre = do_process(namespace,attribute,before,attr) - pre = pre.next - pre.prev = nil - disc.pre = pre - free_node(before) - end - if post and nxt then -- must pair with start - local after = copy_node(nxt) - local tail = find_node_tail(post) - tail.next = after - after.prev = tail - after.next = nil - post = do_process(namespace,attribute,post,attr) - tail.next = nil - disc.post = post - free_node(after) - end - if replace and prv and nxt then -- must pair with start and start.prev - local before = copy_node(prv) - local after = copy_node(nxt) - local tail = find_node_tail(replace) - replace.prev = before - before.next = replace - before.prev = nil - tail.next = after - after.prev = tail - after.next = nil - replace = do_process(namespace,attribute,before,attr) - replace = replace.next - replace.prev = nil - after.prev.next = nil - disc.replace = replace - free_node(after) - free_node(before) + if disc.subtype == discretionary_code then + -- maybe we should forget about this variant as there is no glue + -- possible + local pre, post, replace = disc.pre, disc.post, disc.replace + if pre and prv then -- must pair with start.prev + -- this one happens in most cases + local before = copy_node(prv) + pre.prev = before + before.next = pre + before.prev = nil + pre = do_process(before,attr) + pre = pre.next + pre.prev = nil + disc.pre = pre + free_node(before) + end + if post and nxt then -- must pair with start + local after = copy_node(nxt) + local tail = find_node_tail(post) + tail.next = after + after.prev = tail + after.next = nil + post = do_process(post,attr) + tail.next = nil + disc.post = post + free_node(after) + end + if replace and prv and nxt then -- must pair with start and start.prev + local before = copy_node(prv) + local after = copy_node(nxt) + local tail = find_node_tail(replace) + replace.prev = before + before.next = replace + before.prev = nil + tail.next = after + after.prev = tail + after.next = nil + replace = do_process(before,attr) + replace = replace.next + replace.prev = nil + after.prev.next = nil + disc.replace = replace + free_node(after) + free_node(before) + elseif prv and prv.id == glyph_code and prv.font == lastfont then + local prevchar, lastchar = prv.char, start.char + local kerns = chardata[lastfont][prevchar].kerns + local kern = kerns and kerns[lastchar] or 0 + krn = kern + quaddata[lastfont]*krn -- here + disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue + else + krn = quaddata[lastfont]*krn -- here + disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue + end else + -- this one happens in most cases: automatic (-), explicit (\-), regular (patterns) if prv and prv.id == glyph_code and prv.font == lastfont then local prevchar, lastchar = prv.char, start.char local kerns = chardata[lastfont][prevchar].kerns @@ -250,7 +343,7 @@ local function do_process(namespace,attribute,head,force) -- todo: glue so that else krn = quaddata[lastfont]*krn -- here end - disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue + insert_node_before(head,start,kern_injector(fillup,krn)) end end end @@ -316,20 +409,14 @@ function kerns.set(factor) else factor = unsetvalue end - texattribute[a_kerns] = factor + texsetattribute(a_kerns,factor) return factor end -local function process(namespace,attribute,head) - return do_process(namespace,attribute,head) -- no direct map, because else fourth argument is tail == true +function kerns.handler(head) + return do_process(head) -- no direct map, because else fourth argument is tail == true end -kerns.handler = nodes.installattributehandler { - name = "kern", - namespace = kerns, - processor = process, -} - -- interface commands.setcharacterkerning = kerns.set diff --git a/tex/context/base/typo-krn.mkiv b/tex/context/base/typo-krn.mkiv index a47bd2ac5..3522c02fc 100644 --- a/tex/context/base/typo-krn.mkiv +++ b/tex/context/base/typo-krn.mkiv @@ -40,6 +40,7 @@ \def\typo_kerning_set {\usecharacterkerningstyleandcolor\c!style\c!color % goodie, maybe also strut + \useaddfontfeatureparameter\characterkerningparameter \ctxcommand{setcharacterkerning("\characterkerningparameter\c!factor")}} \unexpanded\def\resetcharacterkerning % fast one @@ -62,29 +63,55 @@ %D then we always would get a command defined which is not beforehand %D a good idea. -\definecharacterkerning [\v!kerncharacters] [\c!factor=.125] - % Here we need to keep the groupedcommand solution as it is % used as modifier. -\unexpanded\def\kerncharacters - {\doifnextoptionalelse\typo_kerning_apply_yes\typo_kerning_apply_nop} +% \definecharacterkerning [\v!kerncharacters] [\c!factor=.125] +% \definecharacterkerning [\v!letterspacing ] [\v!kerncharacters] [\c!features=letterspacing] +% +% \unexpanded\def\kerncharacters +% {\doifnextoptionalelse\typo_kerning_apply_yes\typo_kerning_apply_nop} +% +% \def\typo_kerning_apply_yes[#1]% +% {\groupedcommand{\typo_kerning_apply_yes_indeed{#1}}\donothing} +% +% \def\typo_kerning_apply_nop +% {\groupedcommand\typo_kerning_apply_nop_indeed\donothing} +% +% \def\typo_kerning_apply_yes_indeed#1% +% {\let\currentcharacterkerning\v!kerncharacters +% \setupcurrentcharacterkerning[\c!factor=#1]% +% \typo_kerning_set} +% +% \def\typo_kerning_apply_nop_indeed +% {\let\currentcharacterkerning\v!kerncharacters +% \typo_kerning_set} -\def\typo_kerning_apply_yes[#1]% - {\groupedcommand{\typo_kerning_apply_yes_indeed{#1}}\donothing} +\appendtoks + \setuevalue{\currentcharacterkerning}% + {\doifnextoptionalelse + {\typo_kerning_apply_yes{\currentcharacterkerning}}% + {\typo_kerning_apply_nop{\currentcharacterkerning}}} +\to \everydefinecharacterkerning + +\unexpanded\def\typo_kerning_apply_yes#1[#2]% + {\groupedcommand{\typo_kerning_apply_yes_indeed{#1}{#2}}\donothing} -\def\typo_kerning_apply_nop - {\groupedcommand\typo_kerning_apply_nop_indeed\donothing} +\unexpanded\def\typo_kerning_apply_nop#1% + {\groupedcommand{\typo_kerning_apply_nop_indeed{#1}}\donothing} -\def\typo_kerning_apply_yes_indeed#1% - {\let\currentcharacterkerning\v!kerncharacters - \setupcurrentcharacterkerning[\c!factor=#1]% +\def\typo_kerning_apply_yes_indeed#1#2% + {\edef\currentcharacterkerning{#1}% + \setupcurrentcharacterkerning[\c!factor=#2]% \typo_kerning_set} -\def\typo_kerning_apply_nop_indeed - {\let\currentcharacterkerning\v!kerncharacters +\def\typo_kerning_apply_nop_indeed#1% + {\edef\currentcharacterkerning{#1}% \typo_kerning_set} +\definecharacterkerning [\v!kerncharacters] [\c!factor=.125] +\definecharacterkerning [\v!letterspacing ] [\v!kerncharacters] [\c!features=letterspacing] + %D \macros %D {stretched} %D @@ -133,6 +160,7 @@ \fi \bgroup \usecharacterkerningstyleandcolor\c!style\c!color + \useaddfontfeatureparameter\characterkerningparameter \typo_kerning_set #2% \egroup diff --git a/tex/context/base/typo-mar.lua b/tex/context/base/typo-mar.lua index ec827883d..85d5c85a8 100644 --- a/tex/context/base/typo-mar.lua +++ b/tex/context/base/typo-mar.lua @@ -123,8 +123,6 @@ local free_node_list = node.flush_list local insert_node_after = node.insert_after local insert_node_before = node.insert_before -local concat_nodes = nodes.concat - local nodecodes = nodes.nodecodes local listcodes = nodes.listcodes local gluecodes = nodes.gluecodes @@ -155,9 +153,10 @@ local new_stretch = nodepool.stretch local new_usernumber = nodepool.usernumber local new_latelua = nodepool.latelua -local texcount = tex.count -local texdimen = tex.dimen -local texbox = tex.box +local texgetcount = tex.getcount +local texgetdimen = tex.getdimen +local texgetbox = tex.getbox +local texget = tex.get local points = number.points @@ -243,7 +242,7 @@ end function margins.save(t) setmetatable(t,defaults) - local content = texbox[t.number] + local content = texgetbox(t.number) local location = t.location local category = t.category local inline = t.inline @@ -309,23 +308,24 @@ function margins.save(t) t.n = nofsaved -- used later (we will clean up this natural mess later) -- nice is to make a special status table mechanism - local leftmargindistance = texdimen.naturalleftmargindistance - local rightmargindistance = texdimen.naturalrightmargindistance - t.strutdepth = texbox.strutbox.depth - t.strutheight = texbox.strutbox.height - t.leftskip = tex.leftskip.width -- we're not in forgetall - t.rightskip = tex.rightskip.width -- we're not in forgetall + local leftmargindistance = texgetdimen("naturalleftmargindistance") + local rightmargindistance = texgetdimen("naturalrightmargindistance") + local strutbox = texgetbox("strutbox") + t.strutdepth = strutbox.depth + t.strutheight = strutbox.height + t.leftskip = texget("leftskip").width -- we're not in forgetall + t.rightskip = texget("rightskip").width -- we're not in forgetall t.leftmargindistance = leftmargindistance -- todo:layoutstatus table t.rightmargindistance = rightmargindistance - t.leftedgedistance = texdimen.naturalleftedgedistance - + texdimen.leftmarginwidth + t.leftedgedistance = texgetdimen("naturalleftedgedistance") + + texgetdimen("leftmarginwidth") + leftmargindistance - t.rightedgedistance = texdimen.naturalrightedgedistance - + texdimen.rightmarginwidth + t.rightedgedistance = texgetdimen("naturalrightedgedistance") + + texgetdimen("rightmarginwidth") + rightmargindistance - t.lineheight = texdimen.lineheight + t.lineheight = texgetdimen("lineheight") -- - -- t.realpageno = texcount.realpageno + -- t.realpageno = texgetcount("realpageno") if inline then context(new_usernumber(inline_mark,nofsaved)) store[nofsaved] = t -- no insert @@ -447,7 +447,7 @@ local function realign(current,candidate) end end - current.list = hpack_nodes(concat_nodes{anchornode,new_kern(-delta),current.list,new_kern(delta)}) + current.list = hpack_nodes(anchornode .. new_kern(-delta) .. current.list .. new_kern(delta)) current.width = 0 end @@ -490,7 +490,7 @@ local function markovershoot(current) v_anchors = v_anchors + 1 cache[v_anchors] = stacked local anchor = new_latelua(format("typesetters.margins.ha(%s)",v_anchors)) -- todo: alleen als offset > line - current.list = hpack_nodes(concat_nodes{anchor,current.list}) + current.list = hpack_nodes(anchor .. current.list) end local function getovershoot(location) @@ -623,7 +623,7 @@ local function inject(parent,head,candidate) elseif head.id == whatsit_code and head.subtype == localpar_code then -- experimental if head.dir == "TRT" then - box.list = hpack_nodes(concat_nodes{new_kern(candidate.hsize),box.list,new_kern(-candidate.hsize)}) + box.list = hpack_nodes(new_kern(candidate.hsize) .. box.list .. new_kern(-candidate.hsize)) end insert_node_after(head,head,box) else diff --git a/tex/context/base/typo-par.lua b/tex/context/base/typo-par.lua deleted file mode 100644 index b25ae4a5b..000000000 --- a/tex/context/base/typo-par.lua +++ /dev/null @@ -1,181 +0,0 @@ -if not modules then modules = { } end modules ['typo-par'] = { - version = 1.001, - comment = "companion to typo-par.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - --- A playground for experiments. - -local utfbyte = utf.byte -local utfchar = utf.char - -local trace_paragraphs = false trackers.register("typesetters.paragraphs", function(v) trace_paragraphs = v end) -local trace_dropper = false trackers.register("typesetters.paragraphs.dropper",function(v) trace_dropper = v end) - -local report_paragraphs = logs.reporter("nodes","paragraphs") -local report_dropper = logs.reporter("nodes","dropped") - -typesetters.paragraphs = typesetters.paragraphs or { } -local paragraphs = typesetters.paragraphs - -local nodecodes = nodes.nodecodes -local whatsitcodes = nodes.whatsitcodes -local tasks = nodes.tasks - -local variables = interfaces.variables - -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue - -local glyph_code = nodecodes.glyph -local hlist_code = nodecodes.hlist -local kern_node = nodecodes.kern -local whatsit_code = nodecodes.whatsit -local localpar_code = whatsitcodes.localpar - -local a_paragraph = attributes.private("paragraphspecial") -local a_color = attributes.private('color') -local a_transparency = attributes.private('transparency') -local a_colorspace = attributes.private('colormodel') - -local dropper = { - enabled = false, - -- font = 0, - -- n = 0, - -- distance = 0, - -- hoffset = 0, - -- voffset = 0, -} - -local droppers = { } - -typesetters.paragraphs.droppers = droppers - -function droppers.set(specification) - dropper = specification or { } -end - -function droppers.freeze() - if dropper.enabled then - dropper.font = font.current() - end -end - --- dropped caps experiment (will be done properly when luatex --- stores the state in the local par node) .. btw, search still --- works with dropped caps, as does an export - --- we need a 'par' attribute and in fact for dropped caps we don't need --- need an attribute ... dropit will become s state counter (or end up --- in the localpar user data - --- for the moment, each paragraph gets a number as id (attribute) ..problem --- with nesting .. or anyhow, needed for tagging anyway - --- todo: prevent linebreak .. but normally a dropper ends up atthe top of --- a page so this has a low priority - -local function process(namespace,attribute,head) - local done = false - if head.id == whatsit_code and head.subtype == localpar_code then - -- begin of par - local a = head[attribute] - if a and a > 0 then - if dropper.enabled then - dropper.enabled = false -- dangerous for e.g. nested || in tufte - local first = head.next - if first and first.id == hlist_code then - -- parbox .. needs to be set at 0 - first = first.next - end - if first and first.id == glyph_code then --- if texattribute[a_paragraph] >= 0 then --- texattribute[a_paragraph] = unsetvalue --- end - local char = first.char - local prev = first.prev - local next = first.next - -- if prev.id == hlist_code then - -- -- set the width to 0 - -- end - if next and next.id == kern_node then - next.kern = 0 - end - first.font = dropper.font or first.font - -- can be a helper - local ma = dropper.ma or 0 - local ca = dropper.ca - local ta = dropper.ta - if ca and ca > 0 then - first[a_colorspace] = ma == 0 and 1 or ma - first[a_color] = ca - end - if ta and ta > 0 then - first[a_transparency] = ta - end - -- - local width = first.width - local height = first.height - local depth = first.depth - local distance = dropper.distance or 0 - local voffset = dropper.voffset or 0 - local hoffset = dropper.hoffset or 0 - first.xoffset = - width - hoffset - distance - first.yoffset = - height - voffset - if true then - -- needed till we can store parindent with localpar - first.prev = nil - first.next = nil - local h = node.hpack(first) - h.width = 0 - h.height = 0 - h.depth = 0 - prev.next = h - next.prev = h - h.next = next - h.prev = prev - end - if dropper.location == variables.margin then - -- okay - else - local lines = tonumber(dropper.n) or 0 - if lines == 0 then -- safeguard, not too precise - lines = math.ceil((height+voffset) / tex.baselineskip.width) - end - tex.hangafter = - lines - tex.hangindent = width + distance - end - done = true - end - end - end - end - return head, done -end - -local enabled = false - -function paragraphs.set(n) - if n == variables.reset or not tonumber(n) or n == 0 then - texattribute[a_paragraph] = unsetvalue - else - if not enabled then - tasks.enableaction("processors","typesetters.paragraphs.handler") - if trace_paragraphs then - report_paragraphs("enabling paragraphs") - end - enabled = true - end - texattribute[a_paragraph] = n - end -end - -paragraphs.attribute = a_paragraph - -paragraphs.handler = nodes.installattributehandler { - name = "paragraphs", - namespace = paragraphs, - processor = process, -} diff --git a/tex/context/base/typo-par.mkiv b/tex/context/base/typo-par.mkiv deleted file mode 100644 index 7153d1f8a..000000000 --- a/tex/context/base/typo-par.mkiv +++ /dev/null @@ -1,107 +0,0 @@ -%D \module -%D [ file=typo-par, -%D version=2011.10.27, -%D title=\CONTEXT\ Typesetting Macros, -%D subtitle=Paragraphs, -%D author=Hans Hagen, -%D date=\currentdate, -%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] -%C -%C This module is part of the \CONTEXT\ macro||package and is -%C therefore copyrighted by \PRAGMA. See mreadme.pdf for -%C details. - -\writestatus{loading}{ConTeXt Typesetting Macros / Paragraphs} - -%D This will change ... par specific attribute set and such. - -\unprotect - -\registerctxluafile{typo-par}{1.001} - -\definesystemattribute[paragraphspecial][public] - -\unexpanded\def\setparagraphspecial[#1]% - {\ctxlua{typesetters.paragraphs.set("#1")}} - -\appendtoks - \attribute\paragraphspecialattribute\attributeunsetvalue -\to \everyforgetall - -%D For the moment here: dropped caps redone. In addition to the -%D parameters shown in the example you can use the \type {style} and -%D more low level \type {font} keys to set up the font where -%D the first one wins. If you don't like the behaviour you can always -%D roll out your own command. -%D -%D \starttyping -%D \placeinitial \input ward \par \input ward \par \placeinitial \input tufte -%D -%D \setupinitial -%D [location=text, -%D n=2, -%D color=darkred, -%D distance=-1em, -%D hoffset=1em, -%D voffset=-3ex, -%D before=\blank] -%D -%D \placeinitial \input ward \par \input ward \placeinitial \input tufte -%D \stoptyping - -\installcorenamespace{initial} - -\installsimplecommandhandler \??initial {initial} \??initial - -\appendtoks - \checkinitialparent % this might become automatic -\to \everysetupinitial - -\def\v_typo_initials_auto_scale % magic - {\the\dimexpr\lineheight*\initialparameter\c!n-\initialparameter\c!voffset\relax} - -\setupinitial - [\c!location=\v!text, - \c!n=3, - % \s!font=Bold sa 4, - \s!font=Bold at \v_typo_initials_auto_scale, - \c!distance=.125em, - \c!hoffset=\zeropoint, - \c!voffset=-1ex, - \c!style=, - \c!color=, - \c!before=\blank] - -\unexpanded\def\placeinitial - {\dosingleempty\typo_initials_place} - -\def\typo_initials_place[#1]% todo: optional text - {\par - \begingroup - \edef\currentinitial{#1}% - \begingroup - \useinitialcolorparameter\c!color - \ctxlua{typesetters.paragraphs.droppers.set { - location = "\initialparameter\c!location", - enabled = true, - n = \number\initialparameter\c!n, - distance = \number\dimexpr\initialparameter\c!distance, - hoffset = \number\dimexpr\initialparameter\c!hoffset, - voffset = \number\dimexpr\initialparameter\c!voffset, - ma = \the\attribute\colormodelattribute , - ca = \the\attribute\colorattribute , - ta = \the\attribute\transparencyattribute - }}% - \stopluacode - \endgroup - \begingroup - \doifelsenothing{\initialparameter\c!style} - {\definedfont[\initialparameter\s!font]} - {\useinitialstyleparameter\c!style}% - \ctxlua{typesetters.paragraphs.droppers.freeze()}% - \endgroup - \endgroup - \setparagraphspecial[1]% - \namedinitialparameter{#1}\c!before} - -\protect \endinput diff --git a/tex/context/base/typo-prc.lua b/tex/context/base/typo-prc.lua index 5b74abd0b..a6c27ede6 100644 --- a/tex/context/base/typo-prc.lua +++ b/tex/context/base/typo-prc.lua @@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['typo-prc'] = { -- moved from strc-ini.lua - +local context, commands = context, commands local formatters = string.formatters local lpegmatch, patterns, P, C, Cs = lpeg.match, lpeg.patterns, lpeg.P, lpeg.C, lpeg.Cs diff --git a/tex/context/base/typo-rep.lua b/tex/context/base/typo-rep.lua index 8451ce52b..01868f490 100644 --- a/tex/context/base/typo-rep.lua +++ b/tex/context/base/typo-rep.lua @@ -17,23 +17,23 @@ local report_stripping = logs.reporter("fonts","stripping") local nodes, node = nodes, node -local delete_node = nodes.delete -local replace_node = nodes.replace -local copy_node = node.copy +local delete_node = nodes.delete +local replace_node = nodes.replace +local copy_node = node.copy -local chardata = characters.data -local collected = false -local a_stripping = attributes.private("stripping") -local fontdata = fonts.hashes.identifiers -local tasks = nodes.tasks +local chardata = characters.data +local collected = false +local a_stripping = attributes.private("stripping") +local fontdata = fonts.hashes.identifiers +local tasks = nodes.tasks -local texattribute = tex.attribute -local unsetvalue = attributes.unsetvalue +local texsetattribute = tex.setattribute +local unsetvalue = attributes.unsetvalue -local v_reset = interfaces.variables.reset +local v_reset = interfaces.variables.reset -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph -- todo: other namespace -> typesetters @@ -115,7 +115,7 @@ function stripping.set(n) -- number or 'reset' n = unsetvalue end end - texattribute[a_stripping] = n + texsetattribute(a_stripping,n) end -- why not in task-ini? diff --git a/tex/context/base/typo-spa.lua b/tex/context/base/typo-spa.lua index 5eba22889..c3f50fe98 100644 --- a/tex/context/base/typo-spa.lua +++ b/tex/context/base/typo-spa.lua @@ -24,7 +24,7 @@ local fonthashes = fonts.hashes local fontdata = fonthashes.identifiers local quaddata = fonthashes.quads -local texattribute = tex.attribute +local texsetattribute = tex.setattribute local unsetvalue = attributes.unsetvalue local v_reset = interfaces.variables.reset @@ -66,7 +66,7 @@ end -- todo cache lastattr -local function process(namespace,attribute,head) +function spacings.handler(head) local done = false local start = head -- head is always begin of par (whatsit), so we have at least two prev nodes @@ -74,13 +74,13 @@ local function process(namespace,attribute,head) while start do local id = start.id if id == glyph_code then - local attr = start[attribute] + local attr = start[a_spacings] if attr and attr > 0 then local data = mapping[attr] if data then local char = start.char local map = data.characters[char] - start[attribute] = unsetvalue -- needed? + start[a_spacings] = unsetvalue -- needed? if map then local left = map.left local right = map.right @@ -209,19 +209,13 @@ function spacings.set(name) n = data.number or unsetvalue end end - texattribute[a_spacings] = n + texsetattribute(a_spacings,n) end function spacings.reset() - texattribute[a_spacings] = unsetvalue + texsetattribute(a_spacings,unsetvalue) end -spacings.handler = nodes.installattributehandler { - name = "spacing", - namespace = spacings, - processor = process, -} - -- interface commands.definecharacterspacing = spacings.define diff --git a/tex/context/base/typo-tal.lua b/tex/context/base/typo-tal.lua new file mode 100644 index 000000000..3df8dd00a --- /dev/null +++ b/tex/context/base/typo-tal.lua @@ -0,0 +1,265 @@ +if not modules then modules = { } end modules ['typo-tal'] = { + version = 1.001, + comment = "companion to typo-tal.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- I'll make it a bit more efficient and provide named instances too. + +local next, type = next, type +local div = math.div +local utfbyte = utf.byte + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local glue_code = nodecodes.glue + +local fontcharacters = fonts.hashes.characters +local categories = characters.categories -- nd + +local insert_node_before = nodes.insert_before +local insert_node_after = nodes.insert_after +local traverse_list_by_id = nodes.traverse_id +local dimensions_of_list = nodes.dimensions +local first_glyph = nodes.first_glyph + +local nodepool = nodes.pool +local new_kern = nodepool.kern +local new_gluespec = nodepool.gluespec + +local tracers = nodes.tracers +local setcolor = tracers.colors.set +local tracedrule = tracers.pool.nodes.rule + +local characteralign = { } +typesetters.characteralign = characteralign + +local trace_split = false trackers.register("typesetters.characteralign", function(v) trace_split = true end) +local report = logs.reporter("aligning") + +local a_characteralign = attributes.private("characteralign") +local a_character = attributes.private("characters") + +local enabled = false + +local datasets = false + +local comma = 0x002C +local period = 0x002E +local punctuationspace = 0x2008 + +local validseparators = { + [comma] = true, + [period] = true, + [punctuationspace] = true, +} + +local validsigns = { + [0x002B] = 0x002B, -- plus + [0x002D] = 0x2212, -- hyphen + [0x00B1] = 0x00B1, -- plusminus + [0x2212] = 0x2212, -- minus + [0x2213] = 0x2213, -- minusplus +} + +local function traced_kern(w) + return tracedrule(w,nil,nil,"darkgray") +end + +function characteralign.handler(head,where) + if not datasets then + return head, false + end + local first = first_glyph(head) -- we could do that once + if not first then + return head, false + end + local a = first[a_characteralign] + if not a or a == 0 then + return head, false + end + local column = div(a,100) + local row = a % 100 + local dataset = datasets and datasets[column] or setcharacteralign(column) + local separator = dataset.separator + local list = dataset.list + local b_start = nil + local b_stop = nil + local a_start = nil + local a_stop = nil + local c = nil + local current = first + local sign = nil + -- we can think of constraints + while current do + local id = current.id + if id == glyph_code then + local char = current.char + if char == separator then + c = current + if trace_split then + setcolor(current,"darkred") + end + elseif categories[char] == "nd" or validseparators[char] then + if c then + if not a_start then + a_start = current + end + a_stop = current + if trace_split then + setcolor(current,"darkgreen") + end + else + if not b_start then + if sign then + b_start = sign + local new = validsigns[sign.char] + if char == new or not fontcharacters[sign.font][new] then + if trace_split then + setcolor(sign,"darkyellow") + end + else + sign.char = new + if trace_split then + setcolor(sign,"darkmagenta") + end + end + sign = nil + else + b_start = current + end + end + b_stop = current + if trace_split then + setcolor(current,"darkblue") + end + end + elseif not b_start then + sign = validsigns[char] and current + end + elseif (b_start or a_start) and id == glue_code then + -- somewhat inefficient + local next = current.next + local prev = current.prev + if next and prev and next.id == glyph_code and prev.id == glyph_code then -- too much checking + local width = fontcharacters[b_start.font][period].width + -- local spec = current.spec + -- nodes.free(spec) -- hm, we leak but not that many specs + current.spec = new_gluespec(width) + current[a_character] = punctuationspace + end + end + current = current.next + end + local entry = list[row] + if entry then + if not dataset.collected then + -- print("[maxbefore] [maxafter]") + local maxbefore = 0 + local maxafter = 0 + for k, v in next, list do + local before = v.before + local after = v.after + if before and before > maxbefore then + maxbefore = before + end + if after and after > maxafter then + maxafter = after + end + end + dataset.maxafter = maxafter + dataset.maxbefore = maxbefore + dataset.collected = true + end + local maxafter = dataset.maxafter + local maxbefore = dataset.maxbefore + local before = entry.before or 0 + local after = entry.after or 0 + local new_kern = trace_split and traced_kern or new_kern + if b_start then + if before < maxbefore then + head = insert_node_before(head,b_start,new_kern(maxbefore-before)) + end + if not c then + -- print("[before]") + local width = fontcharacters[b_stop.font][separator].width + insert_node_after(head,b_stop,new_kern(maxafter+width)) + elseif a_start then + -- print("[before] [separator] [after]") + if after < maxafter then + insert_node_after(head,a_stop,new_kern(maxafter-after)) + end + else + -- print("[before] [separator]") + if maxafter > 0 then + insert_node_after(head,c,new_kern(maxafter)) + end + end + elseif a_start then + if c then + -- print("[separator] [after]") + if maxbefore > 0 then + head = insert_node_before(head,c,new_kern(maxbefore)) + end + else + -- print("[after]") + local width = fontcharacters[b_stop.font][separator].width + head = insert_node_before(head,a_start,new_kern(maxbefore+width)) + end + if after < maxafter then + insert_node_after(head,a_stop,new_kern(maxafter-after)) + end + elseif c then + -- print("[separator]") + if maxbefore > 0 then + head = insert_node_before(head,c,new_kern(maxbefore)) + end + if maxafter > 0 then + insert_node_after(head,c,new_kern(maxafter)) + end + end + else + entry = { + before = b_start and dimensions_of_list(b_start,b_stop.next) or 0, + after = a_start and dimensions_of_list(a_start,a_stop.next) or 0, + } + list[row] = entry + end + return head, true +end + +function setcharacteralign(column,separator) + if not enabled then + nodes.tasks.enableaction("processors","typesetters.characteralign.handler") + enabled = true + end + if not datasets then + datasets = { } + end + local dataset = datasets[column] -- we can use a metatable + if not dataset then + dataset = { + separator = separator and utfbyte(separator) or comma, + list = { }, + maxafter = 0, + maxbefore = 0, + collected = false, + } + datasets[column] = dataset + used = true + end + return dataset +end + +local function resetcharacteralign() + datasets = false +end + +characteralign.setcharacteralign = setcharacteralign +characteralign.resetcharacteralign = resetcharacteralign + +commands.setcharacteralign = setcharacteralign +commands.resetcharacteralign = resetcharacteralign + diff --git a/tex/context/base/typo-tal.mkiv b/tex/context/base/typo-tal.mkiv new file mode 100644 index 000000000..a34cb8bb6 --- /dev/null +++ b/tex/context/base/typo-tal.mkiv @@ -0,0 +1,112 @@ +%D \module +%D [ file=typo-tal, % spac-cha (2012.06.08) supp-ali (2000.04.17) +%D version=2013.10.04, +%D title=\CONTEXT\ Typesetting Macros, +%D subtitle=Character Alignment, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\writestatus{loading}{ConTeXt Typesetting Macros / Character Alignment} + +%D This module replaces the \MKII\ character alignment code which hooked into +%D table mechanisms but used parsing. In fact, this might be one of these cases +%D where a \TEX\ based solution is faster, but a \LUA\ one a bit more robust. +%D Anyway, as I had to fix something (to fit the newer table mechanisms) I +%D decided to go the mixed route, a rather easy going effort in the aftermath of +%D the 2013 \CONTEXT\ meeting. + +\unprotect + +\registerctxluafile{typo-tal}{1.001} + +\definesystemattribute[characteralign][public] + +%D This mechanism is mostly meant for tables: +%D +%D \startbuffer +%D \starttabulate[|l|g{,}|r|] +%D \NC test \NC 1.234.456,99 \NC \NC test \NR +%D \NC test \NC 234.456,9 \NC \NC test \NR +%D \NC test \NC 234.456 \NC \NC test \NR +%D \NC test \NC 456 \NC \NC test \NR +%D \NC test \NC \bf whatever \NC \NC test \NR +%D \stoptabulate +%D \stopbuffer +%D +%D \typebuffer \blank \getbuffer \blank + +%D \startbuffer +%D \bTABLE +%D \bTR \bTD[aligncharacter=yes] € 1,1 \eTD \eTR +%D \bTR \bTD[aligncharacter=yes] € 11,11 \eTD \eTR +%D \bTR \bTD[aligncharacter=yes] € 12\punctuationspace111,11 \eTD \eTR +%D \bTR \bTD[aligncharacter=yes] € 12 111,11 \eTD \eTR +%D \bTR \bTD[aligncharacter=yes] € 1.234.451,22222 \eTD \eTR +%D \bTR \bTD[aligncharacter=yes] € 234.451,2 \eTD \eTR +%D \bTR \bTD[aligncharacter=yes] € 234.451 \eTD \eTR +%D \bTR \bTD[aligncharacter=yes] € 451 \eTD \eTR +%D \bTR \bTD \bf some text \eTD \eTR +%D \eTABLE +%D \stopbuffer +%D +%D \typebuffer \blank \getbuffer \blank + +\unexpanded\def\signalcharacteralign#1#2{\attribute\characteralignattribute=\numexpr#1*\plushundred+#2\relax} +\unexpanded\def\setcharacteralign #1#2{\ctxcommand{setcharacteralign(\number#1,"#2")}} +\unexpanded\def\resetcharacteralign {\ctxcommand{resetcharacteralign()}} + +%D Mostly downward compatible: +%D +%D \startbuffer +%D \startcharacteralign +%D \checkcharacteralign{123.456,78} +%D \checkcharacteralign{456} +%D \checkcharacteralign{23.456} +%D \checkcharacteralign{78,9} +%D \stopcharacteralign +%D \stopbuffer +%D +%D \typebuffer \blank \getbuffer \blank + +\def\alignmentcharacter{,} + +\unexpanded\def\typo_charalign_pass_one + {\advance\scratchcounter\plusone + \setbox\scratchbox\typo_charalign_pass} + +\unexpanded\def\typo_charalign_pass_two + {\advance\scratchcounter\plusone + \typo_charalign_pass} + +\def\typo_charalign_pass + {\hbox\bgroup\signalcharacteralign\plusone\scratchcounter\let\next} + +\unexpanded\def\startcharacteralign#1\stopcharacteralign + {\bgroup + \setcharacteralign\plusone\alignmentcharacter + \begingroup + \scratchcounter\zerocount + \let\checkcharacteralign\typo_charalign_pass_one + \settrialtypesetting + #1\relax + \endgroup + \begingroup + \scratchcounter\zerocount + \let\checkcharacteralign\typo_charalign_pass_two + #1\relax + \endgroup + \resetcharacteralign + \egroup} + +\let\stopcharacteralign \relax +\let\checkcharacteralign\gobbleoneargument + +\def\setfirstpasscharacteralign {\let\checkcharacteralign\gobbleoneargument} +\def\setsecondpasscharacteralign{\let\checkcharacteralign\firstofoneargument} + +\protect \endinput diff --git a/tex/context/base/typo-txt.mkvi b/tex/context/base/typo-txt.mkvi index f1c80c1bd..57f4e5f42 100644 --- a/tex/context/base/typo-txt.mkvi +++ b/tex/context/base/typo-txt.mkvi @@ -36,7 +36,7 @@ %D Consider for instance: %D %D \startbuffer[a] -%D \NormalizeFontHeight \TempFont {X} {2\baselineskip} {Serif} +%D \normalizefontheight \TempFont {X} {2\baselineskip} {Serif} %D \stopbuffer %D %D \startbuffer[b] @@ -54,7 +54,7 @@ %D The horizontal counterpart is: %D %D \startbuffer[a] -%D \NormalizeFontWidth \TempFont {This Line Fits} {\hsize} {Serif} +%D \normalizefontwidth \TempFont {This Line Fits} {\hsize} {Serif} %D \stopbuffer %D %D \startbuffer[b] diff --git a/tex/context/base/util-dim.lua b/tex/context/base/util-dim.lua index 47b2706b7..69061495f 100644 --- a/tex/context/base/util-dim.lua +++ b/tex/context/base/util-dim.lua @@ -22,6 +22,8 @@ local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex local formatters = string.formatters +local texget = tex and tex.get or function() return 65536*10*100 end + --this might become another namespace number = number or { } @@ -137,7 +139,7 @@ capture takes place.

--ldx]]-- local amount = (S("+-")^0 * R("09")^0 * P(".")^0 * R("09")^0) + Cc("0") -local unit = R("az")^1 +local unit = R("az")^1 + P("%") local dimenpair = amount/tonumber * (unit^1/dimenfactors + Cc(1)) -- tonumber is new @@ -376,10 +378,10 @@ function dimen(a) a = k else local value, unit = lpegmatch(dimenpair,a) - if type(unit) == "function" then - k = value/unit() + if value and unit then + k = value/unit -- to be considered: round else - k = value/unit + k = 0 end known[a] = k a = k @@ -412,16 +414,16 @@ function string.todimen(str) -- maybe use tex.sp when available end end ---~ local known = { } - ---~ function string.todimen(str) -- maybe use tex.sp ---~ local k = known[str] ---~ if not k then ---~ k = tex.sp(str) ---~ known[str] = k ---~ end ---~ return k ---~ end +-- local known = { } +-- +-- function string.todimen(str) -- maybe use tex.sp +-- local k = known[str] +-- if not k then +-- k = tex.sp(str) +-- known[str] = k +-- end +-- return k +-- end stringtodimen = string.todimen -- local variable defined earlier @@ -439,7 +441,7 @@ probably use a hash instead of a one-element table.

--ldx]]-- function number.percent(n,d) -- will be cleaned up once luatex 0.30 is out - d = d or tex.hsize + d = d or texget("hsize") if type(d) == "string" then d = stringtodimen(d) end diff --git a/tex/context/base/util-env.lua b/tex/context/base/util-env.lua index f4f3ef69f..1184c1d5b 100644 --- a/tex/context/base/util-env.lua +++ b/tex/context/base/util-env.lua @@ -57,6 +57,7 @@ environment.basicengines = basicengines -- instead we could set ranges if not arg then + environment.used_as_library = true -- used as library elseif luaengines[file.removesuffix(arg[-1])] then -- arg[-1] = arg[0] diff --git a/tex/context/base/util-jsn.lua b/tex/context/base/util-jsn.lua index 29587cd38..bbe25d89d 100644 --- a/tex/context/base/util-jsn.lua +++ b/tex/context/base/util-jsn.lua @@ -42,8 +42,20 @@ local dquote = P('"') local whitespace = lpeg.patterns.whitespace local optionalws = whitespace^0 -local escape = C(P("\\u") / "0x" * S("09","AF","af")) / function(s) return utfchar(tonumber(s)) end -local jstring = dquote * Cs((escape + (1-dquote))^0) * dquote +local escapes = { + -- ["\\"] = "\\", -- lua will escape these + -- ["/"] = "/", -- no need to escape this one + ["b"] = "\010", + ["f"] = "\014", + ["n"] = "\n", + ["r"] = "\r", + ["t"] = "\t", +} + +local escape_un = C(P("\\u") / "0x" * S("09","AF","af")) / function(s) return utfchar(tonumber(s)) end +local escape_bs = P([[\]]) / "" * (P(1) / escapes) -- if not found then P(1) is returned i.e. the to be escaped char + +local jstring = dquote * Cs((escape_un + escape_bs + (1-dquote))^0) * dquote local jtrue = P("true") * Cc(true) local jfalse = P("false") * Cc(false) local jnull = P("null") * Cc(nil) diff --git a/tex/context/base/util-lua.lua b/tex/context/base/util-lua.lua index f3be9dcd2..e1dcdc94d 100644 --- a/tex/context/base/util-lua.lua +++ b/tex/context/base/util-lua.lua @@ -41,291 +41,100 @@ luautilities.suffixes = { -- environment.loadpreprocessedfile can be set to a preprocessor -if jit or status.luatex_version >= 74 then - - local function register(name) - if tracestripping then - report_lua("stripped bytecode from %a",name or "unknown") - end - strippedchunks[#strippedchunks+1] = name - luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1 - end - - local function stupidcompile(luafile,lucfile,strip) - local code = io.loaddata(luafile) - if code and code ~= "" then - code = load(code) - if code then - code = dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode) - if code and code ~= "" then - register(name) - io.savedata(lucfile,code) - return true, 0 - end - else - report_lua("fatal error %a in file %a",1,luafile) - end - else - report_lua("fatal error %a in file %a",2,luafile) - end - return false, 0 +local function register(name) + if tracestripping then + report_lua("stripped bytecode from %a",name or "unknown") end + strippedchunks[#strippedchunks+1] = name + luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1 +end - -- quite subtle ... doing this wrong incidentally can give more bytes - - function luautilities.loadedluacode(fullname,forcestrip,name) - -- quite subtle ... doing this wrong incidentally can give more bytes - name = name or fullname - local code = environment.loadpreprocessedfile and environment.loadpreprocessedfile(fullname) or loadfile(fullname) +local function stupidcompile(luafile,lucfile,strip) + local code = io.loaddata(luafile) + if code and code ~= "" then + code = load(code) if code then - code() - end - if forcestrip and luautilities.stripcode then - if type(forcestrip) == "function" then - forcestrip = forcestrip(fullname) - end - if forcestrip or luautilities.alwaysstripcode then + code = dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode) + if code and code ~= "" then register(name) - return load(dump(code,true)), 0 - else - return code, 0 + io.savedata(lucfile,code) + return true, 0 end - elseif luautilities.alwaysstripcode then - register(name) - return load(dump(code,true)), 0 else - return code, 0 - end - end - - function luautilities.strippedloadstring(code,forcestrip,name) -- not executed - if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then - code = load(code) - if not code then - report_lua("fatal error %a in file %a",3,name) - end - register(name) - code = dump(code,true) - end - return load(code), 0 - end - - function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true - report_lua("compiling %a into %a",luafile,lucfile) - os.remove(lucfile) - local done = stupidcompile(luafile,lucfile,strip ~= false) - if done then - report_lua("dumping %a into %a stripped",luafile,lucfile) - if cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then - report_lua("removing %a",luafile) - os.remove(luafile) - end - end - return done - end - - function luautilities.loadstripped(...) - local l = load(...) - if l then - return load(dump(l,true)) + report_lua("fatal error %a in file %a",1,luafile) end + else + report_lua("fatal error %a in file %a",2,luafile) end + return false, 0 +end -else - - -- The next function was posted by Peter Cawley on the lua list and strips line - -- number information etc. from the bytecode data blob. We only apply this trick - -- when we store data tables. Stripping makes the compressed format file about - -- 1MB smaller (and uncompressed we save at least 6MB). - -- - -- You can consider this feature an experiment, so it might disappear. There is - -- no noticeable gain in runtime although the memory footprint should be somewhat - -- smaller (and the file system has a bit less to deal with). - -- - -- Begin of borrowed code ... works for Lua 5.1 which LuaTeX currently uses ... +-- quite subtle ... doing this wrong incidentally can give more bytes - local function register(name,before,after) - local delta = before - after - if tracestripping then - report_lua("bytecodes stripped from %a, # before %s, # after %s, delta %s",name,before,after,delta) - end - strippedchunks[#strippedchunks+1] = name - luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1 - luautilities.nofstrippedbytes = luautilities.nofstrippedbytes + delta - return delta +function luautilities.loadedluacode(fullname,forcestrip,name) + -- quite subtle ... doing this wrong incidentally can give more bytes + name = name or fullname + local code, message + if environment.loadpreprocessedfile then + code, message = environment.loadpreprocessedfile(fullname) + else + code, message = loadfile(fullname) end - - local strip_code_pc - - if _MAJORVERSION == 5 and _MINORVERSION == 1 then - - strip_code_pc = function(dump,name) - local before = #dump - local version, format, endian, int, size, ins, num = byte(dump,5,11) - local subint - if endian == 1 then - subint = function(dump, i, l) - local val = 0 - for n = l, 1, -1 do - val = val * 256 + byte(dump,i + n - 1) - end - return val, i + l - end - else - subint = function(dump, i, l) - local val = 0 - for n = 1, l, 1 do - val = val * 256 + byte(dump,i + n - 1) - end - return val, i + l - end - end - local strip_function - strip_function = function(dump) - local count, offset = subint(dump, 1, size) - local stripped, dirty = rep("\0", size), offset + count - offset = offset + count + int * 2 + 4 - offset = offset + int + subint(dump, offset, int) * ins - count, offset = subint(dump, offset, int) - for n = 1, count do - local t - t, offset = subint(dump, offset, 1) - if t == 1 then - offset = offset + 1 - elseif t == 4 then - offset = offset + size + subint(dump, offset, size) - elseif t == 3 then - offset = offset + num - end - end - count, offset = subint(dump, offset, int) - stripped = stripped .. sub(dump,dirty, offset - 1) - for n = 1, count do - local proto, off = strip_function(sub(dump,offset, -1)) - stripped, offset = stripped .. proto, offset + off - 1 - end - offset = offset + subint(dump, offset, int) * int + int - count, offset = subint(dump, offset, int) - for n = 1, count do - offset = offset + subint(dump, offset, size) + size + int * 2 - end - count, offset = subint(dump, offset, int) - for n = 1, count do - offset = offset + subint(dump, offset, size) + size - end - stripped = stripped .. rep("\0", int * 3) - return stripped, offset - end - dump = sub(dump,1,12) .. strip_function(sub(dump,13,-1)) - local after = #dump - local delta = register(name,before,after) - return dump, delta - end - + if code then + code() else - - strip_code_pc = function(dump,name) - return dump, 0 - end - + report_lua("loading of file %a failed:\n\t%s",fullname,message or "no message") end - - -- ... end of borrowed code. - - -- quite subtle ... doing this wrong incidentally can give more bytes - - function luautilities.loadedluacode(fullname,forcestrip,name) - -- quite subtle ... doing this wrong incidentally can give more bytes - local code = environment.loadpreprocessedfile and environment.preprocessedloadfile(fullname) or loadfile(fullname) - if code then - code() + if forcestrip and luautilities.stripcode then + if type(forcestrip) == "function" then + forcestrip = forcestrip(fullname) end - if forcestrip and luautilities.stripcode then - if type(forcestrip) == "function" then - forcestrip = forcestrip(fullname) - end - if forcestrip then - local code, n = strip_code_pc(dump(code),name) - return load(code), n - elseif luautilities.alwaysstripcode then - return load(strip_code_pc(dump(code),name)) - else - return code, 0 - end - elseif luautilities.alwaysstripcode then - return load(strip_code_pc(dump(code),name)) + if forcestrip or luautilities.alwaysstripcode then + register(name) + return load(dump(code,true)), 0 else return code, 0 end + elseif luautilities.alwaysstripcode then + register(name) + return load(dump(code,true)), 0 + else + return code, 0 end +end - function luautilities.strippedloadstring(code,forcestrip,name) -- not executed - local n = 0 - if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then - code = load(code) - if not code then - report_lua("fatal error in file %a",name) - end - code, n = strip_code_pc(dump(code),name) - end - return load(code), n +function luautilities.strippedloadstring(code,forcestrip,name) -- not executed + local code, message = load(code) + if not code then + report_lua("loading of file %a failed:\n\t%s",name,message or "no message") end - - local function stupidcompile(luafile,lucfile,strip) - local code = io.loaddata(luafile) - local n = 0 - if code and code ~= "" then - code = load(code) - if not code then - report_lua("fatal error in file %a",luafile) - end - code = dump(code) - if strip then - code, n = strip_code_pc(code,luautilities.stripcode or luautilities.alwaysstripcode,luafile) -- last one is reported - end - if code and code ~= "" then - io.savedata(lucfile,code) - end - end - return n + if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then + register(name) + return load(dump(code,true)), 0 -- not yet executes + else + return code, 0 end +end - local luac_normal = "texluac -o %q %q" - local luac_strip = "texluac -s -o %q %q" - - function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true - report_lua("compiling %a into %a",luafile,lucfile) - os.remove(lucfile) - local done = false - if strip ~= false then - strip = true - end - if forcestupidcompile then - fallback = true - elseif strip then - done = os.spawn(format(luac_strip, lucfile,luafile)) == 0 - else - done = os.spawn(format(luac_normal,lucfile,luafile)) == 0 - end - if not done and fallback then - local n = stupidcompile(luafile,lucfile,strip) - if n > 0 then - report_lua("%a dumped into %a (%i bytes stripped)",luafile,lucfile,n) - else - report_lua("%a dumped into %a (unstripped)",luafile,lucfile) - end - cleanup = false -- better see how bad it is - done = true -- hm - end - if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then +function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true + report_lua("compiling %a into %a",luafile,lucfile) + os.remove(lucfile) + local done = stupidcompile(luafile,lucfile,strip ~= false) + if done then + report_lua("dumping %a into %a stripped",luafile,lucfile) + if cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then report_lua("removing %a",luafile) os.remove(luafile) end - return done end + return done +end - luautilities.loadstripped = loadstring - +function luautilities.loadstripped(...) + local l = load(...) + if l then + return load(dump(l,true)) + end end -- local getmetatable, type = getmetatable, type diff --git a/tex/context/base/util-prs.lua b/tex/context/base/util-prs.lua index 9d2ffcc3e..7a8c3ce39 100644 --- a/tex/context/base/util-prs.lua +++ b/tex/context/base/util-prs.lua @@ -9,8 +9,9 @@ if not modules then modules = { } end modules ['util-prs'] = { local lpeg, table, string = lpeg, table, string local P, R, V, S, C, Ct, Cs, Carg, Cc, Cg, Cf, Cp = lpeg.P, lpeg.R, lpeg.V, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Cp local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns -local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find +local concat, gmatch, find = table.concat, string.gmatch, string.find local tostring, type, next, rawset = tostring, type, next, rawset +local mod, div = math.mod, math.div utilities = utilities or {} local parsers = utilities.parsers or { } @@ -260,6 +261,16 @@ function parsers.simple_hash_to_string(h, separator) return concat(t,separator or ",") end +-- for mtx-context etc: aaaa bbbb cccc=dddd eeee=ffff + +local str = C((1-whitespace-equal)^1) +local setting = Cf( Carg(1) * (whitespace^0 * Cg(str * whitespace^0 * (equal * whitespace^0 * str + Cc(""))))^1,rawset) +local splitter = setting^1 + +function utilities.parsers.options_to_hash(str,target) + return str and lpegmatch(splitter,str,1,target or { }) or { } +end + -- for chem (currently one level) local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) @@ -409,7 +420,7 @@ function parsers.csvsplitter(specification) end whatever = quotedata + whatever end - local parser = Ct((Ct(whatever * (separator * whatever)^0) * S("\n\r"))^0 ) + local parser = Ct((Ct(whatever * (separator * whatever)^0) * S("\n\r")^1)^0 ) return function(data) return lpegmatch(parser,data) end @@ -568,7 +579,7 @@ local function fetch(t,name) return t[name] or { } end -function process(result,more) +local function process(result,more) for k, v in next, more do result[k] = v end @@ -590,3 +601,16 @@ end -- } -- -- inspect(utilities.parsers.mergehashes(t,"aa, bb, cc")) + +function utilities.parsers.runtime(time) + if not time then + time = os.runtime() + end + local days = div(time,24*60*60) + time = mod(time,24*60*60) + local hours = div(time,60*60) + time = mod(time,60*60) + local minutes = div(time,60) + local seconds = mod(time,60) + return days, hours, minutes, seconds +end diff --git a/tex/context/base/util-seq.lua b/tex/context/base/util-seq.lua index 27f95f0ee..35e693285 100644 --- a/tex/context/base/util-seq.lua +++ b/tex/context/base/util-seq.lua @@ -296,7 +296,7 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug args = ",one" elseif nofarguments == 2 then args = ",one,two" - elseif nofarguments == 3 then + elseif nofarguments == 3 then -- from here on probably slower than ... args = ",one,two,three" elseif nofarguments == 4 then args = ",one,two,three,four" @@ -321,6 +321,8 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug else calls[n] = format(" head, ok = %s(head%s) done = done or ok",localized,args) end +-- local s = " print('" .. tostring(group) .. " " .. tostring(action) .. " : ' .. tostring(head)) " +-- calls[n] = s .. calls[n] .. s end end end diff --git a/tex/context/base/util-soc.lua b/tex/context/base/util-soc.lua index 30301c510..3a52ee86d 100644 --- a/tex/context/base/util-soc.lua +++ b/tex/context/base/util-soc.lua @@ -25,17 +25,17 @@ function mail.send(specification) local server = specification.server or "" if not server then report_mail("no server specified") - return false + return false, "invalid server" end local to = specification.to or specification.recepient or "" if to == "" then - report_mail("no recepient specified") - return false + report_mail("no recipient specified") + return false, "invalid recipient" end local from = specification.from or specification.sender or "" if from == "" then report_mail("no sender specified") - return false + return false, "invalid sender" end local message = { } local body = specification.body @@ -68,11 +68,13 @@ function mail.send(specification) end end end + local user = specification.user + local password = specification.password local result, detail = smtp.send { server = specification.server, port = specification.port, - user = specification.user, - password = specification.password, + user = user ~= "" and user or nil, + password = password ~= "" and password or nil, from = from, rcpt = to, source = smtp.message { @@ -87,7 +89,9 @@ function mail.send(specification) } if detail then report_mail("error: %s",detail) + return false, detail else report_mail("message sent") + return true end end diff --git a/tex/context/base/util-sql-imp-swiglib.lua b/tex/context/base/util-sql-imp-swiglib.lua index 758c81746..5d64c02d4 100644 --- a/tex/context/base/util-sql-imp-swiglib.lua +++ b/tex/context/base/util-sql-imp-swiglib.lua @@ -453,7 +453,8 @@ local function execute(specification) end local wraptemplate = [[ -local mysql = require("swigluamysql") -- will be stored in method +----- mysql = require("swigluamysql") -- will be stored in method +local mysql = require("swiglib.mysql.core") -- will be stored in method ----- mysql_fetch_row = mysql.mysql_fetch_row ----- mysql_fetch_lengths = mysql.mysql_fetch_lengths diff --git a/tex/context/base/util-sql-users.lua b/tex/context/base/util-sql-users.lua index ea8fb4e07..a1f433946 100644 --- a/tex/context/base/util-sql-users.lua +++ b/tex/context/base/util-sql-users.lua @@ -17,7 +17,7 @@ local sql = utilities.sql local format, upper, find, gsub, topattern = string.format, string.upper, string.find, string.gsub, string.topattern local sumhexa = md5.sumhexa -local booleanstring = string.booleanstring +local toboolean = string.toboolean local sql = utilities.sql local users = { } @@ -244,7 +244,7 @@ function users.add(db,specification) fullname = name or fullname, password = encryptpassword(specification.password or ""), group = groupnumbers[specification.group] or groupnumbers.guest, - enabled = booleanstring(specification.enabled) and "1" or "0", + enabled = toboolean(specification.enabled) and "1" or "0", email = specification.email, address = specification.address, theme = specification.theme, @@ -354,7 +354,7 @@ function users.save(db,id,specification) fullname = fullname, password = encryptpassword(password), group = groupnumbers[group], - enabled = booleanstring(enabled) and "1" or "0", + enabled = toboolean(enabled) and "1" or "0", email = email, address = address, theme = theme, diff --git a/tex/context/base/util-sto.lua b/tex/context/base/util-sto.lua index 191d6cd73..8aafca425 100644 --- a/tex/context/base/util-sto.lua +++ b/tex/context/base/util-sto.lua @@ -103,12 +103,22 @@ end local function f_empty () return "" end -- t,k local function f_self (t,k) t[k] = k return k end local function f_table (t,k) local v = { } t[k] = v return v end +local function f_number(t,k) t[k] = 0 return 0 end -- t,k,v local function f_ignore() end -- t,k,v -local t_empty = { __index = f_empty } -local t_self = { __index = f_self } -local t_table = { __index = f_table } -local t_ignore = { __newindex = f_ignore } +local f_index = { + ["empty"] = f_empty, + ["self"] = f_self, + ["table"] = f_table, + ["number"] = f_number, +} + +local t_index = { + ["empty"] = { __index = f_empty }, + ["self"] = { __index = f_self }, + ["table"] = { __index = f_table }, + ["number"] = { __index = f_number }, +} function table.setmetatableindex(t,f) if type(t) ~= "table" then @@ -116,46 +126,30 @@ function table.setmetatableindex(t,f) end local m = getmetatable(t) if m then - if f == "empty" then - m.__index = f_empty - elseif f == "key" then - m.__index = f_self - elseif f == "table" then - m.__index = f_table - else - m.__index = f - end + m.__index = f_index[f] or f else - if f == "empty" then - setmetatable(t, t_empty) - elseif f == "key" then - setmetatable(t, t_self) - elseif f == "table" then - setmetatable(t, t_table) - else - setmetatable(t,{ __index = f }) - end + setmetatable(t,t_index[f] or { __index = f }) end return t end +local f_index = { + ["ignore"] = f_ignore, +} + +local t_index = { + ["ignore"] = { __newindex = f_ignore }, +} + function table.setmetatablenewindex(t,f) if type(t) ~= "table" then f, t = t, { } end local m = getmetatable(t) if m then - if f == "ignore" then - m.__newindex = f_ignore - else - m.__newindex = f - end + m.__newindex = f_index[f] or f else - if f == "ignore" then - setmetatable(t, t_ignore) - else - setmetatable(t,{ __newindex = f }) - end + setmetatable(t,t_index[f] or { __newindex = f }) end return t end diff --git a/tex/context/base/util-str.lua b/tex/context/base/util-str.lua index 4890a11d6..295fc0044 100644 --- a/tex/context/base/util-str.lua +++ b/tex/context/base/util-str.lua @@ -195,12 +195,14 @@ end -- points %p number (scaled points) -- basepoints %b number (scaled points) -- table concat %...t table +-- table concat %{.}t table -- serialize %...T sequenced (no nested tables) +-- serialize %{.}T sequenced (no nested tables) -- boolean (logic) %l boolean -- BOOLEAN %L boolean -- whitespace %...w -- automatic %...a 'whatever' (string, table, ...) --- automatic %...a "whatever" (string, table, ...) +-- automatic %...A "whatever" (string, table, ...) local n = 0 @@ -279,6 +281,7 @@ local tracedchar = string.tracedchar local autosingle = string.autosingle local autodouble = string.autodouble local sequenced = table.sequenced +local formattednumber = number.formatted ]] local template = [[ @@ -298,7 +301,7 @@ setmetatable(arguments, { __index = }) local prefix_any = C((S("+- .") + R("09"))^0) -local prefix_tab = C((1-R("az","AZ","09","%%"))^0) +local prefix_tab = P("{") * C((1-P("}"))^0) * P("}") + C((1-R("az","AZ","09","%%"))^0) -- we've split all cases as then we can optimize them (let's omit the fuzzy u) @@ -337,7 +340,7 @@ local format_i = function(f) if f and f ~= "" then return format("format('%%%si',a%s)",f,n) else - return format("a%s",n) + return format("format('%%i',a%s)",n) end end @@ -518,6 +521,61 @@ local format_W = function(f) -- handy when doing depth related indent return format("nspaces[%s]",tonumber(f) or 0) end +-- maybe to util-num + +local digit = patterns.digit +local period = patterns.period +local three = digit * digit * digit + +local splitter = Cs ( + (((1 - (three^1 * period))^1 + C(three)) * (Carg(1) * three)^1 + C((1-period)^1)) + * (P(1)/"" * Carg(2)) * C(2) +) + +patterns.formattednumber = splitter + +function number.formatted(n,sep1,sep2) + local s = type(s) == "string" and n or format("%0.2f",n) + if sep1 == true then + return lpegmatch(splitter,s,1,".",",") + elseif sep1 == "." then + return lpegmatch(splitter,s,1,sep1,sep2 or ",") + elseif sep1 == "," then + return lpegmatch(splitter,s,1,sep1,sep2 or ".") + else + return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".") + end +end + +-- print(number.formatted(1)) +-- print(number.formatted(12)) +-- print(number.formatted(123)) +-- print(number.formatted(1234)) +-- print(number.formatted(12345)) +-- print(number.formatted(123456)) +-- print(number.formatted(1234567)) +-- print(number.formatted(12345678)) +-- print(number.formatted(12345678,true)) +-- print(number.formatted(1234.56,"!","?")) + +local format_m = function(f) + n = n + 1 + if not f or f == "" then + f = "," + end + return format([[formattednumber(a%s,%q,".")]],n,f) +end + +local format_M = function(f) + n = n + 1 + if not f or f == "" then + f = "." + end + return format([[formattednumber(a%s,%q,",")]],n,f) +end + +-- + local format_rest = function(s) return format("%q",s) -- catches " and \n and such end @@ -572,6 +630,7 @@ local builder = Cs { "start", + V("W") -- new + V("a") -- new + V("A") -- new + + V("m") + V("M") -- new -- + V("*") -- ignores probably messed up % ) @@ -608,17 +667,20 @@ local builder = Cs { "start", ["b"] = (prefix_any * P("b")) / format_b, -- %b => 12.342bp / maybe: B (and more units) ["t"] = (prefix_tab * P("t")) / format_t, -- %t => concat ["T"] = (prefix_tab * P("T")) / format_T, -- %t => sequenced - ["l"] = (prefix_tab * P("l")) / format_l, -- %l => boolean - ["L"] = (prefix_tab * P("L")) / format_L, -- %L => BOOLEAN + ["l"] = (prefix_any * P("l")) / format_l, -- %l => boolean + ["L"] = (prefix_any * P("L")) / format_L, -- %L => BOOLEAN ["I"] = (prefix_any * P("I")) / format_I, -- %I => signed integer -- ["w"] = (prefix_any * P("w")) / format_w, -- %w => n spaces (optional prefix is added) ["W"] = (prefix_any * P("W")) / format_W, -- %W => mandate prefix, no specifier -- + ["m"] = (prefix_tab * P("m")) / format_m, -- %m => xxx.xxx.xxx,xx (optional prefix instead of .) + ["M"] = (prefix_tab * P("M")) / format_M, -- %M => xxx,xxx,xxx.xx (optional prefix instead of ,) + -- ["a"] = (prefix_any * P("a")) / format_a, -- %a => '...' (forces tostring) ["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring) -- - ["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%%%")^1) / format_rest, -- rest (including %%) + ["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%")^1) / format_rest, -- rest (including %%) -- ["!"] = Carg(2) * prefix_any * P("!") * C((1-P("!"))^1) * P("!") / format_extension, } @@ -645,7 +707,7 @@ local function make(t,str) p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n if n > 0 then p = format(template,preamble,t._preamble_,arguments[n],p) --- print("builder>",p) +-- print("builder>",p) f = loadstripped(p)() else f = function() return str end @@ -733,11 +795,17 @@ strings.formatters.add = add -- registered in the default instance (should we fall back on this one?) -lpeg.patterns.xmlescape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P('"')/""" + P(1))^0) -lpeg.patterns.texescape = Cs((C(S("#$%\\{}"))/"\\%1" + P(1))^0) +patterns.xmlescape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P('"')/""" + P(1))^0) +patterns.texescape = Cs((C(S("#$%\\{}"))/"\\%1" + P(1))^0) +patterns.luaescape = Cs(((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"')^0) -- maybe also \0 +patterns.luaquoted = Cs(Cc('"') * ((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"')^0 * Cc('"')) + +-- escaping by lpeg is faster for strings without quotes, slower on a string with quotes, but +-- faster again when other q-escapables are found (the ones we don't need to escape) -add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]]) -add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]]) +add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]]) +add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]]) +add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]]) -- -- yes or no: -- diff --git a/tex/context/base/util-tab.lua b/tex/context/base/util-tab.lua index a47c0cb77..ae44269bb 100644 --- a/tex/context/base/util-tab.lua +++ b/tex/context/base/util-tab.lua @@ -10,13 +10,14 @@ utilities = utilities or {} utilities.tables = utilities.tables or { } local tables = utilities.tables -local format, gmatch, gsub = string.format, string.gmatch, string.gsub +local format, gmatch, gsub, sub = string.format, string.gmatch, string.gsub, string.sub local concat, insert, remove = table.concat, table.insert, table.remove local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring local type, next, rawset, tonumber, tostring, load, select = type, next, rawset, tonumber, tostring, load, select local lpegmatch, P, Cs, Cc = lpeg.match, lpeg.P, lpeg.Cs, lpeg.Cc -local serialize, sortedkeys, sortedpairs = table.serialize, table.sortedkeys, table.sortedpairs +local sortedkeys, sortedpairs = table.sortedkeys, table.sortedpairs local formatters = string.formatters +local utftoeight = utf.toeight local splitter = lpeg.tsplitat(".") @@ -293,100 +294,90 @@ function tables.encapsulate(core,capsule,protect) end end -local function fastserialize(t,r,outer) -- no mixes - r[#r+1] = "{" - local n = #t - if n > 0 then - for i=1,n do - local v = t[i] - local tv = type(v) - if tv == "string" then - r[#r+1] = formatters["%q,"](v) - elseif tv == "number" then - r[#r+1] = formatters["%s,"](v) - elseif tv == "table" then - fastserialize(v,r) - elseif tv == "boolean" then - r[#r+1] = formatters["%S,"](v) +-- best keep [%q] keys (as we have some in older applications i.e. saving user data + +local f_hashed_string = formatters["[%q]=%q,"] +local f_hashed_number = formatters["[%q]=%s,"] +local f_hashed_boolean = formatters["[%q]=%l,"] +local f_hashed_table = formatters["[%q]="] + +local f_indexed_string = formatters["[%s]=%q,"] +local f_indexed_number = formatters["[%s]=%s,"] +local f_indexed_boolean = formatters["[%s]=%l,"] +local f_indexed_table = formatters["[%s]="] + +local f_ordered_string = formatters["%q,"] +local f_ordered_number = formatters["%s,"] +local f_ordered_boolean = formatters["%l,"] + +function table.fastserialize(t,prefix) + + -- prefix should contain the = + -- not sorted + -- only number and string indices (currently) + + local r = { prefix or "return" } + local m = 1 + + local function fastserialize(t,outer) -- no mixes + local n = #t + m = m + 1 + r[m] = "{" + if n > 0 then + for i=0,n do + local v = t[i] + local tv = type(v) + if tv == "string" then + m = m + 1 r[m] = f_ordered_string(v) + elseif tv == "number" then + m = m + 1 r[m] = f_ordered_number(v) + elseif tv == "table" then + fastserialize(v) + elseif tv == "boolean" then + m = m + 1 r[m] = f_ordered_boolean(v) + end end end - else for k, v in next, t do - local tv = type(v) - if tv == "string" then - r[#r+1] = formatters["[%q]=%q,"](k,v) - elseif tv == "number" then - r[#r+1] = formatters["[%q]=%s,"](k,v) - elseif tv == "table" then - r[#r+1] = formatters["[%q]="](k) - fastserialize(v,r) - elseif tv == "boolean" then - r[#r+1] = formatters["[%q]=%S,"](k,v) + local tk = type(k) + if tk == "number" then + if k > n or k < 0 then + local tv = type(v) + if tv == "string" then + m = m + 1 r[m] = f_indexed_string(k,v) + elseif tv == "number" then + m = m + 1 r[m] = f_indexed_number(k,v) + elseif tv == "table" then + m = m + 1 r[m] = f_indexed_table(k) + fastserialize(v) + elseif tv == "boolean" then + m = m + 1 r[m] = f_indexed_boolean(k,v) + end + end + else + local tv = type(v) + if tv == "string" then + m = m + 1 r[m] = f_hashed_string(k,v) + elseif tv == "number" then + m = m + 1 r[m] = f_hashed_number(k,v) + elseif tv == "table" then + m = m + 1 r[m] = f_hashed_table(k) + fastserialize(v) + elseif tv == "boolean" then + m = m + 1 r[m] = f_hashed_boolean(k,v) + end end end + m = m + 1 + if outer then + r[m] = "}" + else + r[m] = "}," + end + return r end - if outer then - r[#r+1] = "}" - else - r[#r+1] = "}," - end - return r -end --- local f_hashed_string = formatters["[%q]=%q,"] --- local f_hashed_number = formatters["[%q]=%s,"] --- local f_hashed_table = formatters["[%q]="] --- local f_hashed_true = formatters["[%q]=true,"] --- local f_hashed_false = formatters["[%q]=false,"] --- --- local f_indexed_string = formatters["%q,"] --- local f_indexed_number = formatters["%s,"] --- ----- f_indexed_true = formatters["true,"] --- ----- f_indexed_false = formatters["false,"] --- --- local function fastserialize(t,r,outer) -- no mixes --- r[#r+1] = "{" --- local n = #t --- if n > 0 then --- for i=1,n do --- local v = t[i] --- local tv = type(v) --- if tv == "string" then --- r[#r+1] = f_indexed_string(v) --- elseif tv == "number" then --- r[#r+1] = f_indexed_number(v) --- elseif tv == "table" then --- fastserialize(v,r) --- elseif tv == "boolean" then --- -- r[#r+1] = v and f_indexed_true(k) or f_indexed_false(k) --- r[#r+1] = v and "true," or "false," --- end --- end --- else --- for k, v in next, t do --- local tv = type(v) --- if tv == "string" then --- r[#r+1] = f_hashed_string(k,v) --- elseif tv == "number" then --- r[#r+1] = f_hashed_number(k,v) --- elseif tv == "table" then --- r[#r+1] = f_hashed_table(k) --- fastserialize(v,r) --- elseif tv == "boolean" then --- r[#r+1] = v and f_hashed_true(k) or f_hashed_false(k) --- end --- end --- end --- if outer then --- r[#r+1] = "}" --- else --- r[#r+1] = "}," --- end --- return r --- end - -function table.fastserialize(t,prefix) -- so prefix should contain the = - return concat(fastserialize(t,{ prefix or "return" },true)) + return concat(fastserialize(t,true)) end function table.deserialize(str) @@ -410,6 +401,7 @@ function table.load(filename,loader) if filename then local t = (loader or io.loaddata)(filename) if t and t ~= "" then + local t = utftoeight(t) t = load(t) if type(t) == "function" then t = t() @@ -422,10 +414,14 @@ function table.load(filename,loader) end function table.save(filename,t,n,...) - io.savedata(filename,serialize(t,n == nil and true or n,...)) + io.savedata(filename,table.serialize(t,n == nil and true or n,...)) -- no frozen table.serialize end -local function slowdrop(t) +local f_key_value = formatters["%s=%q"] +local f_add_table = formatters[" {%t},\n"] +local f_return_table = formatters["return {\n%t}"] + +local function slowdrop(t) -- maybe less memory (intermediate concat) local r = { } local l = { } for i=1,#t do @@ -433,28 +429,30 @@ local function slowdrop(t) local j = 0 for k, v in next, ti do j = j + 1 - l[j] = formatters["%s=%q"](k,v) + l[j] = f_key_value(k,v) end - r[i] = formatters[" {%t},\n"](l) + r[i] = f_add_table(l) end - return formatters["return {\n%st}"](r) + return f_return_table(r) end local function fastdrop(t) local r = { "return {\n" } + local m = 1 for i=1,#t do local ti = t[i] - r[#r+1] = " {" + m = m + 1 r[m] = " {" for k, v in next, ti do - r[#r+1] = formatters["%s=%q"](k,v) + m = m + 1 r[m] = f_key_value(k,v) end - r[#r+1] = "},\n" + m = m + 1 r[m] = "},\n" end - r[#r+1] = "}" + m = m + 1 + r[m] = "}" return concat(r) end -function table.drop(t,slow) -- only { { a=2 }, {a=3} } +function table.drop(t,slow) -- only { { a=2 }, {a=3} } -- for special cases if #t == 0 then return "return { }" elseif slow == true then @@ -464,6 +462,9 @@ function table.drop(t,slow) -- only { { a=2 }, {a=3} } end end +-- inspect(table.drop({ { a=2 }, {a=3} })) +-- inspect(table.drop({ { a=2 }, {a=3} },true)) + function table.autokey(t,k) local v = { } t[k] = v @@ -491,3 +492,248 @@ function table.twowaymapper(t) return t end +-- The next version is somewhat faster, although in practice one will seldom +-- serialize a lot using this one. Often the above variants are more efficient. +-- If we would really need this a lot, we could hash q keys. + +-- char-def.lua : 0.53 -> 0.38 +-- husayni.tma : 0.28 -> 0.19 + +local f_start_key_idx = formatters["%w{"] +local f_start_key_num = formatters["%w[%s]={"] +local f_start_key_str = formatters["%w[%q]={"] +local f_start_key_boo = formatters["%w[%l]={"] +local f_start_key_nop = formatters["%w{"] + +local f_stop = formatters["%w},"] + +local f_key_num_value_num = formatters["%w[%s]=%s,"] +local f_key_str_value_num = formatters["%w[%q]=%s,"] +local f_key_boo_value_num = formatters["%w[%l]=%s,"] + +local f_key_num_value_str = formatters["%w[%s]=%q,"] +local f_key_str_value_str = formatters["%w[%q]=%q,"] +local f_key_boo_value_str = formatters["%w[%l]=%q,"] + +local f_key_num_value_boo = formatters["%w[%s]=%l,"] +local f_key_str_value_boo = formatters["%w[%q]=%l,"] +local f_key_boo_value_boo = formatters["%w[%l]=%l,"] + +local f_key_num_value_not = formatters["%w[%s]={},"] +local f_key_str_value_not = formatters["%w[%q]={},"] +local f_key_boo_value_not = formatters["%w[%l]={},"] + +local f_key_num_value_seq = formatters["%w[%s]={ %, t },"] +local f_key_str_value_seq = formatters["%w[%q]={ %, t },"] +local f_key_boo_value_seq = formatters["%w[%l]={ %, t },"] + +local f_val_num = formatters["%w%s,"] +local f_val_str = formatters["%w%q,"] +local f_val_boo = formatters["%w%l,"] +local f_val_not = formatters["%w{},"] +local f_val_seq = formatters["%w{ %, t },"] + +local f_table_return = formatters["return {"] +local f_table_name = formatters["%s={"] +local f_table_direct = formatters["{"] +local f_table_entry = formatters["[%q]={"] +local f_table_finish = formatters["}"] + +----- f_string = formatters["%q"] + +local spaces = utilities.strings.newrepeater(" ") + +local serialize = table.serialize -- the extensive one, the one we started with + +-- there is still room for optimization: index run, key run, but i need to check with the +-- latest lua for the value of #n (with holes) .. anyway for tracing purposes we want +-- indices / keys being sorted, so it will never be real fast + +function table.serialize(root,name,specification) + + if type(specification) == "table" then + return serialize(root,name,specification) -- the original one + end + + local t -- = { } + local n = 1 + + local function simple_table(t) + if #t > 0 then + local n = 0 + for _, v in next, t do + n = n + 1 + if type(v) == "table" then + return nil + end + end + if n == #t then + local tt = { } + local nt = 0 + for i=1,#t do + local v = t[i] + local tv = type(v) + nt = nt + 1 + if tv == "number" then + tt[nt] = v + elseif tv == "string" then + tt[nt] = format("%q",v) -- f_string(v) + elseif tv == "boolean" then + tt[nt] = v and "true" or "false" + else + return nil + end + end + return tt + end + end + return nil + end + + local function do_serialize(root,name,depth,level,indexed) + if level > 0 then + n = n + 1 + if indexed then + t[n] = f_start_key_idx(depth) + else + local tn = type(name) + if tn == "number" then + t[n] = f_start_key_num(depth,name) + elseif tn == "string" then + t[n] = f_start_key_str(depth,name) + elseif tn == "boolean" then + t[n] = f_start_key_boo(depth,name) + else + t[n] = f_start_key_nop(depth) + end + end + depth = depth + 1 + end + -- we could check for k (index) being number (cardinal) + if root and next(root) then + local first = nil + local last = 0 + last = #root + for k=1,last do + if root[k] == nil then + last = k - 1 + break + end + end + if last > 0 then + first = 1 + end + local sk = sortedkeys(root) -- inline fast version? + for i=1,#sk do + local k = sk[i] + local v = root[k] + local tv = type(v) + local tk = type(k) + if first and tk == "number" and k >= first and k <= last then + if tv == "number" then + n = n + 1 t[n] = f_val_num(depth,v) + elseif tv == "string" then + n = n + 1 t[n] = f_val_str(depth,v) + elseif tv == "table" then + if not next(v) then + n = n + 1 t[n] = f_val_not(depth) + else + local st = simple_table(v) + if st then + n = n + 1 t[n] = f_val_seq(depth,st) + else + do_serialize(v,k,depth,level+1,true) + end + end + elseif tv == "boolean" then + n = n + 1 t[n] = f_val_boo(depth,v) + end + elseif tv == "number" then + if tk == "number" then + n = n + 1 t[n] = f_key_num_value_num(depth,k,v) + elseif tk == "string" then + n = n + 1 t[n] = f_key_str_value_num(depth,k,v) + elseif tk == "boolean" then + n = n + 1 t[n] = f_key_boo_value_num(depth,k,v) + end + elseif tv == "string" then + if tk == "number" then + n = n + 1 t[n] = f_key_num_value_str(depth,k,v) + elseif tk == "string" then + n = n + 1 t[n] = f_key_str_value_str(depth,k,v) + elseif tk == "boolean" then + n = n + 1 t[n] = f_key_boo_value_str(depth,k,v) + end + elseif tv == "table" then + if not next(v) then + if tk == "number" then + n = n + 1 t[n] = f_key_num_value_not(depth,k,v) + elseif tk == "string" then + n = n + 1 t[n] = f_key_str_value_not(depth,k,v) + elseif tk == "boolean" then + n = n + 1 t[n] = f_key_boo_value_not(depth,k,v) + end + else + local st = simple_table(v) + if not st then + do_serialize(v,k,depth,level+1) + elseif tk == "number" then + n = n + 1 t[n] = f_key_num_value_seq(depth,k,st) + elseif tk == "string" then + n = n + 1 t[n] = f_key_str_value_seq(depth,k,st) + elseif tk == "boolean" then + n = n + 1 t[n] = f_key_boo_value_seq(depth,k,st) + end + end + elseif tv == "boolean" then + if tk == "number" then + n = n + 1 t[n] = f_key_num_value_boo(depth,k,v) + elseif tk == "string" then + n = n + 1 t[n] = f_key_str_value_boo(depth,k,v) + elseif tk == "boolean" then + n = n + 1 t[n] = f_key_boo_value_boo(depth,k,v) + end + end + end + end + if level > 0 then + n = n + 1 t[n] = f_stop(depth-1) + end + end + + local tname = type(name) + + if tname == "string" then + if name == "return" then + t = { f_table_return() } + else + t = { f_table_name(name) } + end + elseif tname == "number" then + t = { f_table_entry(name) } + elseif tname == "boolean" then + if name then + t = { f_table_return() } + else + t = { f_table_direct() } + end + else + t = { f_table_name("t") } + end + + if root then + -- The dummy access will initialize a table that has a delayed initialization + -- using a metatable. (maybe explicitly test for metatable) + if getmetatable(root) then -- todo: make this an option, maybe even per subtable + local dummy = root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_ = nil + end + -- Let's forget about empty tables. + if next(root) then + do_serialize(root,name,1,0) + end + end + n = n + 1 + t[n] = f_table_finish() + return concat(t,"\n") +end diff --git a/tex/context/base/util-tpl.lua b/tex/context/base/util-tpl.lua index 7a6abefd6..67d058221 100644 --- a/tex/context/base/util-tpl.lua +++ b/tex/context/base/util-tpl.lua @@ -17,8 +17,8 @@ local trace_template = false trackers.register("templates.trace",function(v) t local report_template = logs.reporter("template") local tostring = tostring -local format, sub = string.format, string.sub -local P, C, Cs, Carg, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.Carg, lpeg.match +local format, sub, byte = string.format, string.sub, string.byte +local P, C, R, Cs, Cc, Carg, lpegmatch, lpegpatterns = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Cc, lpeg.Carg, lpeg.match, lpeg.patterns -- todo: make installable template.new @@ -52,7 +52,10 @@ local sqlescape = lpeg.replacer { -- { "\t", "\\t" }, } -local sqlquotedescape = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'")) +local sqlquoted = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'")) + +lpegpatterns.sqlescape = sqlescape +lpegpatterns.sqlquoted = sqlquoted -- escapeset : \0\1\2\3\4\5\6\7\8\9\10\11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31\"\\\127 -- test string: [[1\0\31test23"\\]] .. string.char(19) .. "23" @@ -68,7 +71,7 @@ local sqlquotedescape = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'")) -- } -- -- slightly faster: --- + -- local luaescape = Cs (( -- P('"' ) / [[\"]] + -- P('\\') / [[\\]] + @@ -78,9 +81,16 @@ local sqlquotedescape = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'")) -- P(1) -- )^0) +----- xmlescape = lpegpatterns.xmlescape +----- texescape = lpegpatterns.texescape +local luaescape = lpegpatterns.luaescape +----- sqlquoted = lpegpatterns.sqlquoted +----- luaquoted = lpegpatterns.luaquoted + local escapers = { lua = function(s) - return sub(format("%q",s),2,-2) + -- return sub(format("%q",s),2,-2) + return lpegmatch(luaescape,s) end, sql = function(s) return lpegmatch(sqlescape,s) @@ -89,16 +99,14 @@ local escapers = { local quotedescapers = { lua = function(s) + -- return lpegmatch(luaquoted,s) return format("%q",s) end, sql = function(s) - return lpegmatch(sqlquotedescape,s) + return lpegmatch(sqlquoted,s) end, } -lpeg.patterns.sqlescape = sqlescape -lpeg.patterns.sqlescape = sqlquotedescape - local luaescaper = escapers.lua local quotedluaescaper = quotedescapers.lua @@ -151,6 +159,14 @@ end templates.replace = replace +function templates.replacer(str,how,recurse) -- reads nicer + return function(mapping) + return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str + end +end + +-- local cmd = templates.replacer([[foo %bar%]]) print(cmd { bar = "foo" }) + function templates.load(filename,mapping,how,recurse) local data = io.loaddata(filename) or "" if mapping and next(mapping) then diff --git a/tex/context/base/x-asciimath.lua b/tex/context/base/x-asciimath.lua index 5ef741ce3..992c37eae 100644 --- a/tex/context/base/x-asciimath.lua +++ b/tex/context/base/x-asciimath.lua @@ -140,6 +140,8 @@ local reserved = { } +table.setmetatableindex(reserved,characters.entities) + local postmapper = Cs ( ( P("\\mathoptext ") * spaces * (P("\\bgroup ")/"{") * (1-P("\\egroup "))^1 * (P("\\egroup ")/"}") + diff --git a/tex/context/base/x-mathml.lua b/tex/context/base/x-mathml.lua index 31483bbea..d19e1eebf 100644 --- a/tex/context/base/x-mathml.lua +++ b/tex/context/base/x-mathml.lua @@ -62,6 +62,7 @@ local o_replacements = { -- in main table ["{"] = "\\mmlleftdelimiter \\lbrace", ["}"] = "\\mmlrightdelimiter\\rbrace", ["|"] = "\\mmlleftorrightdelimiter\\vert", + ["/"] = "\\mmlleftorrightdelimiter\\solidus", [doublebar] = "\\mmlleftorrightdelimiter\\Vert", ["("] = "\\mmlleftdelimiter(", [")"] = "\\mmlrightdelimiter)", diff --git a/tex/context/base/x-mathml.mkiv b/tex/context/base/x-mathml.mkiv index a4eb09467..1a2099805 100644 --- a/tex/context/base/x-mathml.mkiv +++ b/tex/context/base/x-mathml.mkiv @@ -1,4 +1,4 @@ -%D \module +%D \modul %D [ file=x-mathml, %D version=2008.05.29, %D title=\CONTEXT\ XML Modules, @@ -74,17 +74,30 @@ } } { - \MMLhack\xmlflush{#1} + \math_fences_checked_start + \MMLhack + \xmlflush{#1} + \math_fences_checked_stop } \endgroup \stopxmlsetups \startxmlsetups mml:imath - \inlinemathematics{\MMLhack\xmlflush{#1}} + \inlinemathematics { + \math_fences_checked_start + \MMLhack + \xmlflush{#1} + \math_fences_checked_stop + } \stopxmlsetups \startxmlsetups mml:dmath - \displaymathematics{\MMLhack\xmlflush{#1}} + \displaymathematics { + \math_fences_checked_start + \MMLhack + \xmlflush{#1} + \math_fences_checked_stop + } \stopxmlsetups %D First we define some general formula elements. @@ -96,24 +109,45 @@ \startformula\MMLhack\xmlfirst{#1}{/mml:math}\stopformula \stopxmlsetups -\setfalse\mmlignoredelimiter -\settrue \mmlsomeleftdelimiter +% old delimiter hacks +% +% \setfalse\mmlignoredelimiter +% \settrue \mmlsomeleftdelimiter +% +% \def\MMLleftorright +% {\ifconditional\mmlsomeleftdelimiter +% \setfalse\mmlsomeleftdelimiter\expandafter\MMLleft +% \else +% \settrue \mmlsomeleftdelimiter\expandafter\MMLright +% \fi} +% +% \ifx\MMLleft \undefined \let\MMLleft \firstofoneargument \fi +% \ifx\MMLright \undefined \let\MMLright \firstofoneargument \fi +% \ifx\MMLmiddle\undefined \let\MMLmiddle\firstofoneargument \fi +% +% \def\mmlleftdelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLleft #1}\fi} +% \def\mmlrightdelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLright #1}\fi} +% \def\mmlmiddledelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLmiddle #1}\fi} +% \def\mmlleftorrightdelimiter#1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLleftorright#1}\fi} + +% new delimiter hacks (assumes wrapping) +% +% \math_fences_checked_start +% \math_fences_checked_stop +% +% \math_fences_checked_left +% \math_fences_checked_middle +% \math_fences_checked_right +% \math_fences_checked_left_or_right -\def\MMLleftorright - {\ifconditional\mmlsomeleftdelimiter - \setfalse\mmlsomeleftdelimiter\expandafter\MMLleft - \else - \settrue \mmlsomeleftdelimiter\expandafter\MMLright - \fi} +\setfalse\mmlignoredelimiter % alternatively we could turn it on/off inside the start/stop and ignore \left\right\middle otherwise -\ifx\MMLleft \undefined \let\MMLleft \firstofoneargument \fi -\ifx\MMLright \undefined \let\MMLright \firstofoneargument \fi -\ifx\MMLmiddle\undefined \let\MMLmiddle\firstofoneargument \fi +\def\mmlleftdelimiter {\ifconditional\mmlignoredelimiter\else\expandafter\math_fences_checked_left \fi} +\def\mmlrightdelimiter {\ifconditional\mmlignoredelimiter\else\expandafter\math_fences_checked_right \fi} +\def\mmlmiddledelimiter {\ifconditional\mmlignoredelimiter\else\expandafter\math_fences_checked_middle \fi} +\def\mmlleftorrightdelimiter{\ifconditional\mmlignoredelimiter\else\expandafter\math_fences_checked_left_or_right\fi} -\def\mmlleftdelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLleft #1}\fi} -\def\mmlrightdelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLright #1}\fi} -\def\mmlmiddledelimiter #1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLmiddle #1}\fi} -\def\mmlleftorrightdelimiter#1{\ifconditional\mmlignoredelimiter#1\else\normalordelimiter{#1}{\MMLleftorright#1}\fi} +% end of delimiter mess \def\mmlchar#1{\char#1 } % used in lua code @@ -201,6 +235,10 @@ \def\mmlprelast#1{\xmlelement{#1}{-2}} \def\mmllast #1{\xmlelement{#1}{-1}} +\unexpanded\def\mmlunexpandedfirst #1{\xmlelement{#1}{1}} % we can move these inline if needed +\unexpanded\def\mmlunexpandedsecond #1{\xmlelement{#1}{2}} +\unexpanded\def\mmlunexpandedthird #1{\xmlelement{#1}{3}} + \starttexdefinition doifelsemmlfunction #1 \xmldoifelse {#1} {/mml:fn} { \firstoftwoarguments @@ -217,19 +255,6 @@ } \stoptexdefinition -%D Special features: - - \newtoks \@@postponedMMLactions \setfalse \somepostponedMMLactions - - \def\postponeMMLactions#1% - {\global\settrue\somepostponedMMLactions - \global\@@postponedMMLactions\expandafter{\the\@@postponedMMLactions#1}} - - \def\postponedMMLactions - {\global\setfalse\somepostponedMMLactions - \@EA\global\@EA\@@postponedMMLactions\@EA\emptytoks - \the\@@postponedMMLactions} - %D A couple of lists: \convertargument @@ -301,6 +326,17 @@ \newcount\mmlapplydepth \def\MMLcreset{\mmlapplydepth\zerocount} +% \newtoks \@@postponedMMLactions \setfalse \somepostponedMMLactions +% +% \def\postponeMMLactions#1% +% {\global\settrue\somepostponedMMLactions +% \global\@@postponedMMLactions\expandafter{\the\@@postponedMMLactions#1}} +% +% \def\postponedMMLactions +% {\global\setfalse\somepostponedMMLactions +% \@EA\global\@EA\@@postponedMMLactions\@EA\emptytoks +% \the\@@postponedMMLactions} + \startxmlsetups mml:apply \MMLmathinner { \xmldoif {#1} {/(\MMLcmainresetlist\string|\MMLctempresetlist)} { @@ -1785,25 +1821,23 @@ \stopxmlsetups % PRESENTATION MATHML -% -% there are some rough edges that need to be sorted out - -% helpers - -\xmlmapvalue {mml} {normal} {\tf} -\xmlmapvalue {mml} {double-struck} {\bf} -\xmlmapvalue {mml} {italic} {\it} -\xmlmapvalue {mml} {fraktur} {\bf} -\xmlmapvalue {mml} {script} {\tf} -\xmlmapvalue {mml} {bold} {\bf} -\xmlmapvalue {mml} {bold-italic} {\bi} -\xmlmapvalue {mml} {bold-fraktur} {\bf} -\xmlmapvalue {mml} {bold-script} {\bf} -\xmlmapvalue {mml} {sans-serif} {\ss} -\xmlmapvalue {mml} {bold-sans-serif} {\ss\bf} -\xmlmapvalue {mml} {sans-serif-italic} {\ss\it} -\xmlmapvalue {mml} {sans-serif-bold-italic} {\ss\bi} -\xmlmapvalue {mml} {monospace} {\tt} + +% helpers: maybe we can need a setting for the uprights + +\xmlmapvalue {mml} {normal} {\mathupright} % {\mathtf} +\xmlmapvalue {mml} {double-struck} {\mathblackboard} +\xmlmapvalue {mml} {italic} {\mathit} +\xmlmapvalue {mml} {fraktur} {\mathfraktur} +\xmlmapvalue {mml} {script} {\mathscript} +\xmlmapvalue {mml} {bold} {\mb} % {\mathbf} +\xmlmapvalue {mml} {bold-italic} {\mathbi} +\xmlmapvalue {mml} {bold-fraktur} {\mathfraktur\mathbf} +\xmlmapvalue {mml} {bold-script} {\mathscript\mathbf} +\xmlmapvalue {mml} {sans-serif} {\mathss} +\xmlmapvalue {mml} {bold-sans-serif} {\mathss\mathbf} +\xmlmapvalue {mml} {sans-serif-italic} {\mathss\mathit} +\xmlmapvalue {mml} {sans-serif-bold-italic} {\mathss\mathbi} +\xmlmapvalue {mml} {monospace} {\mathtt} % todo: displaystyle=true/false (or whatever else shows up) @@ -1863,15 +1897,15 @@ % setups -\startxmlsetups mml:mi % todo: mathvariant mathsize mathcolor mathbackground - \ctxmodulemathml{mi("#1")} +\startxmlsetups mml:mi % todo: mathsize (unlikely) mathcolor (easy) mathbackground (easy) + \begingroup + \setmmlmathstyle{#1} + \ctxmodulemathml{mi("#1")} + \endgroup \stopxmlsetups -\startxmlsetups mml:mn % todo: mathvariant mathsize mathcolor mathbackground -% \begingroup -% \mr - \ctxmodulemathml{mn("#1")}% no \hbox, would be ok for . , but spoils rest -% \endgroup +\startxmlsetups mml:mn + \ctxmodulemathml{mn("#1")}% no \hbox, would be ok for . , but spoils rest \stopxmlsetups % -2 and 1-2 @@ -1885,13 +1919,20 @@ \setfalse\mmlignoredelimiter \stopxmlsetups +% \startxmlsetups mml:mfenced % {} around separator is needed for spacing +% \def\MMLleft {\left }% weird +% \def\MMLright {\right} +% \def\MMLmiddle{\middle} +% \ctxmodulemathml{mfenced("#1")} +% \stopxmlsetups + \startxmlsetups mml:mfenced % {} around separator is needed for spacing - \def\MMLleft {\left }% weird - \def\MMLright {\right} - \def\MMLmiddle{\middle} + \math_fences_checked_start \ctxmodulemathml{mfenced("#1")} + \math_fences_checked_stop \stopxmlsetups + \defineoverlay [mml:enclose:box] [\useMPgraphic{mml:enclose:box}] \defineoverlay [mml:enclose:roundedbox] [\useMPgraphic{mml:enclose:roundedbox}] \defineoverlay [mml:enclose:circle] [\useMPgraphic{mml:enclose:circle}] @@ -2103,18 +2144,32 @@ % % fails on { ... so we need +% \startxmlsetups mml:mrow +% \begingroup +% \xmldoifelse {#1} {/mml:mo[first() or last()]} {% we need a {} +% \def\MMLleft {\left } +% \def\MMLright {\right} +% \def\MMLmiddle{\middle} +% \enabledelimiter +% \checkdelimiters{\xmlall{#1}{/mml:mo}} +% \fakeleftdelimiter +% \xmlflush{#1} +% \fakerightdelimiter +% \disabledelimiter +% } { +% \xmlflush{#1} +% } +% \endgroup +% \stopxmlsetups +% +% more modern: + \startxmlsetups mml:mrow \begingroup \xmldoifelse {#1} {/mml:mo[first() or last()]} {% we need a {} - \def\MMLleft {\left } - \def\MMLright {\right} - \def\MMLmiddle{\middle} - \enabledelimiter - \checkdelimiters{\xmlall{#1}{/mml:mo}} - \fakeleftdelimiter - \xmlflush{#1} - \fakerightdelimiter - \disabledelimiter + \math_fences_checked_start + \xmlflush{#1} + \math_fences_checked_stop } { \xmlflush{#1} } @@ -2134,6 +2189,8 @@ % brrr no { } when limop .. todo: better in lua % speed up with ifx and setups or just in lua +\let\mmlnucleus\relax + \startxmlsetups mml:msub \edef\mmlnucleus{\xmlraw{#1}{/mml:*[1]}} \doifelse {\utfmathclass\mmlnucleus} {limop} { @@ -2183,65 +2240,129 @@ \fi\fi {\csname#1\endcsname}} +% todo: combine topaccent/over/bottomaccent/under check + +\definemathextensible [\v!mathematics] [mml:overleftarrow] ["2190] % ["27F5] +\definemathextensible [\v!mathematics] [mml:overrightarrow] ["2192] % ["27F6] +\definemathextensible [\v!mathematics] [mml:overleftrightarrow] ["27F7] +\definemathextensible [\v!mathematics] [mml:overtwoheadrightarrow] ["27F9] +\definemathextensible [\v!mathematics] [mml:overleftharpoondown] ["21BD] +\definemathextensible [\v!mathematics] [mml:overleftharpoonup] ["21BC] +\definemathextensible [\v!mathematics] [mml:overrightharpoondown] ["21C1] +\definemathextensible [\v!mathematics] [mml:overrightharpoonup] ["21C0] + +\definemathextensible [\v!mathematics] [mml:underleftarrow] ["2190] % ["27F5] +\definemathextensible [\v!mathematics] [mml:underrightarrow] ["2192] % ["27F6] +\definemathextensible [\v!mathematics] [mml:underleftrightarrow] ["27F7] +\definemathextensible [\v!mathematics] [mml:undertwoheadrightarrow] ["27F9] +\definemathextensible [\v!mathematics] [mml:underleftharpoondown] ["21BD] +\definemathextensible [\v!mathematics] [mml:underleftharpoonup] ["21BC] +\definemathextensible [\v!mathematics] [mml:underrightharpoondown] ["21C1] +\definemathextensible [\v!mathematics] [mml:underrightharpoonup] ["21C0] + +\definemathtriplet [\v!mathematics] [mmlovertriplet] +\definemathtriplet [\v!mathematics] [mmlundertriplet] +\definemathtriplet [\v!mathematics] [mmldoubletriplet] + +% alternative: +% +% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2190}] ["2190] % ["27F5] +% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2192}] ["2192] % ["27F6] +% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F5}] ["2190] % ["27F5] +% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F6}] ["2192] % ["27F6] +% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F7}] ["27F7] +% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F9}] ["27F9] +% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BD}] ["21BD] +% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BC}] ["21BC] +% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C1}] ["21C1] +% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C0}] ["21C0] + +\unexpanded\def\mmloverof#1{\mmlexecuteifdefined\mmlovercommand\relax{\mmlunexpandedfirst {#1}}\relax} +\unexpanded\def\mmloveros#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedsecond{#1}}\relax} +\unexpanded\def\mmloverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax} +\unexpanded\def\mmloverbs#1{\mmlexecuteifdefined\mmlbasecommand\relax{\mmlunexpandedsecond{#1}}\relax} + \startxmlsetups mml:mover - \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}} - \doifelseutfmathaccentfiltered\mmlovertoken{topaccent} {% not ok - \edef\mmlovercommand{\utfmathcommandfiltered\mmlovertoken{topaccent}} - \mmlexecuteifdefined\mmlovercommand\mathematics{\mmlfirst{#1}} + \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}% /text() + \doifelseutfmathabove\mmlovertoken { + \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken} + \mmloverof{#1} } { - \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}} - \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken} - \edef\mmlovercommand{\utfmathfiller\mmlovertoken} - \vbox { - \mathsurround\zeropoint - \ialign { - \hss$\alignmark\alignmark$\hss - \crcr - \noalign{\kern3\onepoint}% - \mmlexecuteifdefined\mmlovercommand{\mmlsecond{#1}}{}% extra {} is safeguard - \crcr - \noalign{\kern3\onepoint\nointerlineskip}% - \mmlexecuteifdefined\mmlbasecommand{\mmlfirst{#1}}{}% extra {} is safeguard - \crcr - } + \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text() + \doifelseutfmathabove\mmlbasetoken { + \edef\mmlbasecommand{mml:\utfmathcommandabove\mmlbasetoken} + \mmloverbs{#1} + } { + \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken} + \edef\mmlovercommand{\utfmathfiller\mmlovertoken} + \mmlovertriplet{\mmloveros{#1}}{\mmloverbf{#1}}\relax } } -% \limits % spoils spacing + % \limits % spoils spacing \stopxmlsetups -% messy: (_ +% alternative: +% +% \startxmlsetups mml:mover +% \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}% /text() +% \doifelseutfmathabove\mmlovertoken { +% \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken} +% \mmloverof{#1} +% } { +% \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]/text()}} +% \ifcsname mml:\mmlbasetoken\endcsname +% \csname mml:\mmlbasetoken\endcsname{\mmlunexpandedsecond{#1}}\relax +% \else +% \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken} +% \edef\mmlovercommand{\utfmathfiller\mmlovertoken} +% \mmlovertriplet{\mmloveros{#1}}{\mmloverbf{#1}}\relax +% \fi +% } +% % \limits % spoils spacing +% \stopxmlsetups + +\unexpanded\def\mmlunderuf#1{\mmlexecuteifdefined\mmlundercommand\relax {\mmlunexpandedfirst {#1}}\relax} +\unexpanded\def\mmlunderus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax} +\unexpanded\def\mmlunderbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax} +\unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\mmlunexpandedsecond{#1}}\relax} \startxmlsetups mml:munder -% \mathop { - \edef\mmlundertoken{\xmlraw{#1}{/mml:*[2]}} - \doifelseutfmathaccentfiltered\mmlundertoken{botaccent} { - \edef\mmlundercommand{\utfmathcommandfiltered\mmlundertoken{botaccent}} - \mmlexecuteifdefined\mmlundercommand\mathematics{\mmlfirst{#1}} + \edef\mmlundertoken{\xmlraw{#1}{/mml:*[2]}}% /text() + \doifelseutfmathbelow\mmlundertoken {% + \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken} + \mmlunderuf{#1} + } { + \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text() + \doifelseutfmathbelow\mmlbasetoken { + \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken} + \mmlunderbs{#1} } { - \edef\mmlbasetoken {\xmlraw{#1}{/mml:*[1]}} \edef\mmlbasecommand {\utfmathfiller\mmlbasetoken} \edef\mmlundercommand{\utfmathfiller\mmlundertoken} - \vtop { - \mathsurround\zeropoint \ialign { - \hss$##$\hss - \crcr - \mmlexecuteifdefined\mmlbasecommand {\mmlfirst{#1}} - \crcr - \noalign{\kern3\onepoint\nointerlineskip}% - \mmlexecuteifdefined\mmlundercommand{\mmlsecond{#1}} - \crcr - \noalign{\kern3\onepoint} - } - } + \mmlundertriplet{\mmlunderus{#1}}{\mmlunderbf{#1}}\relax } -% } -% \limits % spoils spacing + } + % \limits % spoils spacing \stopxmlsetups +\unexpanded\def\mmlunderoverst#1{\mmlexecuteifdefined\mmlbasecommand \relax{\mmlunexpandedsecond{#1}}{\mmlunexpandedthird{#1}}\relax} +\unexpanded\def\mmlunderoverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax} +\unexpanded\def\mmlunderoverus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax} +\unexpanded\def\mmlunderoverot#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedthird {#1}}\relax} + \startxmlsetups mml:munderover - \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}} - \edef\mmlbasecommand{\utfmathcommand\mmlbasetoken} - \mmlexecuteifdefined\mmlbasecommand{\mathematics{\mmlfirst{#1}}}\normalsubscript{\mmlsecond{#1}}\normalsuperscript{\mmlthird{#1}} + \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text() + \doifelseutfmathbelow\mmlbasetoken { + \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken} + \mmlunderoverst{#1} + } { + \edef\mmlundertoken {\xmlraw{#1}{/mml:*[2]}}% /text() + \edef\mmlovertoken {\xmlraw{#1}{/mml:*[3]}}% /text() + \edef\mmlbasecommand {\utfmathfiller\mmlbasetoken} + \edef\mmlundercommand{\utfmathfiller\mmlundertoken} + \edef\mmlovercommand {\utfmathfiller\mmlovertoken} + \mmldoubletriplet{\mmlunderoverbf{#1}}{\mmlunderoverus{#1}}{\mmlunderoverot{#1}}\relax + } \stopxmlsetups % tables (mml:mtable, mml:mtr, mml:mlabledtr, mml:mtd) diff --git a/tex/context/base/x-set-12.mkiv b/tex/context/base/x-set-12.mkiv index bfeb0ab54..6590bfe9e 100644 --- a/tex/context/base/x-set-12.mkiv +++ b/tex/context/base/x-set-12.mkiv @@ -12,6 +12,19 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. +% included loading overhead +% +% 2.55 / 2.40 (luatex) +% 1.90 / 1.80 (luajittex) + +% \newif\ifcachedcommand +% \newif\ifcalledcommand +% +% \cachedcommandtrue +% \calledcommandtrue +% +% \usemodule[speedtest] + \usemodule[set-11] \unprotect diff --git a/tex/context/fonts/ebgaramond.lfg b/tex/context/fonts/ebgaramond.lfg new file mode 100644 index 000000000..43cc13c51 --- /dev/null +++ b/tex/context/fonts/ebgaramond.lfg @@ -0,0 +1,53 @@ +return { + name = "eb garamond", + version = "1.00", + comment = "Goodies that complement eb garamond.", + author = "Hans Hagen", + copyright = "ConTeXt development team", + designsizes = { + ["EBGaramond-Italic"] = { + ["8pt"] = "file:EBGaramond08-Italic", + ["9pt"] = "file:EBGaramond08-Italic", + ["9.5pt"] = "file:EBGaramond08-Italic", + ["10pt"] = "file:EBGaramond12-Italic", + ["11pt"] = "file:EBGaramond12-Italic", + ["12pt"] = "file:EBGaramond12-Italic", + default = "file:EBGaramond12-Italic", + }, + ["EBGaramond-Regular"] = { + ["8pt"] = "file:EBGaramond08-Regular", + ["9pt"] = "file:EBGaramond08-Regular", + ["9.5pt"] = "file:EBGaramond08-Regular", + ["10pt"] = "file:EBGaramond12-Regular", + ["11pt"] = "file:EBGaramond12-Regular", + ["12pt"] = "file:EBGaramond12-Regular", + default = "file:EBGaramond12-Regular", + }, + ["EBGaramond-SC"] = { + ["8pt"] = "file:EBGaramond08-SC", + ["9pt"] = "file:EBGaramond08-SC", + ["9.5pt"] = "file:EBGaramond08-SC", + ["10pt"] = "file:EBGaramond12-SC", + ["11pt"] = "file:EBGaramond12-SC", + ["12pt"] = "file:EBGaramond12-SC", + default = "file:EBGaramond12-SC", + }, + ["EBGaramond-Bold"] = { + default = "file:EBGaramond12-Bold", + }, + ["EBGaramond-AllSC"] = { + default = "file:EBGaramond12-AllSC", + }, + ["EBGaramond-Initials"] = { + default = "file:EBGaramondInitials", + }, + ["EBGaramond-InitialsF1"] = { + default = "file:EBGaramondInitialsF1", + }, + ["EBGaramond-InitialsF2"] = { + default = "file:EBGaramondInitialsF2", + }, + } +} + + diff --git a/tex/context/fonts/euler-math.lfg b/tex/context/fonts/euler-math.lfg new file mode 100644 index 000000000..da7647c53 --- /dev/null +++ b/tex/context/fonts/euler-math.lfg @@ -0,0 +1,23 @@ +-- this file might go away and is for experiments only + +return { + name = "euler-math", + version = "1.00", + comment = "Goodies that complement euler math.", + author = "Hans Hagen", + copyright = "ConTeXt development team", + mathematics = { + -- virtuals = { + -- ["euler-nova"] = { + -- { name = "texgyrepagella-math.otf", main = true, parameters = true }, + -- { name = "euler.otf", overlay = true }, -- first = 0x1234, last = 0x1256 + -- + -- -- { name = "euler.otf", main = true, parameters = true }, + -- -- { name = "texgyrepagella-math.otf", overlay = true }, + -- + -- }, + -- } + } +} + + diff --git a/tex/context/fonts/lm.lfg b/tex/context/fonts/lm.lfg index 792e723e8..8d7614718 100644 --- a/tex/context/fonts/lm.lfg +++ b/tex/context/fonts/lm.lfg @@ -1,4 +1,4 @@ --- In order to be ale to use beta math fonts, we use our own file name and +-- In order to be able to use beta math fonts, we use our own file name and -- always remap. return { @@ -11,12 +11,13 @@ return { tweaks = { aftercopying = { mathematics.tweaks.fixbadprime, -- prime is too low + -- mathematics.tweaks.fixoverline, }, }, dimensions = { -- always applied --- default = { --- }, + -- default = { + -- }, -- driven by 'mathdimensions' feature signs = { -- set dimensions diff --git a/tex/context/fonts/px-math.lfg b/tex/context/fonts/px-math.lfg index 2996a55e5..14f71dad3 100644 --- a/tex/context/fonts/px-math.lfg +++ b/tex/context/fonts/px-math.lfg @@ -10,7 +10,7 @@ return { }, virtuals = { ["px-math"] = { - { name = "texgyrepagella-regular.otf", features = "virtualmath", main = true }, + { name = "texgyre-pagella-math-regular.otf", features = "virtualmath", main = true }, { name = "texgyrepagella-regular.otf", features = "virtualmath", vector = "tex-mr-missing" } , { name = "rpxr.tfm", vector = "tex-mr" } , { name = "rpxmi.tfm", vector = "tex-mi", skewchar=0x7F }, diff --git a/tex/context/fonts/treatments.lfg b/tex/context/fonts/treatments.lfg index 22706d6e9..b973906c3 100644 --- a/tex/context/fonts/treatments.lfg +++ b/tex/context/fonts/treatments.lfg @@ -2,6 +2,47 @@ -- the order specified by tree order access. The first treatment of a file -- always wins, so one can overload. These files are not (to be) loaded with -- font definitions. (Experiment as part of writing the font manual.) +-- +-- So there are several ways to fix a font: add a patcher to a goodie file and +-- load that one. Such a patch can end up in the cached file. Treatments are +-- applied at runtime. An experimental auto-loaded goodie approach is not yet +-- enabled and will never be if treatments can do the job. + +local report = fonts.treatments.report + +local fix_unifraktur = { + comment = "suspicious x height", + fixes = function(data) + local pfminfo = data.metadata.pfminfo + if pfminfo then + local os2_xheight = pfminfo.os2_xheight + if os2_xheight and os2_xheight < 350 then + report("suspicious x-height %a, nilling",os2_xheight) + pfminfo.os2_xheight_original = os2_xheight + pfminfo.os2_xheight = nil + end + end + end, +} + +local fix_lmmonoregular = { + comment = "wrong widths of some glyphs", + fixes = function(data) + report("fixing some wrong widths") + local unicodes = data.resources.unicodes + local descriptions = data.descriptions + local defaultwidth = descriptions[unicodes["zero"]].width + descriptions[unicodes["six"] ].width = defaultwidth + descriptions[unicodes["nine"] ].width = defaultwidth + descriptions[unicodes["caron"] ].width = defaultwidth + descriptions[unicodes["perthousand"] ].width = defaultwidth + descriptions[unicodes["numero"] ].width = defaultwidth + descriptions[unicodes["caron.cap"] ].width = defaultwidth + descriptions[unicodes["six.taboldstyle"] ].width = defaultwidth + descriptions[unicodes["nine.taboldstyle"]].width = defaultwidth + descriptions[unicodes["dollar.oldstyle" ]].width = defaultwidth + end +} return { name = "treatments", @@ -13,18 +54,26 @@ return { -- we need to complete this list in order to be able to warn -- users not to include these files unless permitted ["adobeheitistd-regular.otf"] = { + comment = "this font is part of acrobat", ignored = false, -- included = false, -- not yet - comment = "this font is part of acrobat", }, -- just an experiment .. normally no big deal but I ran into -- such case ["crap.ttf"] = { - ignored = true, comment = "a text file with suffix ttf", -- used in test file + ignored = true, }, - ["latinmodern-math.otf"] = { - comment = "experimental", - } + -- harmless example + -- ["copperplatethirtythreebc.ttf"] = { + -- comment = "hangs and has no hyphen", + -- ignored = true, + -- }, + -- ["latinmodern-math.otf"] = { + -- comment = "experimental", + -- }, + ["lmmono12regular.otf"] = fix_lmmonoregular, + ["unifrakturcook.ttf"] = fix_unifraktur, + ["unifrakturmaguntia.ttf"] = fix_unifraktur, }, } diff --git a/tex/context/fonts/unifraktur.lfg b/tex/context/fonts/unifraktur.lfg new file mode 100644 index 000000000..32ffed928 --- /dev/null +++ b/tex/context/fonts/unifraktur.lfg @@ -0,0 +1,23 @@ +-- moved to treatments.lfg +-- +-- fonts.handlers.otf.enhancers.patches.register("after","check metadata","unifraktur*", function(data,filename) +-- data.metadata.pfminfo.os2_xheight = nil +-- end) + +return { + name = "unicode fraktur", + version = "1.00", + comment = "Goodies that complement unicode fraktur.", + author = "Hans Hagen", + copyright = "ConTeXt development team", + letterspacing = { + -- watch it: zwnj's are used (in the tounicodes too) + keptligatures = { + ["c_afii301_k.ccmp"] = true, -- ck + ["c_afii301_h.ccmp"] = true, -- ch + ["t_afii301_z.ccmp"] = true, -- tz + ["uniFB05"] = true, -- ſt + }, + } +} + diff --git a/tex/context/interface/keys-cs.xml b/tex/context/interface/keys-cs.xml index ad0cf2dca..d36f969f3 100644 --- a/tex/context/interface/keys-cs.xml +++ b/tex/context/interface/keys-cs.xml @@ -284,6 +284,7 @@ + @@ -417,12 +418,14 @@ + + @@ -599,6 +602,12 @@ + + + + + + @@ -614,6 +623,7 @@ + @@ -636,6 +646,7 @@ + @@ -688,6 +699,7 @@ + @@ -717,6 +729,7 @@ + @@ -753,6 +766,8 @@ + + @@ -804,9 +819,12 @@ + + + @@ -824,8 +842,13 @@ + + + + + @@ -865,6 +888,7 @@ + @@ -932,6 +956,8 @@ + + @@ -978,6 +1004,8 @@ + + @@ -988,7 +1016,11 @@ + + + + diff --git a/tex/context/interface/keys-de.xml b/tex/context/interface/keys-de.xml index 5d107ca40..c5ba364e3 100644 --- a/tex/context/interface/keys-de.xml +++ b/tex/context/interface/keys-de.xml @@ -284,6 +284,7 @@ + @@ -417,12 +418,14 @@ + + @@ -599,6 +602,12 @@ + + + + + + @@ -614,6 +623,7 @@ + @@ -636,6 +646,7 @@ + @@ -688,6 +699,7 @@ + @@ -717,6 +729,7 @@ + @@ -753,6 +766,8 @@ + + @@ -804,9 +819,12 @@ + + + @@ -824,8 +842,13 @@ + + + + + @@ -865,6 +888,7 @@ + @@ -932,6 +956,8 @@ + + @@ -978,6 +1004,8 @@ + + @@ -988,7 +1016,11 @@ + + + + diff --git a/tex/context/interface/keys-en.xml b/tex/context/interface/keys-en.xml index d9166d107..be59542e7 100644 --- a/tex/context/interface/keys-en.xml +++ b/tex/context/interface/keys-en.xml @@ -284,6 +284,7 @@ + @@ -417,12 +418,14 @@ + + @@ -599,6 +602,12 @@ + + + + + + @@ -614,6 +623,7 @@ + @@ -636,6 +646,7 @@ + @@ -688,6 +699,7 @@ + @@ -717,6 +729,7 @@ + @@ -753,6 +766,8 @@ + + @@ -804,9 +819,12 @@ + + + @@ -824,8 +842,13 @@ + + + + + @@ -865,6 +888,7 @@ + @@ -932,6 +956,8 @@ + + @@ -978,6 +1004,8 @@ + + @@ -988,7 +1016,11 @@ + + + + diff --git a/tex/context/interface/keys-fr.xml b/tex/context/interface/keys-fr.xml index c98826cf3..43c47d578 100644 --- a/tex/context/interface/keys-fr.xml +++ b/tex/context/interface/keys-fr.xml @@ -284,6 +284,7 @@ + @@ -417,12 +418,14 @@ + + @@ -599,6 +602,12 @@ + + + + + + @@ -614,6 +623,7 @@ + @@ -636,6 +646,7 @@ + @@ -688,6 +699,7 @@ + @@ -717,6 +729,7 @@ + @@ -753,6 +766,8 @@ + + @@ -804,9 +819,12 @@ + + + @@ -824,8 +842,13 @@ + + + + + @@ -865,6 +888,7 @@ + @@ -932,6 +956,8 @@ + + @@ -978,6 +1004,8 @@ + + @@ -988,7 +1016,11 @@ + + + + diff --git a/tex/context/interface/keys-it.xml b/tex/context/interface/keys-it.xml index afe3b8360..95c2d8aa5 100644 --- a/tex/context/interface/keys-it.xml +++ b/tex/context/interface/keys-it.xml @@ -284,6 +284,7 @@ + @@ -417,12 +418,14 @@ + + @@ -599,6 +602,12 @@ + + + + + + @@ -614,6 +623,7 @@ + @@ -636,6 +646,7 @@ + @@ -688,6 +699,7 @@ + @@ -717,6 +729,7 @@ + @@ -753,6 +766,8 @@ + + @@ -804,9 +819,12 @@ + + + @@ -824,8 +842,13 @@ + + + + + @@ -865,6 +888,7 @@ + @@ -932,6 +956,8 @@ + + @@ -978,6 +1004,8 @@ + + @@ -988,7 +1016,11 @@ + + + + diff --git a/tex/context/interface/keys-nl.xml b/tex/context/interface/keys-nl.xml index 226c96839..bc940ebc4 100644 --- a/tex/context/interface/keys-nl.xml +++ b/tex/context/interface/keys-nl.xml @@ -284,6 +284,7 @@ + @@ -417,12 +418,14 @@ + + @@ -599,6 +602,12 @@ + + + + + + @@ -614,6 +623,7 @@ + @@ -636,6 +646,7 @@ + @@ -688,6 +699,7 @@ + @@ -717,6 +729,7 @@ + @@ -753,6 +766,8 @@ + + @@ -804,9 +819,12 @@ + + + @@ -824,8 +842,13 @@ + + + + + @@ -865,6 +888,7 @@ + @@ -932,6 +956,8 @@ + + @@ -978,6 +1004,8 @@ + + @@ -988,7 +1016,11 @@ + + + + diff --git a/tex/context/interface/keys-pe.xml b/tex/context/interface/keys-pe.xml index 9303c29fd..75e3a17c2 100644 --- a/tex/context/interface/keys-pe.xml +++ b/tex/context/interface/keys-pe.xml @@ -284,6 +284,7 @@ + @@ -417,12 +418,14 @@ + + @@ -599,6 +602,12 @@ + + + + + + @@ -614,6 +623,7 @@ + @@ -636,6 +646,7 @@ + @@ -688,6 +699,7 @@ + @@ -717,6 +729,7 @@ + @@ -753,6 +766,8 @@ + + @@ -804,9 +819,12 @@ + + + @@ -824,8 +842,13 @@ + + + + + @@ -865,6 +888,7 @@ + @@ -932,6 +956,8 @@ + + @@ -978,6 +1004,8 @@ + + @@ -988,7 +1016,11 @@ + + + + diff --git a/tex/context/interface/keys-ro.xml b/tex/context/interface/keys-ro.xml index 29368c9bc..e83d145d0 100644 --- a/tex/context/interface/keys-ro.xml +++ b/tex/context/interface/keys-ro.xml @@ -284,6 +284,7 @@ + @@ -417,12 +418,14 @@ + + @@ -599,6 +602,12 @@ + + + + + + @@ -614,6 +623,7 @@ + @@ -636,6 +646,7 @@ + @@ -688,6 +699,7 @@ + @@ -717,6 +729,7 @@ + @@ -753,6 +766,8 @@ + + @@ -804,9 +819,12 @@ + + + @@ -824,8 +842,13 @@ + + + + + @@ -865,6 +888,7 @@ + @@ -932,6 +956,8 @@ + + @@ -978,6 +1004,8 @@ + + @@ -988,7 +1016,11 @@ + + + + diff --git a/tex/context/patterns/word-xx.lua b/tex/context/patterns/word-xx.lua new file mode 100644 index 000000000..f8b38fe75 --- /dev/null +++ b/tex/context/patterns/word-xx.lua @@ -0,0 +1,14 @@ +return { + ["comment"]="test", + ["copyright"]="not relevant", + ["language"]="xx", + ["lists"]={ + { +-- ["data"]="we thrive information in thick worlds because of our marvelous and everyday capacity to select edit single out structure highlight group pair merge harmonize synthesize focus organize condense reduce boil down choose categorize catalog classify list abstract scan look into idealize isolate discriminate distinguish screen pigeonhole pick over sort integrate blend inspect filter lump skip smooth chunk average approximate cluster aggregate outline summarize itemize review dip into flip through browse glance into leaf through skim refine enumerate glean synopsize winnow the wheat from the chaff and separate the sheep from the goats", + ["data"]="abstract aggregate and approximate average because blend boil browse capacity catalog categorize chaff choose chunk classify cluster condense dip discriminate distinguish down edit enumerate everyday filter flip focus from glance glean goats group harmonize highlight idealize in information inspect integrate into isolate itemize leaf list look lump marvelous merge of organize our out outline over pair pick pigeonhole reduce refine review scan screen select separate sheep single skim skip smooth sort structure summarize synopsize synthesize the thick thrive through to we wheat winnow worlds", + }, + }, + ["timestamp"]="2013-05-20 14:15:21", + ["version"]="1.00", +} + diff --git a/tex/generic/context/luatex/luatex-basics-gen.lua b/tex/generic/context/luatex/luatex-basics-gen.lua index 4a46fbb07..9cf5b9317 100644 --- a/tex/generic/context/luatex/luatex-basics-gen.lua +++ b/tex/generic/context/luatex/luatex-basics-gen.lua @@ -89,6 +89,7 @@ local remapper = { fea = "font feature files", pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! + afm = "afm", } function resolvers.findfile(name,fileformat) @@ -117,6 +118,11 @@ end resolvers.findbinfile = resolvers.findfile +function resolvers.loadbinfile(filename,filetype) + local data = io.loaddata(filename) + return true, data, #data +end + function resolvers.resolve(s) return s end @@ -149,19 +155,29 @@ do local cachepaths = kpse.expand_var('$TEXMFCACHE') or "" - -- quite like tex live or so + -- quite like tex live or so (the weird $TEXMFCACHE test seems to be needed on miktex) - if cachepaths == "" then + if cachepaths == "" or cachepaths == "$TEXMFCACHE" then cachepaths = kpse.expand_var('$TEXMFVAR') or "" end - -- this also happened to be used + -- this also happened to be used (the weird $TEXMFVAR test seems to be needed on miktex) - if cachepaths == "" then + if cachepaths == "" or cachepaths == "$TEXMFVAR" then cachepaths = kpse.expand_var('$VARTEXMF') or "" end - -- and this is a last resort + -- and this is a last resort (hm, we could use TEMP or TEMPDIR) + + if cachepaths == "" then + local fallbacks = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" } + for i=1,#fallbacks do + cachepaths = os.getenv(fallbacks[i]) or "" + if cachepath ~= "" and lfs.isdir(cachepath) then + break + end + end + end if cachepaths == "" then cachepaths = "." @@ -267,7 +283,7 @@ function caches.savedata(path,name,data) local luaname, lucname = makefullname(path,name) if luaname then texio.write(string.format("(save: %s)",luaname)) - table.tofile(luaname,data,true,{ reduce = true }) + table.tofile(luaname,data,true) if lucname and type(caches.compile) == "function" then os.remove(lucname) -- better be safe texio.write(string.format("(save: %s)",lucname)) diff --git a/tex/generic/context/luatex/luatex-basics-nod.lua b/tex/generic/context/luatex/luatex-basics-nod.lua index 5ab9df7f9..50a1e8627 100644 --- a/tex/generic/context/luatex/luatex-basics-nod.lua +++ b/tex/generic/context/luatex/luatex-basics-nod.lua @@ -88,17 +88,80 @@ function nodes.delete(head,current) return nodes.remove(head,current,true) end -nodes.before = node.insert_before -nodes.after = node.insert_after - function nodes.pool.kern(k) local n = new_node("kern",1) n.kern = k return n end -function nodes.endofmath(n) - for n in traverse_id(math_code,n.next) do - return n - end -end +-- experimental + +local getfield = node.getfield or function(n,tag) return n[tag] end +local setfield = node.setfield or function(n,tag,value) n[tag] = value end + +nodes.getfield = getfield +nodes.setfield = setfield + +nodes.getattr = getfield +nodes.setattr = setfield + +if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end +if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end +if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end +if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end +if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end +if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end +if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end + +function nodes.tonut (n) return n end +function nodes.tonode(n) return n end + +-- being lazy ... just copy a bunch ... not all needed in generic but we assume +-- nodes to be kind of private anyway + +nodes.tostring = node.tostring or tostring +nodes.copy = node.copy +nodes.copy_list = node.copy_list +nodes.delete = node.delete +nodes.dimensions = node.dimensions +nodes.end_of_math = node.end_of_math +nodes.flush_list = node.flush_list +nodes.flush_node = node.flush_node +nodes.free = node.free +nodes.insert_after = node.insert_after +nodes.insert_before = node.insert_before +nodes.hpack = node.hpack +nodes.new = node.new +nodes.tail = node.tail +nodes.traverse = node.traverse +nodes.traverse_id = node.traverse_id +nodes.slide = node.slide +nodes.vpack = node.vpack + +nodes.first_glyph = node.first_glyph +nodes.first_character = node.first_character +nodes.has_glyph = node.has_glyph or node.first_glyph + +nodes.current_attr = node.current_attr +nodes.do_ligature_n = node.do_ligature_n +nodes.has_field = node.has_field +nodes.last_node = node.last_node +nodes.usedlist = node.usedlist +nodes.protrusion_skippable = node.protrusion_skippable +nodes.write = node.write + +nodes.has_attribute = node.has_attribute +nodes.set_attribute = node.set_attribute +nodes.unset_attribute = node.unset_attribute + +nodes.protect_glyphs = node.protect_glyphs +nodes.unprotect_glyphs = node.unprotect_glyphs +nodes.kerning = node.kerning +nodes.ligaturing = node.ligaturing +nodes.mlist_to_hlist = node.mlist_to_hlist + +-- in generic code, at least for some time, we stay nodes, while in context +-- we can go nuts (e.g. experimental); this split permits us us keep code +-- used elsewhere stable but at the same time play around in context + +nodes.nuts = nodes diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua index cf5862ca9..17c5b6b22 100644 --- a/tex/generic/context/luatex/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 05/28/13 00:34:00 +-- merge date : 10/20/13 07:09:03 do -- begin closure to overcome local limits and interference @@ -95,6 +95,7 @@ if not modules then modules={} end modules ['l-lpeg']={ license="see context related readme files" } lpeg=require("lpeg") +if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end local type,next,tostring=type,next,tostring local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format local floor=math.floor @@ -110,28 +111,46 @@ patterns.anything=anything patterns.endofstring=endofstring patterns.beginofstring=alwaysmatched patterns.alwaysmatched=alwaysmatched -local digit,sign=R('09'),S('+-') +local sign=S('+-') +local zero=P('0') +local digit=R('09') +local octdigit=R("07") +local lowercase=R("az") +local uppercase=R("AZ") +local underscore=P("_") +local hexdigit=digit+lowercase+uppercase local cr,lf,crlf=P("\r"),P("\n"),P("\r\n") local newline=crlf+S("\r\n") local escaped=P("\\")*anything local squote=P("'") local dquote=P('"') local space=P(" ") -local utfbom_32_be=P('\000\000\254\255') -local utfbom_32_le=P('\255\254\000\000') -local utfbom_16_be=P('\255\254') -local utfbom_16_le=P('\254\255') -local utfbom_8=P('\239\187\191') +local period=P(".") +local comma=P(",") +local utfbom_32_be=P('\000\000\254\255') +local utfbom_32_le=P('\255\254\000\000') +local utfbom_16_be=P('\254\255') +local utfbom_16_le=P('\255\254') +local utfbom_8=P('\239\187\191') local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8 local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8") +local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8") local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0) local utf8next=R("\128\191") +patterns.utfbom_32_be=utfbom_32_be +patterns.utfbom_32_le=utfbom_32_le +patterns.utfbom_16_be=utfbom_16_be +patterns.utfbom_16_le=utfbom_16_le +patterns.utfbom_8=utfbom_8 +patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n") +patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000") patterns.utf8one=R("\000\127") patterns.utf8two=R("\194\223")*utf8next patterns.utf8three=R("\224\239")*utf8next*utf8next patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next patterns.utfbom=utfbom patterns.utftype=utftype +patterns.utfstricttype=utfstricttype patterns.utfoffset=utfoffset local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false) @@ -155,23 +174,8 @@ local stripper=spacer^0*C((spacer^0*nonspacer^1)^0) local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0)) patterns.stripper=stripper patterns.collapser=collapser -patterns.digit=digit -patterns.sign=sign -patterns.cardinal=sign^0*digit^1 -patterns.integer=sign^0*digit^1 -patterns.unsigned=digit^0*P('.')*digit^1 -patterns.float=sign^0*patterns.unsigned -patterns.cunsigned=digit^0*P(',')*digit^1 -patterns.cfloat=sign^0*patterns.cunsigned -patterns.number=patterns.float+patterns.integer -patterns.cnumber=patterns.cfloat+patterns.integer -patterns.oct=P("0")*R("07")^1 -patterns.octal=patterns.oct -patterns.HEX=P("0x")*R("09","AF")^1 -patterns.hex=P("0x")*R("09","af")^1 -patterns.hexadecimal=P("0x")*R("09","AF","af")^1 -patterns.lowercase=R("az") -patterns.uppercase=R("AZ") +patterns.lowercase=lowercase +patterns.uppercase=uppercase patterns.letter=patterns.lowercase+patterns.uppercase patterns.space=space patterns.tab=P("\t") @@ -179,12 +183,12 @@ patterns.spaceortab=patterns.space+patterns.tab patterns.newline=newline patterns.emptyline=newline^1 patterns.equal=P("=") -patterns.comma=P(",") -patterns.commaspacer=P(",")*spacer^0 -patterns.period=P(".") +patterns.comma=comma +patterns.commaspacer=comma*spacer^0 +patterns.period=period patterns.colon=P(":") patterns.semicolon=P(";") -patterns.underscore=P("_") +patterns.underscore=underscore patterns.escaped=escaped patterns.squote=squote patterns.dquote=dquote @@ -197,10 +201,29 @@ patterns.unspacer=((patterns.spacer^1)/"")^0 patterns.singlequoted=squote*patterns.nosquote*squote patterns.doublequoted=dquote*patterns.nodquote*dquote patterns.quoted=patterns.doublequoted+patterns.singlequoted -patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1) +patterns.digit=digit +patterns.octdigit=octdigit +patterns.hexdigit=hexdigit +patterns.sign=sign +patterns.cardinal=digit^1 +patterns.integer=sign^-1*digit^1 +patterns.unsigned=digit^0*period*digit^1 +patterns.float=sign^-1*patterns.unsigned +patterns.cunsigned=digit^0*comma*digit^1 +patterns.cfloat=sign^-1*patterns.cunsigned +patterns.number=patterns.float+patterns.integer +patterns.cnumber=patterns.cfloat+patterns.integer +patterns.oct=zero*octdigit^1 +patterns.octal=patterns.oct +patterns.HEX=zero*P("X")*(digit+uppercase)^1 +patterns.hex=zero*P("x")*(digit+lowercase)^1 +patterns.hexadecimal=zero*S("xX")*hexdigit^1 +patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1 +patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1 +patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring patterns.somecontent=(anything-newline-space)^1 patterns.beginline=#(1-newline) -patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0)) +patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0)) local function anywhere(pattern) return P { P(pattern)+1*V(1) } end @@ -372,7 +395,7 @@ function lpeg.replacer(one,two,makefunction,isutf) return pattern end end -function lpeg.finder(lst,makefunction) +function lpeg.finder(lst,makefunction) local pattern if type(lst)=="table" then pattern=P(false) @@ -401,8 +424,8 @@ local splitters_f,splitters_s={},{} function lpeg.firstofsplit(separator) local splitter=splitters_f[separator] if not splitter then - separator=P(separator) - splitter=C((1-separator)^0) + local pattern=P(separator) + splitter=C((1-pattern)^0) splitters_f[separator]=splitter end return splitter @@ -410,12 +433,31 @@ end function lpeg.secondofsplit(separator) local splitter=splitters_s[separator] if not splitter then - separator=P(separator) - splitter=(1-separator)^0*separator*C(anything^0) + local pattern=P(separator) + splitter=(1-pattern)^0*pattern*C(anything^0) splitters_s[separator]=splitter end return splitter end +local splitters_s,splitters_p={},{} +function lpeg.beforesuffix(separator) + local splitter=splitters_s[separator] + if not splitter then + local pattern=P(separator) + splitter=C((1-pattern)^0)*pattern*endofstring + splitters_s[separator]=splitter + end + return splitter +end +function lpeg.afterprefix(separator) + local splitter=splitters_p[separator] + if not splitter then + local pattern=P(separator) + splitter=pattern*C(anything^0) + splitters_p[separator]=splitter + end + return splitter +end function lpeg.balancer(left,right) left,right=P(left),P(right) return P { left*((1-left-right)+V(1))^0*right } @@ -647,9 +689,6 @@ end function lpeg.times(pattern,n) return P(nextstep(n,2^16,{ "start",["1"]=pattern })) end -local digit=R("09") -local period=P(".") -local zero=P("0") local trailingzeros=zero^0*-digit local case_1=period*trailingzeros/"" local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"") @@ -1037,6 +1076,7 @@ local noquotes,hexify,handle,reduce,compact,inline,functions local reserved=table.tohash { 'and','break','do','else','elseif','end','false','for','function','if', 'in','local','nil','not','or','repeat','return','then','true','until','while', + 'NaN','goto', } local function simple_table(t) if #t>0 then @@ -1056,12 +1096,12 @@ local function simple_table(t) else tt[nt]=tostring(v) end - elseif tv=="boolean" then - nt=nt+1 - tt[nt]=tostring(v) elseif tv=="string" then nt=nt+1 tt[nt]=format("%q",v) + elseif tv=="boolean" then + nt=nt+1 + tt[nt]=v and "true" or "false" else tt=nil break @@ -1094,7 +1134,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s[%q]={",depth,name)) end elseif tn=="boolean" then - handle(format("%s[%s]={",depth,tostring(name))) + handle(format("%s[%s]={",depth,name and "true" or "false")) else handle(format("%s{",depth)) end @@ -1118,21 +1158,21 @@ local function do_serialize(root,name,depth,level,indexed) for i=1,#sk do local k=sk[i] local v=root[k] - local t,tk=type(v),type(k) + local tv,tk=type(v),type(k) if compact and first and tk=="number" and k>=first and k<=last then - if t=="number" then + if tv=="number" then if hexify then handle(format("%s 0x%04X,",depth,v)) else handle(format("%s %s,",depth,v)) end - elseif t=="string" then + elseif tv=="string" then if reduce and tonumber(v) then handle(format("%s %s,",depth,v)) else handle(format("%s %q,",depth,v)) end - elseif t=="table" then + elseif tv=="table" then if not next(v) then handle(format("%s {},",depth)) elseif inline then @@ -1145,11 +1185,11 @@ local function do_serialize(root,name,depth,level,indexed) else do_serialize(v,k,depth,level+1,true) end - elseif t=="boolean" then - handle(format("%s %s,",depth,tostring(v))) - elseif t=="function" then + elseif tv=="boolean" then + handle(format("%s %s,",depth,v and "true" or "false")) + elseif tv=="function" then if functions then - handle(format('%s load(%q),',depth,dump(v))) + handle(format('%s load(%q),',depth,dump(v))) else handle(format('%s "function",',depth)) end @@ -1160,7 +1200,7 @@ local function do_serialize(root,name,depth,level,indexed) if false then handle(format("%s __p__=nil,",depth)) end - elseif t=="number" then + elseif tv=="number" then if tk=="number" then if hexify then handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) @@ -1169,9 +1209,9 @@ local function do_serialize(root,name,depth,level,indexed) end elseif tk=="boolean" then if hexify then - handle(format("%s [%s]=0x%04X,",depth,tostring(k),v)) + handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v)) else - handle(format("%s [%s]=%s,",depth,tostring(k),v)) + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) end elseif noquotes and not reserved[k] and lpegmatch(propername,k) then if hexify then @@ -1186,7 +1226,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%q]=%s,",depth,k,v)) end end - elseif t=="string" then + elseif tv=="string" then if reduce and tonumber(v) then if tk=="number" then if hexify then @@ -1195,7 +1235,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]=%s,",depth,k,v)) end elseif tk=="boolean" then - handle(format("%s [%s]=%s,",depth,tostring(k),v)) + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=%s,",depth,k,v)) else @@ -1209,14 +1249,14 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]=%q,",depth,k,v)) end elseif tk=="boolean" then - handle(format("%s [%s]=%q,",depth,tostring(k),v)) + handle(format("%s [%s]=%q,",depth,k and "true" or "false",v)) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=%q,",depth,k,v)) else handle(format("%s [%q]=%q,",depth,k,v)) end end - elseif t=="table" then + elseif tv=="table" then if not next(v) then if tk=="number" then if hexify then @@ -1225,7 +1265,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]={},",depth,k)) end elseif tk=="boolean" then - handle(format("%s [%s]={},",depth,tostring(k))) + handle(format("%s [%s]={},",depth,k and "true" or "false")) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s={},",depth,k)) else @@ -1241,7 +1281,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) end elseif tk=="boolean" then - handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", "))) + handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", "))) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s={ %s },",depth,k,concat(st,", "))) else @@ -1253,21 +1293,21 @@ local function do_serialize(root,name,depth,level,indexed) else do_serialize(v,k,depth,level+1) end - elseif t=="boolean" then + elseif tv=="boolean" then if tk=="number" then if hexify then - handle(format("%s [0x%04X]=%s,",depth,k,tostring(v))) + handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false")) else - handle(format("%s [%s]=%s,",depth,k,tostring(v))) + handle(format("%s [%s]=%s,",depth,k,v and "true" or "false")) end elseif tk=="boolean" then - handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v))) + handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false")) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,tostring(v))) + handle(format("%s %s=%s,",depth,k,v and "true" or "false")) else - handle(format("%s [%q]=%s,",depth,k,tostring(v))) + handle(format("%s [%q]=%s,",depth,k,v and "true" or "false")) end - elseif t=="function" then + elseif tv=="function" then if functions then local f=getinfo(v).what=="C" and dump(dummy) or dump(v) if tk=="number" then @@ -1277,7 +1317,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]=load(%q),",depth,k,f)) end elseif tk=="boolean" then - handle(format("%s [%s]=load(%q),",depth,tostring(k),f)) + handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f)) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=load(%q),",depth,k,f)) else @@ -1292,7 +1332,7 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s [%s]=%q,",depth,k,tostring(v))) end elseif tk=="boolean" then - handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v))) + handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v))) elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=%q,",depth,k,tostring(v))) else @@ -1662,6 +1702,7 @@ local function readall(f) return f:read('*all') else local done=f:seek("set",0) + local step if size<1024*1024 then step=1024*1024 elseif size>16*1024*1024 then @@ -2185,17 +2226,24 @@ end function file.joinpath(tab,separator) return tab and concat(tab,separator or io.pathseparator) end +local someslash=S("\\/") local stripper=Cs(P(fwslash)^0/""*reslasher) -local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon +local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon local isroot=fwslash^1*-1 local hasroot=fwslash^1 +local reslasher=lpeg.replacer(S("\\/"),"/") local deslasher=lpeg.replacer(S("\\/")^1,"/") function file.join(...) local lst={... } local one=lst[1] if lpegmatch(isnetwork,one) then + local one=lpegmatch(reslasher,one) local two=lpegmatch(deslasher,concat(lst,"/",2)) - return one.."/"..two + if lpegmatch(hasroot,two) then + return one..two + else + return one.."/"..two + end elseif lpegmatch(isroot,one) then local two=lpegmatch(deslasher,concat(lst,"/",2)) if lpegmatch(hasroot,two) then @@ -2212,7 +2260,9 @@ end local drivespec=R("az","AZ")^1*colon local anchors=fwslash+drivespec local untouched=periods+(1-period)^1*P(-1) -local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1)) +local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0) +local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//") +local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1)) local absolute=fwslash function file.collapsepath(str,anchor) if not str then @@ -2375,9 +2425,9 @@ function string.booleanstring(str) end function string.is_boolean(str,default) if type(str)=="string" then - if str=="true" or str=="yes" or str=="on" or str=="t" then + if str=="true" or str=="yes" or str=="on" or str=="t" or str=="1" then return true - elseif str=="false" or str=="no" or str=="off" or str=="f" then + elseif str=="false" or str=="no" or str=="off" or str=="f" or str=="0" then return false end end @@ -2580,6 +2630,7 @@ local tracedchar = string.tracedchar local autosingle = string.autosingle local autodouble = string.autodouble local sequenced = table.sequenced +local formattednumber = number.formatted ]] local template=[[ %s @@ -2594,7 +2645,7 @@ setmetatable(arguments,{ __index=function(t,k) end }) local prefix_any=C((S("+- .")+R("09"))^0) -local prefix_tab=C((1-R("az","AZ","09","%%"))^0) +local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0) local format_s=function(f) n=n+1 if f and f~="" then @@ -2624,7 +2675,7 @@ local format_i=function(f) if f and f~="" then return format("format('%%%si',a%s)",f,n) else - return format("a%s",n) + return format("format('%%i',a%s)",n) end end local format_d=format_i @@ -2776,6 +2827,39 @@ end local format_W=function(f) return format("nspaces[%s]",tonumber(f) or 0) end +local digit=patterns.digit +local period=patterns.period +local three=digit*digit*digit +local splitter=Cs ( + (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2) +) +patterns.formattednumber=splitter +function number.formatted(n,sep1,sep2) + local s=type(s)=="string" and n or format("%0.2f",n) + if sep1==true then + return lpegmatch(splitter,s,1,".",",") + elseif sep1=="." then + return lpegmatch(splitter,s,1,sep1,sep2 or ",") + elseif sep1=="," then + return lpegmatch(splitter,s,1,sep1,sep2 or ".") + else + return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".") + end +end +local format_m=function(f) + n=n+1 + if not f or f=="" then + f="," + end + return format([[formattednumber(a%s,%q,".")]],n,f) +end +local format_M=function(f) + n=n+1 + if not f or f=="" then + f="." + end + return format([[formattednumber(a%s,%q,",")]],n,f) +end local format_rest=function(s) return format("%q",s) end @@ -2813,7 +2897,8 @@ local builder=Cs { "start", +V("w") +V("W") +V("a") -+V("A") ++V("A") ++V("m")+V("M") +V("*") )+V("*") )*(P(-1)+Carg(1)) @@ -2844,14 +2929,16 @@ local builder=Cs { "start", ["b"]=(prefix_any*P("b"))/format_b, ["t"]=(prefix_tab*P("t"))/format_t, ["T"]=(prefix_tab*P("T"))/format_T, - ["l"]=(prefix_tab*P("l"))/format_l, - ["L"]=(prefix_tab*P("L"))/format_L, + ["l"]=(prefix_any*P("l"))/format_l, + ["L"]=(prefix_any*P("L"))/format_L, ["I"]=(prefix_any*P("I"))/format_I, ["w"]=(prefix_any*P("w"))/format_w, ["W"]=(prefix_any*P("W"))/format_W, + ["m"]=(prefix_tab*P("m"))/format_m, + ["M"]=(prefix_tab*P("M"))/format_M, ["a"]=(prefix_any*P("a"))/format_a, ["A"]=(prefix_any*P("A"))/format_A, - ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%%%")^1)/format_rest, + ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest, ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension, } local direct=Cs ( @@ -2897,10 +2984,13 @@ local function add(t,name,template,preamble) end end strings.formatters.add=add -lpeg.patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0) -lpeg.patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0) +patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0) +patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0) +patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0) +patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"')) add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]]) add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]]) +add(formatters,"lua",[[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]]) end -- closure @@ -2979,6 +3069,7 @@ local remapper={ fea="font feature files", pfa="type1 fonts", pfb="type1 fonts", + afm="afm", } function resolvers.findfile(name,fileformat) name=string.gsub(name,"\\","/") @@ -2997,6 +3088,10 @@ function resolvers.findfile(name,fileformat) return found end resolvers.findbinfile=resolvers.findfile +function resolvers.loadbinfile(filename,filetype) + local data=io.loaddata(filename) + return true,data,#data +end function resolvers.resolve(s) return s end @@ -3012,12 +3107,21 @@ if not caches.namespace or caches.namespace=="" or caches.namespace=="context" t end do local cachepaths=kpse.expand_var('$TEXMFCACHE') or "" - if cachepaths=="" then + if cachepaths=="" or cachepaths=="$TEXMFCACHE" then cachepaths=kpse.expand_var('$TEXMFVAR') or "" end - if cachepaths=="" then + if cachepaths=="" or cachepaths=="$TEXMFVAR" then cachepaths=kpse.expand_var('$VARTEXMF') or "" end + if cachepaths=="" then + local fallbacks={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" } + for i=1,#fallbacks do + cachepaths=os.getenv(fallbacks[i]) or "" + if cachepath~="" and lfs.isdir(cachepath) then + break + end + end + end if cachepaths=="" then cachepaths="." end @@ -3111,7 +3215,7 @@ function caches.savedata(path,name,data) local luaname,lucname=makefullname(path,name) if luaname then texio.write(string.format("(save: %s)",luaname)) - table.tofile(luaname,data,true,{ reduce=true }) + table.tofile(luaname,data,true) if lucname and type(caches.compile)=="function" then os.remove(lucname) texio.write(string.format("(save: %s)",lucname)) @@ -3323,18 +3427,63 @@ end function nodes.delete(head,current) return nodes.remove(head,current,true) end -nodes.before=node.insert_before -nodes.after=node.insert_after function nodes.pool.kern(k) local n=new_node("kern",1) n.kern=k return n end -function nodes.endofmath(n) - for n in traverse_id(math_code,n.next) do - return n - end -end +local getfield=node.getfield or function(n,tag) return n[tag] end +local setfield=node.setfield or function(n,tag,value) n[tag]=value end +nodes.getfield=getfield +nodes.setfield=setfield +nodes.getattr=getfield +nodes.setattr=setfield +if node.getid then nodes.getid=node.getid else function nodes.getid (n) return getfield(n,"id") end end +if node.getsubtype then nodes.getsubtype=node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end +if node.getnext then nodes.getnext=node.getnext else function nodes.getnext (n) return getfield(n,"next") end end +if node.getprev then nodes.getprev=node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end +if node.getchar then nodes.getchar=node.getchar else function nodes.getchar (n) return getfield(n,"char") end end +if node.getfont then nodes.getfont=node.getfont else function nodes.getfont (n) return getfield(n,"font") end end +if node.getlist then nodes.getlist=node.getlist else function nodes.getlist (n) return getfield(n,"list") end end +function nodes.tonut (n) return n end +function nodes.tonode(n) return n end +nodes.tostring=node.tostring or tostring +nodes.copy=node.copy +nodes.copy_list=node.copy_list +nodes.delete=node.delete +nodes.dimensions=node.dimensions +nodes.end_of_math=node.end_of_math +nodes.flush_list=node.flush_list +nodes.flush_node=node.flush_node +nodes.free=node.free +nodes.insert_after=node.insert_after +nodes.insert_before=node.insert_before +nodes.hpack=node.hpack +nodes.new=node.new +nodes.tail=node.tail +nodes.traverse=node.traverse +nodes.traverse_id=node.traverse_id +nodes.slide=node.slide +nodes.vpack=node.vpack +nodes.first_glyph=node.first_glyph +nodes.first_character=node.first_character +nodes.has_glyph=node.has_glyph or node.first_glyph +nodes.current_attr=node.current_attr +nodes.do_ligature_n=node.do_ligature_n +nodes.has_field=node.has_field +nodes.last_node=node.last_node +nodes.usedlist=node.usedlist +nodes.protrusion_skippable=node.protrusion_skippable +nodes.write=node.write +nodes.has_attribute=node.has_attribute +nodes.set_attribute=node.set_attribute +nodes.unset_attribute=node.unset_attribute +nodes.protect_glyphs=node.protect_glyphs +nodes.unprotect_glyphs=node.unprotect_glyphs +nodes.kerning=node.kerning +nodes.ligaturing=node.ligaturing +nodes.mlist_to_hlist=node.mlist_to_hlist +nodes.nuts=nodes end -- closure @@ -3578,6 +3727,7 @@ function constructors.scale(tfmdata,specification) if tonumber(specification) then specification={ size=specification } end + target.specification=specification local scaledpoints=specification.size local relativeid=specification.relativeid local properties=tfmdata.properties or {} @@ -3629,7 +3779,7 @@ function constructors.scale(tfmdata,specification) targetproperties.script=properties.script or "dflt" targetproperties.mode=properties.mode or "base" local askedscaledpoints=scaledpoints - local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints) + local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification) local hdelta=delta local vdelta=delta target.designsize=parameters.designsize @@ -3703,7 +3853,7 @@ function constructors.scale(tfmdata,specification) end target.type=isvirtual and "virtual" or "real" target.postprocessors=tfmdata.postprocessors - local targetslant=(parameters.slant or parameters[1] or 0) + local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt local targetspace=(parameters.space or parameters[2] or 0)*hdelta local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta @@ -4021,7 +4171,7 @@ function constructors.finalize(tfmdata) parameters.slantfactor=tfmdata.slant or 0 end if not parameters.designsize then - parameters.designsize=tfmdata.designsize or 655360 + parameters.designsize=tfmdata.designsize or (factors.pt*10) end if not parameters.units then parameters.units=tfmdata.units_per_em or 1000 @@ -4145,11 +4295,11 @@ function constructors.hashinstance(specification,force) size=math.round(constructors.scaled(size,designsizes[hash])) specification.size=size end - if fallbacks then - return hash..' @ '..tostring(size)..' @ '..fallbacks - else - return hash..' @ '..tostring(size) - end + if fallbacks then + return hash..' @ '..tostring(size)..' @ '..fallbacks + else + return hash..' @ '..tostring(size) + end end function constructors.setname(tfmdata,specification) if constructors.namemode=="specification" then @@ -4383,7 +4533,8 @@ function constructors.collectprocessors(what,tfmdata,features,trace,report) local whathandler=handlers[what] local whatfeatures=whathandler.features local whatprocessors=whatfeatures.processors - local processors=whatprocessors[properties.mode] + local mode=properties.mode + local processors=whatprocessors[mode] if processors then for i=1,#processors do local step=processors[i] @@ -4400,7 +4551,7 @@ function constructors.collectprocessors(what,tfmdata,features,trace,report) end end elseif trace then - report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname) + report("no feature processors for mode %a for font %a",mode,properties.fullname) end end return processes @@ -4411,7 +4562,8 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report) local whathandler=handlers[what] local whatfeatures=whathandler.features local whatmanipulators=whatfeatures.manipulators - local manipulators=whatmanipulators[properties.mode] + local mode=properties.mode + local manipulators=whatmanipulators[mode] if manipulators then for i=1,#manipulators do local step=manipulators[i] @@ -4420,7 +4572,7 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report) if value then local action=step.action if trace then - report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) + report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname) end if action then action(tfmdata,feature,value) @@ -4780,33 +4932,38 @@ function mappings.addtounicode(data,filename) if not unicode or unicode=="" then local split=lpegmatch(namesplitter,name) local nsplit=split and #split or 0 - if nsplit>=2 then - local t,n={},0 - for l=1,nsplit do - local base=split[l] - local u=unicodes[base] or unicodevector[base] - if not u then + local t,n={},0 + unicode=true + for l=1,nsplit do + local base=split[l] + local u=unicodes[base] or unicodevector[base] + if not u then + break + elseif type(u)=="table" then + if u[1]>=private then + unicode=false break - elseif type(u)=="table" then - n=n+1 - t[n]=u[1] - else - n=n+1 - t[n]=u end - end - if n==0 then - elseif n==1 then - originals[index]=t[1] - tounicode[index]=tounicode16(t[1],name) + n=n+1 + t[n]=u[1] else - originals[index]=t - tounicode[index]=tounicode16sequence(t) + if u>=private then + unicode=false + break + end + n=n+1 + t[n]=u end - nl=nl+1 - unicode=true + end + if n==0 then + elseif n==1 then + originals[index]=t[1] + tounicode[index]=tounicode16(t[1],name) else + originals[index]=t + tounicode[index]=tounicode16sequence(t) end + nl=nl+1 end if not unicode or unicode=="" then local foundcodes,multiple=lpegmatch(uparser,name) @@ -4895,33 +5052,1131 @@ function fonts.names.resolve(name,sub) end loaded=true end - if type(data)=="table" and data.version==fonts.names.version then - local condensed=string.gsub(string.lower(name),"[^%a%d]","") - local found=data.mappings and data.mappings[condensed] - if found then - local fontname,filename,subfont=found[1],found[2],found[3] - if subfont then - return filename,fontname - else - return filename,false + if type(data)=="table" and data.version==fonts.names.version then + local condensed=string.gsub(string.lower(name),"[^%a%d]","") + local found=data.mappings and data.mappings[condensed] + if found then + local fontname,filename,subfont=found[1],found[2],found[3] + if subfont then + return filename,fontname + else + return filename,false + end + elseif fonts.names.reportmissingname then + fonts.names.reportmissingname() + return name,false + end + elseif fonts.names.reportmissingbase then + fonts.names.reportmissingbase() + end +end +fonts.names.resolvespec=fonts.names.resolve +function fonts.names.getfilename(askedname,suffix) + return "" +end +function fonts.names.ignoredfile(filename) + return true +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-tfm']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local next=next +local match=string.match +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end) +local report_defining=logs.reporter("fonts","defining") +local report_tfm=logs.reporter("fonts","tfm loading") +local findbinfile=resolvers.findbinfile +local fonts=fonts +local handlers=fonts.handlers +local readers=fonts.readers +local constructors=fonts.constructors +local encodings=fonts.encodings +local tfm=constructors.newhandler("tfm") +local tfmfeatures=constructors.newfeatures("tfm") +local registertfmfeature=tfmfeatures.register +constructors.resolvevirtualtoo=false +fonts.formats.tfm="type1" +function tfm.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm) + if okay then + return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm) + else + return {} + end +end +local function read_from_tfm(specification) + local filename=specification.filename + local size=specification.size + if trace_defining then + report_defining("loading tfm file %a at size %s",filename,size) + end + local tfmdata=font.read_tfm(filename,size) + if tfmdata then + local features=specification.features and specification.features.normal or {} + local resources=tfmdata.resources or {} + local properties=tfmdata.properties or {} + local parameters=tfmdata.parameters or {} + local shared=tfmdata.shared or {} + properties.name=tfmdata.name + properties.fontname=tfmdata.fontname + properties.psname=tfmdata.psname + properties.filename=specification.filename + parameters.size=size + shared.rawdata={} + shared.features=features + shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil + tfmdata.properties=properties + tfmdata.resources=resources + tfmdata.parameters=parameters + tfmdata.shared=shared + parameters.slant=parameters.slant or parameters[1] or 0 + parameters.space=parameters.space or parameters[2] or 0 + parameters.space_stretch=parameters.space_stretch or parameters[3] or 0 + parameters.space_shrink=parameters.space_shrink or parameters[4] or 0 + parameters.x_height=parameters.x_height or parameters[5] or 0 + parameters.quad=parameters.quad or parameters[6] or 0 + parameters.extra_space=parameters.extra_space or parameters[7] or 0 + constructors.enhanceparameters(parameters) + if constructors.resolvevirtualtoo then + fonts.loggers.register(tfmdata,file.suffix(filename),specification) + local vfname=findbinfile(specification.name,'ovf') + if vfname and vfname~="" then + local vfdata=font.read_vf(vfname,size) + if vfdata then + local chars=tfmdata.characters + for k,v in next,vfdata.characters do + chars[k].commands=v.commands + end + properties.virtualized=true + tfmdata.fonts=vfdata.fonts + end + end + end + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) + if not features.encoding then + local encoding,filename=match(properties.filename,"^(.-)%-(.*)$") + if filename and encoding and encodings.known and encodings.known[encoding] then + features.encoding=encoding + end + end + return tfmdata + end +end +local function check_tfm(specification,fullname) + local foundname=findbinfile(fullname,'tfm') or "" + if foundname=="" then + foundname=findbinfile(fullname,'ofm') or "" + end + if foundname=="" then + foundname=fonts.names.getfilename(fullname,"tfm") or "" + end + if foundname~="" then + specification.filename=foundname + specification.format="ofm" + return read_from_tfm(specification) + elseif trace_defining then + report_defining("loading tfm with name %a fails",specification.name) + end +end +readers.check_tfm=check_tfm +function readers.tfm(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + return check_tfm(specification,fullname) +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-afm']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers +local next,type,tonumber=next,type,tonumber +local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip +local abs=math.abs +local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns +local derivetable=table.derive +local trace_features=false trackers.register("afm.features",function(v) trace_features=v end) +local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end) +local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end) +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local report_afm=logs.reporter("fonts","afm loading") +local findbinfile=resolvers.findbinfile +local definers=fonts.definers +local readers=fonts.readers +local constructors=fonts.constructors +local afm=constructors.newhandler("afm") +local pfb=constructors.newhandler("pfb") +local afmfeatures=constructors.newfeatures("afm") +local registerafmfeature=afmfeatures.register +afm.version=1.410 +afm.cache=containers.define("fonts","afm",afm.version,true) +afm.autoprefixed=true +afm.helpdata={} +afm.syncspace=true +afm.addligatures=true +afm.addtexligatures=true +afm.addkerns=true +local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode=lower(value) + end +end +registerafmfeature { + name="mode", + description="mode", + initializers={ + base=setmode, + node=setmode, + } +} +local comment=P("Comment") +local spacing=patterns.spacer +local lineend=patterns.newline +local words=C((1-lineend)^1) +local number=C((R("09")+S("."))^1)/tonumber*spacing^0 +local data=lpeg.Carg(1) +local pattern=( + comment*spacing*( + data*( + ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end + )+(1-lineend)^0 + )+(1-comment)^1 +)^0 +local function scan_comment(str) + local fd={} + lpegmatch(pattern,str,1,fd) + return fd +end +local keys={} +function keys.FontName (data,line) data.metadata.fontname=strip (line) + data.metadata.fullname=strip (line) end +function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end +function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end +function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end +function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end +function keys.Descender (data,line) data.metadata.descender=tonumber (line) end +function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end +function keys.Comment (data,line) + line=lower(line) + local designsize=match(line,"designsize[^%d]*(%d+)") + if designsize then data.metadata.designsize=tonumber(designsize) end +end +local function get_charmetrics(data,charmetrics,vector) + local characters=data.characters + local chr,ind={},0 + for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do + if k=='C' then + v=tonumber(v) + if v<0 then + ind=ind+1 + else + ind=v + end + chr={ + index=ind + } + elseif k=='WX' then + chr.width=tonumber(v) + elseif k=='N' then + characters[v]=chr + elseif k=='B' then + local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$") + chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) } + elseif k=='L' then + local plus,becomes=match(v,"^(.-) +(.-)$") + local ligatures=chr.ligatures + if ligatures then + ligatures[plus]=becomes + else + chr.ligatures={ [plus]=becomes } + end + end + end +end +local function get_kernpairs(data,kernpairs) + local characters=data.characters + for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do + local chr=characters[one] + if chr then + local kerns=chr.kerns + if kerns then + kerns[two]=tonumber(value) + else + chr.kerns={ [two]=tonumber(value) } + end + end + end +end +local function get_variables(data,fontmetrics) + for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do + local keyhandler=keys[key] + if keyhandler then + keyhandler(data,rest) + end + end +end +local function get_indexes(data,pfbname) + data.resources.filename=resolvers.unresolve(pfbname) + local pfbblob=fontloader.open(pfbname) + if pfbblob then + local characters=data.characters + local pfbdata=fontloader.to_table(pfbblob) + if pfbdata then + local glyphs=pfbdata.glyphs + if glyphs then + if trace_loading then + report_afm("getting index data from %a",pfbname) + end + for index,glyph in next,glyphs do + local name=glyph.name + if name then + local char=characters[name] + if char then + if trace_indexing then + report_afm("glyph %a has index %a",name,index) + end + char.index=index + end + end + end + elseif trace_loading then + report_afm("no glyph data in pfb file %a",pfbname) + end + elseif trace_loading then + report_afm("no data in pfb file %a",pfbname) + end + fontloader.close(pfbblob) + elseif trace_loading then + report_afm("invalid pfb file %a",pfbname) + end +end +local function readafm(filename) + local ok,afmblob,size=resolvers.loadbinfile(filename) + if ok and afmblob then + local data={ + resources={ + filename=resolvers.unresolve(filename), + version=afm.version, + creator="context mkiv", + }, + properties={ + hasitalics=false, + }, + goodies={}, + metadata={ + filename=file.removesuffix(file.basename(filename)) + }, + characters={ + }, + descriptions={ + }, + } + afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics) + if trace_loading then + report_afm("loading char metrics") + end + get_charmetrics(data,charmetrics,vector) + return "" + end) + afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs) + if trace_loading then + report_afm("loading kern pairs") + end + get_kernpairs(data,kernpairs) + return "" + end) + afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics) + if trace_loading then + report_afm("loading variables") + end + data.afmversion=version + get_variables(data,fontmetrics) + data.fontdimens=scan_comment(fontmetrics) + return "" + end) + return data + else + if trace_loading then + report_afm("no valid afm file %a",filename) + end + return nil + end +end +local addkerns,addligatures,addtexligatures,unify,normalize +function afm.load(filename) + filename=resolvers.findfile(filename,'afm') or "" + if filename~="" and not fonts.names.ignoredfile(filename) then + local name=file.removesuffix(file.basename(filename)) + local data=containers.read(afm.cache,name) + local attr=lfs.attributes(filename) + local size,time=attr.size or 0,attr.modification or 0 + local pfbfile=file.replacesuffix(name,"pfb") + local pfbname=resolvers.findfile(pfbfile,"pfb") or "" + if pfbname=="" then + pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or "" + end + local pfbsize,pfbtime=0,0 + if pfbname~="" then + local attr=lfs.attributes(pfbname) + pfbsize=attr.size or 0 + pfbtime=attr.modification or 0 + end + if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then + report_afm("reading %a",filename) + data=readafm(filename) + if data then + if pfbname~="" then + get_indexes(data,pfbname) + elseif trace_loading then + report_afm("no pfb file for %a",filename) + end + report_afm("unifying %a",filename) + unify(data,filename) + if afm.addligatures then + report_afm("add ligatures") + addligatures(data) + end + if afm.addtexligatures then + report_afm("add tex ligatures") + addtexligatures(data) + end + if afm.addkerns then + report_afm("add extra kerns") + addkerns(data) + end + normalize(data) + report_afm("add tounicode data") + fonts.mappings.addtounicode(data,filename) + data.size=size + data.time=time + data.pfbsize=pfbsize + data.pfbtime=pfbtime + report_afm("saving %a in cache",name) + data=containers.write(afm.cache,name,data) + data=containers.read(afm.cache,name) + end + if applyruntimefixes and data then + applyruntimefixes(filename,data) + end + end + return data + else + return nil + end +end +local uparser=fonts.mappings.makenameparser() +unify=function(data,filename) + local unicodevector=fonts.encodings.agl.unicodes + local unicodes,names={},{} + local private=constructors.privateoffset + local descriptions=data.descriptions + for name,blob in next,data.characters do + local code=unicodevector[name] + if not code then + code=lpegmatch(uparser,name) + if not code then + code=private + private=private+1 + report_afm("assigning private slot %U for unknown glyph name %a",code,name) + end + end + local index=blob.index + unicodes[name]=code + names[name]=index + blob.name=name + descriptions[code]={ + boundingbox=blob.boundingbox, + width=blob.width, + kerns=blob.kerns, + index=index, + name=name, + } + end + for unicode,description in next,descriptions do + local kerns=description.kerns + if kerns then + local krn={} + for name,kern in next,kerns do + local unicode=unicodes[name] + if unicode then + krn[unicode]=kern + else + print(unicode,name) + end + end + description.kerns=krn + end + end + data.characters=nil + local resources=data.resources + local filename=resources.filename or file.removesuffix(file.basename(filename)) + resources.filename=resolvers.unresolve(filename) + resources.unicodes=unicodes + resources.marks={} + resources.names=names + resources.private=private +end +normalize=function(data) +end +local addthem=function(rawdata,ligatures) + if ligatures then + local descriptions=rawdata.descriptions + local resources=rawdata.resources + local unicodes=resources.unicodes + local names=resources.names + for ligname,ligdata in next,ligatures do + local one=descriptions[unicodes[ligname]] + if one then + for _,pair in next,ligdata do + local two,three=unicodes[pair[1]],unicodes[pair[2]] + if two and three then + local ol=one.ligatures + if ol then + if not ol[two] then + ol[two]=three + end + else + one.ligatures={ [two]=three } + end + end + end + end + end + end +end +addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end +addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end +addkerns=function(rawdata) + local descriptions=rawdata.descriptions + local resources=rawdata.resources + local unicodes=resources.unicodes + local function do_it_left(what) + if what then + for unicode,description in next,descriptions do + local kerns=description.kerns + if kerns then + local extrakerns + for complex,simple in next,what do + complex=unicodes[complex] + simple=unicodes[simple] + if complex and simple then + local ks=kerns[simple] + if ks and not kerns[complex] then + if extrakerns then + extrakerns[complex]=ks + else + extrakerns={ [complex]=ks } + end + end + end + end + if extrakerns then + description.extrakerns=extrakerns + end + end + end + end + end + local function do_it_copy(what) + if what then + for complex,simple in next,what do + complex=unicodes[complex] + simple=unicodes[simple] + if complex and simple then + local complexdescription=descriptions[complex] + if complexdescription then + local simpledescription=descriptions[complex] + if simpledescription then + local extrakerns + local kerns=simpledescription.kerns + if kerns then + for unicode,kern in next,kerns do + if extrakerns then + extrakerns[unicode]=kern + else + extrakerns={ [unicode]=kern } + end + end + end + local extrakerns=simpledescription.extrakerns + if extrakerns then + for unicode,kern in next,extrakerns do + if extrakerns then + extrakerns[unicode]=kern + else + extrakerns={ [unicode]=kern } + end + end + end + if extrakerns then + complexdescription.extrakerns=extrakerns + end + end + end + end + end + end + end + do_it_left(afm.helpdata.leftkerned) + do_it_left(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.rightkerned) +end +local function adddimensions(data) + if data then + for unicode,description in next,data.descriptions do + local bb=description.boundingbox + if bb then + local ht,dp=bb[4],-bb[2] + if ht==0 or ht<0 then + else + description.height=ht + end + if dp==0 or dp<0 then + else + description.depth=dp + end + end + end + end +end +local function copytotfm(data) + if data and data.descriptions then + local metadata=data.metadata + local resources=data.resources + local properties=derivetable(data.properties) + local descriptions=derivetable(data.descriptions) + local goodies=derivetable(data.goodies) + local characters={} + local parameters={} + local unicodes=resources.unicodes + for unicode,description in next,data.descriptions do + characters[unicode]={} + end + local filename=constructors.checkedfilename(resources) + local fontname=metadata.fontname or metadata.fullname + local fullname=metadata.fullname or metadata.fontname + local endash=unicodes['space'] + local emdash=unicodes['emdash'] + local spacer="space" + local spaceunits=500 + local monospaced=metadata.isfixedpitch + local charwidth=metadata.charwidth + local italicangle=metadata.italicangle + local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight + properties.monospaced=monospaced + parameters.italicangle=italicangle + parameters.charwidth=charwidth + parameters.charxheight=charxheight + if properties.monospaced then + if descriptions[endash] then + spaceunits,spacer=descriptions[endash].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width,"emdash" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + else + if descriptions[endash] then + spaceunits,spacer=descriptions[endash].width,"space" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + end + spaceunits=tonumber(spaceunits) + if spaceunits<200 then + end + parameters.slant=0 + parameters.space=spaceunits + parameters.space_stretch=500 + parameters.space_shrink=333 + parameters.x_height=400 + parameters.quad=1000 + if italicangle and italicangle~=0 then + parameters.italicangle=italicangle + parameters.italicfactor=math.cos(math.rad(90+italicangle)) + parameters.slant=- math.tan(italicangle*math.pi/180) + end + if monospaced then + parameters.space_stretch=0 + parameters.space_shrink=0 + elseif afm.syncspace then + parameters.space_stretch=spaceunits/2 + parameters.space_shrink=spaceunits/3 + end + parameters.extra_space=parameters.space_shrink + if charxheight then + parameters.x_height=charxheight + else + local x=unicodes['x'] + if x then + local x=descriptions[x] + if x then + parameters.x_height=x.height + end + end + end + local fd=data.fontdimens + if fd and fd[8] and fd[9] and fd[10] then + for k,v in next,fd do + parameters[k]=v + end + end + parameters.designsize=(metadata.designsize or 10)*65536 + parameters.ascender=abs(metadata.ascender or 0) + parameters.descender=abs(metadata.descender or 0) + parameters.units=1000 + properties.spacer=spacer + properties.encodingbytes=2 + properties.format=fonts.formats[filename] or "type1" + properties.filename=filename + properties.fontname=fontname + properties.fullname=fullname + properties.psname=fullname + properties.name=filename or fullname or fontname + if next(characters) then + return { + characters=characters, + descriptions=descriptions, + parameters=parameters, + resources=resources, + properties=properties, + goodies=goodies, + } + end + end + return nil +end +function afm.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) + if okay then + return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) + else + return {} + end +end +local function checkfeatures(specification) +end +local function afmtotfm(specification) + local afmname=specification.filename or specification.name + if specification.forced=="afm" or specification.format=="afm" then + if trace_loading then + report_afm("forcing afm format for %a",afmname) + end + else + local tfmname=findbinfile(afmname,"ofm") or "" + if tfmname~="" then + if trace_loading then + report_afm("fallback from afm to tfm for %a",afmname) + end + return + end + end + if afmname~="" then + local features=constructors.checkedfeatures("afm",specification.features.normal) + specification.features.normal=features + constructors.hashinstance(specification,true) + specification=definers.resolve(specification) + local cache_id=specification.hash + local tfmdata=containers.read(constructors.cache,cache_id) + if not tfmdata then + local rawdata=afm.load(afmname) + if rawdata and next(rawdata) then + adddimensions(rawdata) + tfmdata=copytotfm(rawdata) + if tfmdata and next(tfmdata) then + local shared=tfmdata.shared + if not shared then + shared={} + tfmdata.shared=shared + end + shared.rawdata=rawdata + shared.features=features + shared.processes=afm.setfeatures(tfmdata,features) + end + elseif trace_loading then + report_afm("no (valid) afm file found with name %a",afmname) + end + tfmdata=containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata + end +end +local function read_from_afm(specification) + local tfmdata=afmtotfm(specification) + if tfmdata then + tfmdata.properties.name=specification.name + tfmdata=constructors.scale(tfmdata,specification) + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) + fonts.loggers.register(tfmdata,'afm',specification) + end + return tfmdata +end +local function prepareligatures(tfmdata,ligatures,value) + if value then + local descriptions=tfmdata.descriptions + for unicode,character in next,tfmdata.characters do + local description=descriptions[unicode] + local dligatures=description.ligatures + if dligatures then + local cligatures=character.ligatures + if not cligatures then + cligatures={} + character.ligatures=cligatures + end + for unicode,ligature in next,dligatures do + cligatures[unicode]={ + char=ligature, + type=0 + } + end + end + end + end +end +local function preparekerns(tfmdata,kerns,value) + if value then + local rawdata=tfmdata.shared.rawdata + local resources=rawdata.resources + local unicodes=resources.unicodes + local descriptions=tfmdata.descriptions + for u,chr in next,tfmdata.characters do + local d=descriptions[u] + local newkerns=d[kerns] + if newkerns then + local kerns=chr.kerns + if not kerns then + kerns={} + chr.kerns=kerns + end + for k,v in next,newkerns do + local uk=unicodes[k] + if uk then + kerns[uk]=v + end + end + end + end + end +end +local list={ + [0x0027]=0x2019, +} +local function texreplacements(tfmdata,value) + local descriptions=tfmdata.descriptions + local characters=tfmdata.characters + for k,v in next,list do + characters [k]=characters [v] + descriptions[k]=descriptions[v] + end +end +local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end +local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end +local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end +local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end +registerafmfeature { + name="liga", + description="traditional ligatures", + initializers={ + base=ligatures, + node=ligatures, + } +} +registerafmfeature { + name="kern", + description="intercharacter kerning", + initializers={ + base=kerns, + node=kerns, + } +} +registerafmfeature { + name="extrakerns", + description="additional intercharacter kerning", + initializers={ + base=extrakerns, + node=extrakerns, + } +} +registerafmfeature { + name='tlig', + description='tex ligatures', + initializers={ + base=texligatures, + node=texligatures, + } +} +registerafmfeature { + name='trep', + description='tex replacements', + initializers={ + base=texreplacements, + node=texreplacements, + } +} +local check_tfm=readers.check_tfm +fonts.formats.afm="type1" +fonts.formats.pfb="type1" +local function check_afm(specification,fullname) + local foundname=findbinfile(fullname,'afm') or "" + if foundname=="" then + foundname=fonts.names.getfilename(fullname,"afm") or "" + end + if foundname=="" and afm.autoprefixed then + local encoding,shortname=match(fullname,"^(.-)%-(.*)$") + if encoding and shortname and fonts.encodings.known[encoding] then + shortname=findbinfile(shortname,'afm') or "" + if shortname~="" then + foundname=shortname + if trace_defining then + report_afm("stripping encoding prefix from filename %a",afmname) + end + end + end + end + if foundname~="" then + specification.filename=foundname + specification.format="afm" + return read_from_afm(specification) + end +end +function readers.afm(specification,method) + local fullname,tfmdata=specification.filename or "",nil + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + tfmdata=check_afm(specification,specification.name.."."..forced) + end + if not tfmdata then + method=method or definers.method or "afm or tfm" + if method=="tfm" then + tfmdata=check_tfm(specification,specification.name) + elseif method=="afm" then + tfmdata=check_afm(specification,specification.name) + elseif method=="tfm or afm" then + tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name) + else + tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name) end - elseif fonts.names.reportmissingname then - fonts.names.reportmissingname() - return name,false end - elseif fonts.names.reportmissingbase then - fonts.names.reportmissingbase() + else + tfmdata=check_afm(specification,fullname) end + return tfmdata end -fonts.names.resolvespec=fonts.names.resolve -function fonts.names.getfilename(askedname,suffix) - return "" +function readers.pfb(specification,method) + local original=specification.specification + if trace_defining then + report_afm("using afm reader for %a",original) + end + specification.specification=gsub(original,"%.pfb",".afm") + specification.forced="afm" + return readers.afm(specification,method) end end -- closure do -- begin closure to overcome local limits and interference +if not modules then modules={} end modules ['font-afk']={ + version=1.001, + comment="companion to font-afm.lua", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", + dataonly=true, +} +local allocate=utilities.storage.allocate +fonts.handlers.afm.helpdata={ + ligatures=allocate { + ['f']={ + { 'f','ff' }, + { 'i','fi' }, + { 'l','fl' }, + }, + ['ff']={ + { 'i','ffi' } + }, + ['fi']={ + { 'i','fii' } + }, + ['fl']={ + { 'i','fli' } + }, + ['s']={ + { 't','st' } + }, + ['i']={ + { 'j','ij' } + }, + }, + texligatures=allocate { + ['quoteleft']={ + { 'quoteleft','quotedblleft' } + }, + ['quoteright']={ + { 'quoteright','quotedblright' } + }, + ['hyphen']={ + { 'hyphen','endash' } + }, + ['endash']={ + { 'hyphen','emdash' } + } + }, + leftkerned=allocate { + AEligature="A",aeligature="a", + OEligature="O",oeligature="o", + IJligature="I",ijligature="i", + AE="A",ae="a", + OE="O",oe="o", + IJ="I",ij="i", + Ssharp="S",ssharp="s", + }, + rightkerned=allocate { + AEligature="E",aeligature="e", + OEligature="E",oeligature="e", + IJligature="J",ijligature="j", + AE="E",ae="e", + OE="E",oe="e", + IJ="J",ij="j", + Ssharp="S",ssharp="s", + }, + bothkerned=allocate { + Acircumflex="A",acircumflex="a", + Ccircumflex="C",ccircumflex="c", + Ecircumflex="E",ecircumflex="e", + Gcircumflex="G",gcircumflex="g", + Hcircumflex="H",hcircumflex="h", + Icircumflex="I",icircumflex="i", + Jcircumflex="J",jcircumflex="j", + Ocircumflex="O",ocircumflex="o", + Scircumflex="S",scircumflex="s", + Ucircumflex="U",ucircumflex="u", + Wcircumflex="W",wcircumflex="w", + Ycircumflex="Y",ycircumflex="y", + Agrave="A",agrave="a", + Egrave="E",egrave="e", + Igrave="I",igrave="i", + Ograve="O",ograve="o", + Ugrave="U",ugrave="u", + Ygrave="Y",ygrave="y", + Atilde="A",atilde="a", + Itilde="I",itilde="i", + Otilde="O",otilde="o", + Utilde="U",utilde="u", + Ntilde="N",ntilde="n", + Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a", + Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e", + Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i", + Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o", + Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u", + Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y", + Aacute="A",aacute="a", + Cacute="C",cacute="c", + Eacute="E",eacute="e", + Iacute="I",iacute="i", + Lacute="L",lacute="l", + Nacute="N",nacute="n", + Oacute="O",oacute="o", + Racute="R",racute="r", + Sacute="S",sacute="s", + Uacute="U",uacute="u", + Yacute="Y",yacute="y", + Zacute="Z",zacute="z", + Dstroke="D",dstroke="d", + Hstroke="H",hstroke="h", + Tstroke="T",tstroke="t", + Cdotaccent="C",cdotaccent="c", + Edotaccent="E",edotaccent="e", + Gdotaccent="G",gdotaccent="g", + Idotaccent="I",idotaccent="i", + Zdotaccent="Z",zdotaccent="z", + Amacron="A",amacron="a", + Emacron="E",emacron="e", + Imacron="I",imacron="i", + Omacron="O",omacron="o", + Umacron="U",umacron="u", + Ccedilla="C",ccedilla="c", + Kcedilla="K",kcedilla="k", + Lcedilla="L",lcedilla="l", + Ncedilla="N",ncedilla="n", + Rcedilla="R",rcedilla="r", + Scedilla="S",scedilla="s", + Tcedilla="T",tcedilla="t", + Ohungarumlaut="O",ohungarumlaut="o", + Uhungarumlaut="U",uhungarumlaut="u", + Aogonek="A",aogonek="a", + Eogonek="E",eogonek="e", + Iogonek="I",iogonek="i", + Uogonek="U",uogonek="u", + Aring="A",aring="a", + Uring="U",uring="u", + Abreve="A",abreve="a", + Ebreve="E",ebreve="e", + Gbreve="G",gbreve="g", + Ibreve="I",ibreve="i", + Obreve="O",obreve="o", + Ubreve="U",ubreve="u", + Ccaron="C",ccaron="c", + Dcaron="D",dcaron="d", + Ecaron="E",ecaron="e", + Lcaron="L",lcaron="l", + Ncaron="N",ncaron="n", + Rcaron="R",rcaron="r", + Scaron="S",scaron="s", + Tcaron="T",tcaron="t", + Zcaron="Z",zcaron="z", + dotlessI="I",dotlessi="i", + dotlessJ="J",dotlessj="j", + AEligature="AE",aeligature="ae",AE="AE",ae="ae", + OEligature="OE",oeligature="oe",OE="OE",oe="oe", + IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij", + Lstroke="L",lstroke="l",Lslash="L",lslash="l", + Ostroke="O",ostroke="o",Oslash="O",oslash="o", + Ssharp="SS",ssharp="ss", + Aumlaut="A",aumlaut="a", + Eumlaut="E",eumlaut="e", + Iumlaut="I",iumlaut="i", + Oumlaut="O",oumlaut="o", + Uumlaut="U",uumlaut="u", + } +} + +end -- closure + +do -- begin closure to overcome local limits and interference + if not modules then modules={} end modules ['luatex-fonts-tfm']={ version=1.001, comment="companion to luatex-*.tex", @@ -5079,7 +6334,7 @@ local report_otf=logs.reporter("fonts","otf loading") local fonts=fonts local otf=fonts.handlers.otf otf.glists={ "gsub","gpos" } -otf.version=2.743 +otf.version=2.745 otf.cache=containers.define("fonts","otf",otf.version,true) local fontdata=fonts.hashes.identifiers local chardata=characters and characters.data @@ -5099,17 +6354,28 @@ local packdata=true local syncspace=true local forcenotdef=false local includesubfonts=false +local overloadkerns=false +local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes local wildcard="*" local default="dflt" local fontloaderfields=fontloader.fields local mainfields=nil local glyphfields=nil +local formats=fonts.formats +formats.otf="opentype" +formats.ttf="truetype" +formats.ttc="truetype" +formats.dfont="truetype" registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end) registerdirective("fonts.otf.loader.force",function(v) forceload=v end) registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end) registerdirective("fonts.otf.loader.pack",function(v) packdata=v end) registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end) registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end) +registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end) +local function otf_format(filename) + return formats[lower(file.suffix(filename))] +end local function load_featurefile(raw,featurefile) if featurefile and featurefile~="" then if trace_loading then @@ -5296,7 +6562,7 @@ end function enhancers.register(what,action) actions[what]=action end -function otf.load(filename,format,sub,featurefile) +function otf.load(filename,sub,featurefile) local base=file.basename(file.removesuffix(filename)) local name=file.removesuffix(base) local attr=lfs.attributes(filename) @@ -5394,7 +6660,7 @@ function otf.load(filename,format,sub,featurefile) data={ size=size, time=time, - format=format, + format=otf_format(filename), featuredata=featurefiles, resources={ filename=resolvers.unresolve(filename), @@ -5460,6 +6726,9 @@ function otf.load(filename,format,sub,featurefile) report_otf("loading from cache using hash %a",hash) end enhance("unpack",data,filename,nil,false) + if applyruntimefixes then + applyruntimefixes(filename,data) + end enhance("add dimensions",data,filename,nil,false) if trace_sequences then showfeatureorder(data,filename) @@ -6390,74 +7659,93 @@ actions["merge kern classes"]=function(data,filename,raw) local resources=data.resources local unicodes=resources.unicodes local splitter=data.helpers.tounicodetable + local ignored=0 + local blocked=0 for gp=1,#gposlist do local gpos=gposlist[gp] local subtables=gpos.subtables if subtables then + local first_done={} + local split={} for s=1,#subtables do local subtable=subtables[s] local kernclass=subtable.kernclass + local lookup=subtable.lookup or subtable.name if kernclass then - local split={} - for k=1,#kernclass do - local kcl=kernclass[k] - local firsts=kcl.firsts - local seconds=kcl.seconds - local offsets=kcl.offsets - local lookups=kcl.lookup - if type(lookups)~="table" then - lookups={ lookups } - end - for n,s in next,firsts do - split[s]=split[s] or lpegmatch(splitter,s) - end - local maxseconds=0 - for n,s in next,seconds do - if n>maxseconds then - maxseconds=n - end - split[s]=split[s] or lpegmatch(splitter,s) + if #kernclass>0 then + kernclass=kernclass[1] + lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup + report_otf("fixing kernclass table of lookup %a",lookup) + end + local firsts=kernclass.firsts + local seconds=kernclass.seconds + local offsets=kernclass.offsets + for n,s in next,firsts do + split[s]=split[s] or lpegmatch(splitter,s) + end + local maxseconds=0 + for n,s in next,seconds do + if n>maxseconds then + maxseconds=n end - for l=1,#lookups do - local lookup=lookups[l] - for fk=1,#firsts do - local fv=firsts[fk] - local splt=split[fv] - if splt then - local extrakerns={} - local baseoffset=(fk-1)*maxseconds - for sk=2,maxseconds do - local sv=seconds[sk] - local splt=split[sv] - if splt then - local offset=offsets[baseoffset+sk] - if offset then - for i=1,#splt do - extrakerns[splt[i]]=offset - end - end + split[s]=split[s] or lpegmatch(splitter,s) + end + for fk=1,#firsts do + local fv=firsts[fk] + local splt=split[fv] + if splt then + local extrakerns={} + local baseoffset=(fk-1)*maxseconds + for sk=2,maxseconds do + local sv=seconds[sk] + local splt=split[sv] + if splt then + local offset=offsets[baseoffset+sk] + if offset then + for i=1,#splt do + extrakerns[splt[i]]=offset end end - for i=1,#splt do - local first_unicode=splt[i] - local description=descriptions[first_unicode] - if description then - local kerns=description.kerns - if not kerns then - kerns={} - description.kerns=kerns - end - local lookupkerns=kerns[lookup] - if not lookupkerns then - lookupkerns={} - kerns[lookup]=lookupkerns - end + end + end + for i=1,#splt do + local first_unicode=splt[i] + if first_done[first_unicode] then + report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode) + blocked=blocked+1 + else + first_done[first_unicode]=true + local description=descriptions[first_unicode] + if description then + local kerns=description.kerns + if not kerns then + kerns={} + description.kerns=kerns + end + local lookupkerns=kerns[lookup] + if not lookupkerns then + lookupkerns={} + kerns[lookup]=lookupkerns + end + if overloadkerns then for second_unicode,kern in next,extrakerns do lookupkerns[second_unicode]=kern end - elseif trace_loading then - report_otf("no glyph data for %U",first_unicode) + else + for second_unicode,kern in next,extrakerns do + local k=lookupkerns[second_unicode] + if not k then + lookupkerns[second_unicode]=kern + elseif k~=kern then + if trace_loading then + report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern) + end + ignored=ignored+1 + end + end end + elseif trace_loading then + report_otf("no glyph data for %U",first_unicode) end end end @@ -6468,6 +7756,12 @@ actions["merge kern classes"]=function(data,filename,raw) end end end + if ignored>0 then + report_otf("%s kern overloads ignored",ignored) + end + if blocked>0 then + report_otf("%s succesive kerns blocked",blocked) + end end end actions["check glyphs"]=function(data,filename,raw) @@ -6681,10 +7975,19 @@ local function copytotfm(data,cache_id) end end end + local filename=constructors.checkedfilename(resources) + local fontname=metadata.fontname + local fullname=metadata.fullname or fontname + local units=metadata.units_per_em or 1000 + if units==0 then + units=1000 + metadata.units_per_em=1000 + report_otf("changing %a units to %a",0,units) + end local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced") local charwidth=pfminfo.avgwidth - local italicangle=metadata.italicangle local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight + local italicangle=metadata.italicangle properties.monospaced=monospaced parameters.italicangle=italicangle parameters.charwidth=charwidth @@ -6713,14 +8016,6 @@ local function copytotfm(data,cache_id) end end spaceunits=tonumber(spaceunits) or 500 - local filename=constructors.checkedfilename(resources) - local fontname=metadata.fontname - local fullname=metadata.fullname or fontname - local units=metadata.units_per_em or 1000 - if units==0 then - units=1000 - metadata.units_per_em=1000 - end parameters.slant=0 parameters.space=spaceunits parameters.space_stretch=units/2 @@ -6729,10 +8024,10 @@ local function copytotfm(data,cache_id) parameters.quad=units if spaceunits<2*units/5 then end - if italicangle then + if italicangle and italicangle~=0 then parameters.italicangle=italicangle parameters.italicfactor=math.cos(math.rad(90+italicangle)) - parameters.slant=- math.round(math.tan(italicangle*math.pi/180)) + parameters.slant=- math.tan(italicangle*math.pi/180) end if monospaced then parameters.space_stretch=0 @@ -6759,7 +8054,7 @@ local function copytotfm(data,cache_id) parameters.units=units properties.space=spacer properties.encodingbytes=2 - properties.format=data.format or fonts.formats[filename] or "opentype" + properties.format=data.format or otf_format(filename) or formats.otf properties.noglyphnames=true properties.filename=filename properties.fontname=fontname @@ -6784,9 +8079,8 @@ local function otftotfm(specification) local name=specification.name local sub=specification.sub local filename=specification.filename - local format=specification.format local features=specification.features.normal - local rawdata=otf.load(filename,format,sub,features and features.featurefile) + local rawdata=otf.load(filename,sub,features and features.featurefile) if rawdata and next(rawdata) then rawdata.lookuphash={} tfmdata=copytotfm(rawdata,cache_id) @@ -6868,41 +8162,33 @@ function otf.collectlookups(rawdata,kind,script,language) end return nil,nil end -local function check_otf(forced,specification,suffix,what) +local function check_otf(forced,specification,suffix) local name=specification.name if forced then - name=file.addsuffix(name,suffix,true) + name=specification.forcedname end local fullname=findbinfile(name,suffix) or "" if fullname=="" then fullname=fonts.names.getfilename(name,suffix) or "" end - if fullname~="" then + if fullname~="" and not fonts.names.ignoredfile(fullname) then specification.filename=fullname - specification.format=what return read_from_otf(specification) end end -local function opentypereader(specification,suffix,what) +local function opentypereader(specification,suffix) local forced=specification.forced or "" - if forced=="otf" then - return check_otf(true,specification,forced,"opentype") - elseif forced=="ttf" or forced=="ttc" or forced=="dfont" then - return check_otf(true,specification,forced,"truetype") + if formats[forced] then + return check_otf(true,specification,forced) else - return check_otf(false,specification,suffix,what) + return check_otf(false,specification,suffix) end end -readers.opentype=opentypereader -local formats=fonts.formats -formats.otf="opentype" -formats.ttf="truetype" -formats.ttc="truetype" -formats.dfont="truetype" -function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end -function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end -function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end -function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end +readers.opentype=opentypereader +function readers.otf (specification) return opentypereader(specification,"otf") end +function readers.ttf (specification) return opentypereader(specification,"ttf") end +function readers.ttc (specification) return opentypereader(specification,"ttf") end +function readers.dfont(specification) return opentypereader(specification,"ttf") end function otf.scriptandlanguage(tfmdata,attr) local properties=tfmdata.properties return properties.script or "dflt",properties.language or "dflt" @@ -7494,11 +8780,24 @@ local injections=nodes.injections local nodecodes=nodes.nodecodes local glyph_code=nodecodes.glyph local kern_code=nodecodes.kern -local nodepool=nodes.pool +local nuts=nodes.nuts +local nodepool=nuts.pool local newkern=nodepool.kern -local traverse_id=node.traverse_id -local insert_node_before=node.insert_before -local insert_node_after=node.insert_after +local tonode=nuts.tonode +local tonut=nuts.tonut +local getfield=nuts.getfield +local getnext=nuts.getnext +local getprev=nuts.getprev +local getid=nuts.getid +local getattr=nuts.getattr +local getfont=nuts.getfont +local getsubtype=nuts.getsubtype +local getchar=nuts.getchar +local setfield=nuts.setfield +local setattr=nuts.setattr +local traverse_id=nuts.traverse_id +local insert_node_before=nuts.insert_before +local insert_node_after=nuts.insert_after local a_kernpair=attributes.private('kernpair') local a_ligacomp=attributes.private('ligacomp') local a_markbase=attributes.private('markbase') @@ -7517,21 +8816,21 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2]) local ws,wn=tfmstart.width,tfmnext.width local bound=#cursives+1 - start[a_cursbase]=bound - nxt[a_curscurs]=bound + setattr(start,a_cursbase,bound) + setattr(nxt,a_curscurs,bound) cursives[bound]={ rlmode,dx,dy,ws,wn } return dx,dy,bound end function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4] if x~=0 or w~=0 or y~=0 or h~=0 then - local bound=current[a_kernpair] + local bound=getattr(current,a_kernpair) if bound then local kb=kerns[bound] kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h else bound=#kerns+1 - current[a_kernpair]=bound + setattr(current,a_kernpair,bound) kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width } end return x,y,w,h,bound @@ -7542,35 +8841,35 @@ function injections.setkern(current,factor,rlmode,x,tfmchr) local dx=factor*x if dx~=0 then local bound=#kerns+1 - current[a_kernpair]=bound + setattr(current,a_kernpair,bound) kerns[bound]={ rlmode,dx } return dx,bound else return 0,0 end end -function injections.setmark(start,base,factor,rlmode,ba,ma,index) +function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2]) - local bound=base[a_markbase] + local bound=getattr(base,a_markbase) local index=1 if bound then local mb=marks[bound] if mb then index=#mb+1 mb[index]={ dx,dy,rlmode } - start[a_markmark]=bound - start[a_markdone]=index + setattr(start,a_markmark,bound) + setattr(start,a_markdone,index) return dx,dy,bound else - report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound) + report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound) end end index=index or 1 bound=#marks+1 - base[a_markbase]=bound - start[a_markmark]=bound - start[a_markdone]=index - marks[bound]={ [index]={ dx,dy,rlmode } } + setattr(base,a_markbase,bound) + setattr(start,a_markmark,bound) + setattr(start,a_markdone,index) + marks[bound]={ [index]={ dx,dy,rlmode,baseismark } } return dx,dy,bound end local function dir(n) @@ -7579,15 +8878,15 @@ end local function trace(head) report_injections("begin run") for n in traverse_id(glyph_code,head) do - if n.subtype<256 then - local kp=n[a_kernpair] - local mb=n[a_markbase] - local mm=n[a_markmark] - local md=n[a_markdone] - local cb=n[a_cursbase] - local cc=n[a_curscurs] - local char=n.char - report_injections("font %s, char %U, glyph %c",n.font,char,char) + if getsubtype(n)<256 then + local kp=getattr(n,a_kernpair) + local mb=getattr(n,a_markbase) + local mm=getattr(n,a_markmark) + local md=getattr(n,a_markdone) + local cb=getattr(n,a_cursbase) + local cc=getattr(n,a_curscurs) + local char=getchar(n) + report_injections("font %s, char %U, glyph %c",getfont(n),char,char) if kp then local k=kerns[kp] if k[3] then @@ -7628,21 +8927,23 @@ local function show_result(head) local current=head local skipping=false while current do - local id=current.id + local id=getid(current) if id==glyph_code then - report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset) + report_injections("char: %C, width %p, xoffset %p, yoffset %p", + getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset")) skipping=false elseif id==kern_code then - report_injections("kern: %p",current.kern) + report_injections("kern: %p",getfield(current,"kern")) skipping=false elseif not skipping then report_injections() skipping=true end - current=current.next + current=getnext(current) end end function injections.handler(head,where,keep) + head=tonut(head) local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns) if has_marks or has_cursives then if trace_injections then @@ -7652,17 +8953,18 @@ function injections.handler(head,where,keep) if has_kerns then local nf,tm=nil,nil for n in traverse_id(glyph_code,head) do - if n.subtype<256 then + if getsubtype(n)<256 then nofvalid=nofvalid+1 valid[nofvalid]=n - if n.font~=nf then - nf=n.font - tm=fontdata[nf].resources.marks + local f=getfont(n) + if f~=nf then + nf=f + tm=fontdata[nf].resources.marks end if tm then - mk[n]=tm[n.char] + mk[n]=tm[getchar(n)] end - local k=n[a_kernpair] + local k=getattr(n,a_kernpair) if k then local kk=kerns[k] if kk then @@ -7682,15 +8984,16 @@ function injections.handler(head,where,keep) else local nf,tm=nil,nil for n in traverse_id(glyph_code,head) do - if n.subtype<256 then + if getsubtype(n)<256 then nofvalid=nofvalid+1 valid[nofvalid]=n - if n.font~=nf then - nf=n.font - tm=fontdata[nf].resources.marks + local f=getfont(n) + if f~=nf then + nf=f + tm=fontdata[nf].resources.marks end if tm then - mk[n]=tm[n.char] + mk[n]=tm[getchar(n)] end end end @@ -7699,7 +9002,7 @@ function injections.handler(head,where,keep) local cx={} if has_kerns and next(ky) then for n,k in next,ky do - n.yoffset=k + setfield(n,"yoffset",k) end end if has_cursives then @@ -7708,9 +9011,9 @@ function injections.handler(head,where,keep) for i=1,nofvalid do local n=valid[i] if not mk[n] then - local n_cursbase=n[a_cursbase] + local n_cursbase=getattr(n,a_cursbase) if p_cursbase then - local n_curscurs=n[a_curscurs] + local n_curscurs=getattr(n,a_curscurs) if p_cursbase==n_curscurs then local c=cursives[n_curscurs] if c then @@ -7733,20 +9036,20 @@ function injections.handler(head,where,keep) end end elseif maxt>0 then - local ny=n.yoffset + local ny=getfield(n,"yoffset") for i=maxt,1,-1 do ny=ny+d[i] local ti=t[i] - ti.yoffset=ti.yoffset+ny + setfield(ti,"yoffset",getfield(ti,"yoffset")+ny) end maxt=0 end if not n_cursbase and maxt>0 then - local ny=n.yoffset + local ny=getfield(n,"yoffset") for i=maxt,1,-1 do ny=ny+d[i] local ti=t[i] - ti.yoffset=ny + setfield(ti,"yoffset",ny) end maxt=0 end @@ -7754,11 +9057,11 @@ function injections.handler(head,where,keep) end end if maxt>0 then - local ny=n.yoffset + local ny=getfield(n,"yoffset") for i=maxt,1,-1 do ny=ny+d[i] local ti=t[i] - ti.yoffset=ny + setfield(ti,"yoffset",ny) end maxt=0 end @@ -7769,52 +9072,66 @@ function injections.handler(head,where,keep) if has_marks then for i=1,nofvalid do local p=valid[i] - local p_markbase=p[a_markbase] + local p_markbase=getattr(p,a_markbase) if p_markbase then local mrks=marks[p_markbase] local nofmarks=#mrks - for n in traverse_id(glyph_code,p.next) do - local n_markmark=n[a_markmark] + for n in traverse_id(glyph_code,getnext(p)) do + local n_markmark=getattr(n,a_markmark) if p_markbase==n_markmark then - local index=n[a_markdone] or 1 + local index=getattr(n,a_markdone) or 1 local d=mrks[index] if d then local rlmode=d[3] local k=wx[p] + local px=getfield(p,"xoffset") + local ox=0 if k then local x=k[2] local w=k[4] if w then if rlmode and rlmode>=0 then - n.xoffset=p.xoffset-p.width+d[1]-(w-x) + ox=px-getfield(p,"width")+d[1]-(w-x) else - n.xoffset=p.xoffset-d[1]-x + ox=px-d[1]-x end else if rlmode and rlmode>=0 then - n.xoffset=p.xoffset-p.width+d[1] + ox=px-getfield(p,"width")+d[1] else - n.xoffset=p.xoffset-d[1]-x + ox=px-d[1]-x end end else + local wp=getfield(p,"width") + local wn=getfield(n,"width") if rlmode and rlmode>=0 then - n.xoffset=p.xoffset-p.width+d[1] + ox=px-wp+d[1] else - n.xoffset=p.xoffset-d[1] + ox=px-d[1] + end + if wn~=0 then + insert_node_before(head,n,newkern(-wn/2)) + insert_node_after(head,n,newkern(-wn/2)) end end + setfield(n,"xoffset",ox) + local py=getfield(p,"yoffset") + local oy=0 if mk[p] then - n.yoffset=p.yoffset+d[2] + oy=py+d[2] else - n.yoffset=n.yoffset+p.yoffset+d[2] + oy=getfield(n,"yoffset")+py+d[2] end + setfield(n,"yoffset",oy) if nofmarks==1 then break else nofmarks=nofmarks-1 end end + elseif not n_markmark then + break else end end @@ -7866,6 +9183,7 @@ function injections.handler(head,where,keep) if not keep then kerns={} end +head=tonode(head) return head,true elseif not keep then kerns,cursives,marks={},{},{} @@ -7875,14 +9193,14 @@ function injections.handler(head,where,keep) trace(head) end for n in traverse_id(glyph_code,head) do - if n.subtype<256 then - local k=n[a_kernpair] + if getsubtype(n)<256 then + local k=getattr(n,a_kernpair) if k then local kk=kerns[k] if kk then local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4] if y and y~=0 then - n.yoffset=y + setfield(n,"yoffset",y) end if w then local wx=w-x @@ -7913,10 +9231,10 @@ function injections.handler(head,where,keep) if not keep then kerns={} end - return head,true + return tonode(head),true else end - return head,false + return tonode(head),false end end -- closure @@ -7944,6 +9262,7 @@ analyzers.useunicodemarks=false local a_state=attributes.private('state') local nodecodes=nodes.nodecodes local glyph_code=nodecodes.glyph +local disc_code=nodecodes.disc local math_code=nodecodes.math local traverse_id=node.traverse_id local traverse_node_list=node.traverse @@ -7976,6 +9295,11 @@ local features={ medi=s_medi, fina=s_fina, isol=s_isol, + rphf=s_rphf, + half=s_half, + pref=s_pref, + blwf=s_blwf, + pstf=s_pstf, } analyzers.states=states analyzers.features=features @@ -8010,7 +9334,7 @@ function analyzers.setstate(head,font) first,last,n=nil,nil,0 end elseif id==disc_code then - current[a_state]=s_midi + current[a_state]=s_medi last=current else if first and first==last then @@ -8062,7 +9386,7 @@ local function analyzeprocessor(head,font,attr) end registerotffeature { name="analyze", - description="analysis of (for instance) character classes", + description="analysis of character classes", default=true, initializers={ node=analyzeinitializer, @@ -8325,12 +9649,25 @@ registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") registertracker("otf.actions","otf.replacements,otf.positions") registertracker("otf.injections","nodes.injections") registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") -local insert_node_after=node.insert_after -local delete_node=nodes.delete -local copy_node=node.copy -local find_node_tail=node.tail or node.slide -local flush_node_list=node.flush_list -local end_of_math=node.end_of_math +local nuts=nodes.nuts +local tonode=nuts.tonode +local tonut=nuts.tonut +local getfield=nuts.getfield +local getnext=nuts.getnext +local getprev=nuts.getprev +local getid=nuts.getid +local getattr=nuts.getattr +local getfont=nuts.getfont +local getsubtype=nuts.getsubtype +local getchar=nuts.getchar +local setfield=nuts.setfield +local setattr=nuts.setattr +local insert_node_after=nuts.insert_after +local delete_node=nuts.delete +local copy_node=nuts.copy +local find_node_tail=nuts.tail +local flush_node_list=nuts.flush_list +local end_of_math=nuts.end_of_math local setmetatableindex=table.setmetatableindex local zwnj=0x200C local zwj=0x200D @@ -8339,6 +9676,7 @@ local default="dflt" local nodecodes=nodes.nodecodes local whatcodes=nodes.whatcodes local glyphcodes=nodes.glyphcodes +local disccodes=nodes.disccodes local glyph_code=nodecodes.glyph local glue_code=nodecodes.glue local disc_code=nodecodes.disc @@ -8346,6 +9684,7 @@ local whatsit_code=nodecodes.whatsit local math_code=nodecodes.math local dir_code=whatcodes.dir local localpar_code=whatcodes.localpar +local discretionary_code=disccodes.discretionary local ligature_code=glyphcodes.ligature local privateattribute=attributes.private local a_state=privateattribute('state') @@ -8439,83 +9778,83 @@ local function pref(kind,lookupname) return formatters["feature %a, lookup %a"](kind,lookupname) end local function copy_glyph(g) - local components=g.components + local components=getfield(g,"components") if components then - g.components=nil + setfield(g,"components",nil) local n=copy_node(g) - g.components=components + setfield(g,"components",components) return n else return copy_node(g) end end local function markstoligature(kind,lookupname,head,start,stop,char) - if start==stop and start.char==char then + if start==stop and getchar(start)==char then return head,start else - local prev=start.prev - local next=stop.next - start.prev=nil - stop.next=nil + local prev=getprev(start) + local next=getnext(stop) + setfield(start,"prev",nil) + setfield(stop,"next",nil) local base=copy_glyph(start) if head==start then head=base end - base.char=char - base.subtype=ligature_code - base.components=start + setfield(base,"char",char) + setfield(base,"subtype",ligature_code) + setfield(base,"components",start) if prev then - prev.next=base + setfield(prev,"next",base) end if next then - next.prev=base + setfield(next,"prev",base) end - base.next=next - base.prev=prev + setfield(base,"next",next) + setfield(base,"prev",prev) return head,base end end local function getcomponentindex(start) - if start.id~=glyph_code then + if getid(start)~=glyph_code then return 0 - elseif start.subtype==ligature_code then + elseif getsubtype(start)==ligature_code then local i=0 - local components=start.components + local components=getfield(start,"components") while components do i=i+getcomponentindex(components) - components=components.next + components=getnext(components) end return i - elseif not marks[start.char] then + elseif not marks[getchar(start)] then return 1 else return 0 end end local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) - if start==stop and start.char==char then - start.char=char + if start==stop and getchar(start)==char then + setfield(start,"char",char) return head,start end - local prev=start.prev - local next=stop.next - start.prev=nil - stop.next=nil + local prev=getprev(start) + local next=getnext(stop) + setfield(start,"prev",nil) + setfield(stop,"next",nil) local base=copy_glyph(start) if start==head then head=base end - base.char=char - base.subtype=ligature_code - base.components=start + setfield(base,"char",char) + setfield(base,"subtype",ligature_code) + setfield(base,"components",start) if prev then - prev.next=base + setfield(prev,"next",base) end if next then - next.prev=base + setfield(next,"prev",base) end - base.next=next - base.prev=prev + setfield(base,"next",next) + setfield(base,"prev",prev) if not discfound then local deletemarks=markflag~="mark" local components=start @@ -8524,42 +9863,42 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun local head=base local current=base while start do - local char=start.char + local char=getchar(start) if not marks[char] then baseindex=baseindex+componentindex componentindex=getcomponentindex(start) elseif not deletemarks then - start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex) + setattr(start,a_ligacomp,baseindex+(getattr(start,a_ligacomp) or componentindex)) if trace_marks then - logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) + logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp)) end head,current=insert_node_after(head,current,copy_node(start)) elseif trace_marks then logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) end - start=start.next + start=getnext(start) end - local start=current.next - while start and start.id==glyph_code do - local char=start.char + local start=getnext(current) + while start and getid(start)==glyph_code do + local char=getchar(start) if marks[char] then - start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex) + setattr(start,a_ligacomp,baseindex+(getattr(start,a_ligacomp) or componentindex)) if trace_marks then - logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) + logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp)) end else break end - start=start.next + start=getnext(start) end end return head,base end function handlers.gsub_single(head,start,kind,lookupname,replacement) if trace_singles then - logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement)) + logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement)) end - start.char=replacement + setfield(start,"char",replacement) return head,start,true end local function get_alternative_glyph(start,alternatives,value,trace_alternatives) @@ -8585,7 +9924,7 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") end elseif value==0 then - return start.char,trace_alternatives and formatters["invalid value %a, %s"](value,"no change") + return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change") elseif value<1 then return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1) else @@ -8593,28 +9932,28 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives end end end -local function multiple_glyphs(head,start,multiple) +local function multiple_glyphs(head,start,multiple,ignoremarks) local nofmultiples=#multiple if nofmultiples>0 then - start.char=multiple[1] + setfield(start,"char",multiple[1]) if nofmultiples>1 then - local sn=start.next - for k=2,nofmultiples do + local sn=getnext(start) + for k=2,nofmultiples do local n=copy_node(start) - n.char=multiple[k] - n.next=sn - n.prev=start + setfield(n,"char",multiple[k]) + setfield(n,"next",sn) + setfield(n,"prev",start) if sn then - sn.prev=n + setfield(sn,"prev",n) end - start.next=n + setfield(start,"next",n) start=n end end return head,start,true else if trace_multiples then - logprocess("no multiple for %s",gref(start.char)) + logprocess("no multiple for %s",gref(getchar(start))) end return head,start,false end @@ -8624,34 +9963,34 @@ function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives) if choice then if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment) + logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment) end - start.char=choice + setfield(start,"char",choice) else if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment) + logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment) end end return head,start,true end -function handlers.gsub_multiple(head,start,kind,lookupname,multiple) +function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) if trace_multiples then - logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple)) + logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple)) end - return multiple_glyphs(head,start,multiple) + return multiple_glyphs(head,start,multiple,sequence.flags[1]) end function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) - local s,stop,discfound=start.next,nil,false - local startchar=start.char + local s,stop,discfound=getnext(start),nil,false + local startchar=getchar(start) if marks[startchar] then while s do - local id=s.id - if id==glyph_code and s.font==currentfont and s.subtype<256 then - local lg=ligature[s.char] + local id=getid(s) + if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then + local lg=ligature[getchar(s)] if lg then stop=s ligature=lg - s=s.next + s=getnext(s) else break end @@ -8663,9 +10002,9 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) local lig=ligature.ligature if lig then if trace_ligatures then - local stopchar=stop.char + local stopchar=getchar(stop) head,start=markstoligature(kind,lookupname,head,start,stop,lig) - logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) + logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) else head,start=markstoligature(kind,lookupname,head,start,stop,lig) end @@ -8676,18 +10015,18 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) else local skipmark=sequence.flags[1] while s do - local id=s.id - if id==glyph_code and s.subtype<256 then - if s.font==currentfont then - local char=s.char + local id=getid(s) + if id==glyph_code and getsubtype(s)<256 then + if getfont(s)==currentfont then + local char=getchar(s) if skipmark and marks[char] then - s=s.next + s=getnext(s) else local lg=ligature[char] if lg then stop=s ligature=lg - s=s.next + s=getnext(s) else break end @@ -8697,7 +10036,7 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) end elseif id==disc_code then discfound=true - s=s.next + s=getnext(s) else break end @@ -8706,9 +10045,9 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) local lig=ligature.ligature if lig then if trace_ligatures then - local stopchar=stop.char + local stopchar=getchar(stop) head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) + logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) else head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) end @@ -8720,16 +10059,16 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) return head,start,false end function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) - local markchar=start.char + local markchar=getchar(start) if marks[markchar] then - local base=start.prev - if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then - local basechar=base.char + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) if marks[basechar] then while true do - base=base.prev - if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then - basechar=base.char + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) if not marks[basechar] then break end @@ -8778,16 +10117,16 @@ function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence return head,start,false end function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) - local markchar=start.char + local markchar=getchar(start) if marks[markchar] then - local base=start.prev - if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then - local basechar=base.char + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) if marks[basechar] then while true do - base=base.prev - if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then - basechar=base.char + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) if not marks[basechar] then break end @@ -8799,7 +10138,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ end end end - local index=start[a_ligacomp] + local index=getattr(start,a_ligacomp) local baseanchors=descriptions[basechar] if baseanchors then baseanchors=baseanchors.anchors @@ -8844,22 +10183,22 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ return head,start,false end function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) - local markchar=start.char + local markchar=getchar(start) if marks[markchar] then - local base=start.prev - local slc=start[a_ligacomp] + local base=getprev(start) + local slc=getattr(start,a_ligacomp) if slc then while base do - local blc=base[a_ligacomp] + local blc=getattr(base,a_ligacomp) if blc and blc~=slc then - base=base.prev + base=getprev(base) else break end end end - if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then - local basechar=base.char + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) local baseanchors=descriptions[basechar] if baseanchors then baseanchors=baseanchors.anchors @@ -8871,7 +10210,7 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence if al[anchor] then local ma=markanchors[anchor] if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -8897,20 +10236,20 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence return head,start,false end function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) - local alreadydone=cursonce and start[a_cursbase] + local alreadydone=cursonce and getattr(start,a_cursbase) if not alreadydone then local done=false - local startchar=start.char + local startchar=getchar(start) if marks[startchar] then if trace_cursive then logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) end else - local nxt=start.next - while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do - local nextchar=nxt.char + local nxt=getnext(start) + while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do + local nextchar=getchar(nxt) if marks[nextchar] then - nxt=nxt.next + nxt=getnext(nxt) else local entryanchors=descriptions[nextchar] if entryanchors then @@ -8944,13 +10283,13 @@ function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) return head,start,done else if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) end return head,start,false end end function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) - local startchar=start.char + local startchar=getchar(start) local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) if trace_kerns then logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) @@ -8958,34 +10297,33 @@ function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) return head,start,false end function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) - local snext=start.next + local snext=getnext(start) if not snext then return head,start,false else local prev,done=start,false local factor=tfmdata.parameters.factor local lookuptype=lookuptypes[lookupname] - while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do - local nextchar=snext.char + while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do + local nextchar=getchar(snext) local krn=kerns[nextchar] if not krn and marks[nextchar] then prev=snext - snext=snext.next + snext=getnext(snext) else - local krn=kerns[nextchar] if not krn then elseif type(krn)=="table" then if lookuptype=="pair" then local a,b=krn[2],krn[3] if a and #a>0 then - local startchar=start.char + local startchar=getchar(start) local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) if trace_kerns then logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) end end if b and #b>0 then - local startchar=start.char + local startchar=getchar(start) local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) if trace_kerns then logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) @@ -8998,7 +10336,7 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) elseif krn~=0 then local k=setkern(snext,factor,rlmode,krn) if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) + logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) end done=true end @@ -9033,46 +10371,18 @@ function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,looku return head,start,false end function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) - local char=start.char + local char=getchar(start) local replacement=replacements[char] if replacement then if trace_singles then logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) end - start.char=replacement + setfield(start,"char",replacement) return head,start,true else return head,start,false end end -local function delete_till_stop(start,stop,ignoremarks) - local n=1 - if start==stop then - elseif ignoremarks then - repeat - local next=start.next - if not marks[next.char] then - local components=next.components - if components then - flush_node_list(components) - end - delete_node(start,next) - end - n=n+1 - until next==stop - else - repeat - local next=start.next - local components=next.components - if components then - flush_node_list(components) - end - delete_node(start,next) - n=n+1 - until next==stop - end - return n -end function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) local current=start local subtables=currentlookup.subtables @@ -9080,8 +10390,8 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) end while current do - if current.id==glyph_code then - local currentchar=current.char + if getid(current)==glyph_code then + local currentchar=getchar(current) local lookupname=subtables[1] local replacement=lookuphash[lookupname] if not replacement then @@ -9098,22 +10408,21 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo if trace_singles then logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) end - current.char=replacement + setfield(current,"char",replacement) end end return head,start,true elseif current==stop then break else - current=current.next + current=getnext(current) end end return head,start,false end chainmores.gsub_single=chainprocs.gsub_single function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - delete_till_stop(start,stop) - local startchar=start.char + local startchar=getchar(start) local subtables=currentlookup.subtables local lookupname=subtables[1] local replacements=lookuphash[lookupname] @@ -9131,7 +10440,7 @@ function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext, if trace_multiples then logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) end - return multiple_glyphs(head,start,replacements) + return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) end end return head,start,false @@ -9142,8 +10451,8 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext local subtables=currentlookup.subtables local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue while current do - if current.id==glyph_code then - local currentchar=current.char + if getid(current)==glyph_code then + local currentchar=getchar(current) local lookupname=subtables[1] local alternatives=lookuphash[lookupname] if not alternatives then @@ -9158,7 +10467,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext if trace_alternatives then logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) end - start.char=choice + setfield(start,"char",choice) else if trace_alternatives then logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) @@ -9172,14 +10481,14 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext elseif current==stop then break else - current=current.next + current=getnext(current) end end return head,start,false end chainmores.gsub_alternate=chainprocs.gsub_alternate function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - local startchar=start.char + local startchar=getchar(start) local subtables=currentlookup.subtables local lookupname=subtables[1] local ligatures=lookuphash[lookupname] @@ -9194,20 +10503,20 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext, logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) end else - local s=start.next + local s=getnext(start) local discfound=false local last=stop local nofreplacements=0 local skipmark=currentlookup.flags[1] while s do - local id=s.id + local id=getid(s) if id==disc_code then - s=s.next + s=getnext(s) discfound=true else - local schar=s.char + local schar=getchar(s) if skipmark and marks[schar] then - s=s.next + s=getnext(s) else local lg=ligatures[schar] if lg then @@ -9215,7 +10524,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext, if s==stop then break else - s=s.next + s=getnext(s) end else break @@ -9232,7 +10541,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext, if start==stop then logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) else - logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2)) + logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2)) end end head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) @@ -9241,7 +10550,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext, if start==stop then logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) else - logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char)) + logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop))) end end end @@ -9250,7 +10559,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext, end chainmores.gsub_ligature=chainprocs.gsub_ligature function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=start.char + local markchar=getchar(start) if marks[markchar] then local subtables=currentlookup.subtables local lookupname=subtables[1] @@ -9259,14 +10568,14 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext markanchors=markanchors[markchar] end if markanchors then - local base=start.prev - if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then - local basechar=base.char + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) if marks[basechar] then while true do - base=base.prev - if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then - basechar=base.char + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) if not marks[basechar] then break end @@ -9313,7 +10622,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext return head,start,false end function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=start.char + local markchar=getchar(start) if marks[markchar] then local subtables=currentlookup.subtables local lookupname=subtables[1] @@ -9322,14 +10631,14 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon markanchors=markanchors[markchar] end if markanchors then - local base=start.prev - if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then - local basechar=base.char + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) if marks[basechar] then while true do - base=base.prev - if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then - basechar=base.char + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) if not marks[basechar] then break end @@ -9341,7 +10650,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon end end end - local index=start[a_ligacomp] + local index=getattr(start,a_ligacomp) local baseanchors=descriptions[basechar].anchors if baseanchors then local baseanchors=baseanchors['baselig'] @@ -9380,7 +10689,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon return head,start,false end function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=start.char + local markchar=getchar(start) if marks[markchar] then local subtables=currentlookup.subtables local lookupname=subtables[1] @@ -9389,20 +10698,20 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext markanchors=markanchors[markchar] end if markanchors then - local base=start.prev - local slc=start[a_ligacomp] + local base=getprev(start) + local slc=getattr(start,a_ligacomp) if slc then while base do - local blc=base[a_ligacomp] + local blc=getattr(base,a_ligacomp) if blc and blc~=slc then - base=base.prev + base=getprev(base) else break end end end - if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then - local basechar=base.char + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) local baseanchors=descriptions[basechar].anchors if baseanchors then baseanchors=baseanchors['basemark'] @@ -9412,7 +10721,7 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext if al[anchor] then local ma=markanchors[anchor] if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -9438,9 +10747,9 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext return head,start,false end function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local alreadydone=cursonce and start[a_cursbase] + local alreadydone=cursonce and getattr(start,a_cursbase) if not alreadydone then - local startchar=start.char + local startchar=getchar(start) local subtables=currentlookup.subtables local lookupname=subtables[1] local exitanchors=lookuphash[lookupname] @@ -9454,11 +10763,11 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) end else - local nxt=start.next - while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do - local nextchar=nxt.char + local nxt=getnext(start) + while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do + local nextchar=getchar(nxt) if marks[nextchar] then - nxt=nxt.next + nxt=getnext(nxt) else local entryanchors=descriptions[nextchar] if entryanchors then @@ -9492,7 +10801,7 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l return head,start,done else if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) end return head,start,false end @@ -9500,7 +10809,7 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l return head,start,false end function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - local startchar=start.char + local startchar=getchar(start) local subtables=currentlookup.subtables local lookupname=subtables[1] local kerns=lookuphash[lookupname] @@ -9515,10 +10824,11 @@ function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lo end return head,start,false end +chainmores.gpos_single=chainprocs.gpos_single function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - local snext=start.next + local snext=getnext(start) if snext then - local startchar=start.char + local startchar=getchar(start) local subtables=currentlookup.subtables local lookupname=subtables[1] local kerns=lookuphash[lookupname] @@ -9528,26 +10838,26 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look local lookuptype=lookuptypes[lookupname] local prev,done=start,false local factor=tfmdata.parameters.factor - while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do - local nextchar=snext.char + while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do + local nextchar=getchar(snext) local krn=kerns[nextchar] if not krn and marks[nextchar] then prev=snext - snext=snext.next + snext=getnext(snext) else if not krn then elseif type(krn)=="table" then if lookuptype=="pair" then local a,b=krn[2],krn[3] if a and #a>0 then - local startchar=start.char + local startchar=getchar(start) local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) if trace_kerns then logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) end end if b and #b>0 then - local startchar=start.char + local startchar=getchar(start) local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) if trace_kerns then logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) @@ -9559,7 +10869,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look if a and a~=0 then local k=setkern(snext,factor,rlmode,a) if trace_kerns then - logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) + logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) end end if b and b~=0 then @@ -9570,7 +10880,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look elseif krn~=0 then local k=setkern(snext,factor,rlmode,krn) if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) + logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) end done=true end @@ -9583,6 +10893,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look end return head,start,false end +chainmores.gpos_pair=chainprocs.gpos_pair local function show_skip(kind,chainname,char,ck,class) if ck[9] then logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) @@ -9607,7 +10918,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq local seq=ck[3] local s=#seq if s==1 then - match=current.id==glyph_code and current.font==currentfont and current.subtype<256 and seq[1][current.char] + match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)] else local f,l=ck[4],ck[5] if f==1 and f==l then @@ -9615,13 +10926,13 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq if f==l then else local n=f+1 - last=last.next + last=getnext(last) while n<=l do if last then - local id=last.id + local id=getid(last) if id==glyph_code then - if last.font==currentfont and last.subtype<256 then - local char=last.char + if getfont(last)==currentfont and getsubtype(last)<256 then + local char=getchar(last) local ccd=descriptions[char] if ccd then local class=ccd.class @@ -9630,10 +10941,10 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq if trace_skips then show_skip(kind,chainname,char,ck,class) end - last=last.next + last=getnext(last) elseif seq[n][char] then if n1 then - local prev=start.prev + local prev=getprev(start) if prev then local n=f-1 while n>=1 do if prev then - local id=prev.id + local id=getid(prev) if id==glyph_code then - if prev.font==currentfont and prev.subtype<256 then - local char=prev.char + if getfont(prev)==currentfont and getsubtype(prev)<256 then + local char=getchar(prev) local ccd=descriptions[char] if ccd then local class=ccd.class @@ -9700,7 +11011,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq match=false break end - prev=prev.prev + prev=getprev(prev) elseif seq[n][32] then n=n -1 else @@ -9720,15 +11031,15 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end end if match and s>l then - local current=last and last.next + local current=last and getnext(last) if current then local n=l+1 while n<=s do if current then - local id=current.id + local id=getid(current) if id==glyph_code then - if current.font==currentfont and current.subtype<256 then - local char=current.char + if getfont(current)==currentfont and getsubtype(current)<256 then + local char=getchar(current) local ccd=descriptions[char] if ccd then local class=ccd.class @@ -9758,7 +11069,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq match=false break end - current=current.next + current=getnext(current) elseif seq[n][32] then n=n+1 else @@ -9781,7 +11092,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq if match then if trace_contexts then local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5] - local char=start.char + local char=getchar(start) if ck[9] then logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) @@ -9799,7 +11110,11 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq if chainlookup then local cp=chainprocs[chainlookup.type] if cp then - head,start,done=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) + local ok + head,start,ok=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) + if ok then + done=true + end else logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) end @@ -9811,12 +11126,12 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq repeat if skipped then while true do - local char=start.char + local char=getchar(start) local ccd=descriptions[char] if ccd then local class=ccd.class if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - start=start.next + start=getnext(start) else break end @@ -9826,22 +11141,27 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end end local chainlookupname=chainlookups[i] - local chainlookup=lookuptable[chainlookupname] - local cp=chainlookup and chainmores[chainlookup.type] - if cp then - local ok,n - head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) - if ok then - done=true - i=i+(n or 1) - else + local chainlookup=lookuptable[chainlookupname] + if not chainlookup then + i=i+1 + else + local cp=chainmores[chainlookup.type] + if not cp then + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) i=i+1 + else + local ok,n + head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) + if ok then + done=true + i=i+(n or 1) + else + i=i+1 + end end - else - i=i+1 end if start then - start=start.next + start=getnext(start) else end until i>nofchainlookups @@ -9968,6 +11288,7 @@ local function featuresprocessor(head,font,attr) if not lookuphash then return head,false end + head=tonut(head) if trace_steps then checkstep(head) end @@ -9985,228 +11306,344 @@ local function featuresprocessor(head,font,attr) local done=false local datasets=otf.dataset(tfmdata,font,attr) local dirstack={} -for s=1,#datasets do - local dataset=datasets[s] - featurevalue=dataset[1] - local sequence=dataset[5] - local rlparmode=0 - local topstack=0 - local success=false - local attribute=dataset[2] - local chain=dataset[3] - local typ=sequence.type - local subtables=sequence.subtables - if chain<0 then - local handler=handlers[typ] - local start=find_node_tail(head) + for s=1,#datasets do + local dataset=datasets[s] + featurevalue=dataset[1] + local sequence=dataset[5] + local rlparmode=0 + local topstack=0 + local success=false + local attribute=dataset[2] + local chain=dataset[3] + local typ=sequence.type + local subtables=sequence.subtables + if chain<0 then + local handler=handlers[typ] + local start=find_node_tail(head) + while start do + local id=getid(start) + if id==glyph_code then + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=a==attr + else + a=true + end + if a then + for i=1,#subtables do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if success then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start=getprev(start) end + else + start=getprev(start) + end + else + start=getprev(start) + end + else + start=getprev(start) + end + end + else + local handler=handlers[typ] + local ns=#subtables + local start=head + rlmode=0 + if ns==1 then + local lookupname=subtables[1] + local lookupcache=lookuphash[lookupname] + if not lookupcache then + report_missing_cache(typ,lookupname) + else + local function subrun(start) + local head=start + local done=false + while start do + local id=getid(start) + if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getattr(start,a_state)==attribute) + else + a=not attribute or getattr(start,a_state)==attribute + end + if a then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done=true + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + end + if done then + success=true + return head + end + end while start do - local id=start.id + local id=getid(start) if id==glyph_code then - if start.font==font and start.subtype<256 then - local a=start[0] + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) if a then - a=a==attr + a=(a==attr) and (not attribute or getattr(start,a_state)==attribute) else - a=true + a=not attribute or getattr(start,a_state)==attribute end if a then - for i=1,#subtables do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[start.char] - if lookupmatch then - head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if success then - break - end - end - else - report_missing_cache(typ,lookupname) + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + success=true end end - if start then start=start.prev end + if start then start=getnext(start) end else - start=start.prev + start=getnext(start) end else - start=start.prev + start=getnext(start) + end + elseif id==disc_code then + if getsubtype(start)==discretionary_code then + local pre=getfield(start,"pre") + if pre then + local new=subrun(pre) + if new then setfield(start,"pre",new) end + end + local post=getfield(start,"post") + if post then + local new=subrun(post) + if new then setfield(start,"post",new) end + end + local replace=getfield(start,"replace") + if replace then + local new=subrun(replace) + if new then setfield(start,"replace",new) end + end + end + start=getnext(start) + elseif id==whatsit_code then + local subtype=getsubtype(start) + if subtype==dir_code then + local dir=getfield(start,"dir") + if dir=="+TRT" or dir=="+TLT" then + topstack=topstack+1 + dirstack[topstack]=dir + elseif dir=="-TRT" or dir=="-TLT" then + topstack=topstack-1 + end + local newdir=dirstack[topstack] + if newdir=="+TRT" then + rlmode=-1 + elseif newdir=="+TLT" then + rlmode=1 + else + rlmode=rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype==localpar_code then + local dir=getfield(start,"dir") + if dir=="TRT" then + rlparmode=-1 + elseif dir=="TLT" then + rlparmode=1 + else + rlparmode=0 + end + rlmode=rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end end + start=getnext(start) + elseif id==math_code then + start=getnext(end_of_math(start)) else - start=start.prev + start=getnext(start) end end - else - local handler=handlers[typ] - local ns=#subtables - local start=head - rlmode=0 - if ns==1 then - local lookupname=subtables[1] - local lookupcache=lookuphash[lookupname] - if not lookupcache then - report_missing_cache(typ,lookupname) - else - while start do - local id=start.id - if id==glyph_code then - if start.font==font and start.subtype<256 then - local a=start[0] - if a then - a=(a==attr) and (not attribute or start[a_state]==attribute) - else - a=not attribute or start[a_state]==attribute - end - if a then - local lookupmatch=lookupcache[start.char] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - success=true - end + end + else + local function subrun(start) + local head=start + local done=false + while start do + local id=getid(start) + if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getattr(start,a_state)==attribute) + else + a=not attribute or getattr(start,a_state)==attribute + end + if a then + for i=1,ns do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done=true + break + elseif not start then + break end - if start then start=start.next end - else - start=start.next end - elseif id==math_code then - start=end_of_math(start).next else - start=start.next - end - elseif id==whatsit_code then - local subtype=start.subtype - if subtype==dir_code then - local dir=start.dir - if dir=="+TRT" or dir=="+TLT" then - topstack=topstack+1 - dirstack[topstack]=dir - elseif dir=="-TRT" or dir=="-TLT" then - topstack=topstack-1 - end - local newdir=dirstack[topstack] - if newdir=="+TRT" then - rlmode=-1 - elseif newdir=="+TLT" then - rlmode=1 - else - rlmode=rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype==localpar_code then - local dir=start.dir - if dir=="TRT" then - rlparmode=-1 - elseif dir=="TLT" then - rlparmode=1 - else - rlparmode=0 - end - rlmode=rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end + report_missing_cache(typ,lookupname) end - start=start.next - elseif id==math_code then - start=end_of_math(start).next - else - start=start.next end + if start then start=getnext(start) end + else + start=getnext(start) end + else + start=getnext(start) end - else - while start do - local id=start.id - if id==glyph_code then - if start.font==font and start.subtype<256 then - local a=start[0] - if a then - a=(a==attr) and (not attribute or start[a_state]==attribute) - else - a=not attribute or start[a_state]==attribute - end - if a then - for i=1,ns do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[start.char] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - success=true - break - elseif not start then - break - end - end - else - report_missing_cache(typ,lookupname) + end + if done then + success=true + return head + end + end + while start do + local id=getid(start) + if id==glyph_code then + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getattr(start,a_state)==attribute) + else + a=not attribute or getattr(start,a_state)==attribute + end + if a then + for i=1,ns do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + success=true + break + elseif not start then + break end end - if start then start=start.next end - else - start=start.next - end - else - start=start.next - end - elseif id==whatsit_code then - local subtype=start.subtype - if subtype==dir_code then - local dir=start.dir - if dir=="+TRT" or dir=="+TLT" then - topstack=topstack+1 - dirstack[topstack]=dir - elseif dir=="-TRT" or dir=="-TLT" then - topstack=topstack-1 - end - local newdir=dirstack[topstack] - if newdir=="+TRT" then - rlmode=-1 - elseif newdir=="+TLT" then - rlmode=1 else - rlmode=rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype==localpar_code then - local dir=start.dir - if dir=="TRT" then - rlparmode=-1 - elseif dir=="TLT" then - rlparmode=1 - else - rlparmode=0 - end - rlmode=rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + report_missing_cache(typ,lookupname) end end - start=start.next - elseif id==math_code then - start=end_of_math(start).next + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + elseif id==disc_code then + if getsubtype(start)==discretionary_code then + local pre=getfield(start,"pre") + if pre then + local new=subrun(pre) + if new then setfield(start,"pre",new) end + end + local post=getfield(start,"post") + if post then + local new=subrun(post) + if new then setfield(start,"post",new) end + end + local replace=getfield(start,"replace") + if replace then + local new=subrun(replace) + if new then setfield(start,"replace",new) end + end + end + start=getnext(start) + elseif id==whatsit_code then + local subtype=getsubtype(start) + if subtype==dir_code then + local dir=getfield(start,"dir") + if dir=="+TRT" or dir=="+TLT" then + topstack=topstack+1 + dirstack[topstack]=dir + elseif dir=="-TRT" or dir=="-TLT" then + topstack=topstack-1 + end + local newdir=dirstack[topstack] + if newdir=="+TRT" then + rlmode=-1 + elseif newdir=="+TLT" then + rlmode=1 + else + rlmode=rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype==localpar_code then + local dir=getfield(start,"dir") + if dir=="TRT" then + rlparmode=-1 + elseif dir=="TLT" then + rlparmode=1 else - start=start.next + rlparmode=0 + end + rlmode=rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) end end + start=getnext(start) + elseif id==math_code then + start=getnext(end_of_math(start)) + else + start=getnext(start) end end - if success then - done=true - end - if trace_steps then - registerstep(head) - end + end + end + if success then + done=true + end + if trace_steps then + registerstep(head) + end end + head=tonode(head) return head,done end local function generic(lookupdata,lookupname,unicode,lookuphash) @@ -11315,6 +12752,7 @@ if not modules then modules={} end modules ['font-def']={ local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub local tostring,next=tostring,next local lpegmatch=lpeg.match +local suffixonly,removesuffix=file.suffix,file.removesuffix local allocate=utilities.storage.allocate local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end) local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end) @@ -11362,7 +12800,7 @@ addlookup("file") addlookup("name") addlookup("spec") local function getspecification(str) - return lpegmatch(splitter,str) + return lpegmatch(splitter,str or "") end definers.getspecification=getspecification function definers.registersplit(symbol,action,verbosename) @@ -11404,10 +12842,11 @@ definers.resolvers=definers.resolvers or {} local resolvers=definers.resolvers function resolvers.file(specification) local name=resolvefile(specification.name) - local suffix=file.suffix(name) + local suffix=lower(suffixonly(name)) if fonts.formats[suffix] then specification.forced=suffix - specification.name=file.removesuffix(name) + specification.forcedname=name + specification.name=removesuffix(name) else specification.name=name end @@ -11419,10 +12858,11 @@ function resolvers.name(specification) if resolved then specification.resolved=resolved specification.sub=sub - local suffix=file.suffix(resolved) + local suffix=lower(suffixonly(resolved)) if fonts.formats[suffix] then specification.forced=suffix - specification.name=file.removesuffix(resolved) + specification.forcedname=resolved + specification.name=removesuffix(resolved) else specification.name=resolved end @@ -11438,8 +12878,9 @@ function resolvers.spec(specification) if resolved then specification.resolved=resolved specification.sub=sub - specification.forced=file.suffix(resolved) - specification.name=file.removesuffix(resolved) + specification.forced=lower(suffixonly(resolved)) + specification.forcedname=resolved + specification.name=removesuffix(resolved) end else resolvers.name(specification) @@ -11454,8 +12895,7 @@ function definers.resolve(specification) end if specification.forced=="" then specification.forced=nil - else - specification.forced=specification.forced + specification.forcedname=nil end specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification)) if specification.sub and specification.sub~="" then @@ -11500,7 +12940,7 @@ function definers.loadfont(specification) if not tfmdata then local forced=specification.forced or "" if forced~="" then - local reader=readers[lower(forced)] + local reader=readers[lower(forced)] tfmdata=reader and reader(specification) if not tfmdata then report_defining("forced type %a of %a not found",forced,specification.name) diff --git a/tex/generic/context/luatex/luatex-fonts-syn.lua b/tex/generic/context/luatex/luatex-fonts-syn.lua index ea6e3cab5..60dd2c063 100644 --- a/tex/generic/context/luatex/luatex-fonts-syn.lua +++ b/tex/generic/context/luatex/luatex-fonts-syn.lua @@ -100,3 +100,7 @@ fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv function fonts.names.getfilename(askedname,suffix) -- only supported in mkiv return "" end + +function fonts.names.ignoredfile(filename) -- only supported in mkiv + return true -- will be overloaded +end diff --git a/tex/generic/context/luatex/luatex-fonts.lua b/tex/generic/context/luatex/luatex-fonts.lua index 89592fcac..7995be33e 100644 --- a/tex/generic/context/luatex/luatex-fonts.lua +++ b/tex/generic/context/luatex/luatex-fonts.lua @@ -192,7 +192,7 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then -- with context. The mtx-fonts script can be used to genate this file (using the --names option). -- In 2013/14 I will merge/move some generic files into luatex-fonts-* files (copies) so that - -- intermediate updates of context not interfere. We can then also use the general merger and + -- intermediate updates of context don't interfere. We can then also use the general merger and -- consider stripping debug code. loadmodule('font-ini.lua') @@ -201,6 +201,11 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then loadmodule('font-cid.lua') loadmodule('font-map.lua') -- for loading lum file (will be stripped) loadmodule('luatex-fonts-syn.lua') -- deals with font names (synonyms) + -- begin of test + loadmodule('font-tfm.lua') -- optional + loadmodule('font-afm.lua') -- optional + loadmodule('font-afk.lua') -- optional + -- end of test loadmodule('luatex-fonts-tfm.lua') loadmodule('font-oti.lua') loadmodule('font-otf.lua') @@ -209,7 +214,6 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then loadmodule('font-ota.lua') loadmodule('font-otn.lua') loadmodule('font-otp.lua') -- optional - ----------('luatex-fonts-chr.lua') loadmodule('luatex-fonts-lua.lua') loadmodule('font-def.lua') loadmodule('luatex-fonts-def.lua') diff --git a/tex/generic/context/luatex/luatex-test.tex b/tex/generic/context/luatex/luatex-test.tex index fcc837e70..fbf8ce3cf 100644 --- a/tex/generic/context/luatex/luatex-test.tex +++ b/tex/generic/context/luatex/luatex-test.tex @@ -80,4 +80,8 @@ $$\left( { {1} \over { {1} \over {x} } } \right) $$ $$\sqrt {2} { { {1} \over { {1} \over {x} } } } $$ +\font\cows=file:koeieletters.afm at 50pt + +\cows Hello World! + \end -- cgit v1.2.3